code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
plik=open("nowy_zad_84.txt", "w")
print(" Podaj 5 imion")
for i in range(1,6):
imie=input(f" Podaj imie nr {i} ")
# plik.write(imie)
# plik.write("\n")
plik.write(f" {imie} \n")
plik.close()
plik=open("nowy_zad_84.txt", "a")
for i in range(1,101):
plik.write(str(i))
plik.write("\n")
plik.close()
|
normal
|
{
"blob_id": "0ac99e2b33f676a99674c9a8e5d9d47c5bce084b",
"index": 5820,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(' Podaj 5 imion')\nfor i in range(1, 6):\n imie = input(f' Podaj imie nr {i} ')\n plik.write(f' {imie} \\n')\nplik.close()\n<mask token>\nfor i in range(1, 101):\n plik.write(str(i))\n plik.write('\\n')\nplik.close()\n",
"step-3": "plik = open('nowy_zad_84.txt', 'w')\nprint(' Podaj 5 imion')\nfor i in range(1, 6):\n imie = input(f' Podaj imie nr {i} ')\n plik.write(f' {imie} \\n')\nplik.close()\nplik = open('nowy_zad_84.txt', 'a')\nfor i in range(1, 101):\n plik.write(str(i))\n plik.write('\\n')\nplik.close()\n",
"step-4": "\r\n\r\nplik=open(\"nowy_zad_84.txt\", \"w\")\r\n\r\nprint(\" Podaj 5 imion\")\r\nfor i in range(1,6):\r\n imie=input(f\" Podaj imie nr {i} \")\r\n # plik.write(imie)\r\n # plik.write(\"\\n\")\r\n plik.write(f\" {imie} \\n\")\r\n\r\nplik.close()\r\n\r\nplik=open(\"nowy_zad_84.txt\", \"a\")\r\n\r\nfor i in range(1,101):\r\n plik.write(str(i))\r\n plik.write(\"\\n\")\r\n\r\nplik.close()\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy as np
import math
import datetime
def multi_strassen(A,B, check_ig = True, check_quad = True, check_pot = True, check_time = True):
def Strassen(matriz_1,matriz_2): # Função do algoritmo de Strassen para multiplicação de matrizes do tipo 2x2
if (matriz_1.shape[0] != 2) or (matriz_1.shape[1] != 2) or (matriz_2.shape[0] != 2) or (matriz_2.shape[1] != 2):
print("As matrizes devem ser do tipo 2x2")
return None
M1 = (matriz_1[0,0] + matriz_1[1,1]) * (matriz_2[0,0] + matriz_2[1,1])
M2 = (matriz_1[1,0] + matriz_1[1,1]) * matriz_2[0,0]
M3 = matriz_1[0,0] * (matriz_2[0,1] - matriz_2[1,1])
M4 = matriz_1[1,1] * (matriz_2[1,0] - matriz_2[0,0])
M5 = (matriz_1[0,0] + matriz_1 [0,1]) * matriz_2[1,1]
M6 = (matriz_1[1,0] - matriz_1[0,0]) * (matriz_2[0,0] + matriz_2[0,1])
M7 = (matriz_1[0,1] - matriz_1[1,1]) * (matriz_2[1,0] + matriz_2[1,1])
Resultado = np.zeros([2,2])
Resultado[0,0] = M1 + M4 - M5 + M7
Resultado[0,1] = M3 + M5
Resultado[1,0] = M2 + M4
Resultado[1,1] = M1 - M2 + M3 + M6
return Resultado
if check_time:
inicio = datetime.datetime.now()
C = np.zeros([A.shape[0],B.shape[1]]) #Guarda o tamanho original da matriz multiplicada
#Parte 1: Checagem das condições pré-estabelecidas
if (A.shape[1] != B.shape[0]):
print("Erro: Não é possível realizar a multiplicação C = A * B com as matrizes fornecidas")
return None, None
if (len(A.shape) != 2) or (len(B.shape) != 2): #Checa a dimensão da matriz
print("Erro: As matrizes devem ser bidimensionais")
return None, None
if check_ig:
if (A.shape != B.shape): #Checa se as matrizes possuem mesma dimensão
print("Erro: As matrizes devem possuir mesmas dimensões")
return None, None
if check_quad:
if ((A.shape[0] - A.shape[1]) != 0) or ((B.shape[0] - B.shape[1]) != 0): #Checa se as matrizes são quadradas
print("Erro: As matrizes devem ser ambas quadradas")
return None, None
if check_pot:
if (math.ceil(math.log2(A.shape[0]) != math.floor(math.log2(A.shape[0])))) or (math.ceil(math.log2(A.shape[1]) != math.floor(math.log2(A.shape[1])))):
print("A matriz A será modificada, acrescentando-se zeros para que torne-se uma matriz do tipo 2^n x 2^n")
linhas = 2**math.ceil(math.log2(A.shape[0])) - A.shape[0] #Calcula quantas linhas faltam para um quadrado de dois
colunas = 2**math.ceil(math.log2(A.shape[1])) - A.shape[1] #Calcula quantas colunas faltam para um quadrado de dois
if linhas > colunas:
matriz_auxiliar = np.zeros([linhas,A.shape[1]])
A = np.vstack((A,matriz_auxiliar))
matriz_auxiliar = np.zeros([A.shape[0],A.shape[0]-A.shape[1]])
A = np.hstack((A,matriz_auxiliar))
elif colunas >= linhas:
matriz_auxiliar = np.zeros([A.shape[0],colunas])
A = np.hstack((A,matriz_auxiliar))
matriz_auxiliar = np.zeros([A.shape[1]-A.shape[0],A.shape[1]])
A = np.vstack((A,matriz_auxiliar))
if (math.ceil(math.log2(B.shape[0]) != math.floor(math.log2(B.shape[0])))) or (math.ceil(math.log2(B.shape[1]) != math.floor(math.log2(B.shape[1])))):
print("A matriz B será modificada, acrescentando-se zeros para que torne-se uma matriz do tipo 2^n x 2^n")
linhas = 2**math.ceil(math.log2(B.shape[0])) - B.shape[0] #Calcula quantas linhas faltam para um quadrado de dois
colunas = 2**math.ceil(math.log2(B.shape[1])) - B.shape[1] #Calcula quantas colunas faltam para um quadrado de dois
if linhas > colunas:
matriz_auxiliar = np.zeros([linhas,B.shape[1]])
B = np.vstack((B,matriz_auxiliar))
matriz_auxiliar = np.zeros([B.shape[0],B.shape[0]-B.shape[1]])
B = np.hstack((B,matriz_auxiliar))
elif colunas >= linhas:
matriz_auxiliar = np.zeros([B.shape[0],colunas])
B = np.hstack((B,matriz_auxiliar))
matriz_auxiliar = np.zeros([B.shape[1]-B.shape[0],B.shape[1]])
B = np.vstack((B,matriz_auxiliar))
#Multiplicação de fato das matrizes
D = np.zeros_like(A)
for i in range(0,A.shape[0],2):
for j in range(0,B.shape[1],2):
soma = 0
for k in range(0,A.shape[1],2):
soma = soma + Strassen(A[i:i+2,k:k+2],B[k:k+2,j:j+2])
D[i:i+2,j:j+2] = soma
C = D[0:C.shape[0],0:C.shape[1]]
print (C)
if check_time:
fim = datetime.datetime.now()
tempo = fim - inicio
#print("Tempo de execução = ", fim - inicio)
else:
tempo = "Tempo não calcualdo"
return C, tempo
|
normal
|
{
"blob_id": "6707723b3d0b42271e49c08c639afc9103066dc7",
"index": 4679,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef multi_strassen(A, B, check_ig=True, check_quad=True, check_pot=True,\n check_time=True):\n\n def Strassen(matriz_1, matriz_2):\n if matriz_1.shape[0] != 2 or matriz_1.shape[1] != 2 or matriz_2.shape[0\n ] != 2 or matriz_2.shape[1] != 2:\n print('As matrizes devem ser do tipo 2x2')\n return None\n M1 = (matriz_1[0, 0] + matriz_1[1, 1]) * (matriz_2[0, 0] + matriz_2\n [1, 1])\n M2 = (matriz_1[1, 0] + matriz_1[1, 1]) * matriz_2[0, 0]\n M3 = matriz_1[0, 0] * (matriz_2[0, 1] - matriz_2[1, 1])\n M4 = matriz_1[1, 1] * (matriz_2[1, 0] - matriz_2[0, 0])\n M5 = (matriz_1[0, 0] + matriz_1[0, 1]) * matriz_2[1, 1]\n M6 = (matriz_1[1, 0] - matriz_1[0, 0]) * (matriz_2[0, 0] + matriz_2\n [0, 1])\n M7 = (matriz_1[0, 1] - matriz_1[1, 1]) * (matriz_2[1, 0] + matriz_2\n [1, 1])\n Resultado = np.zeros([2, 2])\n Resultado[0, 0] = M1 + M4 - M5 + M7\n Resultado[0, 1] = M3 + M5\n Resultado[1, 0] = M2 + M4\n Resultado[1, 1] = M1 - M2 + M3 + M6\n return Resultado\n if check_time:\n inicio = datetime.datetime.now()\n C = np.zeros([A.shape[0], B.shape[1]])\n if A.shape[1] != B.shape[0]:\n print(\n 'Erro: Não é possível realizar a multiplicação C = A * B com as matrizes fornecidas'\n )\n return None, None\n if len(A.shape) != 2 or len(B.shape) != 2:\n print('Erro: As matrizes devem ser bidimensionais')\n return None, None\n if check_ig:\n if A.shape != B.shape:\n print('Erro: As matrizes devem possuir mesmas dimensões')\n return None, None\n if check_quad:\n if A.shape[0] - A.shape[1] != 0 or B.shape[0] - B.shape[1] != 0:\n print('Erro: As matrizes devem ser ambas quadradas')\n return None, None\n if check_pot:\n if math.ceil(math.log2(A.shape[0]) != math.floor(math.log2(A.shape[0]))\n ) or math.ceil(math.log2(A.shape[1]) != math.floor(math.log2(A.\n shape[1]))):\n print(\n 'A matriz A será modificada, acrescentando-se zeros para que torne-se uma matriz do tipo 2^n x 2^n'\n )\n linhas = 2 ** math.ceil(math.log2(A.shape[0])) - A.shape[0]\n colunas = 2 ** math.ceil(math.log2(A.shape[1])) - A.shape[1]\n if linhas > colunas:\n matriz_auxiliar = np.zeros([linhas, A.shape[1]])\n A = np.vstack((A, matriz_auxiliar))\n matriz_auxiliar = np.zeros([A.shape[0], A.shape[0] - A.\n shape[1]])\n A = np.hstack((A, matriz_auxiliar))\n elif colunas >= linhas:\n matriz_auxiliar = np.zeros([A.shape[0], colunas])\n A = np.hstack((A, matriz_auxiliar))\n matriz_auxiliar = np.zeros([A.shape[1] - A.shape[0], A.\n shape[1]])\n A = np.vstack((A, matriz_auxiliar))\n if math.ceil(math.log2(B.shape[0]) != math.floor(math.log2(B.shape[0]))\n ) or math.ceil(math.log2(B.shape[1]) != math.floor(math.log2(B.\n shape[1]))):\n print(\n 'A matriz B será modificada, acrescentando-se zeros para que torne-se uma matriz do tipo 2^n x 2^n'\n )\n linhas = 2 ** math.ceil(math.log2(B.shape[0])) - B.shape[0]\n colunas = 2 ** math.ceil(math.log2(B.shape[1])) - B.shape[1]\n if linhas > colunas:\n matriz_auxiliar = np.zeros([linhas, B.shape[1]])\n B = np.vstack((B, matriz_auxiliar))\n matriz_auxiliar = np.zeros([B.shape[0], B.shape[0] - B.\n shape[1]])\n B = np.hstack((B, matriz_auxiliar))\n elif colunas >= linhas:\n matriz_auxiliar = np.zeros([B.shape[0], colunas])\n B = np.hstack((B, matriz_auxiliar))\n matriz_auxiliar = np.zeros([B.shape[1] - B.shape[0], B.\n shape[1]])\n B = np.vstack((B, matriz_auxiliar))\n D = np.zeros_like(A)\n for i in range(0, A.shape[0], 2):\n for j in range(0, B.shape[1], 2):\n soma = 0\n for k in range(0, A.shape[1], 2):\n soma = soma + Strassen(A[i:i + 2, k:k + 2], B[k:k + 2, j:j + 2]\n )\n D[i:i + 2, j:j + 2] = soma\n C = D[0:C.shape[0], 0:C.shape[1]]\n print(C)\n if check_time:\n fim = datetime.datetime.now()\n tempo = fim - inicio\n else:\n tempo = 'Tempo não calcualdo'\n return C, tempo\n",
"step-3": "import numpy as np\nimport math\nimport datetime\n\n\ndef multi_strassen(A, B, check_ig=True, check_quad=True, check_pot=True,\n check_time=True):\n\n def Strassen(matriz_1, matriz_2):\n if matriz_1.shape[0] != 2 or matriz_1.shape[1] != 2 or matriz_2.shape[0\n ] != 2 or matriz_2.shape[1] != 2:\n print('As matrizes devem ser do tipo 2x2')\n return None\n M1 = (matriz_1[0, 0] + matriz_1[1, 1]) * (matriz_2[0, 0] + matriz_2\n [1, 1])\n M2 = (matriz_1[1, 0] + matriz_1[1, 1]) * matriz_2[0, 0]\n M3 = matriz_1[0, 0] * (matriz_2[0, 1] - matriz_2[1, 1])\n M4 = matriz_1[1, 1] * (matriz_2[1, 0] - matriz_2[0, 0])\n M5 = (matriz_1[0, 0] + matriz_1[0, 1]) * matriz_2[1, 1]\n M6 = (matriz_1[1, 0] - matriz_1[0, 0]) * (matriz_2[0, 0] + matriz_2\n [0, 1])\n M7 = (matriz_1[0, 1] - matriz_1[1, 1]) * (matriz_2[1, 0] + matriz_2\n [1, 1])\n Resultado = np.zeros([2, 2])\n Resultado[0, 0] = M1 + M4 - M5 + M7\n Resultado[0, 1] = M3 + M5\n Resultado[1, 0] = M2 + M4\n Resultado[1, 1] = M1 - M2 + M3 + M6\n return Resultado\n if check_time:\n inicio = datetime.datetime.now()\n C = np.zeros([A.shape[0], B.shape[1]])\n if A.shape[1] != B.shape[0]:\n print(\n 'Erro: Não é possível realizar a multiplicação C = A * B com as matrizes fornecidas'\n )\n return None, None\n if len(A.shape) != 2 or len(B.shape) != 2:\n print('Erro: As matrizes devem ser bidimensionais')\n return None, None\n if check_ig:\n if A.shape != B.shape:\n print('Erro: As matrizes devem possuir mesmas dimensões')\n return None, None\n if check_quad:\n if A.shape[0] - A.shape[1] != 0 or B.shape[0] - B.shape[1] != 0:\n print('Erro: As matrizes devem ser ambas quadradas')\n return None, None\n if check_pot:\n if math.ceil(math.log2(A.shape[0]) != math.floor(math.log2(A.shape[0]))\n ) or math.ceil(math.log2(A.shape[1]) != math.floor(math.log2(A.\n shape[1]))):\n print(\n 'A matriz A será modificada, acrescentando-se zeros para que torne-se uma matriz do tipo 2^n x 2^n'\n )\n linhas = 2 ** math.ceil(math.log2(A.shape[0])) - A.shape[0]\n colunas = 2 ** math.ceil(math.log2(A.shape[1])) - A.shape[1]\n if linhas > colunas:\n matriz_auxiliar = np.zeros([linhas, A.shape[1]])\n A = np.vstack((A, matriz_auxiliar))\n matriz_auxiliar = np.zeros([A.shape[0], A.shape[0] - A.\n shape[1]])\n A = np.hstack((A, matriz_auxiliar))\n elif colunas >= linhas:\n matriz_auxiliar = np.zeros([A.shape[0], colunas])\n A = np.hstack((A, matriz_auxiliar))\n matriz_auxiliar = np.zeros([A.shape[1] - A.shape[0], A.\n shape[1]])\n A = np.vstack((A, matriz_auxiliar))\n if math.ceil(math.log2(B.shape[0]) != math.floor(math.log2(B.shape[0]))\n ) or math.ceil(math.log2(B.shape[1]) != math.floor(math.log2(B.\n shape[1]))):\n print(\n 'A matriz B será modificada, acrescentando-se zeros para que torne-se uma matriz do tipo 2^n x 2^n'\n )\n linhas = 2 ** math.ceil(math.log2(B.shape[0])) - B.shape[0]\n colunas = 2 ** math.ceil(math.log2(B.shape[1])) - B.shape[1]\n if linhas > colunas:\n matriz_auxiliar = np.zeros([linhas, B.shape[1]])\n B = np.vstack((B, matriz_auxiliar))\n matriz_auxiliar = np.zeros([B.shape[0], B.shape[0] - B.\n shape[1]])\n B = np.hstack((B, matriz_auxiliar))\n elif colunas >= linhas:\n matriz_auxiliar = np.zeros([B.shape[0], colunas])\n B = np.hstack((B, matriz_auxiliar))\n matriz_auxiliar = np.zeros([B.shape[1] - B.shape[0], B.\n shape[1]])\n B = np.vstack((B, matriz_auxiliar))\n D = np.zeros_like(A)\n for i in range(0, A.shape[0], 2):\n for j in range(0, B.shape[1], 2):\n soma = 0\n for k in range(0, A.shape[1], 2):\n soma = soma + Strassen(A[i:i + 2, k:k + 2], B[k:k + 2, j:j + 2]\n )\n D[i:i + 2, j:j + 2] = soma\n C = D[0:C.shape[0], 0:C.shape[1]]\n print(C)\n if check_time:\n fim = datetime.datetime.now()\n tempo = fim - inicio\n else:\n tempo = 'Tempo não calcualdo'\n return C, tempo\n",
"step-4": "import numpy as np\nimport math\nimport datetime\n\ndef multi_strassen(A,B, check_ig = True, check_quad = True, check_pot = True, check_time = True):\n \n def Strassen(matriz_1,matriz_2): # Função do algoritmo de Strassen para multiplicação de matrizes do tipo 2x2\n if (matriz_1.shape[0] != 2) or (matriz_1.shape[1] != 2) or (matriz_2.shape[0] != 2) or (matriz_2.shape[1] != 2):\n print(\"As matrizes devem ser do tipo 2x2\")\n return None\n\n M1 = (matriz_1[0,0] + matriz_1[1,1]) * (matriz_2[0,0] + matriz_2[1,1])\n M2 = (matriz_1[1,0] + matriz_1[1,1]) * matriz_2[0,0]\n M3 = matriz_1[0,0] * (matriz_2[0,1] - matriz_2[1,1])\n M4 = matriz_1[1,1] * (matriz_2[1,0] - matriz_2[0,0])\n M5 = (matriz_1[0,0] + matriz_1 [0,1]) * matriz_2[1,1]\n M6 = (matriz_1[1,0] - matriz_1[0,0]) * (matriz_2[0,0] + matriz_2[0,1])\n M7 = (matriz_1[0,1] - matriz_1[1,1]) * (matriz_2[1,0] + matriz_2[1,1])\n\n Resultado = np.zeros([2,2])\n Resultado[0,0] = M1 + M4 - M5 + M7\n Resultado[0,1] = M3 + M5\n Resultado[1,0] = M2 + M4\n Resultado[1,1] = M1 - M2 + M3 + M6\n\n return Resultado\n if check_time:\n inicio = datetime.datetime.now()\n \n C = np.zeros([A.shape[0],B.shape[1]]) #Guarda o tamanho original da matriz multiplicada\n\n #Parte 1: Checagem das condições pré-estabelecidas\n if (A.shape[1] != B.shape[0]):\n print(\"Erro: Não é possível realizar a multiplicação C = A * B com as matrizes fornecidas\")\n return None, None\n\n if (len(A.shape) != 2) or (len(B.shape) != 2): #Checa a dimensão da matriz\n print(\"Erro: As matrizes devem ser bidimensionais\")\n return None, None\n\n if check_ig:\n \n if (A.shape != B.shape): #Checa se as matrizes possuem mesma dimensão\n print(\"Erro: As matrizes devem possuir mesmas dimensões\")\n return None, None\n \n if check_quad:\n \n if ((A.shape[0] - A.shape[1]) != 0) or ((B.shape[0] - B.shape[1]) != 0): #Checa se as matrizes são quadradas\n print(\"Erro: As matrizes devem ser ambas quadradas\")\n return None, None\n\n if check_pot:\n \n if (math.ceil(math.log2(A.shape[0]) != math.floor(math.log2(A.shape[0])))) or (math.ceil(math.log2(A.shape[1]) != math.floor(math.log2(A.shape[1])))):\n print(\"A matriz A será modificada, acrescentando-se zeros para que torne-se uma matriz do tipo 2^n x 2^n\")\n linhas = 2**math.ceil(math.log2(A.shape[0])) - A.shape[0] #Calcula quantas linhas faltam para um quadrado de dois\n colunas = 2**math.ceil(math.log2(A.shape[1])) - A.shape[1] #Calcula quantas colunas faltam para um quadrado de dois\n if linhas > colunas:\n matriz_auxiliar = np.zeros([linhas,A.shape[1]])\n A = np.vstack((A,matriz_auxiliar))\n matriz_auxiliar = np.zeros([A.shape[0],A.shape[0]-A.shape[1]])\n A = np.hstack((A,matriz_auxiliar))\n\n elif colunas >= linhas:\n matriz_auxiliar = np.zeros([A.shape[0],colunas])\n A = np.hstack((A,matriz_auxiliar))\n matriz_auxiliar = np.zeros([A.shape[1]-A.shape[0],A.shape[1]])\n A = np.vstack((A,matriz_auxiliar))\n\n if (math.ceil(math.log2(B.shape[0]) != math.floor(math.log2(B.shape[0])))) or (math.ceil(math.log2(B.shape[1]) != math.floor(math.log2(B.shape[1])))):\n print(\"A matriz B será modificada, acrescentando-se zeros para que torne-se uma matriz do tipo 2^n x 2^n\")\n linhas = 2**math.ceil(math.log2(B.shape[0])) - B.shape[0] #Calcula quantas linhas faltam para um quadrado de dois\n colunas = 2**math.ceil(math.log2(B.shape[1])) - B.shape[1] #Calcula quantas colunas faltam para um quadrado de dois\n if linhas > colunas:\n matriz_auxiliar = np.zeros([linhas,B.shape[1]])\n B = np.vstack((B,matriz_auxiliar))\n matriz_auxiliar = np.zeros([B.shape[0],B.shape[0]-B.shape[1]])\n B = np.hstack((B,matriz_auxiliar))\n\n elif colunas >= linhas:\n matriz_auxiliar = np.zeros([B.shape[0],colunas])\n B = np.hstack((B,matriz_auxiliar))\n matriz_auxiliar = np.zeros([B.shape[1]-B.shape[0],B.shape[1]])\n B = np.vstack((B,matriz_auxiliar))\n\n\n #Multiplicação de fato das matrizes\n \n D = np.zeros_like(A)\n for i in range(0,A.shape[0],2):\n for j in range(0,B.shape[1],2):\n soma = 0\n for k in range(0,A.shape[1],2):\n soma = soma + Strassen(A[i:i+2,k:k+2],B[k:k+2,j:j+2])\n D[i:i+2,j:j+2] = soma\n C = D[0:C.shape[0],0:C.shape[1]]\n print (C)\n if check_time:\n fim = datetime.datetime.now()\n tempo = fim - inicio\n #print(\"Tempo de execução = \", fim - inicio)\n else:\n tempo = \"Tempo não calcualdo\"\n \n return C, tempo\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django import forms
from django.forms import widgets
# from product.models import PRODUCT_OTHER_CHOICE, PRODUCT_CATEGORY_CHOICES
PRODUCT_OTHER_CHOICE = 'other'
PRODUCT_CATEGORY_CHOICES = (
(PRODUCT_OTHER_CHOICE, 'Разное'),
('food', 'Еда'),
('drink', 'Вода'),
('cloth', 'Одежда'),
('electronics', 'Электроника')
)
class ProductForm(forms.Form):
name = forms.CharField(max_length=100, label='Наименование')
description = forms.CharField(max_length=2000, required=True, label='Описание', widget=forms.Textarea)
category = forms.ChoiceField(required=False, widget=forms.Select, choices=PRODUCT_CATEGORY_CHOICES, label='Категория')
amount = forms.IntegerField(min_value=0, label='Остаток')
price = forms.DecimalField(max_digits=7, decimal_places=2, label='Цена')
class FindForm(forms.Form):
name = forms.CharField(max_length=100, label='Наименование')
|
normal
|
{
"blob_id": "e8a024796b6426e572571e46030678e90c537229",
"index": 7549,
"step-1": "<mask token>\n\n\nclass ProductForm(forms.Form):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass FindForm(forms.Form):\n name = forms.CharField(max_length=100, label='Наименование')\n",
"step-2": "<mask token>\n\n\nclass ProductForm(forms.Form):\n name = forms.CharField(max_length=100, label='Наименование')\n description = forms.CharField(max_length=2000, required=True, label=\n 'Описание', widget=forms.Textarea)\n category = forms.ChoiceField(required=False, widget=forms.Select,\n choices=PRODUCT_CATEGORY_CHOICES, label='Категория')\n amount = forms.IntegerField(min_value=0, label='Остаток')\n price = forms.DecimalField(max_digits=7, decimal_places=2, label='Цена')\n\n\nclass FindForm(forms.Form):\n name = forms.CharField(max_length=100, label='Наименование')\n",
"step-3": "<mask token>\nPRODUCT_OTHER_CHOICE = 'other'\nPRODUCT_CATEGORY_CHOICES = (PRODUCT_OTHER_CHOICE, 'Разное'), ('food', 'Еда'), (\n 'drink', 'Вода'), ('cloth', 'Одежда'), ('electronics', 'Электроника')\n\n\nclass ProductForm(forms.Form):\n name = forms.CharField(max_length=100, label='Наименование')\n description = forms.CharField(max_length=2000, required=True, label=\n 'Описание', widget=forms.Textarea)\n category = forms.ChoiceField(required=False, widget=forms.Select,\n choices=PRODUCT_CATEGORY_CHOICES, label='Категория')\n amount = forms.IntegerField(min_value=0, label='Остаток')\n price = forms.DecimalField(max_digits=7, decimal_places=2, label='Цена')\n\n\nclass FindForm(forms.Form):\n name = forms.CharField(max_length=100, label='Наименование')\n",
"step-4": "from django import forms\nfrom django.forms import widgets\nPRODUCT_OTHER_CHOICE = 'other'\nPRODUCT_CATEGORY_CHOICES = (PRODUCT_OTHER_CHOICE, 'Разное'), ('food', 'Еда'), (\n 'drink', 'Вода'), ('cloth', 'Одежда'), ('electronics', 'Электроника')\n\n\nclass ProductForm(forms.Form):\n name = forms.CharField(max_length=100, label='Наименование')\n description = forms.CharField(max_length=2000, required=True, label=\n 'Описание', widget=forms.Textarea)\n category = forms.ChoiceField(required=False, widget=forms.Select,\n choices=PRODUCT_CATEGORY_CHOICES, label='Категория')\n amount = forms.IntegerField(min_value=0, label='Остаток')\n price = forms.DecimalField(max_digits=7, decimal_places=2, label='Цена')\n\n\nclass FindForm(forms.Form):\n name = forms.CharField(max_length=100, label='Наименование')\n",
"step-5": "from django import forms\nfrom django.forms import widgets\n# from product.models import PRODUCT_OTHER_CHOICE, PRODUCT_CATEGORY_CHOICES\n\nPRODUCT_OTHER_CHOICE = 'other'\nPRODUCT_CATEGORY_CHOICES = (\n (PRODUCT_OTHER_CHOICE, 'Разное'),\n ('food', 'Еда'),\n ('drink', 'Вода'),\n ('cloth', 'Одежда'),\n ('electronics', 'Электроника')\n)\n\nclass ProductForm(forms.Form):\n name = forms.CharField(max_length=100, label='Наименование')\n description = forms.CharField(max_length=2000, required=True, label='Описание', widget=forms.Textarea)\n category = forms.ChoiceField(required=False, widget=forms.Select, choices=PRODUCT_CATEGORY_CHOICES, label='Категория')\n amount = forms.IntegerField(min_value=0, label='Остаток')\n price = forms.DecimalField(max_digits=7, decimal_places=2, label='Цена')\n\nclass FindForm(forms.Form):\n name = forms.CharField(max_length=100, label='Наименование')\n\n\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
def test_burst_evolved():
"""Test burst() in EvolvedCluster"""
cluster = p22.EvolvedCluster('..#\n#..\n...')
assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean
cluster.burst()
assert cluster.virus.direction == p22.Directions.left
assert cluster.virus.pos == p22.Position(1, 0)
assert cluster.infected[p22.Position(1, 1)] == p22.State.Weakened
assert cluster.infected[cluster.virus.pos] == p22.State.Infected
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.virus.direction == p22.Directions.up
assert cluster.virus.pos == p22.Position(0, 0)
assert cluster.infected[prev_pos] == p22.State.Flagged
assert cluster.infected[cluster.virus.pos] == p22.State.Clean
@pytest.mark.skip(reason='too slow to test')
def test_solve_b():
"""Tests for solve_b()"""
print('\nTesting solve_b()')
assert p22.solve_b(100, '..#\n#..\n...') == 26
assert p22.solve_b(10000000, '..#\n#..\n...') == 2511944
def test_solve_a0():
"""Tests for solve_a0()"""
print('\nTesting solve_a0()')
assert p22.solve_a0(7, '..#\n#..\n...') == 5
assert p22.solve_a0(70, '..#\n#..\n...') == 41
assert p22.solve_a0(10000, '..#\n#..\n...') == 5587
def test_solve_b0():
"""Tests for solve_b0()"""
print('\nTesting solve_b0()')
assert p22.solve_b0(100, '..#\n#..\n...') == 26
assert p22.solve_b0(10000000, '..#\n#..\n...') == 2511944
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_solve_a():
"""Tests for solve_b()"""
print('\nTesting solve_a()')
assert p22.solve_a(7, '..#\n#..\n...') == 5
assert p22.solve_a(70, '..#\n#..\n...') == 41
assert p22.solve_a(10000, '..#\n#..\n...') == 5587
def test_burst_evolved():
"""Test burst() in EvolvedCluster"""
cluster = p22.EvolvedCluster('..#\n#..\n...')
assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean
cluster.burst()
assert cluster.virus.direction == p22.Directions.left
assert cluster.virus.pos == p22.Position(1, 0)
assert cluster.infected[p22.Position(1, 1)] == p22.State.Weakened
assert cluster.infected[cluster.virus.pos] == p22.State.Infected
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.virus.direction == p22.Directions.up
assert cluster.virus.pos == p22.Position(0, 0)
assert cluster.infected[prev_pos] == p22.State.Flagged
assert cluster.infected[cluster.virus.pos] == p22.State.Clean
@pytest.mark.skip(reason='too slow to test')
def test_solve_b():
"""Tests for solve_b()"""
print('\nTesting solve_b()')
assert p22.solve_b(100, '..#\n#..\n...') == 26
assert p22.solve_b(10000000, '..#\n#..\n...') == 2511944
def test_solve_a0():
"""Tests for solve_a0()"""
print('\nTesting solve_a0()')
assert p22.solve_a0(7, '..#\n#..\n...') == 5
assert p22.solve_a0(70, '..#\n#..\n...') == 41
assert p22.solve_a0(10000, '..#\n#..\n...') == 5587
def test_solve_b0():
"""Tests for solve_b0()"""
print('\nTesting solve_b0()')
assert p22.solve_b0(100, '..#\n#..\n...') == 26
assert p22.solve_b0(10000000, '..#\n#..\n...') == 2511944
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_burst():
"""Test burst() in Cluster"""
print('\nTesting burst()')
cluster = p22.Cluster('..#\n#..\n...')
assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean
cluster.burst()
assert cluster.virus.direction == p22.Directions.left
assert cluster.virus.pos == p22.Position(1, 0)
assert cluster.infected[p22.Position(1, 1)] == p22.State.Infected
assert cluster.infected[cluster.virus.pos] == p22.State.Infected
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.virus.direction == p22.Directions.up
assert cluster.virus.pos == p22.Position(0, 0)
assert cluster.infected[prev_pos] == p22.State.Clean
for _ in range(4):
assert cluster.infected[cluster.virus.pos] == p22.State.Clean
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.infected[prev_pos] == p22.State.Infected
assert cluster.virus.pos == p22.Position(0, 0)
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.virus.direction == p22.Directions.right
assert cluster.virus.pos == p22.Position(0, 1)
assert cluster.infected[prev_pos] == p22.State.Clean
assert cluster.infections_caused == 5
def test_solve_a():
"""Tests for solve_b()"""
print('\nTesting solve_a()')
assert p22.solve_a(7, '..#\n#..\n...') == 5
assert p22.solve_a(70, '..#\n#..\n...') == 41
assert p22.solve_a(10000, '..#\n#..\n...') == 5587
def test_burst_evolved():
"""Test burst() in EvolvedCluster"""
cluster = p22.EvolvedCluster('..#\n#..\n...')
assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean
cluster.burst()
assert cluster.virus.direction == p22.Directions.left
assert cluster.virus.pos == p22.Position(1, 0)
assert cluster.infected[p22.Position(1, 1)] == p22.State.Weakened
assert cluster.infected[cluster.virus.pos] == p22.State.Infected
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.virus.direction == p22.Directions.up
assert cluster.virus.pos == p22.Position(0, 0)
assert cluster.infected[prev_pos] == p22.State.Flagged
assert cluster.infected[cluster.virus.pos] == p22.State.Clean
@pytest.mark.skip(reason='too slow to test')
def test_solve_b():
"""Tests for solve_b()"""
print('\nTesting solve_b()')
assert p22.solve_b(100, '..#\n#..\n...') == 26
assert p22.solve_b(10000000, '..#\n#..\n...') == 2511944
def test_solve_a0():
"""Tests for solve_a0()"""
print('\nTesting solve_a0()')
assert p22.solve_a0(7, '..#\n#..\n...') == 5
assert p22.solve_a0(70, '..#\n#..\n...') == 41
assert p22.solve_a0(10000, '..#\n#..\n...') == 5587
def test_solve_b0():
"""Tests for solve_b0()"""
print('\nTesting solve_b0()')
assert p22.solve_b0(100, '..#\n#..\n...') == 26
assert p22.solve_b0(10000000, '..#\n#..\n...') == 2511944
<|reserved_special_token_1|>
import pytest
import problem22 as p22
def test_burst():
"""Test burst() in Cluster"""
print('\nTesting burst()')
cluster = p22.Cluster('..#\n#..\n...')
assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean
cluster.burst()
assert cluster.virus.direction == p22.Directions.left
assert cluster.virus.pos == p22.Position(1, 0)
assert cluster.infected[p22.Position(1, 1)] == p22.State.Infected
assert cluster.infected[cluster.virus.pos] == p22.State.Infected
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.virus.direction == p22.Directions.up
assert cluster.virus.pos == p22.Position(0, 0)
assert cluster.infected[prev_pos] == p22.State.Clean
for _ in range(4):
assert cluster.infected[cluster.virus.pos] == p22.State.Clean
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.infected[prev_pos] == p22.State.Infected
assert cluster.virus.pos == p22.Position(0, 0)
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.virus.direction == p22.Directions.right
assert cluster.virus.pos == p22.Position(0, 1)
assert cluster.infected[prev_pos] == p22.State.Clean
assert cluster.infections_caused == 5
def test_solve_a():
"""Tests for solve_b()"""
print('\nTesting solve_a()')
assert p22.solve_a(7, '..#\n#..\n...') == 5
assert p22.solve_a(70, '..#\n#..\n...') == 41
assert p22.solve_a(10000, '..#\n#..\n...') == 5587
def test_burst_evolved():
"""Test burst() in EvolvedCluster"""
cluster = p22.EvolvedCluster('..#\n#..\n...')
assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean
cluster.burst()
assert cluster.virus.direction == p22.Directions.left
assert cluster.virus.pos == p22.Position(1, 0)
assert cluster.infected[p22.Position(1, 1)] == p22.State.Weakened
assert cluster.infected[cluster.virus.pos] == p22.State.Infected
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.virus.direction == p22.Directions.up
assert cluster.virus.pos == p22.Position(0, 0)
assert cluster.infected[prev_pos] == p22.State.Flagged
assert cluster.infected[cluster.virus.pos] == p22.State.Clean
@pytest.mark.skip(reason='too slow to test')
def test_solve_b():
"""Tests for solve_b()"""
print('\nTesting solve_b()')
assert p22.solve_b(100, '..#\n#..\n...') == 26
assert p22.solve_b(10000000, '..#\n#..\n...') == 2511944
def test_solve_a0():
"""Tests for solve_a0()"""
print('\nTesting solve_a0()')
assert p22.solve_a0(7, '..#\n#..\n...') == 5
assert p22.solve_a0(70, '..#\n#..\n...') == 41
assert p22.solve_a0(10000, '..#\n#..\n...') == 5587
def test_solve_b0():
"""Tests for solve_b0()"""
print('\nTesting solve_b0()')
assert p22.solve_b0(100, '..#\n#..\n...') == 26
assert p22.solve_b0(10000000, '..#\n#..\n...') == 2511944
<|reserved_special_token_1|>
import pytest
import problem22 as p22
def test_burst():
"""Test burst() in Cluster"""
print('\nTesting burst()')
cluster = p22.Cluster('..#\n#..\n...')
assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean
cluster.burst()
assert cluster.virus.direction == p22.Directions.left
assert cluster.virus.pos == p22.Position(1,0)
assert cluster.infected[p22.Position(1,1)] == p22.State.Infected
assert cluster.infected[cluster.virus.pos] == p22.State.Infected
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.virus.direction == p22.Directions.up # turned right
assert cluster.virus.pos == p22.Position(0, 0) # moved up
assert cluster.infected[prev_pos] == p22.State.Clean # cleaned
# four times in a row finds clean and infects
for _ in range(4):
assert cluster.infected[cluster.virus.pos] == p22.State.Clean
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.infected[prev_pos] == p22.State.Infected
assert cluster.virus.pos == p22.Position(0, 0)
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.virus.direction == p22.Directions.right
assert cluster.virus.pos == p22.Position(0, 1)
assert cluster.infected[prev_pos] == p22.State.Clean
assert cluster.infections_caused == 5
def test_solve_a():
"""Tests for solve_b()"""
print('\nTesting solve_a()')
assert p22.solve_a(7, '..#\n#..\n...') == 5
assert p22.solve_a(70, '..#\n#..\n...') == 41
assert p22.solve_a(10000, '..#\n#..\n...') == 5587
def test_burst_evolved():
"""Test burst() in EvolvedCluster"""
cluster = p22.EvolvedCluster('..#\n#..\n...')
assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected
assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean
cluster.burst()
assert cluster.virus.direction == p22.Directions.left
assert cluster.virus.pos == p22.Position(1,0)
assert cluster.infected[p22.Position(1,1)] == p22.State.Weakened
assert cluster.infected[cluster.virus.pos] == p22.State.Infected
prev_pos = cluster.virus.pos
cluster.burst()
assert cluster.virus.direction == p22.Directions.up
assert cluster.virus.pos == p22.Position(0,0)
assert cluster.infected[prev_pos] == p22.State.Flagged
assert cluster.infected[cluster.virus.pos] == p22.State.Clean
@pytest.mark.skip(reason="too slow to test")
def test_solve_b():
"""Tests for solve_b()"""
print('\nTesting solve_b()')
assert p22.solve_b(100, '..#\n#..\n...') == 26
assert p22.solve_b(10000000, '..#\n#..\n...') == 2511944
def test_solve_a0():
"""Tests for solve_a0()"""
print('\nTesting solve_a0()')
assert p22.solve_a0(7, '..#\n#..\n...') == 5
assert p22.solve_a0(70, '..#\n#..\n...') == 41
assert p22.solve_a0(10000, '..#\n#..\n...') == 5587
def test_solve_b0():
"""Tests for solve_b0()"""
print('\nTesting solve_b0()')
assert p22.solve_b0(100, '..#\n#..\n...') == 26
assert p22.solve_b0(10000000, '..#\n#..\n...') == 2511944
|
flexible
|
{
"blob_id": "f0a3778e74d113a5de778fa17ec321c6680c56c2",
"index": 1143,
"step-1": "<mask token>\n\n\ndef test_burst_evolved():\n \"\"\"Test burst() in EvolvedCluster\"\"\"\n cluster = p22.EvolvedCluster('..#\\n#..\\n...')\n assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.left\n assert cluster.virus.pos == p22.Position(1, 0)\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Weakened\n assert cluster.infected[cluster.virus.pos] == p22.State.Infected\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.up\n assert cluster.virus.pos == p22.Position(0, 0)\n assert cluster.infected[prev_pos] == p22.State.Flagged\n assert cluster.infected[cluster.virus.pos] == p22.State.Clean\n\n\[email protected](reason='too slow to test')\ndef test_solve_b():\n \"\"\"Tests for solve_b()\"\"\"\n print('\\nTesting solve_b()')\n assert p22.solve_b(100, '..#\\n#..\\n...') == 26\n assert p22.solve_b(10000000, '..#\\n#..\\n...') == 2511944\n\n\ndef test_solve_a0():\n \"\"\"Tests for solve_a0()\"\"\"\n print('\\nTesting solve_a0()')\n assert p22.solve_a0(7, '..#\\n#..\\n...') == 5\n assert p22.solve_a0(70, '..#\\n#..\\n...') == 41\n assert p22.solve_a0(10000, '..#\\n#..\\n...') == 5587\n\n\ndef test_solve_b0():\n \"\"\"Tests for solve_b0()\"\"\"\n print('\\nTesting solve_b0()')\n assert p22.solve_b0(100, '..#\\n#..\\n...') == 26\n assert p22.solve_b0(10000000, '..#\\n#..\\n...') == 2511944\n",
"step-2": "<mask token>\n\n\ndef test_solve_a():\n \"\"\"Tests for solve_b()\"\"\"\n print('\\nTesting solve_a()')\n assert p22.solve_a(7, '..#\\n#..\\n...') == 5\n assert p22.solve_a(70, '..#\\n#..\\n...') == 41\n assert p22.solve_a(10000, '..#\\n#..\\n...') == 5587\n\n\ndef test_burst_evolved():\n \"\"\"Test burst() in EvolvedCluster\"\"\"\n cluster = p22.EvolvedCluster('..#\\n#..\\n...')\n assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.left\n assert cluster.virus.pos == p22.Position(1, 0)\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Weakened\n assert cluster.infected[cluster.virus.pos] == p22.State.Infected\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.up\n assert cluster.virus.pos == p22.Position(0, 0)\n assert cluster.infected[prev_pos] == p22.State.Flagged\n assert cluster.infected[cluster.virus.pos] == p22.State.Clean\n\n\[email protected](reason='too slow to test')\ndef test_solve_b():\n \"\"\"Tests for solve_b()\"\"\"\n print('\\nTesting solve_b()')\n assert p22.solve_b(100, '..#\\n#..\\n...') == 26\n assert p22.solve_b(10000000, '..#\\n#..\\n...') == 2511944\n\n\ndef test_solve_a0():\n \"\"\"Tests for solve_a0()\"\"\"\n print('\\nTesting solve_a0()')\n assert p22.solve_a0(7, '..#\\n#..\\n...') == 5\n assert p22.solve_a0(70, '..#\\n#..\\n...') == 41\n assert p22.solve_a0(10000, '..#\\n#..\\n...') == 5587\n\n\ndef test_solve_b0():\n \"\"\"Tests for solve_b0()\"\"\"\n print('\\nTesting solve_b0()')\n assert p22.solve_b0(100, '..#\\n#..\\n...') == 26\n assert p22.solve_b0(10000000, '..#\\n#..\\n...') == 2511944\n",
"step-3": "<mask token>\n\n\ndef test_burst():\n \"\"\"Test burst() in Cluster\"\"\"\n print('\\nTesting burst()')\n cluster = p22.Cluster('..#\\n#..\\n...')\n assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.left\n assert cluster.virus.pos == p22.Position(1, 0)\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Infected\n assert cluster.infected[cluster.virus.pos] == p22.State.Infected\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.up\n assert cluster.virus.pos == p22.Position(0, 0)\n assert cluster.infected[prev_pos] == p22.State.Clean\n for _ in range(4):\n assert cluster.infected[cluster.virus.pos] == p22.State.Clean\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.infected[prev_pos] == p22.State.Infected\n assert cluster.virus.pos == p22.Position(0, 0)\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.right\n assert cluster.virus.pos == p22.Position(0, 1)\n assert cluster.infected[prev_pos] == p22.State.Clean\n assert cluster.infections_caused == 5\n\n\ndef test_solve_a():\n \"\"\"Tests for solve_b()\"\"\"\n print('\\nTesting solve_a()')\n assert p22.solve_a(7, '..#\\n#..\\n...') == 5\n assert p22.solve_a(70, '..#\\n#..\\n...') == 41\n assert p22.solve_a(10000, '..#\\n#..\\n...') == 5587\n\n\ndef test_burst_evolved():\n \"\"\"Test burst() in EvolvedCluster\"\"\"\n cluster = p22.EvolvedCluster('..#\\n#..\\n...')\n assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.left\n assert cluster.virus.pos == p22.Position(1, 0)\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Weakened\n assert cluster.infected[cluster.virus.pos] == p22.State.Infected\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.up\n assert cluster.virus.pos == p22.Position(0, 0)\n assert cluster.infected[prev_pos] == p22.State.Flagged\n assert cluster.infected[cluster.virus.pos] == p22.State.Clean\n\n\[email protected](reason='too slow to test')\ndef test_solve_b():\n \"\"\"Tests for solve_b()\"\"\"\n print('\\nTesting solve_b()')\n assert p22.solve_b(100, '..#\\n#..\\n...') == 26\n assert p22.solve_b(10000000, '..#\\n#..\\n...') == 2511944\n\n\ndef test_solve_a0():\n \"\"\"Tests for solve_a0()\"\"\"\n print('\\nTesting solve_a0()')\n assert p22.solve_a0(7, '..#\\n#..\\n...') == 5\n assert p22.solve_a0(70, '..#\\n#..\\n...') == 41\n assert p22.solve_a0(10000, '..#\\n#..\\n...') == 5587\n\n\ndef test_solve_b0():\n \"\"\"Tests for solve_b0()\"\"\"\n print('\\nTesting solve_b0()')\n assert p22.solve_b0(100, '..#\\n#..\\n...') == 26\n assert p22.solve_b0(10000000, '..#\\n#..\\n...') == 2511944\n",
"step-4": "import pytest\nimport problem22 as p22\n\n\ndef test_burst():\n \"\"\"Test burst() in Cluster\"\"\"\n print('\\nTesting burst()')\n cluster = p22.Cluster('..#\\n#..\\n...')\n assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.left\n assert cluster.virus.pos == p22.Position(1, 0)\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Infected\n assert cluster.infected[cluster.virus.pos] == p22.State.Infected\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.up\n assert cluster.virus.pos == p22.Position(0, 0)\n assert cluster.infected[prev_pos] == p22.State.Clean\n for _ in range(4):\n assert cluster.infected[cluster.virus.pos] == p22.State.Clean\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.infected[prev_pos] == p22.State.Infected\n assert cluster.virus.pos == p22.Position(0, 0)\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.right\n assert cluster.virus.pos == p22.Position(0, 1)\n assert cluster.infected[prev_pos] == p22.State.Clean\n assert cluster.infections_caused == 5\n\n\ndef test_solve_a():\n \"\"\"Tests for solve_b()\"\"\"\n print('\\nTesting solve_a()')\n assert p22.solve_a(7, '..#\\n#..\\n...') == 5\n assert p22.solve_a(70, '..#\\n#..\\n...') == 41\n assert p22.solve_a(10000, '..#\\n#..\\n...') == 5587\n\n\ndef test_burst_evolved():\n \"\"\"Test burst() in EvolvedCluster\"\"\"\n cluster = p22.EvolvedCluster('..#\\n#..\\n...')\n assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.left\n assert cluster.virus.pos == p22.Position(1, 0)\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Weakened\n assert cluster.infected[cluster.virus.pos] == p22.State.Infected\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.up\n assert cluster.virus.pos == p22.Position(0, 0)\n assert cluster.infected[prev_pos] == p22.State.Flagged\n assert cluster.infected[cluster.virus.pos] == p22.State.Clean\n\n\[email protected](reason='too slow to test')\ndef test_solve_b():\n \"\"\"Tests for solve_b()\"\"\"\n print('\\nTesting solve_b()')\n assert p22.solve_b(100, '..#\\n#..\\n...') == 26\n assert p22.solve_b(10000000, '..#\\n#..\\n...') == 2511944\n\n\ndef test_solve_a0():\n \"\"\"Tests for solve_a0()\"\"\"\n print('\\nTesting solve_a0()')\n assert p22.solve_a0(7, '..#\\n#..\\n...') == 5\n assert p22.solve_a0(70, '..#\\n#..\\n...') == 41\n assert p22.solve_a0(10000, '..#\\n#..\\n...') == 5587\n\n\ndef test_solve_b0():\n \"\"\"Tests for solve_b0()\"\"\"\n print('\\nTesting solve_b0()')\n assert p22.solve_b0(100, '..#\\n#..\\n...') == 26\n assert p22.solve_b0(10000000, '..#\\n#..\\n...') == 2511944\n",
"step-5": "import pytest\nimport problem22 as p22\n\ndef test_burst():\n \"\"\"Test burst() in Cluster\"\"\"\n print('\\nTesting burst()')\n cluster = p22.Cluster('..#\\n#..\\n...')\n assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.left\n assert cluster.virus.pos == p22.Position(1,0)\n assert cluster.infected[p22.Position(1,1)] == p22.State.Infected\n assert cluster.infected[cluster.virus.pos] == p22.State.Infected\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.up # turned right\n assert cluster.virus.pos == p22.Position(0, 0) # moved up\n assert cluster.infected[prev_pos] == p22.State.Clean # cleaned\n # four times in a row finds clean and infects\n\n for _ in range(4):\n assert cluster.infected[cluster.virus.pos] == p22.State.Clean\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.infected[prev_pos] == p22.State.Infected\n assert cluster.virus.pos == p22.Position(0, 0)\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.right\n assert cluster.virus.pos == p22.Position(0, 1)\n assert cluster.infected[prev_pos] == p22.State.Clean\n assert cluster.infections_caused == 5\n\ndef test_solve_a():\n \"\"\"Tests for solve_b()\"\"\"\n print('\\nTesting solve_a()')\n assert p22.solve_a(7, '..#\\n#..\\n...') == 5\n assert p22.solve_a(70, '..#\\n#..\\n...') == 41\n assert p22.solve_a(10000, '..#\\n#..\\n...') == 5587\n\ndef test_burst_evolved():\n \"\"\"Test burst() in EvolvedCluster\"\"\"\n cluster = p22.EvolvedCluster('..#\\n#..\\n...')\n assert cluster.infected[p22.Position(0, 2)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 0)] == p22.State.Infected\n assert cluster.infected[p22.Position(1, 1)] == p22.State.Clean\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.left\n assert cluster.virus.pos == p22.Position(1,0)\n assert cluster.infected[p22.Position(1,1)] == p22.State.Weakened\n assert cluster.infected[cluster.virus.pos] == p22.State.Infected\n prev_pos = cluster.virus.pos\n cluster.burst()\n assert cluster.virus.direction == p22.Directions.up\n assert cluster.virus.pos == p22.Position(0,0)\n assert cluster.infected[prev_pos] == p22.State.Flagged\n assert cluster.infected[cluster.virus.pos] == p22.State.Clean\n\n\[email protected](reason=\"too slow to test\")\ndef test_solve_b():\n \"\"\"Tests for solve_b()\"\"\"\n print('\\nTesting solve_b()')\n assert p22.solve_b(100, '..#\\n#..\\n...') == 26\n assert p22.solve_b(10000000, '..#\\n#..\\n...') == 2511944\n\ndef test_solve_a0():\n \"\"\"Tests for solve_a0()\"\"\"\n print('\\nTesting solve_a0()')\n assert p22.solve_a0(7, '..#\\n#..\\n...') == 5\n assert p22.solve_a0(70, '..#\\n#..\\n...') == 41\n assert p22.solve_a0(10000, '..#\\n#..\\n...') == 5587\n\ndef test_solve_b0():\n \"\"\"Tests for solve_b0()\"\"\"\n print('\\nTesting solve_b0()')\n assert p22.solve_b0(100, '..#\\n#..\\n...') == 26\n assert p22.solve_b0(10000000, '..#\\n#..\\n...') == 2511944",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
class Publisher:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class TopicPublisher(Publisher):
def __init__(self, exchange_name, channel, routing_key=None):
super().__init__(exchange_name, channel, routing_key)
def reconnect(self, channel):
self.exchange = kombu.Exchange(self.exchange_name, type='topic',
durable=True)
self.channel = channel
self._producer = kombu.Producer(exchange=self.exchange, channel=
channel, serializer='json', routing_key=self.routing_key)
class PublisherFactory:
def __init__(self, channel):
self.channel = channel
def get_publisher(self, key):
publisher = None
logger.debug('routing_key: %s' % key)
if key == 'nokkhum_compute.update_status':
routing_key = 'nokkhum_compute.update_status'
publisher = Publisher('nokkunm_compute.update_status', self.
channel, routing_key)
return publisher
else:
import fnmatch
import re
regex = fnmatch.translate('nokkhum_compute.*.rpc_*')
reobj = re.compile(regex)
if reobj.match(key):
routing_key = key
if 'nokkhum_compute.*.rpc_response' in routing_key:
publisher = TopicPublisher('nokkunm_compute.compute_rpc',
self.channel, routing_key)
elif 'nokkhum_compute.*.rpc_request':
publisher = TopicPublisher('nokkunm_compute.rpc', self.
channel, routing_key)
return publisher
return publisher
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Publisher:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def queue_declare(self, routing_key):
if routing_key is None:
return
if routing_key in self.routing_key_list:
return
self.routing_key_list.append(routing_key)
queue = queues.QueueFactory().get_queue(self.exchange, routing_key)
if queue:
queue(self.channel).declare()
<|reserved_special_token_0|>
def drop_routing_key(self, routing_key):
logger.debug('drop_routing_key: %s' % routing_key)
if routing_key in self.routing_key_list:
self.routing_key_list.remove(routing_key)
class TopicPublisher(Publisher):
def __init__(self, exchange_name, channel, routing_key=None):
super().__init__(exchange_name, channel, routing_key)
def reconnect(self, channel):
self.exchange = kombu.Exchange(self.exchange_name, type='topic',
durable=True)
self.channel = channel
self._producer = kombu.Producer(exchange=self.exchange, channel=
channel, serializer='json', routing_key=self.routing_key)
class PublisherFactory:
def __init__(self, channel):
self.channel = channel
def get_publisher(self, key):
publisher = None
logger.debug('routing_key: %s' % key)
if key == 'nokkhum_compute.update_status':
routing_key = 'nokkhum_compute.update_status'
publisher = Publisher('nokkunm_compute.update_status', self.
channel, routing_key)
return publisher
else:
import fnmatch
import re
regex = fnmatch.translate('nokkhum_compute.*.rpc_*')
reobj = re.compile(regex)
if reobj.match(key):
routing_key = key
if 'nokkhum_compute.*.rpc_response' in routing_key:
publisher = TopicPublisher('nokkunm_compute.compute_rpc',
self.channel, routing_key)
elif 'nokkhum_compute.*.rpc_request':
publisher = TopicPublisher('nokkunm_compute.rpc', self.
channel, routing_key)
return publisher
return publisher
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Publisher:
def __init__(self, exchange_name, channel, routing_key=None):
self.exchange_name = exchange_name
self._producer = None
self.exchange = None
self.channel = channel
self.routing_key_list = []
self.routing_key = routing_key
self.reconnect(channel)
def reconnect(self, channel):
cc.acquire()
self.exchange = kombu.Exchange(self.exchange_name, type='direct',
durable=True)
self.channel = channel
try:
self._producer = kombu.Producer(exchange=self.exchange, channel
=channel, serializer='json', routing_key=self.routing_key)
if self.routing_key:
self.queue_declare(self.routing_key)
except Exception as e:
logger.exception(e)
cc.release()
def queue_declare(self, routing_key):
if routing_key is None:
return
if routing_key in self.routing_key_list:
return
self.routing_key_list.append(routing_key)
queue = queues.QueueFactory().get_queue(self.exchange, routing_key)
if queue:
queue(self.channel).declare()
def send(self, message, routing_key=None):
result = False
cc.acquire()
try:
self._producer.publish(message, routing_key=routing_key)
result = True
except Exception as e:
logger.exception(e)
logger.debug('wait for connection')
cc.release()
return result
def drop_routing_key(self, routing_key):
logger.debug('drop_routing_key: %s' % routing_key)
if routing_key in self.routing_key_list:
self.routing_key_list.remove(routing_key)
class TopicPublisher(Publisher):
def __init__(self, exchange_name, channel, routing_key=None):
super().__init__(exchange_name, channel, routing_key)
def reconnect(self, channel):
self.exchange = kombu.Exchange(self.exchange_name, type='topic',
durable=True)
self.channel = channel
self._producer = kombu.Producer(exchange=self.exchange, channel=
channel, serializer='json', routing_key=self.routing_key)
class PublisherFactory:
def __init__(self, channel):
self.channel = channel
def get_publisher(self, key):
publisher = None
logger.debug('routing_key: %s' % key)
if key == 'nokkhum_compute.update_status':
routing_key = 'nokkhum_compute.update_status'
publisher = Publisher('nokkunm_compute.update_status', self.
channel, routing_key)
return publisher
else:
import fnmatch
import re
regex = fnmatch.translate('nokkhum_compute.*.rpc_*')
reobj = re.compile(regex)
if reobj.match(key):
routing_key = key
if 'nokkhum_compute.*.rpc_response' in routing_key:
publisher = TopicPublisher('nokkunm_compute.compute_rpc',
self.channel, routing_key)
elif 'nokkhum_compute.*.rpc_request':
publisher = TopicPublisher('nokkunm_compute.rpc', self.
channel, routing_key)
return publisher
return publisher
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logger = logging.getLogger(__name__)
<|reserved_special_token_0|>
cc = threading.Condition()
class Publisher:
def __init__(self, exchange_name, channel, routing_key=None):
self.exchange_name = exchange_name
self._producer = None
self.exchange = None
self.channel = channel
self.routing_key_list = []
self.routing_key = routing_key
self.reconnect(channel)
def reconnect(self, channel):
cc.acquire()
self.exchange = kombu.Exchange(self.exchange_name, type='direct',
durable=True)
self.channel = channel
try:
self._producer = kombu.Producer(exchange=self.exchange, channel
=channel, serializer='json', routing_key=self.routing_key)
if self.routing_key:
self.queue_declare(self.routing_key)
except Exception as e:
logger.exception(e)
cc.release()
def queue_declare(self, routing_key):
if routing_key is None:
return
if routing_key in self.routing_key_list:
return
self.routing_key_list.append(routing_key)
queue = queues.QueueFactory().get_queue(self.exchange, routing_key)
if queue:
queue(self.channel).declare()
def send(self, message, routing_key=None):
result = False
cc.acquire()
try:
self._producer.publish(message, routing_key=routing_key)
result = True
except Exception as e:
logger.exception(e)
logger.debug('wait for connection')
cc.release()
return result
def drop_routing_key(self, routing_key):
logger.debug('drop_routing_key: %s' % routing_key)
if routing_key in self.routing_key_list:
self.routing_key_list.remove(routing_key)
class TopicPublisher(Publisher):
def __init__(self, exchange_name, channel, routing_key=None):
super().__init__(exchange_name, channel, routing_key)
def reconnect(self, channel):
self.exchange = kombu.Exchange(self.exchange_name, type='topic',
durable=True)
self.channel = channel
self._producer = kombu.Producer(exchange=self.exchange, channel=
channel, serializer='json', routing_key=self.routing_key)
class PublisherFactory:
def __init__(self, channel):
self.channel = channel
def get_publisher(self, key):
publisher = None
logger.debug('routing_key: %s' % key)
if key == 'nokkhum_compute.update_status':
routing_key = 'nokkhum_compute.update_status'
publisher = Publisher('nokkunm_compute.update_status', self.
channel, routing_key)
return publisher
else:
import fnmatch
import re
regex = fnmatch.translate('nokkhum_compute.*.rpc_*')
reobj = re.compile(regex)
if reobj.match(key):
routing_key = key
if 'nokkhum_compute.*.rpc_response' in routing_key:
publisher = TopicPublisher('nokkunm_compute.compute_rpc',
self.channel, routing_key)
elif 'nokkhum_compute.*.rpc_request':
publisher = TopicPublisher('nokkunm_compute.rpc', self.
channel, routing_key)
return publisher
return publisher
<|reserved_special_token_1|>
'''
Created on Dec 23, 2011
@author: boatkrap
'''
import kombu
from kombu.common import maybe_declare
from . import queues
import logging
logger = logging.getLogger(__name__)
import threading
cc = threading.Condition()
class Publisher:
def __init__(self, exchange_name, channel, routing_key=None):
self.exchange_name = exchange_name
self._producer = None
self.exchange = None
self.channel = channel
self.routing_key_list = []
self.routing_key = routing_key
self.reconnect(channel)
def reconnect(self, channel):
cc.acquire()
self.exchange = kombu.Exchange(
self.exchange_name, type="direct", durable=True)
self.channel = channel
try:
self._producer = kombu.Producer(exchange=self.exchange,
channel=channel, serializer="json",
routing_key=self.routing_key)
if self.routing_key:
self.queue_declare(self.routing_key)
except Exception as e:
logger.exception(e)
cc.release()
def queue_declare(self, routing_key):
if routing_key is None:
return
if routing_key in self.routing_key_list:
return
self.routing_key_list.append(routing_key)
queue = queues.QueueFactory().get_queue(self.exchange, routing_key)
if queue:
queue(self.channel).declare()
def send(self, message, routing_key=None):
result = False
cc.acquire()
try:
self._producer.publish(message, routing_key=routing_key)
result = True
except Exception as e:
logger.exception(e)
logger.debug("wait for connection")
cc.release()
return result
def drop_routing_key(self, routing_key):
logger.debug("drop_routing_key: %s" % routing_key)
if routing_key in self.routing_key_list:
self.routing_key_list.remove(routing_key)
class TopicPublisher(Publisher):
def __init__(self, exchange_name, channel, routing_key=None):
super().__init__(exchange_name, channel, routing_key)
def reconnect(self, channel):
self.exchange = kombu.Exchange(
self.exchange_name, type="topic", durable=True)
self.channel = channel
self._producer = kombu.Producer(exchange=self.exchange,
channel=channel, serializer="json",
routing_key=self.routing_key)
class PublisherFactory:
def __init__(self, channel):
self.channel = channel
def get_publisher(self, key):
publisher = None
logger.debug("routing_key: %s" % key)
if key == "nokkhum_compute.update_status":
routing_key = "nokkhum_compute.update_status"
publisher = Publisher(
"nokkunm_compute.update_status", self.channel, routing_key)
return publisher
else:
import fnmatch
import re
regex = fnmatch.translate('nokkhum_compute.*.rpc_*')
reobj = re.compile(regex)
if reobj.match(key):
routing_key = key
if "nokkhum_compute.*.rpc_response" in routing_key:
publisher = TopicPublisher(
"nokkunm_compute.compute_rpc", self.channel, routing_key)
elif "nokkhum_compute.*.rpc_request":
publisher = TopicPublisher(
"nokkunm_compute.rpc", self.channel, routing_key)
# logger.debug("get pub: %s"%publisher)
return publisher
return publisher
|
flexible
|
{
"blob_id": "8205541dcdd4627a535b14c6775f04b80e7c0d15",
"index": 3354,
"step-1": "<mask token>\n\n\nclass Publisher:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass TopicPublisher(Publisher):\n\n def __init__(self, exchange_name, channel, routing_key=None):\n super().__init__(exchange_name, channel, routing_key)\n\n def reconnect(self, channel):\n self.exchange = kombu.Exchange(self.exchange_name, type='topic',\n durable=True)\n self.channel = channel\n self._producer = kombu.Producer(exchange=self.exchange, channel=\n channel, serializer='json', routing_key=self.routing_key)\n\n\nclass PublisherFactory:\n\n def __init__(self, channel):\n self.channel = channel\n\n def get_publisher(self, key):\n publisher = None\n logger.debug('routing_key: %s' % key)\n if key == 'nokkhum_compute.update_status':\n routing_key = 'nokkhum_compute.update_status'\n publisher = Publisher('nokkunm_compute.update_status', self.\n channel, routing_key)\n return publisher\n else:\n import fnmatch\n import re\n regex = fnmatch.translate('nokkhum_compute.*.rpc_*')\n reobj = re.compile(regex)\n if reobj.match(key):\n routing_key = key\n if 'nokkhum_compute.*.rpc_response' in routing_key:\n publisher = TopicPublisher('nokkunm_compute.compute_rpc',\n self.channel, routing_key)\n elif 'nokkhum_compute.*.rpc_request':\n publisher = TopicPublisher('nokkunm_compute.rpc', self.\n channel, routing_key)\n return publisher\n return publisher\n",
"step-2": "<mask token>\n\n\nclass Publisher:\n <mask token>\n <mask token>\n\n def queue_declare(self, routing_key):\n if routing_key is None:\n return\n if routing_key in self.routing_key_list:\n return\n self.routing_key_list.append(routing_key)\n queue = queues.QueueFactory().get_queue(self.exchange, routing_key)\n if queue:\n queue(self.channel).declare()\n <mask token>\n\n def drop_routing_key(self, routing_key):\n logger.debug('drop_routing_key: %s' % routing_key)\n if routing_key in self.routing_key_list:\n self.routing_key_list.remove(routing_key)\n\n\nclass TopicPublisher(Publisher):\n\n def __init__(self, exchange_name, channel, routing_key=None):\n super().__init__(exchange_name, channel, routing_key)\n\n def reconnect(self, channel):\n self.exchange = kombu.Exchange(self.exchange_name, type='topic',\n durable=True)\n self.channel = channel\n self._producer = kombu.Producer(exchange=self.exchange, channel=\n channel, serializer='json', routing_key=self.routing_key)\n\n\nclass PublisherFactory:\n\n def __init__(self, channel):\n self.channel = channel\n\n def get_publisher(self, key):\n publisher = None\n logger.debug('routing_key: %s' % key)\n if key == 'nokkhum_compute.update_status':\n routing_key = 'nokkhum_compute.update_status'\n publisher = Publisher('nokkunm_compute.update_status', self.\n channel, routing_key)\n return publisher\n else:\n import fnmatch\n import re\n regex = fnmatch.translate('nokkhum_compute.*.rpc_*')\n reobj = re.compile(regex)\n if reobj.match(key):\n routing_key = key\n if 'nokkhum_compute.*.rpc_response' in routing_key:\n publisher = TopicPublisher('nokkunm_compute.compute_rpc',\n self.channel, routing_key)\n elif 'nokkhum_compute.*.rpc_request':\n publisher = TopicPublisher('nokkunm_compute.rpc', self.\n channel, routing_key)\n return publisher\n return publisher\n",
"step-3": "<mask token>\n\n\nclass Publisher:\n\n def __init__(self, exchange_name, channel, routing_key=None):\n self.exchange_name = exchange_name\n self._producer = None\n self.exchange = None\n self.channel = channel\n self.routing_key_list = []\n self.routing_key = routing_key\n self.reconnect(channel)\n\n def reconnect(self, channel):\n cc.acquire()\n self.exchange = kombu.Exchange(self.exchange_name, type='direct',\n durable=True)\n self.channel = channel\n try:\n self._producer = kombu.Producer(exchange=self.exchange, channel\n =channel, serializer='json', routing_key=self.routing_key)\n if self.routing_key:\n self.queue_declare(self.routing_key)\n except Exception as e:\n logger.exception(e)\n cc.release()\n\n def queue_declare(self, routing_key):\n if routing_key is None:\n return\n if routing_key in self.routing_key_list:\n return\n self.routing_key_list.append(routing_key)\n queue = queues.QueueFactory().get_queue(self.exchange, routing_key)\n if queue:\n queue(self.channel).declare()\n\n def send(self, message, routing_key=None):\n result = False\n cc.acquire()\n try:\n self._producer.publish(message, routing_key=routing_key)\n result = True\n except Exception as e:\n logger.exception(e)\n logger.debug('wait for connection')\n cc.release()\n return result\n\n def drop_routing_key(self, routing_key):\n logger.debug('drop_routing_key: %s' % routing_key)\n if routing_key in self.routing_key_list:\n self.routing_key_list.remove(routing_key)\n\n\nclass TopicPublisher(Publisher):\n\n def __init__(self, exchange_name, channel, routing_key=None):\n super().__init__(exchange_name, channel, routing_key)\n\n def reconnect(self, channel):\n self.exchange = kombu.Exchange(self.exchange_name, type='topic',\n durable=True)\n self.channel = channel\n self._producer = kombu.Producer(exchange=self.exchange, channel=\n channel, serializer='json', routing_key=self.routing_key)\n\n\nclass PublisherFactory:\n\n def __init__(self, channel):\n self.channel = channel\n\n def get_publisher(self, key):\n publisher = None\n logger.debug('routing_key: %s' % key)\n if key == 'nokkhum_compute.update_status':\n routing_key = 'nokkhum_compute.update_status'\n publisher = Publisher('nokkunm_compute.update_status', self.\n channel, routing_key)\n return publisher\n else:\n import fnmatch\n import re\n regex = fnmatch.translate('nokkhum_compute.*.rpc_*')\n reobj = re.compile(regex)\n if reobj.match(key):\n routing_key = key\n if 'nokkhum_compute.*.rpc_response' in routing_key:\n publisher = TopicPublisher('nokkunm_compute.compute_rpc',\n self.channel, routing_key)\n elif 'nokkhum_compute.*.rpc_request':\n publisher = TopicPublisher('nokkunm_compute.rpc', self.\n channel, routing_key)\n return publisher\n return publisher\n",
"step-4": "<mask token>\nlogger = logging.getLogger(__name__)\n<mask token>\ncc = threading.Condition()\n\n\nclass Publisher:\n\n def __init__(self, exchange_name, channel, routing_key=None):\n self.exchange_name = exchange_name\n self._producer = None\n self.exchange = None\n self.channel = channel\n self.routing_key_list = []\n self.routing_key = routing_key\n self.reconnect(channel)\n\n def reconnect(self, channel):\n cc.acquire()\n self.exchange = kombu.Exchange(self.exchange_name, type='direct',\n durable=True)\n self.channel = channel\n try:\n self._producer = kombu.Producer(exchange=self.exchange, channel\n =channel, serializer='json', routing_key=self.routing_key)\n if self.routing_key:\n self.queue_declare(self.routing_key)\n except Exception as e:\n logger.exception(e)\n cc.release()\n\n def queue_declare(self, routing_key):\n if routing_key is None:\n return\n if routing_key in self.routing_key_list:\n return\n self.routing_key_list.append(routing_key)\n queue = queues.QueueFactory().get_queue(self.exchange, routing_key)\n if queue:\n queue(self.channel).declare()\n\n def send(self, message, routing_key=None):\n result = False\n cc.acquire()\n try:\n self._producer.publish(message, routing_key=routing_key)\n result = True\n except Exception as e:\n logger.exception(e)\n logger.debug('wait for connection')\n cc.release()\n return result\n\n def drop_routing_key(self, routing_key):\n logger.debug('drop_routing_key: %s' % routing_key)\n if routing_key in self.routing_key_list:\n self.routing_key_list.remove(routing_key)\n\n\nclass TopicPublisher(Publisher):\n\n def __init__(self, exchange_name, channel, routing_key=None):\n super().__init__(exchange_name, channel, routing_key)\n\n def reconnect(self, channel):\n self.exchange = kombu.Exchange(self.exchange_name, type='topic',\n durable=True)\n self.channel = channel\n self._producer = kombu.Producer(exchange=self.exchange, channel=\n channel, serializer='json', routing_key=self.routing_key)\n\n\nclass PublisherFactory:\n\n def __init__(self, channel):\n self.channel = channel\n\n def get_publisher(self, key):\n publisher = None\n logger.debug('routing_key: %s' % key)\n if key == 'nokkhum_compute.update_status':\n routing_key = 'nokkhum_compute.update_status'\n publisher = Publisher('nokkunm_compute.update_status', self.\n channel, routing_key)\n return publisher\n else:\n import fnmatch\n import re\n regex = fnmatch.translate('nokkhum_compute.*.rpc_*')\n reobj = re.compile(regex)\n if reobj.match(key):\n routing_key = key\n if 'nokkhum_compute.*.rpc_response' in routing_key:\n publisher = TopicPublisher('nokkunm_compute.compute_rpc',\n self.channel, routing_key)\n elif 'nokkhum_compute.*.rpc_request':\n publisher = TopicPublisher('nokkunm_compute.rpc', self.\n channel, routing_key)\n return publisher\n return publisher\n",
"step-5": "'''\nCreated on Dec 23, 2011\n\n@author: boatkrap\n'''\n\nimport kombu\nfrom kombu.common import maybe_declare\n\nfrom . import queues\n\nimport logging\nlogger = logging.getLogger(__name__)\n\nimport threading\ncc = threading.Condition()\n\n\nclass Publisher:\n\n def __init__(self, exchange_name, channel, routing_key=None):\n\n self.exchange_name = exchange_name\n self._producer = None\n\n self.exchange = None\n self.channel = channel\n self.routing_key_list = []\n self.routing_key = routing_key\n self.reconnect(channel)\n\n def reconnect(self, channel):\n cc.acquire()\n self.exchange = kombu.Exchange(\n self.exchange_name, type=\"direct\", durable=True)\n self.channel = channel\n try:\n self._producer = kombu.Producer(exchange=self.exchange,\n channel=channel, serializer=\"json\",\n routing_key=self.routing_key)\n\n if self.routing_key:\n self.queue_declare(self.routing_key)\n except Exception as e:\n logger.exception(e)\n\n cc.release()\n\n def queue_declare(self, routing_key):\n if routing_key is None:\n return\n\n if routing_key in self.routing_key_list:\n return\n\n self.routing_key_list.append(routing_key)\n\n queue = queues.QueueFactory().get_queue(self.exchange, routing_key)\n if queue:\n\n queue(self.channel).declare()\n\n def send(self, message, routing_key=None):\n result = False\n cc.acquire()\n try:\n self._producer.publish(message, routing_key=routing_key)\n result = True\n except Exception as e:\n logger.exception(e)\n logger.debug(\"wait for connection\")\n cc.release()\n return result\n\n def drop_routing_key(self, routing_key):\n logger.debug(\"drop_routing_key: %s\" % routing_key)\n if routing_key in self.routing_key_list:\n self.routing_key_list.remove(routing_key)\n\n\nclass TopicPublisher(Publisher):\n\n def __init__(self, exchange_name, channel, routing_key=None):\n super().__init__(exchange_name, channel, routing_key)\n\n def reconnect(self, channel):\n self.exchange = kombu.Exchange(\n self.exchange_name, type=\"topic\", durable=True)\n self.channel = channel\n self._producer = kombu.Producer(exchange=self.exchange,\n channel=channel, serializer=\"json\",\n routing_key=self.routing_key)\n\n\nclass PublisherFactory:\n\n def __init__(self, channel):\n self.channel = channel\n\n def get_publisher(self, key):\n\n publisher = None\n logger.debug(\"routing_key: %s\" % key)\n if key == \"nokkhum_compute.update_status\":\n routing_key = \"nokkhum_compute.update_status\"\n publisher = Publisher(\n \"nokkunm_compute.update_status\", self.channel, routing_key)\n\n return publisher\n\n else:\n import fnmatch\n import re\n regex = fnmatch.translate('nokkhum_compute.*.rpc_*')\n reobj = re.compile(regex)\n if reobj.match(key):\n routing_key = key\n\n if \"nokkhum_compute.*.rpc_response\" in routing_key:\n publisher = TopicPublisher(\n \"nokkunm_compute.compute_rpc\", self.channel, routing_key)\n elif \"nokkhum_compute.*.rpc_request\":\n publisher = TopicPublisher(\n \"nokkunm_compute.rpc\", self.channel, routing_key)\n # logger.debug(\"get pub: %s\"%publisher)\n return publisher\n\n return publisher\n",
"step-ids": [
7,
9,
12,
13,
15
]
}
|
[
7,
9,
12,
13,
15
] |
def pin():
print('wqeqwwqe')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def pin():
print('wqeqwwqe')
<|reserved_special_token_0|>
window.title('爱你吆')
window.geometry('400x400+800+200')
window.protocol('WM_DELETE_WINDOW')
<|reserved_special_token_0|>
label.grid(row=10, column=10)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def pin():
print('wqeqwwqe')
<|reserved_special_token_0|>
window = Tk()
window.title('爱你吆')
window.geometry('400x400+800+200')
window.protocol('WM_DELETE_WINDOW')
label = Label(window, text='hey,小姐姐', font=('微软雅黑', 15))
label.grid(row=10, column=10)
window = mainloop()
<|reserved_special_token_1|>
def pin():
print('wqeqwwqe')
from tkinter import *
from tkinter import messagebox
from PIL import Image
from PIL import ImageTk
window = Tk()
window.title('爱你吆')
window.geometry('400x400+800+200')
window.protocol('WM_DELETE_WINDOW')
label = Label(window, text='hey,小姐姐', font=('微软雅黑', 15))
label.grid(row=10, column=10)
window = mainloop()
<|reserved_special_token_1|>
def pin():
print('wqeqwwqe')
from tkinter import *
from tkinter import messagebox
from PIL import Image
from PIL import ImageTk
window = Tk() #创建一个窗口
window.title('爱你吆') #定义窗口标题
window.geometry('400x400+800+200') #定义窗口大小 窗口显示位置
# window.protocol('WM_DELETE_WINDOW', pin) #摧毁窗口,引到另一个函数命令
window.protocol('WM_DELETE_WINDOW')
##############
label = Label(window, text='hey,小姐姐', font=("微软雅黑", 15))
# text 窗口文本 font 设置字体 fg设置字体颜色
label.grid(row=10, column=10) # 网格布局 显示位置
################# 人
window=mainloop()
|
flexible
|
{
"blob_id": "55c9fe8caf1983f22d5a752574f590fa129e8017",
"index": 1443,
"step-1": "def pin():\n print('wqeqwwqe')\n\n\n<mask token>\n",
"step-2": "def pin():\n print('wqeqwwqe')\n\n\n<mask token>\nwindow.title('爱你吆')\nwindow.geometry('400x400+800+200')\nwindow.protocol('WM_DELETE_WINDOW')\n<mask token>\nlabel.grid(row=10, column=10)\n<mask token>\n",
"step-3": "def pin():\n print('wqeqwwqe')\n\n\n<mask token>\nwindow = Tk()\nwindow.title('爱你吆')\nwindow.geometry('400x400+800+200')\nwindow.protocol('WM_DELETE_WINDOW')\nlabel = Label(window, text='hey,小姐姐', font=('微软雅黑', 15))\nlabel.grid(row=10, column=10)\nwindow = mainloop()\n",
"step-4": "def pin():\n print('wqeqwwqe')\n\n\nfrom tkinter import *\nfrom tkinter import messagebox\nfrom PIL import Image\nfrom PIL import ImageTk\nwindow = Tk()\nwindow.title('爱你吆')\nwindow.geometry('400x400+800+200')\nwindow.protocol('WM_DELETE_WINDOW')\nlabel = Label(window, text='hey,小姐姐', font=('微软雅黑', 15))\nlabel.grid(row=10, column=10)\nwindow = mainloop()\n",
"step-5": "def pin():\n print('wqeqwwqe')\n\n\n\nfrom tkinter import *\nfrom tkinter import messagebox\nfrom PIL import Image\nfrom PIL import ImageTk\nwindow = Tk() #创建一个窗口\nwindow.title('爱你吆') #定义窗口标题\nwindow.geometry('400x400+800+200') #定义窗口大小 窗口显示位置\n# window.protocol('WM_DELETE_WINDOW', pin) #摧毁窗口,引到另一个函数命令\nwindow.protocol('WM_DELETE_WINDOW')\n##############\nlabel = Label(window, text='hey,小姐姐', font=(\"微软雅黑\", 15))\n# text 窗口文本 font 设置字体 fg设置字体颜色\nlabel.grid(row=10, column=10) # 网格布局 显示位置\n################# 人\nwindow=mainloop()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
bot = telebot.TeleBot(os.environ.get('TELEGRAM_ACCESS_TOCKEN', 'TOKEN'))
<|reserved_special_token_1|>
import os
import telebot
bot = telebot.TeleBot(os.environ.get('TELEGRAM_ACCESS_TOCKEN', 'TOKEN'))
|
flexible
|
{
"blob_id": "ce7b7980d1e93f23e7e3ef048ddadc0c779ef9ce",
"index": 7981,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nbot = telebot.TeleBot(os.environ.get('TELEGRAM_ACCESS_TOCKEN', 'TOKEN'))\n",
"step-3": "import os\nimport telebot\nbot = telebot.TeleBot(os.environ.get('TELEGRAM_ACCESS_TOCKEN', 'TOKEN'))\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import pytest
import numpy as np
from GSPA_DMC import SymmetrizeWfn as symm
def test_swap():
cds = np.load('h3o_data/ffinal_h3o.npy')
dws = np.load('h3o_data/ffinal_h3o_dw.npy')
cds = cds[:10]
a = symm.swap_two_atoms(cds, dws, atm_1=1, atm_2=2)
b = symm.swap_group(cds, dws, atm_list_1=[0, 1], atm_list_2=[2, 3])
assert True
|
normal
|
{
"blob_id": "4ecd756b94b0cbab47a8072e9bccf26e2dd716d0",
"index": 7833,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_swap():\n cds = np.load('h3o_data/ffinal_h3o.npy')\n dws = np.load('h3o_data/ffinal_h3o_dw.npy')\n cds = cds[:10]\n a = symm.swap_two_atoms(cds, dws, atm_1=1, atm_2=2)\n b = symm.swap_group(cds, dws, atm_list_1=[0, 1], atm_list_2=[2, 3])\n assert True\n",
"step-3": "import pytest\nimport numpy as np\nfrom GSPA_DMC import SymmetrizeWfn as symm\n\n\ndef test_swap():\n cds = np.load('h3o_data/ffinal_h3o.npy')\n dws = np.load('h3o_data/ffinal_h3o_dw.npy')\n cds = cds[:10]\n a = symm.swap_two_atoms(cds, dws, atm_1=1, atm_2=2)\n b = symm.swap_group(cds, dws, atm_list_1=[0, 1], atm_list_2=[2, 3])\n assert True\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#coding=utf-8
from __future__ import division
import os
def judgeReported(evi, content):
for item in evi['reported']:
flag = content.find(item)
if flag > 0:
return 'Y'
for item in evi['properly']['neg']:
flag = content.find(item)
if flag > 0:
return 'Y'
return 'N'
def judgeConducted(evi, content):
for item in evi['conducted']:
flag = content.find(item)
if flag > 0:
return 'N'
ran_flag = 'N'
for item in evi['reported']:
flag = content.find(item)
if flag > 0:
ran_flag = 'Y'
for item in evi['properly']['neg']:
flag = content.find(item)
if flag > 0 and ran_flag == 'N':
return 'N'
return 'Y'
def judgeDescribedOld(paper_id, evi, content):
score = {}
for k in content.keys():
score[k] = 1
excld = ['.',',',':','#','!','(',')','"','?']
for e in excld:
content[k] = content[k].replace(e, '')
for word in content[k].split():
if word in evi.keys():
score[k] *= evi[word]
else:
score[k] *= 0.1
ranking = [(score[key], key) for key in score.keys()]
ranking.sort()
ranking.reverse()
for kk in score.keys():
if score[kk] > 2000:
return 'Y'
return 'N'
def judgeDescribed(evi, content):
phrase = evi['properly']['pos'] + evi['properly']['neg']
for item in phrase:
flag = content.find(item)
if flag > 0:
return 'Y'
return 'N'
def judgeProperly(evi, content):
for p in evi['neg']:
if content.find(p) > 0:
print 'Not done properly:\t' + p
return 'N'
for q in evi['pos']:
if content.find(q) > 0:
print 'Done properly:\t' + q
return 'Y'
return 'N'
def getJudgement(paper_id, content_string, evidence):
result = {}
result['reported'] = judgeReported(evidence, content_string)
if result['reported'] == 'N':
result['conducted'] = 'N'
result['described'] = 'N'
result['properly'] = 'N'
result['result'] = 'E'
return result
else:
result['conducted'] = judgeConducted(evidence, content_string)
if result['conducted'] == 'N':
result['described'] = 'N'
result['properly'] = 'N'
result['result'] = 'D'
return result
else:
result['described'] = judgeDescribed(evidence, content_string)
if result['described'] == 'N':
result['properly'] = 'N'
result['result'] = 'C'
return result
else:
result['properly'] = judgeProperly(evidence['properly'], content_string)
if result['properly'] == 'N':
result['result'] = 'B'
else:
result['result'] = 'A'
return result
|
normal
|
{
"blob_id": "064f535b7ea0f1e4a09bdf830021f17d175beda7",
"index": 4422,
"step-1": "#coding=utf-8\n\nfrom __future__ import division\nimport os\n \ndef judgeReported(evi, content):\n for item in evi['reported']:\n flag = content.find(item)\n if flag > 0:\n return 'Y'\n for item in evi['properly']['neg']:\n flag = content.find(item)\n if flag > 0:\n return 'Y'\n return 'N'\n\ndef judgeConducted(evi, content):\n for item in evi['conducted']:\n flag = content.find(item)\n if flag > 0:\n return 'N'\n ran_flag = 'N'\n for item in evi['reported']:\n flag = content.find(item)\n if flag > 0:\n ran_flag = 'Y'\n for item in evi['properly']['neg']:\n flag = content.find(item)\n if flag > 0 and ran_flag == 'N':\n return 'N'\n return 'Y'\n \ndef judgeDescribedOld(paper_id, evi, content):\n score = {}\n for k in content.keys():\n score[k] = 1\n excld = ['.',',',':','#','!','(',')','\"','?']\n for e in excld:\n content[k] = content[k].replace(e, '')\n for word in content[k].split():\n if word in evi.keys():\n score[k] *= evi[word]\n else:\n score[k] *= 0.1\n ranking = [(score[key], key) for key in score.keys()]\n ranking.sort()\n ranking.reverse()\n \n for kk in score.keys():\n if score[kk] > 2000:\n return 'Y' \n return 'N'\n\ndef judgeDescribed(evi, content):\n phrase = evi['properly']['pos'] + evi['properly']['neg']\n for item in phrase:\n flag = content.find(item)\n if flag > 0:\n return 'Y'\n return 'N'\n \n\ndef judgeProperly(evi, content):\n for p in evi['neg']:\n if content.find(p) > 0:\n print 'Not done properly:\\t' + p\n return 'N'\n for q in evi['pos']:\n if content.find(q) > 0:\n print 'Done properly:\\t' + q\n return 'Y'\n return 'N'\n \ndef getJudgement(paper_id, content_string, evidence):\n\n result = {}\n result['reported'] = judgeReported(evidence, content_string)\n if result['reported'] == 'N':\n result['conducted'] = 'N'\n result['described'] = 'N'\n result['properly'] = 'N'\n result['result'] = 'E'\n return result\n else:\n result['conducted'] = judgeConducted(evidence, content_string)\n if result['conducted'] == 'N':\n result['described'] = 'N'\n result['properly'] = 'N'\n result['result'] = 'D'\n return result\n else:\n result['described'] = judgeDescribed(evidence, content_string)\n if result['described'] == 'N':\n result['properly'] = 'N'\n result['result'] = 'C'\n return result\n else:\n result['properly'] = judgeProperly(evidence['properly'], content_string)\n if result['properly'] == 'N':\n result['result'] = 'B'\n else:\n result['result'] = 'A'\n return result\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class __module_protocol__(typing.Protocol):
cmdline: org.cogroo.gc.cmdline.__module_protocol__
<|reserved_special_token_1|>
import org.cogroo.gc.cmdline
import typing
class __module_protocol__(typing.Protocol):
cmdline: org.cogroo.gc.cmdline.__module_protocol__
<|reserved_special_token_1|>
import org.cogroo.gc.cmdline
import typing
class __module_protocol__(typing.Protocol):
# A module protocol which reflects the result of ``jp.JPackage("org.cogroo.gc")``.
cmdline: org.cogroo.gc.cmdline.__module_protocol__
|
flexible
|
{
"blob_id": "f615e7bbfa9179d0bfb321242cd8df4ae7b48993",
"index": 3181,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass __module_protocol__(typing.Protocol):\n cmdline: org.cogroo.gc.cmdline.__module_protocol__\n",
"step-3": "import org.cogroo.gc.cmdline\nimport typing\n\n\nclass __module_protocol__(typing.Protocol):\n cmdline: org.cogroo.gc.cmdline.__module_protocol__\n",
"step-4": "import org.cogroo.gc.cmdline\nimport typing\n\n\nclass __module_protocol__(typing.Protocol):\n # A module protocol which reflects the result of ``jp.JPackage(\"org.cogroo.gc\")``.\n\n cmdline: org.cogroo.gc.cmdline.__module_protocol__\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy as np
import matplotlib.pyplot as plt
def cos_Taylor2(x, n):
s = 0
a = 1
for i in range(0, n+1):
s = s+a
a = -a*x**2 / ((2*i+1)*(2*i+2))
return s, abs(a)
vcos = np.vectorize(cos_Taylor2)
def cos_two_terms(x):
s = 0
a = 1
s = s+a
a = -a*x**2 / ((2*0+1)*(2*0+2))
s = s + a
a = -a*x**2 / ((2*1+1)*(2*1+2))
s = s + a
a = -a*x**2 / ((2*2+1)*(2*2+2))
return s, abs(a)
def test_cos_Taylor():
x = 0.63
tol = 1e-14
s_expected, a_expected = cos_two_terms(x)
s_computed, a_computed = cos_Taylor2(x,2)
success1 = abs(s_computed - s_expected) < tol
success2 = abs(a_computed - a_expected) < tol
success = success1 and success2
message = 'Output is different from expected!'
assert success, message
test_cos_Taylor()
x = np.linspace(-5,5,100)
n = [0,2,4,6]
for i in n:
y = vcos(x, i)
plt.plot(x, y[0], label='n = %g' % i)
y = np.cos(x)
plt.plot(x, y, 'b-', label = 'expected')
plt.ylim(-1.1,1.1)
plt.legend()
plt.savefig('cos_Taylor_series_diffeq.png')
plt.show()
'''
Terminal> cos_Taylor_series_diffeq.py"
Process finished with exit code 0
'''
|
normal
|
{
"blob_id": "fb0dcb641dfb379751264dc0b18007f5d058d379",
"index": 3520,
"step-1": "<mask token>\n\n\ndef cos_two_terms(x):\n s = 0\n a = 1\n s = s + a\n a = -a * x ** 2 / ((2 * 0 + 1) * (2 * 0 + 2))\n s = s + a\n a = -a * x ** 2 / ((2 * 1 + 1) * (2 * 1 + 2))\n s = s + a\n a = -a * x ** 2 / ((2 * 2 + 1) * (2 * 2 + 2))\n return s, abs(a)\n\n\ndef test_cos_Taylor():\n x = 0.63\n tol = 1e-14\n s_expected, a_expected = cos_two_terms(x)\n s_computed, a_computed = cos_Taylor2(x, 2)\n success1 = abs(s_computed - s_expected) < tol\n success2 = abs(a_computed - a_expected) < tol\n success = success1 and success2\n message = 'Output is different from expected!'\n assert success, message\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef cos_Taylor2(x, n):\n s = 0\n a = 1\n for i in range(0, n + 1):\n s = s + a\n a = -a * x ** 2 / ((2 * i + 1) * (2 * i + 2))\n return s, abs(a)\n\n\n<mask token>\n\n\ndef cos_two_terms(x):\n s = 0\n a = 1\n s = s + a\n a = -a * x ** 2 / ((2 * 0 + 1) * (2 * 0 + 2))\n s = s + a\n a = -a * x ** 2 / ((2 * 1 + 1) * (2 * 1 + 2))\n s = s + a\n a = -a * x ** 2 / ((2 * 2 + 1) * (2 * 2 + 2))\n return s, abs(a)\n\n\ndef test_cos_Taylor():\n x = 0.63\n tol = 1e-14\n s_expected, a_expected = cos_two_terms(x)\n s_computed, a_computed = cos_Taylor2(x, 2)\n success1 = abs(s_computed - s_expected) < tol\n success2 = abs(a_computed - a_expected) < tol\n success = success1 and success2\n message = 'Output is different from expected!'\n assert success, message\n\n\ntest_cos_Taylor()\n<mask token>\nfor i in n:\n y = vcos(x, i)\n plt.plot(x, y[0], label='n = %g' % i)\n<mask token>\nplt.plot(x, y, 'b-', label='expected')\nplt.ylim(-1.1, 1.1)\nplt.legend()\nplt.savefig('cos_Taylor_series_diffeq.png')\nplt.show()\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef cos_Taylor2(x, n):\n s = 0\n a = 1\n for i in range(0, n + 1):\n s = s + a\n a = -a * x ** 2 / ((2 * i + 1) * (2 * i + 2))\n return s, abs(a)\n\n\nvcos = np.vectorize(cos_Taylor2)\n\n\ndef cos_two_terms(x):\n s = 0\n a = 1\n s = s + a\n a = -a * x ** 2 / ((2 * 0 + 1) * (2 * 0 + 2))\n s = s + a\n a = -a * x ** 2 / ((2 * 1 + 1) * (2 * 1 + 2))\n s = s + a\n a = -a * x ** 2 / ((2 * 2 + 1) * (2 * 2 + 2))\n return s, abs(a)\n\n\ndef test_cos_Taylor():\n x = 0.63\n tol = 1e-14\n s_expected, a_expected = cos_two_terms(x)\n s_computed, a_computed = cos_Taylor2(x, 2)\n success1 = abs(s_computed - s_expected) < tol\n success2 = abs(a_computed - a_expected) < tol\n success = success1 and success2\n message = 'Output is different from expected!'\n assert success, message\n\n\ntest_cos_Taylor()\nx = np.linspace(-5, 5, 100)\nn = [0, 2, 4, 6]\nfor i in n:\n y = vcos(x, i)\n plt.plot(x, y[0], label='n = %g' % i)\ny = np.cos(x)\nplt.plot(x, y, 'b-', label='expected')\nplt.ylim(-1.1, 1.1)\nplt.legend()\nplt.savefig('cos_Taylor_series_diffeq.png')\nplt.show()\n<mask token>\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\n\n\ndef cos_Taylor2(x, n):\n s = 0\n a = 1\n for i in range(0, n + 1):\n s = s + a\n a = -a * x ** 2 / ((2 * i + 1) * (2 * i + 2))\n return s, abs(a)\n\n\nvcos = np.vectorize(cos_Taylor2)\n\n\ndef cos_two_terms(x):\n s = 0\n a = 1\n s = s + a\n a = -a * x ** 2 / ((2 * 0 + 1) * (2 * 0 + 2))\n s = s + a\n a = -a * x ** 2 / ((2 * 1 + 1) * (2 * 1 + 2))\n s = s + a\n a = -a * x ** 2 / ((2 * 2 + 1) * (2 * 2 + 2))\n return s, abs(a)\n\n\ndef test_cos_Taylor():\n x = 0.63\n tol = 1e-14\n s_expected, a_expected = cos_two_terms(x)\n s_computed, a_computed = cos_Taylor2(x, 2)\n success1 = abs(s_computed - s_expected) < tol\n success2 = abs(a_computed - a_expected) < tol\n success = success1 and success2\n message = 'Output is different from expected!'\n assert success, message\n\n\ntest_cos_Taylor()\nx = np.linspace(-5, 5, 100)\nn = [0, 2, 4, 6]\nfor i in n:\n y = vcos(x, i)\n plt.plot(x, y[0], label='n = %g' % i)\ny = np.cos(x)\nplt.plot(x, y, 'b-', label='expected')\nplt.ylim(-1.1, 1.1)\nplt.legend()\nplt.savefig('cos_Taylor_series_diffeq.png')\nplt.show()\n<mask token>\n",
"step-5": "import numpy as np\nimport matplotlib.pyplot as plt\n\n\ndef cos_Taylor2(x, n):\n s = 0\n a = 1\n for i in range(0, n+1):\n s = s+a\n a = -a*x**2 / ((2*i+1)*(2*i+2))\n return s, abs(a)\nvcos = np.vectorize(cos_Taylor2)\n\n\ndef cos_two_terms(x):\n s = 0\n a = 1\n s = s+a\n a = -a*x**2 / ((2*0+1)*(2*0+2))\n s = s + a\n a = -a*x**2 / ((2*1+1)*(2*1+2))\n s = s + a\n a = -a*x**2 / ((2*2+1)*(2*2+2))\n return s, abs(a)\n\n\ndef test_cos_Taylor():\n x = 0.63\n tol = 1e-14\n s_expected, a_expected = cos_two_terms(x)\n s_computed, a_computed = cos_Taylor2(x,2)\n success1 = abs(s_computed - s_expected) < tol\n success2 = abs(a_computed - a_expected) < tol\n success = success1 and success2\n message = 'Output is different from expected!'\n assert success, message\ntest_cos_Taylor()\n\n\nx = np.linspace(-5,5,100)\nn = [0,2,4,6]\nfor i in n:\n y = vcos(x, i)\n plt.plot(x, y[0], label='n = %g' % i)\ny = np.cos(x)\nplt.plot(x, y, 'b-', label = 'expected')\nplt.ylim(-1.1,1.1)\nplt.legend()\nplt.savefig('cos_Taylor_series_diffeq.png')\nplt.show()\n\n\n'''\nTerminal> cos_Taylor_series_diffeq.py\"\n\nProcess finished with exit code 0\n'''",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
str="mama"
stringlength=len(str)
slicedString=str[stringlength::-1]
print (slicedString)
|
normal
|
{
"blob_id": "5c80561a3344c0240e59500e5dadc1f1ef7f380e",
"index": 7687,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(slicedString)\n",
"step-3": "str = 'mama'\nstringlength = len(str)\nslicedString = str[stringlength::-1]\nprint(slicedString)\n",
"step-4": "str=\"mama\"\r\nstringlength=len(str)\r\nslicedString=str[stringlength::-1]\r\nprint (slicedString)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print('Enter string:')
<|reserved_special_token_0|>
for i in a:
if i in vowels:
v += 1
elif i in consonants:
c += 1
elif i in digits:
d += 1
elif i in whitespace:
ws += 1
print(v, c, d, ws)
<|reserved_special_token_1|>
print('Enter string:')
s = input()
a = s.lower()
vowels = 'aeiou'
consonants = 'bcdfghjklmnpqrstvwxyz'
digits = '1234567890'
whitespace = ' '
c = 0
v = 0
d = 0
ws = 0
for i in a:
if i in vowels:
v += 1
elif i in consonants:
c += 1
elif i in digits:
d += 1
elif i in whitespace:
ws += 1
print(v, c, d, ws)
<|reserved_special_token_1|>
print("Enter string:")
s=input()
a = s.lower()
vowels = "aeiou"
consonants = "bcdfghjklmnpqrstvwxyz"
digits = "1234567890"
whitespace = " "
c = 0
v = 0
d = 0
ws= 0
for i in a:
if i in vowels:
v+=1
elif i in consonants:
c+=1
elif i in digits:
d+=1
elif i in whitespace:
ws+=1
print(v,c,d,ws)
|
flexible
|
{
"blob_id": "088c77e090d444e7057a91cac606995fb523c8ef",
"index": 3079,
"step-1": "<mask token>\n",
"step-2": "print('Enter string:')\n<mask token>\nfor i in a:\n if i in vowels:\n v += 1\n elif i in consonants:\n c += 1\n elif i in digits:\n d += 1\n elif i in whitespace:\n ws += 1\nprint(v, c, d, ws)\n",
"step-3": "print('Enter string:')\ns = input()\na = s.lower()\nvowels = 'aeiou'\nconsonants = 'bcdfghjklmnpqrstvwxyz'\ndigits = '1234567890'\nwhitespace = ' '\nc = 0\nv = 0\nd = 0\nws = 0\nfor i in a:\n if i in vowels:\n v += 1\n elif i in consonants:\n c += 1\n elif i in digits:\n d += 1\n elif i in whitespace:\n ws += 1\nprint(v, c, d, ws)\n",
"step-4": "print(\"Enter string:\")\ns=input()\na = s.lower()\n\n\nvowels = \"aeiou\"\nconsonants = \"bcdfghjklmnpqrstvwxyz\"\ndigits = \"1234567890\"\nwhitespace = \" \"\n\nc = 0\nv = 0\nd = 0\nws= 0\n\nfor i in a:\n if i in vowels:\n v+=1\n elif i in consonants:\n c+=1\n elif i in digits:\n d+=1\n elif i in whitespace:\n ws+=1\n\nprint(v,c,d,ws)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
Program file: DataParser.py.
This program parses and returns a dataset for a plotting program
"""
from sys import exit
from csv import Sniffer, DictReader
class DataParser:
"""
Summary: parses a data file, and returns list of the filtered data.
Instances:
1. accepted_records
2. ignored_records
Methods:
1. valid_value
2. create_reader
3. create_dataset
4. get_dataset
"""
def __init__(self, csvfile, data_centers):
"""DataParser constructor."""
self.accepted_records = []
self.ignored_records = []
with open(csvfile, 'r') as file:
# Creates a reader object for later data manipulation
reader = self.create_reader(file)
# Resetting read/write pointer to beginning of file
file.seek(0)
# Creating list for graphing data center's dataset
self.create_dataset(reader, data_centers)
def valid_value(self, number):
"""
Summary: Checks that value is a valid positive number.
Description: Accepts positive whole and decimal numbers.
"""
try:
# Checking that entered value can be converted to a float.
# Excludes letters and symbols.
float(number)
# Checking that validated number is nonnegative.
if float(number) > 0:
return True
return False
except ValueError:
return False
def create_reader(self, csvfile):
"""
Summary: Validates a csv file, returns a DictReader object.
Description: Takes one argument: "data" (Should be a csv file)
"""
# Determines the dialect of the csv file for processing
file_dialect = Sniffer().sniff(csvfile.read(1024))
# Resets the read/write pointer within the file
csvfile.seek(0)
# Checks to see that the csv file imported has a header row,
# that will be used for later parsing.
if not Sniffer().has_header(csvfile.read(1024)):
print('Imported csv file lacks header row')
exit()
# Resets the read/write pointer within the file
csvfile.seek(0)
# Creates a DictReader object with the csvfile provided, and the
# dialect object to define the parameters of the reader instance.
reader = DictReader(csvfile, dialect=file_dialect)
# Return DictReader object
return reader
def create_dataset(self, reader=None, data_centers=None):
"""
Summary: Creates a dataset of dcs and their respective times, values.
Arguments: 'reader' defines a reader object used to read a csv file.
'dataCenters' is a list containing data center names that are to be
graphed.
"""
for row in reader:
# Checking that the 'DC' matches one defined in "data_centers" list
if row.get('DC') in data_centers:
# Validating DC's value is a positive nonnegative number.
if not self.valid_value(row.get('Value')):
# Archiving ignored records for later analysis
self.ignored_records.append(row)
else:
self.accepted_records.append(
[
row.get('DC'),
float(row.get('Time')),
float(row.get('Value'))
]
)
def get_dataset(self):
"""Getter for accepted_records list."""
return self.accepted_records
|
normal
|
{
"blob_id": "af609f1558276bab96477d3a2c61d813b9dd3d82",
"index": 9660,
"step-1": "<mask token>\n\n\nclass DataParser:\n <mask token>\n\n def __init__(self, csvfile, data_centers):\n \"\"\"DataParser constructor.\"\"\"\n self.accepted_records = []\n self.ignored_records = []\n with open(csvfile, 'r') as file:\n reader = self.create_reader(file)\n file.seek(0)\n self.create_dataset(reader, data_centers)\n\n def valid_value(self, number):\n \"\"\"\n Summary: Checks that value is a valid positive number.\n\n Description: Accepts positive whole and decimal numbers.\n \"\"\"\n try:\n float(number)\n if float(number) > 0:\n return True\n return False\n except ValueError:\n return False\n\n def create_reader(self, csvfile):\n \"\"\"\n Summary: Validates a csv file, returns a DictReader object.\n\n Description: Takes one argument: \"data\" (Should be a csv file)\n \"\"\"\n file_dialect = Sniffer().sniff(csvfile.read(1024))\n csvfile.seek(0)\n if not Sniffer().has_header(csvfile.read(1024)):\n print('Imported csv file lacks header row')\n exit()\n csvfile.seek(0)\n reader = DictReader(csvfile, dialect=file_dialect)\n return reader\n <mask token>\n\n def get_dataset(self):\n \"\"\"Getter for accepted_records list.\"\"\"\n return self.accepted_records\n",
"step-2": "<mask token>\n\n\nclass DataParser:\n <mask token>\n\n def __init__(self, csvfile, data_centers):\n \"\"\"DataParser constructor.\"\"\"\n self.accepted_records = []\n self.ignored_records = []\n with open(csvfile, 'r') as file:\n reader = self.create_reader(file)\n file.seek(0)\n self.create_dataset(reader, data_centers)\n\n def valid_value(self, number):\n \"\"\"\n Summary: Checks that value is a valid positive number.\n\n Description: Accepts positive whole and decimal numbers.\n \"\"\"\n try:\n float(number)\n if float(number) > 0:\n return True\n return False\n except ValueError:\n return False\n\n def create_reader(self, csvfile):\n \"\"\"\n Summary: Validates a csv file, returns a DictReader object.\n\n Description: Takes one argument: \"data\" (Should be a csv file)\n \"\"\"\n file_dialect = Sniffer().sniff(csvfile.read(1024))\n csvfile.seek(0)\n if not Sniffer().has_header(csvfile.read(1024)):\n print('Imported csv file lacks header row')\n exit()\n csvfile.seek(0)\n reader = DictReader(csvfile, dialect=file_dialect)\n return reader\n\n def create_dataset(self, reader=None, data_centers=None):\n \"\"\"\n Summary: Creates a dataset of dcs and their respective times, values.\n\n Arguments: 'reader' defines a reader object used to read a csv file.\n 'dataCenters' is a list containing data center names that are to be\n graphed.\n \"\"\"\n for row in reader:\n if row.get('DC') in data_centers:\n if not self.valid_value(row.get('Value')):\n self.ignored_records.append(row)\n else:\n self.accepted_records.append([row.get('DC'), float(row.\n get('Time')), float(row.get('Value'))])\n\n def get_dataset(self):\n \"\"\"Getter for accepted_records list.\"\"\"\n return self.accepted_records\n",
"step-3": "<mask token>\n\n\nclass DataParser:\n \"\"\"\n Summary: parses a data file, and returns list of the filtered data.\n\n Instances:\n 1. accepted_records\n 2. ignored_records\n\n Methods:\n 1. valid_value\n 2. create_reader\n 3. create_dataset\n 4. get_dataset\n \"\"\"\n\n def __init__(self, csvfile, data_centers):\n \"\"\"DataParser constructor.\"\"\"\n self.accepted_records = []\n self.ignored_records = []\n with open(csvfile, 'r') as file:\n reader = self.create_reader(file)\n file.seek(0)\n self.create_dataset(reader, data_centers)\n\n def valid_value(self, number):\n \"\"\"\n Summary: Checks that value is a valid positive number.\n\n Description: Accepts positive whole and decimal numbers.\n \"\"\"\n try:\n float(number)\n if float(number) > 0:\n return True\n return False\n except ValueError:\n return False\n\n def create_reader(self, csvfile):\n \"\"\"\n Summary: Validates a csv file, returns a DictReader object.\n\n Description: Takes one argument: \"data\" (Should be a csv file)\n \"\"\"\n file_dialect = Sniffer().sniff(csvfile.read(1024))\n csvfile.seek(0)\n if not Sniffer().has_header(csvfile.read(1024)):\n print('Imported csv file lacks header row')\n exit()\n csvfile.seek(0)\n reader = DictReader(csvfile, dialect=file_dialect)\n return reader\n\n def create_dataset(self, reader=None, data_centers=None):\n \"\"\"\n Summary: Creates a dataset of dcs and their respective times, values.\n\n Arguments: 'reader' defines a reader object used to read a csv file.\n 'dataCenters' is a list containing data center names that are to be\n graphed.\n \"\"\"\n for row in reader:\n if row.get('DC') in data_centers:\n if not self.valid_value(row.get('Value')):\n self.ignored_records.append(row)\n else:\n self.accepted_records.append([row.get('DC'), float(row.\n get('Time')), float(row.get('Value'))])\n\n def get_dataset(self):\n \"\"\"Getter for accepted_records list.\"\"\"\n return self.accepted_records\n",
"step-4": "<mask token>\nfrom sys import exit\nfrom csv import Sniffer, DictReader\n\n\nclass DataParser:\n \"\"\"\n Summary: parses a data file, and returns list of the filtered data.\n\n Instances:\n 1. accepted_records\n 2. ignored_records\n\n Methods:\n 1. valid_value\n 2. create_reader\n 3. create_dataset\n 4. get_dataset\n \"\"\"\n\n def __init__(self, csvfile, data_centers):\n \"\"\"DataParser constructor.\"\"\"\n self.accepted_records = []\n self.ignored_records = []\n with open(csvfile, 'r') as file:\n reader = self.create_reader(file)\n file.seek(0)\n self.create_dataset(reader, data_centers)\n\n def valid_value(self, number):\n \"\"\"\n Summary: Checks that value is a valid positive number.\n\n Description: Accepts positive whole and decimal numbers.\n \"\"\"\n try:\n float(number)\n if float(number) > 0:\n return True\n return False\n except ValueError:\n return False\n\n def create_reader(self, csvfile):\n \"\"\"\n Summary: Validates a csv file, returns a DictReader object.\n\n Description: Takes one argument: \"data\" (Should be a csv file)\n \"\"\"\n file_dialect = Sniffer().sniff(csvfile.read(1024))\n csvfile.seek(0)\n if not Sniffer().has_header(csvfile.read(1024)):\n print('Imported csv file lacks header row')\n exit()\n csvfile.seek(0)\n reader = DictReader(csvfile, dialect=file_dialect)\n return reader\n\n def create_dataset(self, reader=None, data_centers=None):\n \"\"\"\n Summary: Creates a dataset of dcs and their respective times, values.\n\n Arguments: 'reader' defines a reader object used to read a csv file.\n 'dataCenters' is a list containing data center names that are to be\n graphed.\n \"\"\"\n for row in reader:\n if row.get('DC') in data_centers:\n if not self.valid_value(row.get('Value')):\n self.ignored_records.append(row)\n else:\n self.accepted_records.append([row.get('DC'), float(row.\n get('Time')), float(row.get('Value'))])\n\n def get_dataset(self):\n \"\"\"Getter for accepted_records list.\"\"\"\n return self.accepted_records\n",
"step-5": "\"\"\"\nProgram file: DataParser.py.\n\nThis program parses and returns a dataset for a plotting program\n\"\"\"\n\nfrom sys import exit\nfrom csv import Sniffer, DictReader\n\n\nclass DataParser:\n \"\"\"\n Summary: parses a data file, and returns list of the filtered data.\n\n Instances:\n 1. accepted_records\n 2. ignored_records\n\n Methods:\n 1. valid_value\n 2. create_reader\n 3. create_dataset\n 4. get_dataset\n \"\"\"\n\n def __init__(self, csvfile, data_centers):\n \"\"\"DataParser constructor.\"\"\"\n self.accepted_records = []\n self.ignored_records = []\n\n with open(csvfile, 'r') as file:\n # Creates a reader object for later data manipulation\n reader = self.create_reader(file)\n\n # Resetting read/write pointer to beginning of file\n file.seek(0)\n\n # Creating list for graphing data center's dataset\n self.create_dataset(reader, data_centers)\n\n def valid_value(self, number):\n \"\"\"\n Summary: Checks that value is a valid positive number.\n\n Description: Accepts positive whole and decimal numbers.\n \"\"\"\n try:\n # Checking that entered value can be converted to a float.\n # Excludes letters and symbols.\n float(number)\n\n # Checking that validated number is nonnegative.\n if float(number) > 0:\n return True\n return False\n except ValueError:\n return False\n\n def create_reader(self, csvfile):\n \"\"\"\n Summary: Validates a csv file, returns a DictReader object.\n\n Description: Takes one argument: \"data\" (Should be a csv file)\n \"\"\"\n # Determines the dialect of the csv file for processing\n file_dialect = Sniffer().sniff(csvfile.read(1024))\n\n # Resets the read/write pointer within the file\n csvfile.seek(0)\n\n # Checks to see that the csv file imported has a header row,\n # that will be used for later parsing.\n if not Sniffer().has_header(csvfile.read(1024)):\n print('Imported csv file lacks header row')\n exit()\n\n # Resets the read/write pointer within the file\n csvfile.seek(0)\n\n # Creates a DictReader object with the csvfile provided, and the\n # dialect object to define the parameters of the reader instance.\n reader = DictReader(csvfile, dialect=file_dialect)\n\n # Return DictReader object\n return reader\n\n def create_dataset(self, reader=None, data_centers=None):\n \"\"\"\n Summary: Creates a dataset of dcs and their respective times, values.\n\n Arguments: 'reader' defines a reader object used to read a csv file.\n 'dataCenters' is a list containing data center names that are to be\n graphed.\n \"\"\"\n for row in reader:\n # Checking that the 'DC' matches one defined in \"data_centers\" list\n if row.get('DC') in data_centers:\n # Validating DC's value is a positive nonnegative number.\n if not self.valid_value(row.get('Value')):\n # Archiving ignored records for later analysis\n self.ignored_records.append(row)\n else:\n self.accepted_records.append(\n [\n row.get('DC'),\n float(row.get('Time')),\n float(row.get('Value'))\n ]\n )\n\n def get_dataset(self):\n \"\"\"Getter for accepted_records list.\"\"\"\n return self.accepted_records\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
import numpy as np
from flask import Flask,request,render_template
import pickle
from werkzeug.serving import run_simple
app=Flask(__name__,template_folder='template')
model=pickle.load(open("model.pkl",'rb'))
@app.route('/')
def home():
return render_template('index.html')
@app.route('/predict',methods=['POST'])
def predict():
arr=[int(x) for x in request.form.values()]
arr2=[np.array(arr)]
output=model.predict(arr2)
# o2=round(output)
return render_template('index.html',prediction_text=output)
if __name__ == "__main__":
run_simple('localhost',8001,app,use_reloader=False)
|
normal
|
{
"blob_id": "02b760b16cdcd42f8d8d7222b439da87fb8076a3",
"index": 4959,
"step-1": "<mask token>\n\n\[email protected]('/predict', methods=['POST'])\ndef predict():\n arr = [int(x) for x in request.form.values()]\n arr2 = [np.array(arr)]\n output = model.predict(arr2)\n return render_template('index.html', prediction_text=output)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/')\ndef home():\n return render_template('index.html')\n\n\[email protected]('/predict', methods=['POST'])\ndef predict():\n arr = [int(x) for x in request.form.values()]\n arr2 = [np.array(arr)]\n output = model.predict(arr2)\n return render_template('index.html', prediction_text=output)\n\n\nif __name__ == '__main__':\n run_simple('localhost', 8001, app, use_reloader=False)\n",
"step-3": "<mask token>\napp = Flask(__name__, template_folder='template')\nmodel = pickle.load(open('model.pkl', 'rb'))\n\n\[email protected]('/')\ndef home():\n return render_template('index.html')\n\n\[email protected]('/predict', methods=['POST'])\ndef predict():\n arr = [int(x) for x in request.form.values()]\n arr2 = [np.array(arr)]\n output = model.predict(arr2)\n return render_template('index.html', prediction_text=output)\n\n\nif __name__ == '__main__':\n run_simple('localhost', 8001, app, use_reloader=False)\n",
"step-4": "import numpy as np\nfrom flask import Flask, request, render_template\nimport pickle\nfrom werkzeug.serving import run_simple\napp = Flask(__name__, template_folder='template')\nmodel = pickle.load(open('model.pkl', 'rb'))\n\n\[email protected]('/')\ndef home():\n return render_template('index.html')\n\n\[email protected]('/predict', methods=['POST'])\ndef predict():\n arr = [int(x) for x in request.form.values()]\n arr2 = [np.array(arr)]\n output = model.predict(arr2)\n return render_template('index.html', prediction_text=output)\n\n\nif __name__ == '__main__':\n run_simple('localhost', 8001, app, use_reloader=False)\n",
"step-5": "import numpy as np\r\nfrom flask import Flask,request,render_template\r\nimport pickle\r\nfrom werkzeug.serving import run_simple\r\n\r\napp=Flask(__name__,template_folder='template')\r\nmodel=pickle.load(open(\"model.pkl\",'rb'))\r\n\r\n\r\[email protected]('/')\r\ndef home():\r\n return render_template('index.html')\r\n\r\[email protected]('/predict',methods=['POST'])\r\ndef predict():\r\n arr=[int(x) for x in request.form.values()]\r\n arr2=[np.array(arr)]\r\n output=model.predict(arr2)\r\n # o2=round(output)\r\n return render_template('index.html',prediction_text=output)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n run_simple('localhost',8001,app,use_reloader=False)",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def generate_parameters(seed):
np.random.seed(seed)
out = {}
out['nfeatures'] = np.random.randint(3, 25)
out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))
out['gamma'] = np.random.uniform(0.75, 0.05)
out['penalty'] = float(loguniform.rvs(1e-05, 0.1, size=1))
out['batch'] = np.random.choice([32, 64])
return out
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def generate_parameters(seed):
np.random.seed(seed)
out = {}
out['nfeatures'] = np.random.randint(3, 25)
out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))
out['gamma'] = np.random.uniform(0.75, 0.05)
out['penalty'] = float(loguniform.rvs(1e-05, 0.1, size=1))
out['batch'] = np.random.choice([32, 64])
return out
if __name__ == '__main__':
out = generate_parameters(int(sys.argv[1]))
out_str = ('--nfeatures {} --lr {} --gamma {} --penalty {} --batch {}'.
format(out['nfeatures'], out['lr'], out['gamma'], out['penalty'],
out['batch']))
print(out_str)
<|reserved_special_token_1|>
import numpy as np
from scipy.stats import loguniform
import sys
def generate_parameters(seed):
np.random.seed(seed)
out = {}
out['nfeatures'] = np.random.randint(3, 25)
out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))
out['gamma'] = np.random.uniform(0.75, 0.05)
out['penalty'] = float(loguniform.rvs(1e-05, 0.1, size=1))
out['batch'] = np.random.choice([32, 64])
return out
if __name__ == '__main__':
out = generate_parameters(int(sys.argv[1]))
out_str = ('--nfeatures {} --lr {} --gamma {} --penalty {} --batch {}'.
format(out['nfeatures'], out['lr'], out['gamma'], out['penalty'],
out['batch']))
print(out_str)
<|reserved_special_token_1|>
import numpy as np
from scipy.stats import loguniform
import sys
def generate_parameters(seed):
np.random.seed(seed)
out={}
out['nfeatures'] = np.random.randint(3, 25)
out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))
out['gamma'] = np.random.uniform(0.75, 0.05)
out['penalty'] = float(loguniform.rvs(0.00001, 0.1, size=1))
out['batch'] = np.random.choice([32,64])
return out
if __name__ == '__main__':
out = generate_parameters(int(sys.argv[1]))
out_str = '--nfeatures {} --lr {} --gamma {} --penalty {} --batch {}'.format(out['nfeatures'], out['lr'], out['gamma'], out['penalty'], out['batch'])
print(out_str)
|
flexible
|
{
"blob_id": "7571e86be1077ae0f7ae542824cfcaaa2949dc83",
"index": 8731,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef generate_parameters(seed):\n np.random.seed(seed)\n out = {}\n out['nfeatures'] = np.random.randint(3, 25)\n out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))\n out['gamma'] = np.random.uniform(0.75, 0.05)\n out['penalty'] = float(loguniform.rvs(1e-05, 0.1, size=1))\n out['batch'] = np.random.choice([32, 64])\n return out\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef generate_parameters(seed):\n np.random.seed(seed)\n out = {}\n out['nfeatures'] = np.random.randint(3, 25)\n out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))\n out['gamma'] = np.random.uniform(0.75, 0.05)\n out['penalty'] = float(loguniform.rvs(1e-05, 0.1, size=1))\n out['batch'] = np.random.choice([32, 64])\n return out\n\n\nif __name__ == '__main__':\n out = generate_parameters(int(sys.argv[1]))\n out_str = ('--nfeatures {} --lr {} --gamma {} --penalty {} --batch {}'.\n format(out['nfeatures'], out['lr'], out['gamma'], out['penalty'],\n out['batch']))\n print(out_str)\n",
"step-4": "import numpy as np\nfrom scipy.stats import loguniform\nimport sys\n\n\ndef generate_parameters(seed):\n np.random.seed(seed)\n out = {}\n out['nfeatures'] = np.random.randint(3, 25)\n out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))\n out['gamma'] = np.random.uniform(0.75, 0.05)\n out['penalty'] = float(loguniform.rvs(1e-05, 0.1, size=1))\n out['batch'] = np.random.choice([32, 64])\n return out\n\n\nif __name__ == '__main__':\n out = generate_parameters(int(sys.argv[1]))\n out_str = ('--nfeatures {} --lr {} --gamma {} --penalty {} --batch {}'.\n format(out['nfeatures'], out['lr'], out['gamma'], out['penalty'],\n out['batch']))\n print(out_str)\n",
"step-5": "import numpy as np\nfrom scipy.stats import loguniform\nimport sys\n\ndef generate_parameters(seed):\n np.random.seed(seed)\n out={}\n out['nfeatures'] = np.random.randint(3, 25)\n out['lr'] = float(loguniform.rvs(0.001, 0.01, size=1))\n out['gamma'] = np.random.uniform(0.75, 0.05)\n out['penalty'] = float(loguniform.rvs(0.00001, 0.1, size=1))\n out['batch'] = np.random.choice([32,64])\n return out\n\nif __name__ == '__main__':\n out = generate_parameters(int(sys.argv[1]))\n out_str = '--nfeatures {} --lr {} --gamma {} --penalty {} --batch {}'.format(out['nfeatures'], out['lr'], out['gamma'], out['penalty'], out['batch'])\n print(out_str)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class Solution:
def toGoatLatin(self, S: str) -> str:
def exchange(str2):
if str2[0] in "aeiou":
str2 = str2+"ma"
else:
str2 = str2[1:]+str2[0]+"ma"
list2 = S.split(" ")
for i in list2:
res.append(exchange(i))
for i in res:
if __name__ == "__main__":
s = Solution()
str2 = "I speak Goat Latin"
print(s.toGoatLatin(str2))
|
normal
|
{
"blob_id": "398c28265e61831ba65b4ae2a785e57c0fa5b6d2",
"index": 8311,
"step-1": "\n\n\nclass Solution:\n def toGoatLatin(self, S: str) -> str:\n \n def exchange(str2):\n if str2[0] in \"aeiou\":\n str2 = str2+\"ma\"\n else:\n str2 = str2[1:]+str2[0]+\"ma\"\n\n list2 = S.split(\" \")\n\n for i in list2:\n res.append(exchange(i))\n\n\n for i in res:\n \n\nif __name__ == \"__main__\":\n s = Solution()\n str2 = \"I speak Goat Latin\"\n print(s.toGoatLatin(str2))\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app.config_from_object('task.config')
if __name__ == '__main__':
app.start()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = Celery('task', include=['task.tasks'])
app.config_from_object('task.config')
if __name__ == '__main__':
app.start()
<|reserved_special_token_1|>
from celery import Celery
app = Celery('task', include=['task.tasks'])
app.config_from_object('task.config')
if __name__ == '__main__':
app.start()
<|reserved_special_token_1|>
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from celery import Celery
app = Celery('task', include=['task.tasks'])
app.config_from_object('task.config')
if __name__ == '__main__':
app.start()
|
flexible
|
{
"blob_id": "68d9f77f91a13c73373c323ef0edbe18af9990a3",
"index": 4321,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp.config_from_object('task.config')\nif __name__ == '__main__':\n app.start()\n",
"step-3": "<mask token>\napp = Celery('task', include=['task.tasks'])\napp.config_from_object('task.config')\nif __name__ == '__main__':\n app.start()\n",
"step-4": "from celery import Celery\napp = Celery('task', include=['task.tasks'])\napp.config_from_object('task.config')\nif __name__ == '__main__':\n app.start()\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nfrom celery import Celery\n\napp = Celery('task', include=['task.tasks'])\n\napp.config_from_object('task.config')\n\nif __name__ == '__main__':\n app.start()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def call_func(some_func, argument):
return some_func(argument)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def call_func(some_func, argument):
return some_func(argument)
def main(argument):
"""docstring"""
return call_func(say_hi, argument)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def say_hi(argument):
return f'Hello {argument}'
def call_func(some_func, argument):
return some_func(argument)
def main(argument):
"""docstring"""
return call_func(say_hi, argument)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def say_hi(argument):
return f'Hello {argument}'
def call_func(some_func, argument):
return some_func(argument)
def main(argument):
"""docstring"""
return call_func(say_hi, argument)
if __name__ == '__main__':
print(main(1))
<|reserved_special_token_1|>
def say_hi(argument):
return f"Hello {argument}"
def call_func(some_func, argument):
return some_func(argument)
def main(argument):
"""docstring"""
return call_func(say_hi, argument)
if __name__ == "__main__":
print(main(1))
|
flexible
|
{
"blob_id": "2a3c3112122dee5574a1569155287ea3e5f8c7b2",
"index": 6120,
"step-1": "<mask token>\n\n\ndef call_func(some_func, argument):\n return some_func(argument)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef call_func(some_func, argument):\n return some_func(argument)\n\n\ndef main(argument):\n \"\"\"docstring\"\"\"\n return call_func(say_hi, argument)\n\n\n<mask token>\n",
"step-3": "def say_hi(argument):\n return f'Hello {argument}'\n\n\ndef call_func(some_func, argument):\n return some_func(argument)\n\n\ndef main(argument):\n \"\"\"docstring\"\"\"\n return call_func(say_hi, argument)\n\n\n<mask token>\n",
"step-4": "def say_hi(argument):\n return f'Hello {argument}'\n\n\ndef call_func(some_func, argument):\n return some_func(argument)\n\n\ndef main(argument):\n \"\"\"docstring\"\"\"\n return call_func(say_hi, argument)\n\n\nif __name__ == '__main__':\n print(main(1))\n",
"step-5": "def say_hi(argument):\n return f\"Hello {argument}\"\n\ndef call_func(some_func, argument):\n return some_func(argument)\n\ndef main(argument):\n \"\"\"docstring\"\"\"\n return call_func(say_hi, argument)\n\nif __name__ == \"__main__\":\n print(main(1))",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def tanh(x):
return np.tanh(x)
def tanh_deriv(x):
return 1.0 - np.tanh(x) * np.tanh(x)
<|reserved_special_token_0|>
class NeuralNetwork:
def __init__(self, layers, activation='tanh'):
"""
:param layers: A list containing the number of units in each layer.
Should be at least two values
:param activation: The activation function to be used. Can be
"logistic" or "tanh"
"""
if activation == 'logistic':
self.activation = logistic
self.activation_deriv = logistic_derivative
elif activation == 'tanh':
self.activation = tanh
self.activation_deriv = tanh_deriv
self.weights = []
for i in range(1, len(layers) - 1):
print(i)
self.weights.append((2 * np.random.random((layers[i - 1] + 1,
layers[i] + 1)) - 1) * 0.25)
self.weights.append((2 * np.random.random((layers[i] + 1,
layers[i + 1])) - 1) * 0.25)
def fit(self, X, y, learning_rate=0.2, epochs=10000):
X = np.atleast_2d(X)
print(X)
print(X.shape)
temp = np.ones([X.shape[0], X.shape[1] + 1])
temp[:, 0:-1] = X
X = temp
print(X)
y = np.array(y)
print(y)
for k in range(epochs):
i = np.random.randint(X.shape[0])
a = [X[i]]
for l in range(len(self.weights)):
a.append(self.activation(np.dot(a[l], self.weights[l])))
error = y[i] - a[-1]
deltas = [error * self.activation_deriv(a[-1])]
for l in range(len(a) - 2, 0, -1):
deltas.append(deltas[-1].dot(self.weights[l].T) * self.
activation_deriv(a[l]))
deltas.reverse()
for i in range(len(self.weights)):
layer = np.atleast_2d(a[i])
delta = np.atleast_2d(deltas[i])
self.weights[i] += learning_rate * layer.T.dot(delta)
def predict(self, x):
x = np.array(x)
temp = np.ones(x.shape[0] + 1)
temp[0:-1] = x
a = temp
for l in range(0, len(self.weights)):
a = self.activation(np.dot(a, self.weights[l]))
return a
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def tanh(x):
return np.tanh(x)
def tanh_deriv(x):
return 1.0 - np.tanh(x) * np.tanh(x)
def logistic(x):
return 1 / (1 + np.exp(-x))
def logistic_derivative(x):
return logistic(x) * (1 - logistic(x))
class NeuralNetwork:
def __init__(self, layers, activation='tanh'):
"""
:param layers: A list containing the number of units in each layer.
Should be at least two values
:param activation: The activation function to be used. Can be
"logistic" or "tanh"
"""
if activation == 'logistic':
self.activation = logistic
self.activation_deriv = logistic_derivative
elif activation == 'tanh':
self.activation = tanh
self.activation_deriv = tanh_deriv
self.weights = []
for i in range(1, len(layers) - 1):
print(i)
self.weights.append((2 * np.random.random((layers[i - 1] + 1,
layers[i] + 1)) - 1) * 0.25)
self.weights.append((2 * np.random.random((layers[i] + 1,
layers[i + 1])) - 1) * 0.25)
def fit(self, X, y, learning_rate=0.2, epochs=10000):
X = np.atleast_2d(X)
print(X)
print(X.shape)
temp = np.ones([X.shape[0], X.shape[1] + 1])
temp[:, 0:-1] = X
X = temp
print(X)
y = np.array(y)
print(y)
for k in range(epochs):
i = np.random.randint(X.shape[0])
a = [X[i]]
for l in range(len(self.weights)):
a.append(self.activation(np.dot(a[l], self.weights[l])))
error = y[i] - a[-1]
deltas = [error * self.activation_deriv(a[-1])]
for l in range(len(a) - 2, 0, -1):
deltas.append(deltas[-1].dot(self.weights[l].T) * self.
activation_deriv(a[l]))
deltas.reverse()
for i in range(len(self.weights)):
layer = np.atleast_2d(a[i])
delta = np.atleast_2d(deltas[i])
self.weights[i] += learning_rate * layer.T.dot(delta)
def predict(self, x):
x = np.array(x)
temp = np.ones(x.shape[0] + 1)
temp[0:-1] = x
a = temp
for l in range(0, len(self.weights)):
a = self.activation(np.dot(a, self.weights[l]))
return a
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def tanh(x):
return np.tanh(x)
def tanh_deriv(x):
return 1.0 - np.tanh(x) * np.tanh(x)
def logistic(x):
return 1 / (1 + np.exp(-x))
def logistic_derivative(x):
return logistic(x) * (1 - logistic(x))
class NeuralNetwork:
def __init__(self, layers, activation='tanh'):
"""
:param layers: A list containing the number of units in each layer.
Should be at least two values
:param activation: The activation function to be used. Can be
"logistic" or "tanh"
"""
if activation == 'logistic':
self.activation = logistic
self.activation_deriv = logistic_derivative
elif activation == 'tanh':
self.activation = tanh
self.activation_deriv = tanh_deriv
self.weights = []
for i in range(1, len(layers) - 1):
print(i)
self.weights.append((2 * np.random.random((layers[i - 1] + 1,
layers[i] + 1)) - 1) * 0.25)
self.weights.append((2 * np.random.random((layers[i] + 1,
layers[i + 1])) - 1) * 0.25)
def fit(self, X, y, learning_rate=0.2, epochs=10000):
X = np.atleast_2d(X)
print(X)
print(X.shape)
temp = np.ones([X.shape[0], X.shape[1] + 1])
temp[:, 0:-1] = X
X = temp
print(X)
y = np.array(y)
print(y)
for k in range(epochs):
i = np.random.randint(X.shape[0])
a = [X[i]]
for l in range(len(self.weights)):
a.append(self.activation(np.dot(a[l], self.weights[l])))
error = y[i] - a[-1]
deltas = [error * self.activation_deriv(a[-1])]
for l in range(len(a) - 2, 0, -1):
deltas.append(deltas[-1].dot(self.weights[l].T) * self.
activation_deriv(a[l]))
deltas.reverse()
for i in range(len(self.weights)):
layer = np.atleast_2d(a[i])
delta = np.atleast_2d(deltas[i])
self.weights[i] += learning_rate * layer.T.dot(delta)
def predict(self, x):
x = np.array(x)
temp = np.ones(x.shape[0] + 1)
temp[0:-1] = x
a = temp
for l in range(0, len(self.weights)):
a = self.activation(np.dot(a, self.weights[l]))
return a
<|reserved_special_token_0|>
nn.fit(X, y)
for i in [[0, 0], [0, 1], [1, 0], [1, 1]]:
print(i, nn.predict(i))
<|reserved_special_token_1|>
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.datasets import load_digits
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.preprocessing import LabelBinarizer
def tanh(x):
return np.tanh(x)
def tanh_deriv(x):
return 1.0 - np.tanh(x) * np.tanh(x)
def logistic(x):
return 1 / (1 + np.exp(-x))
def logistic_derivative(x):
return logistic(x) * (1 - logistic(x))
class NeuralNetwork:
def __init__(self, layers, activation='tanh'):
"""
:param layers: A list containing the number of units in each layer.
Should be at least two values
:param activation: The activation function to be used. Can be
"logistic" or "tanh"
"""
if activation == 'logistic':
self.activation = logistic
self.activation_deriv = logistic_derivative
elif activation == 'tanh':
self.activation = tanh
self.activation_deriv = tanh_deriv
self.weights = []
for i in range(1, len(layers) - 1):
print(i)
self.weights.append((2 * np.random.random((layers[i - 1] + 1,
layers[i] + 1)) - 1) * 0.25)
self.weights.append((2 * np.random.random((layers[i] + 1,
layers[i + 1])) - 1) * 0.25)
def fit(self, X, y, learning_rate=0.2, epochs=10000):
X = np.atleast_2d(X)
print(X)
print(X.shape)
temp = np.ones([X.shape[0], X.shape[1] + 1])
temp[:, 0:-1] = X
X = temp
print(X)
y = np.array(y)
print(y)
for k in range(epochs):
i = np.random.randint(X.shape[0])
a = [X[i]]
for l in range(len(self.weights)):
a.append(self.activation(np.dot(a[l], self.weights[l])))
error = y[i] - a[-1]
deltas = [error * self.activation_deriv(a[-1])]
for l in range(len(a) - 2, 0, -1):
deltas.append(deltas[-1].dot(self.weights[l].T) * self.
activation_deriv(a[l]))
deltas.reverse()
for i in range(len(self.weights)):
layer = np.atleast_2d(a[i])
delta = np.atleast_2d(deltas[i])
self.weights[i] += learning_rate * layer.T.dot(delta)
def predict(self, x):
x = np.array(x)
temp = np.ones(x.shape[0] + 1)
temp[0:-1] = x
a = temp
for l in range(0, len(self.weights)):
a = self.activation(np.dot(a, self.weights[l]))
return a
nn = NeuralNetwork([2, 2, 1], 'tanh')
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y = np.array([0, 1, 1, 0])
nn.fit(X, y)
for i in [[0, 0], [0, 1], [1, 0], [1, 1]]:
print(i, nn.predict(i))
<|reserved_special_token_1|>
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.datasets import load_digits
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.preprocessing import LabelBinarizer
def tanh(x):
return np.tanh(x)
def tanh_deriv(x):
return 1.0 - np.tanh(x) * np.tanh(x)
def logistic(x):
return 1 / (1 + np.exp(-x))
def logistic_derivative(x):
return logistic(x) * (1 - logistic(x))
class NeuralNetwork:
def __init__(self, layers, activation='tanh'):
"""
:param layers: A list containing the number of units in each layer.
Should be at least two values
:param activation: The activation function to be used. Can be
"logistic" or "tanh"
"""
if activation == 'logistic':
self.activation = logistic
self.activation_deriv = logistic_derivative
elif activation == 'tanh':
self.activation = tanh
self.activation_deriv = tanh_deriv
self.weights = []
for i in range(1, len(layers) - 1):
print(i)
self.weights.append((2 * np.random.random((layers[i - 1] + 1, layers[i] + 1)) - 1) * 0.25)
self.weights.append((2 * np.random.random((layers[i] + 1, layers[i + 1])) - 1) * 0.25)
# print(self.weights)
def fit(self, X, y, learning_rate=0.2, epochs=10000):
# 一. 给X数据加一列1,相当于后续的偏置所乘的数
X = np.atleast_2d(X)
print(X)
print(X.shape)
temp = np.ones([X.shape[0], X.shape[1] + 1])
# print(temp)
temp[:, 0:-1] = X # adding the bias unit to the input layer
X = temp
print(X)
y = np.array(y)
print(y)
# 迭代epochs次
for k in range(epochs):
# 随机挑选X的一行,i为行号,a为这一行数据,为输入层数据
i = np.random.randint(X.shape[0])
a = [X[i]]
# a为每层的值,a[0]为第一层输入层数据,a[1]为第二层输出层数据,a[-1]为最后一层输出层数据
for l in range(len(self.weights)):
# 计算每层的结果
a.append(self.activation(np.dot(a[l], self.weights[l])))
# Computer the error at the top layer
# print(a)
error = y[i] - a[-1]
# For output layer, Err calculation (delta is updated error)
deltas = [error * self.activation_deriv(a[-1])]
# Staring backprobagation
for l in range(len(a) - 2, 0, -1): # we need to begin at the second to last layer
# Compute the updated error (i,e, deltas) for each node going from top layer to input layer
deltas.append(deltas[-1].dot(self.weights[l].T) * self.activation_deriv(a[l]))
deltas.reverse()
# print(deltas)
for i in range(len(self.weights)):
layer = np.atleast_2d(a[i])
delta = np.atleast_2d(deltas[i])
self.weights[i] += learning_rate * layer.T.dot(delta)
def predict(self, x):
x = np.array(x)
temp = np.ones(x.shape[0] + 1)
temp[0:-1] = x
a = temp
for l in range(0, len(self.weights)):
a = self.activation(np.dot(a, self.weights[l]))
return a
nn = NeuralNetwork([2, 2, 1], 'tanh')
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y = np.array([0, 1, 1, 0])
nn.fit(X, y)
for i in [[0, 0], [0, 1], [1, 0], [1, 1]]:
print(i, nn.predict(i))
# digits = load_digits()
# X = digits.data
# y = digits.target
# X -= X.min() # normalize the values to bring them into the range 0-1
# X /= X.max()
#
# nn = NeuralNetwork([64, 100, 10], 'logistic')
# X_train, X_test, y_train, y_test = train_test_split(X, y)
# labels_train = LabelBinarizer().fit_transform(y_train)
# labels_test = LabelBinarizer().fit_transform(y_test)
# print("start fitting")
# nn.fit(X_train, labels_train, epochs=3000)
# predictions = []
# for i in range(X_test.shape[0]):
# o = nn.predict(X_test[i])
# predictions.append(np.argmax(o))
# print(confusion_matrix(y_test, predictions))
# print(classification_report(y_test, predictions))
|
flexible
|
{
"blob_id": "a6a5fddb8e1eda4cc8e9c79ad83019f55d149a80",
"index": 2988,
"step-1": "<mask token>\n\n\ndef tanh(x):\n return np.tanh(x)\n\n\ndef tanh_deriv(x):\n return 1.0 - np.tanh(x) * np.tanh(x)\n\n\n<mask token>\n\n\nclass NeuralNetwork:\n\n def __init__(self, layers, activation='tanh'):\n \"\"\"\n :param layers: A list containing the number of units in each layer.\n Should be at least two values\n :param activation: The activation function to be used. Can be\n \"logistic\" or \"tanh\"\n \"\"\"\n if activation == 'logistic':\n self.activation = logistic\n self.activation_deriv = logistic_derivative\n elif activation == 'tanh':\n self.activation = tanh\n self.activation_deriv = tanh_deriv\n self.weights = []\n for i in range(1, len(layers) - 1):\n print(i)\n self.weights.append((2 * np.random.random((layers[i - 1] + 1, \n layers[i] + 1)) - 1) * 0.25)\n self.weights.append((2 * np.random.random((layers[i] + 1,\n layers[i + 1])) - 1) * 0.25)\n\n def fit(self, X, y, learning_rate=0.2, epochs=10000):\n X = np.atleast_2d(X)\n print(X)\n print(X.shape)\n temp = np.ones([X.shape[0], X.shape[1] + 1])\n temp[:, 0:-1] = X\n X = temp\n print(X)\n y = np.array(y)\n print(y)\n for k in range(epochs):\n i = np.random.randint(X.shape[0])\n a = [X[i]]\n for l in range(len(self.weights)):\n a.append(self.activation(np.dot(a[l], self.weights[l])))\n error = y[i] - a[-1]\n deltas = [error * self.activation_deriv(a[-1])]\n for l in range(len(a) - 2, 0, -1):\n deltas.append(deltas[-1].dot(self.weights[l].T) * self.\n activation_deriv(a[l]))\n deltas.reverse()\n for i in range(len(self.weights)):\n layer = np.atleast_2d(a[i])\n delta = np.atleast_2d(deltas[i])\n self.weights[i] += learning_rate * layer.T.dot(delta)\n\n def predict(self, x):\n x = np.array(x)\n temp = np.ones(x.shape[0] + 1)\n temp[0:-1] = x\n a = temp\n for l in range(0, len(self.weights)):\n a = self.activation(np.dot(a, self.weights[l]))\n return a\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef tanh(x):\n return np.tanh(x)\n\n\ndef tanh_deriv(x):\n return 1.0 - np.tanh(x) * np.tanh(x)\n\n\ndef logistic(x):\n return 1 / (1 + np.exp(-x))\n\n\ndef logistic_derivative(x):\n return logistic(x) * (1 - logistic(x))\n\n\nclass NeuralNetwork:\n\n def __init__(self, layers, activation='tanh'):\n \"\"\"\n :param layers: A list containing the number of units in each layer.\n Should be at least two values\n :param activation: The activation function to be used. Can be\n \"logistic\" or \"tanh\"\n \"\"\"\n if activation == 'logistic':\n self.activation = logistic\n self.activation_deriv = logistic_derivative\n elif activation == 'tanh':\n self.activation = tanh\n self.activation_deriv = tanh_deriv\n self.weights = []\n for i in range(1, len(layers) - 1):\n print(i)\n self.weights.append((2 * np.random.random((layers[i - 1] + 1, \n layers[i] + 1)) - 1) * 0.25)\n self.weights.append((2 * np.random.random((layers[i] + 1,\n layers[i + 1])) - 1) * 0.25)\n\n def fit(self, X, y, learning_rate=0.2, epochs=10000):\n X = np.atleast_2d(X)\n print(X)\n print(X.shape)\n temp = np.ones([X.shape[0], X.shape[1] + 1])\n temp[:, 0:-1] = X\n X = temp\n print(X)\n y = np.array(y)\n print(y)\n for k in range(epochs):\n i = np.random.randint(X.shape[0])\n a = [X[i]]\n for l in range(len(self.weights)):\n a.append(self.activation(np.dot(a[l], self.weights[l])))\n error = y[i] - a[-1]\n deltas = [error * self.activation_deriv(a[-1])]\n for l in range(len(a) - 2, 0, -1):\n deltas.append(deltas[-1].dot(self.weights[l].T) * self.\n activation_deriv(a[l]))\n deltas.reverse()\n for i in range(len(self.weights)):\n layer = np.atleast_2d(a[i])\n delta = np.atleast_2d(deltas[i])\n self.weights[i] += learning_rate * layer.T.dot(delta)\n\n def predict(self, x):\n x = np.array(x)\n temp = np.ones(x.shape[0] + 1)\n temp[0:-1] = x\n a = temp\n for l in range(0, len(self.weights)):\n a = self.activation(np.dot(a, self.weights[l]))\n return a\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef tanh(x):\n return np.tanh(x)\n\n\ndef tanh_deriv(x):\n return 1.0 - np.tanh(x) * np.tanh(x)\n\n\ndef logistic(x):\n return 1 / (1 + np.exp(-x))\n\n\ndef logistic_derivative(x):\n return logistic(x) * (1 - logistic(x))\n\n\nclass NeuralNetwork:\n\n def __init__(self, layers, activation='tanh'):\n \"\"\"\n :param layers: A list containing the number of units in each layer.\n Should be at least two values\n :param activation: The activation function to be used. Can be\n \"logistic\" or \"tanh\"\n \"\"\"\n if activation == 'logistic':\n self.activation = logistic\n self.activation_deriv = logistic_derivative\n elif activation == 'tanh':\n self.activation = tanh\n self.activation_deriv = tanh_deriv\n self.weights = []\n for i in range(1, len(layers) - 1):\n print(i)\n self.weights.append((2 * np.random.random((layers[i - 1] + 1, \n layers[i] + 1)) - 1) * 0.25)\n self.weights.append((2 * np.random.random((layers[i] + 1,\n layers[i + 1])) - 1) * 0.25)\n\n def fit(self, X, y, learning_rate=0.2, epochs=10000):\n X = np.atleast_2d(X)\n print(X)\n print(X.shape)\n temp = np.ones([X.shape[0], X.shape[1] + 1])\n temp[:, 0:-1] = X\n X = temp\n print(X)\n y = np.array(y)\n print(y)\n for k in range(epochs):\n i = np.random.randint(X.shape[0])\n a = [X[i]]\n for l in range(len(self.weights)):\n a.append(self.activation(np.dot(a[l], self.weights[l])))\n error = y[i] - a[-1]\n deltas = [error * self.activation_deriv(a[-1])]\n for l in range(len(a) - 2, 0, -1):\n deltas.append(deltas[-1].dot(self.weights[l].T) * self.\n activation_deriv(a[l]))\n deltas.reverse()\n for i in range(len(self.weights)):\n layer = np.atleast_2d(a[i])\n delta = np.atleast_2d(deltas[i])\n self.weights[i] += learning_rate * layer.T.dot(delta)\n\n def predict(self, x):\n x = np.array(x)\n temp = np.ones(x.shape[0] + 1)\n temp[0:-1] = x\n a = temp\n for l in range(0, len(self.weights)):\n a = self.activation(np.dot(a, self.weights[l]))\n return a\n\n\n<mask token>\nnn.fit(X, y)\nfor i in [[0, 0], [0, 1], [1, 0], [1, 1]]:\n print(i, nn.predict(i))\n",
"step-4": "import numpy as np\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.datasets import load_digits\nfrom sklearn.metrics import confusion_matrix, classification_report\nfrom sklearn.preprocessing import LabelBinarizer\n\n\ndef tanh(x):\n return np.tanh(x)\n\n\ndef tanh_deriv(x):\n return 1.0 - np.tanh(x) * np.tanh(x)\n\n\ndef logistic(x):\n return 1 / (1 + np.exp(-x))\n\n\ndef logistic_derivative(x):\n return logistic(x) * (1 - logistic(x))\n\n\nclass NeuralNetwork:\n\n def __init__(self, layers, activation='tanh'):\n \"\"\"\n :param layers: A list containing the number of units in each layer.\n Should be at least two values\n :param activation: The activation function to be used. Can be\n \"logistic\" or \"tanh\"\n \"\"\"\n if activation == 'logistic':\n self.activation = logistic\n self.activation_deriv = logistic_derivative\n elif activation == 'tanh':\n self.activation = tanh\n self.activation_deriv = tanh_deriv\n self.weights = []\n for i in range(1, len(layers) - 1):\n print(i)\n self.weights.append((2 * np.random.random((layers[i - 1] + 1, \n layers[i] + 1)) - 1) * 0.25)\n self.weights.append((2 * np.random.random((layers[i] + 1,\n layers[i + 1])) - 1) * 0.25)\n\n def fit(self, X, y, learning_rate=0.2, epochs=10000):\n X = np.atleast_2d(X)\n print(X)\n print(X.shape)\n temp = np.ones([X.shape[0], X.shape[1] + 1])\n temp[:, 0:-1] = X\n X = temp\n print(X)\n y = np.array(y)\n print(y)\n for k in range(epochs):\n i = np.random.randint(X.shape[0])\n a = [X[i]]\n for l in range(len(self.weights)):\n a.append(self.activation(np.dot(a[l], self.weights[l])))\n error = y[i] - a[-1]\n deltas = [error * self.activation_deriv(a[-1])]\n for l in range(len(a) - 2, 0, -1):\n deltas.append(deltas[-1].dot(self.weights[l].T) * self.\n activation_deriv(a[l]))\n deltas.reverse()\n for i in range(len(self.weights)):\n layer = np.atleast_2d(a[i])\n delta = np.atleast_2d(deltas[i])\n self.weights[i] += learning_rate * layer.T.dot(delta)\n\n def predict(self, x):\n x = np.array(x)\n temp = np.ones(x.shape[0] + 1)\n temp[0:-1] = x\n a = temp\n for l in range(0, len(self.weights)):\n a = self.activation(np.dot(a, self.weights[l]))\n return a\n\n\nnn = NeuralNetwork([2, 2, 1], 'tanh')\nX = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])\ny = np.array([0, 1, 1, 0])\nnn.fit(X, y)\nfor i in [[0, 0], [0, 1], [1, 0], [1, 1]]:\n print(i, nn.predict(i))\n",
"step-5": "import numpy as np\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.datasets import load_digits\nfrom sklearn.metrics import confusion_matrix, classification_report\nfrom sklearn.preprocessing import LabelBinarizer\n\n\ndef tanh(x):\n return np.tanh(x)\n\n\ndef tanh_deriv(x):\n return 1.0 - np.tanh(x) * np.tanh(x)\n\n\ndef logistic(x):\n return 1 / (1 + np.exp(-x))\n\n\ndef logistic_derivative(x):\n return logistic(x) * (1 - logistic(x))\n\n\nclass NeuralNetwork:\n def __init__(self, layers, activation='tanh'):\n \"\"\"\n :param layers: A list containing the number of units in each layer.\n Should be at least two values\n :param activation: The activation function to be used. Can be\n \"logistic\" or \"tanh\"\n \"\"\"\n if activation == 'logistic':\n self.activation = logistic\n self.activation_deriv = logistic_derivative\n elif activation == 'tanh':\n self.activation = tanh\n self.activation_deriv = tanh_deriv\n\n self.weights = []\n for i in range(1, len(layers) - 1):\n print(i)\n self.weights.append((2 * np.random.random((layers[i - 1] + 1, layers[i] + 1)) - 1) * 0.25)\n self.weights.append((2 * np.random.random((layers[i] + 1, layers[i + 1])) - 1) * 0.25)\n # print(self.weights)\n\n def fit(self, X, y, learning_rate=0.2, epochs=10000):\n # 一. 给X数据加一列1,相当于后续的偏置所乘的数\n X = np.atleast_2d(X)\n print(X)\n print(X.shape)\n temp = np.ones([X.shape[0], X.shape[1] + 1])\n # print(temp)\n temp[:, 0:-1] = X # adding the bias unit to the input layer\n X = temp\n print(X)\n y = np.array(y)\n print(y)\n\n # 迭代epochs次\n for k in range(epochs):\n # 随机挑选X的一行,i为行号,a为这一行数据,为输入层数据\n i = np.random.randint(X.shape[0])\n a = [X[i]]\n\n # a为每层的值,a[0]为第一层输入层数据,a[1]为第二层输出层数据,a[-1]为最后一层输出层数据\n for l in range(len(self.weights)):\n # 计算每层的结果\n\n a.append(self.activation(np.dot(a[l], self.weights[l])))\n\n # Computer the error at the top layer\n # print(a)\n error = y[i] - a[-1]\n\n # For output layer, Err calculation (delta is updated error)\n deltas = [error * self.activation_deriv(a[-1])]\n\n # Staring backprobagation\n for l in range(len(a) - 2, 0, -1): # we need to begin at the second to last layer\n # Compute the updated error (i,e, deltas) for each node going from top layer to input layer\n deltas.append(deltas[-1].dot(self.weights[l].T) * self.activation_deriv(a[l]))\n deltas.reverse()\n # print(deltas)\n for i in range(len(self.weights)):\n layer = np.atleast_2d(a[i])\n delta = np.atleast_2d(deltas[i])\n self.weights[i] += learning_rate * layer.T.dot(delta)\n\n def predict(self, x):\n x = np.array(x)\n temp = np.ones(x.shape[0] + 1)\n temp[0:-1] = x\n a = temp\n for l in range(0, len(self.weights)):\n a = self.activation(np.dot(a, self.weights[l]))\n return a\n\n\nnn = NeuralNetwork([2, 2, 1], 'tanh')\nX = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])\ny = np.array([0, 1, 1, 0])\nnn.fit(X, y)\nfor i in [[0, 0], [0, 1], [1, 0], [1, 1]]:\n print(i, nn.predict(i))\n\n# digits = load_digits()\n# X = digits.data\n# y = digits.target\n# X -= X.min() # normalize the values to bring them into the range 0-1\n# X /= X.max()\n#\n# nn = NeuralNetwork([64, 100, 10], 'logistic')\n# X_train, X_test, y_train, y_test = train_test_split(X, y)\n# labels_train = LabelBinarizer().fit_transform(y_train)\n# labels_test = LabelBinarizer().fit_transform(y_test)\n# print(\"start fitting\")\n# nn.fit(X_train, labels_train, epochs=3000)\n# predictions = []\n# for i in range(X_test.shape[0]):\n# o = nn.predict(X_test[i])\n# predictions.append(np.argmax(o))\n# print(confusion_matrix(y_test, predictions))\n# print(classification_report(y_test, predictions))\n",
"step-ids": [
6,
8,
9,
11,
12
]
}
|
[
6,
8,
9,
11,
12
] |
<|reserved_special_token_0|>
class Chunk_CleanSentences(Resource):
<|reserved_special_token_0|>
parser.add_argument('text', type=str, required=True, help=
'გთხოვთ შეიყვანოთ სწორი წინადადება')
def get(self):
data = Chunk_CleanSentences.parser.parse_args()
text = data['text']
sentences = sent_tokenize(text)
clean_sentences = []
for sent in sentences:
clear_sentence = tokenizer.tokenize(sent)
clean_sentences.append(clear_sentence)
for word in clean_sentences:
tagged_sent = nltk.pos_tag(word)
chunkGram = 'Chunk: {<VB.?>*<NNP>?} '
chuckParser = nltk.RegexpParser(chunkGram)
chunked = chuckParser.parse(tagged_sent)
chunked.draw()
return {'clean_sentences': clean_sentences}
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Chunk_CleanSentences(Resource):
parser = reqparse.RequestParser()
parser.add_argument('text', type=str, required=True, help=
'გთხოვთ შეიყვანოთ სწორი წინადადება')
def get(self):
data = Chunk_CleanSentences.parser.parse_args()
text = data['text']
sentences = sent_tokenize(text)
clean_sentences = []
for sent in sentences:
clear_sentence = tokenizer.tokenize(sent)
clean_sentences.append(clear_sentence)
for word in clean_sentences:
tagged_sent = nltk.pos_tag(word)
chunkGram = 'Chunk: {<VB.?>*<NNP>?} '
chuckParser = nltk.RegexpParser(chunkGram)
chunked = chuckParser.parse(tagged_sent)
chunked.draw()
return {'clean_sentences': clean_sentences}
<|reserved_special_token_1|>
<|reserved_special_token_0|>
tokenizer = nltk.RegexpTokenizer('\\w+')
class Chunk_CleanSentences(Resource):
parser = reqparse.RequestParser()
parser.add_argument('text', type=str, required=True, help=
'გთხოვთ შეიყვანოთ სწორი წინადადება')
def get(self):
data = Chunk_CleanSentences.parser.parse_args()
text = data['text']
sentences = sent_tokenize(text)
clean_sentences = []
for sent in sentences:
clear_sentence = tokenizer.tokenize(sent)
clean_sentences.append(clear_sentence)
for word in clean_sentences:
tagged_sent = nltk.pos_tag(word)
chunkGram = 'Chunk: {<VB.?>*<NNP>?} '
chuckParser = nltk.RegexpParser(chunkGram)
chunked = chuckParser.parse(tagged_sent)
chunked.draw()
return {'clean_sentences': clean_sentences}
<|reserved_special_token_1|>
from flask_restful import Resource, reqparse
import nltk
from nltk.tokenize import sent_tokenize
tokenizer = nltk.RegexpTokenizer('\\w+')
class Chunk_CleanSentences(Resource):
parser = reqparse.RequestParser()
parser.add_argument('text', type=str, required=True, help=
'გთხოვთ შეიყვანოთ სწორი წინადადება')
def get(self):
data = Chunk_CleanSentences.parser.parse_args()
text = data['text']
sentences = sent_tokenize(text)
clean_sentences = []
for sent in sentences:
clear_sentence = tokenizer.tokenize(sent)
clean_sentences.append(clear_sentence)
for word in clean_sentences:
tagged_sent = nltk.pos_tag(word)
chunkGram = 'Chunk: {<VB.?>*<NNP>?} '
chuckParser = nltk.RegexpParser(chunkGram)
chunked = chuckParser.parse(tagged_sent)
chunked.draw()
return {'clean_sentences': clean_sentences}
<|reserved_special_token_1|>
from flask_restful import Resource, reqparse
import nltk
from nltk.tokenize import sent_tokenize
tokenizer = nltk.RegexpTokenizer(r"\w+")
# CLASS DESCRIPTION:
# Devides and clears the sentence of punctuation marks and builds a dependency tree on each sentence
# Allocates its own names and verbs
# added: Temuri Kitoshvili
class Chunk_CleanSentences(Resource):
parser = reqparse.RequestParser()
parser.add_argument('text',
type=str,
required=True,
help="გთხოვთ შეიყვანოთ სწორი წინადადება")
def get(self):
data = Chunk_CleanSentences.parser.parse_args()
text = data['text']
sentences = sent_tokenize(text)
clean_sentences = []
for sent in sentences:
clear_sentence = tokenizer.tokenize(sent)
clean_sentences.append(clear_sentence)
for word in clean_sentences:
tagged_sent = nltk.pos_tag(word)
chunkGram = r"""Chunk: {<VB.?>*<NNP>?} """
chuckParser = nltk.RegexpParser(chunkGram)
chunked = chuckParser.parse(tagged_sent)
chunked.draw()
return {"clean_sentences": clean_sentences}
|
flexible
|
{
"blob_id": "6d042a2035eab579193452e4dc44c425125d9515",
"index": 9402,
"step-1": "<mask token>\n\n\nclass Chunk_CleanSentences(Resource):\n <mask token>\n parser.add_argument('text', type=str, required=True, help=\n 'გთხოვთ შეიყვანოთ სწორი წინადადება')\n\n def get(self):\n data = Chunk_CleanSentences.parser.parse_args()\n text = data['text']\n sentences = sent_tokenize(text)\n clean_sentences = []\n for sent in sentences:\n clear_sentence = tokenizer.tokenize(sent)\n clean_sentences.append(clear_sentence)\n for word in clean_sentences:\n tagged_sent = nltk.pos_tag(word)\n chunkGram = 'Chunk: {<VB.?>*<NNP>?} '\n chuckParser = nltk.RegexpParser(chunkGram)\n chunked = chuckParser.parse(tagged_sent)\n chunked.draw()\n return {'clean_sentences': clean_sentences}\n",
"step-2": "<mask token>\n\n\nclass Chunk_CleanSentences(Resource):\n parser = reqparse.RequestParser()\n parser.add_argument('text', type=str, required=True, help=\n 'გთხოვთ შეიყვანოთ სწორი წინადადება')\n\n def get(self):\n data = Chunk_CleanSentences.parser.parse_args()\n text = data['text']\n sentences = sent_tokenize(text)\n clean_sentences = []\n for sent in sentences:\n clear_sentence = tokenizer.tokenize(sent)\n clean_sentences.append(clear_sentence)\n for word in clean_sentences:\n tagged_sent = nltk.pos_tag(word)\n chunkGram = 'Chunk: {<VB.?>*<NNP>?} '\n chuckParser = nltk.RegexpParser(chunkGram)\n chunked = chuckParser.parse(tagged_sent)\n chunked.draw()\n return {'clean_sentences': clean_sentences}\n",
"step-3": "<mask token>\ntokenizer = nltk.RegexpTokenizer('\\\\w+')\n\n\nclass Chunk_CleanSentences(Resource):\n parser = reqparse.RequestParser()\n parser.add_argument('text', type=str, required=True, help=\n 'გთხოვთ შეიყვანოთ სწორი წინადადება')\n\n def get(self):\n data = Chunk_CleanSentences.parser.parse_args()\n text = data['text']\n sentences = sent_tokenize(text)\n clean_sentences = []\n for sent in sentences:\n clear_sentence = tokenizer.tokenize(sent)\n clean_sentences.append(clear_sentence)\n for word in clean_sentences:\n tagged_sent = nltk.pos_tag(word)\n chunkGram = 'Chunk: {<VB.?>*<NNP>?} '\n chuckParser = nltk.RegexpParser(chunkGram)\n chunked = chuckParser.parse(tagged_sent)\n chunked.draw()\n return {'clean_sentences': clean_sentences}\n",
"step-4": "from flask_restful import Resource, reqparse\nimport nltk\nfrom nltk.tokenize import sent_tokenize\ntokenizer = nltk.RegexpTokenizer('\\\\w+')\n\n\nclass Chunk_CleanSentences(Resource):\n parser = reqparse.RequestParser()\n parser.add_argument('text', type=str, required=True, help=\n 'გთხოვთ შეიყვანოთ სწორი წინადადება')\n\n def get(self):\n data = Chunk_CleanSentences.parser.parse_args()\n text = data['text']\n sentences = sent_tokenize(text)\n clean_sentences = []\n for sent in sentences:\n clear_sentence = tokenizer.tokenize(sent)\n clean_sentences.append(clear_sentence)\n for word in clean_sentences:\n tagged_sent = nltk.pos_tag(word)\n chunkGram = 'Chunk: {<VB.?>*<NNP>?} '\n chuckParser = nltk.RegexpParser(chunkGram)\n chunked = chuckParser.parse(tagged_sent)\n chunked.draw()\n return {'clean_sentences': clean_sentences}\n",
"step-5": "from flask_restful import Resource, reqparse\nimport nltk\nfrom nltk.tokenize import sent_tokenize\ntokenizer = nltk.RegexpTokenizer(r\"\\w+\")\n\n# CLASS DESCRIPTION:\n # Devides and clears the sentence of punctuation marks and builds a dependency tree on each sentence\n # Allocates its own names and verbs\n # added: Temuri Kitoshvili\n\nclass Chunk_CleanSentences(Resource):\n parser = reqparse.RequestParser()\n parser.add_argument('text',\n type=str,\n required=True,\n help=\"გთხოვთ შეიყვანოთ სწორი წინადადება\")\n\n def get(self):\n data = Chunk_CleanSentences.parser.parse_args()\n text = data['text']\n\n sentences = sent_tokenize(text)\n clean_sentences = []\n\n for sent in sentences:\n clear_sentence = tokenizer.tokenize(sent)\n clean_sentences.append(clear_sentence)\n\n for word in clean_sentences:\n tagged_sent = nltk.pos_tag(word)\n chunkGram = r\"\"\"Chunk: {<VB.?>*<NNP>?} \"\"\"\n chuckParser = nltk.RegexpParser(chunkGram)\n chunked = chuckParser.parse(tagged_sent)\n\n chunked.draw()\n\n return {\"clean_sentences\": clean_sentences}\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
BASEDIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
DATA_DIR = os.path.join(BASEDIR, 'data')
DATA_FILE = os.path.join(DATA_DIR, 'data.yaml')
<|reserved_special_token_1|>
import os
BASEDIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
DATA_DIR = os.path.join(BASEDIR, 'data')
DATA_FILE = os.path.join(DATA_DIR, 'data.yaml')
<|reserved_special_token_1|>
import os
# __file__: 当前文件
# os.path.dirname(): 所在目录
# os.path.abspath(): 当前文件/目录的绝对路径
# os.path.join(): 路径连接
# 项目路径
BASEDIR = os.path.abspath(
os.path.dirname(
os.path.dirname(
__file__)))
# 数据文件目录
DATA_DIR = os.path.join(BASEDIR, "data")
DATA_FILE = os.path.join(DATA_DIR, 'data.yaml')
|
flexible
|
{
"blob_id": "7a793c2081032745ae58f92a4572954333742dfd",
"index": 3943,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nBASEDIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))\nDATA_DIR = os.path.join(BASEDIR, 'data')\nDATA_FILE = os.path.join(DATA_DIR, 'data.yaml')\n",
"step-3": "import os\nBASEDIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))\nDATA_DIR = os.path.join(BASEDIR, 'data')\nDATA_FILE = os.path.join(DATA_DIR, 'data.yaml')\n",
"step-4": "import os\n\n# __file__: 当前文件\n# os.path.dirname(): 所在目录\n# os.path.abspath(): 当前文件/目录的绝对路径\n# os.path.join(): 路径连接\n\n# 项目路径\nBASEDIR = os.path.abspath(\n os.path.dirname(\n os.path.dirname(\n __file__)))\n\n# 数据文件目录\nDATA_DIR = os.path.join(BASEDIR, \"data\")\nDATA_FILE = os.path.join(DATA_DIR, 'data.yaml')",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Server(Socket):
def __init__(self):
super(Server, self).__init__()
print('server listening')
self.users = []
def set_up(self):
self.bind(('192.168.0.109', 1337))
self.listen(0)
self.accept_sockets()
def send_data(self, data):
for user in self.users:
try:
user.send(data)
except ConnectionResetError:
self.users.pop(self.users.index(user))
pass
<|reserved_special_token_0|>
def accept_sockets(self):
while True:
user_socket, address = self.accept()
print(f'User <{address[0]}> connected!')
self.users.append(user_socket)
print(len(self.users))
listen_accepted_user = threading.Thread(target=self.
listen_socket, args=(user_socket,))
listen_accepted_user.start()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Server(Socket):
def __init__(self):
super(Server, self).__init__()
print('server listening')
self.users = []
def set_up(self):
self.bind(('192.168.0.109', 1337))
self.listen(0)
self.accept_sockets()
def send_data(self, data):
for user in self.users:
try:
user.send(data)
except ConnectionResetError:
self.users.pop(self.users.index(user))
pass
def listen_socket(self, listened_socket=None):
countForDel = 0
while True:
data = listened_socket.recv(2048)
if data.decode('utf-8')[0:-2] == '':
countForDel += 1
if countForDel > 5:
print('deleting user: Antispam')
self.users.pop(self.users.index(listened_socket))
raise ConnectionResetError
print(f'User sent {data}')
self.send_data(data)
def accept_sockets(self):
while True:
user_socket, address = self.accept()
print(f'User <{address[0]}> connected!')
self.users.append(user_socket)
print(len(self.users))
listen_accepted_user = threading.Thread(target=self.
listen_socket, args=(user_socket,))
listen_accepted_user.start()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Server(Socket):
def __init__(self):
super(Server, self).__init__()
print('server listening')
self.users = []
def set_up(self):
self.bind(('192.168.0.109', 1337))
self.listen(0)
self.accept_sockets()
def send_data(self, data):
for user in self.users:
try:
user.send(data)
except ConnectionResetError:
self.users.pop(self.users.index(user))
pass
def listen_socket(self, listened_socket=None):
countForDel = 0
while True:
data = listened_socket.recv(2048)
if data.decode('utf-8')[0:-2] == '':
countForDel += 1
if countForDel > 5:
print('deleting user: Antispam')
self.users.pop(self.users.index(listened_socket))
raise ConnectionResetError
print(f'User sent {data}')
self.send_data(data)
def accept_sockets(self):
while True:
user_socket, address = self.accept()
print(f'User <{address[0]}> connected!')
self.users.append(user_socket)
print(len(self.users))
listen_accepted_user = threading.Thread(target=self.
listen_socket, args=(user_socket,))
listen_accepted_user.start()
if __name__ == '__main__':
server = Server()
server.set_up()
<|reserved_special_token_1|>
from Socket import Socket
import threading
class Server(Socket):
def __init__(self):
super(Server, self).__init__()
print('server listening')
self.users = []
def set_up(self):
self.bind(('192.168.0.109', 1337))
self.listen(0)
self.accept_sockets()
def send_data(self, data):
for user in self.users:
try:
user.send(data)
except ConnectionResetError:
self.users.pop(self.users.index(user))
pass
def listen_socket(self, listened_socket=None):
countForDel = 0
while True:
data = listened_socket.recv(2048)
if data.decode('utf-8')[0:-2] == '':
countForDel += 1
if countForDel > 5:
print('deleting user: Antispam')
self.users.pop(self.users.index(listened_socket))
raise ConnectionResetError
print(f'User sent {data}')
self.send_data(data)
def accept_sockets(self):
while True:
user_socket, address = self.accept()
print(f'User <{address[0]}> connected!')
self.users.append(user_socket)
print(len(self.users))
listen_accepted_user = threading.Thread(target=self.
listen_socket, args=(user_socket,))
listen_accepted_user.start()
if __name__ == '__main__':
server = Server()
server.set_up()
<|reserved_special_token_1|>
from Socket import Socket
import threading
class Server(Socket):
def __init__(self):
super(Server, self).__init__()
print("server listening")
self.users = []
def set_up(self):
self.bind(("192.168.0.109", 1337))
self.listen(0)
self.accept_sockets()
def send_data(self, data):
for user in self.users:
try:
user.send(data)
except ConnectionResetError:
self.users.pop(self.users.index(user))
pass
def listen_socket(self, listened_socket=None):
countForDel = 0
while True:
data = listened_socket.recv(2048)
if data.decode("utf-8")[0:-2] == '':
countForDel += 1
if countForDel > 5:
print("deleting user: Antispam")
self.users.pop(self.users.index(listened_socket))
raise ConnectionResetError
print(f"User sent {data}")
self.send_data(data)
def accept_sockets(self):
while True:
user_socket, address = self.accept()
print(f"User <{address[0]}> connected!")
self.users.append(user_socket) # добавляется юзер
print(len(self.users))
listen_accepted_user = threading.Thread(
target=self.listen_socket,
args=(user_socket,))
listen_accepted_user.start()
if __name__ == '__main__':
server = Server()
server.set_up()
|
flexible
|
{
"blob_id": "2027904401e5be7b1c95eebec3a1e6a88c25660c",
"index": 9338,
"step-1": "<mask token>\n\n\nclass Server(Socket):\n\n def __init__(self):\n super(Server, self).__init__()\n print('server listening')\n self.users = []\n\n def set_up(self):\n self.bind(('192.168.0.109', 1337))\n self.listen(0)\n self.accept_sockets()\n\n def send_data(self, data):\n for user in self.users:\n try:\n user.send(data)\n except ConnectionResetError:\n self.users.pop(self.users.index(user))\n pass\n <mask token>\n\n def accept_sockets(self):\n while True:\n user_socket, address = self.accept()\n print(f'User <{address[0]}> connected!')\n self.users.append(user_socket)\n print(len(self.users))\n listen_accepted_user = threading.Thread(target=self.\n listen_socket, args=(user_socket,))\n listen_accepted_user.start()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Server(Socket):\n\n def __init__(self):\n super(Server, self).__init__()\n print('server listening')\n self.users = []\n\n def set_up(self):\n self.bind(('192.168.0.109', 1337))\n self.listen(0)\n self.accept_sockets()\n\n def send_data(self, data):\n for user in self.users:\n try:\n user.send(data)\n except ConnectionResetError:\n self.users.pop(self.users.index(user))\n pass\n\n def listen_socket(self, listened_socket=None):\n countForDel = 0\n while True:\n data = listened_socket.recv(2048)\n if data.decode('utf-8')[0:-2] == '':\n countForDel += 1\n if countForDel > 5:\n print('deleting user: Antispam')\n self.users.pop(self.users.index(listened_socket))\n raise ConnectionResetError\n print(f'User sent {data}')\n self.send_data(data)\n\n def accept_sockets(self):\n while True:\n user_socket, address = self.accept()\n print(f'User <{address[0]}> connected!')\n self.users.append(user_socket)\n print(len(self.users))\n listen_accepted_user = threading.Thread(target=self.\n listen_socket, args=(user_socket,))\n listen_accepted_user.start()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Server(Socket):\n\n def __init__(self):\n super(Server, self).__init__()\n print('server listening')\n self.users = []\n\n def set_up(self):\n self.bind(('192.168.0.109', 1337))\n self.listen(0)\n self.accept_sockets()\n\n def send_data(self, data):\n for user in self.users:\n try:\n user.send(data)\n except ConnectionResetError:\n self.users.pop(self.users.index(user))\n pass\n\n def listen_socket(self, listened_socket=None):\n countForDel = 0\n while True:\n data = listened_socket.recv(2048)\n if data.decode('utf-8')[0:-2] == '':\n countForDel += 1\n if countForDel > 5:\n print('deleting user: Antispam')\n self.users.pop(self.users.index(listened_socket))\n raise ConnectionResetError\n print(f'User sent {data}')\n self.send_data(data)\n\n def accept_sockets(self):\n while True:\n user_socket, address = self.accept()\n print(f'User <{address[0]}> connected!')\n self.users.append(user_socket)\n print(len(self.users))\n listen_accepted_user = threading.Thread(target=self.\n listen_socket, args=(user_socket,))\n listen_accepted_user.start()\n\n\nif __name__ == '__main__':\n server = Server()\n server.set_up()\n",
"step-4": "from Socket import Socket\nimport threading\n\n\nclass Server(Socket):\n\n def __init__(self):\n super(Server, self).__init__()\n print('server listening')\n self.users = []\n\n def set_up(self):\n self.bind(('192.168.0.109', 1337))\n self.listen(0)\n self.accept_sockets()\n\n def send_data(self, data):\n for user in self.users:\n try:\n user.send(data)\n except ConnectionResetError:\n self.users.pop(self.users.index(user))\n pass\n\n def listen_socket(self, listened_socket=None):\n countForDel = 0\n while True:\n data = listened_socket.recv(2048)\n if data.decode('utf-8')[0:-2] == '':\n countForDel += 1\n if countForDel > 5:\n print('deleting user: Antispam')\n self.users.pop(self.users.index(listened_socket))\n raise ConnectionResetError\n print(f'User sent {data}')\n self.send_data(data)\n\n def accept_sockets(self):\n while True:\n user_socket, address = self.accept()\n print(f'User <{address[0]}> connected!')\n self.users.append(user_socket)\n print(len(self.users))\n listen_accepted_user = threading.Thread(target=self.\n listen_socket, args=(user_socket,))\n listen_accepted_user.start()\n\n\nif __name__ == '__main__':\n server = Server()\n server.set_up()\n",
"step-5": "from Socket import Socket\nimport threading\n\nclass Server(Socket):\n def __init__(self):\n super(Server, self).__init__()\n\n print(\"server listening\")\n\n self.users = []\n\n def set_up(self):\n self.bind((\"192.168.0.109\", 1337))\n self.listen(0)\n self.accept_sockets()\n\n def send_data(self, data):\n for user in self.users:\n try:\n user.send(data)\n except ConnectionResetError:\n self.users.pop(self.users.index(user))\n pass\n\n def listen_socket(self, listened_socket=None):\n countForDel = 0\n while True:\n data = listened_socket.recv(2048)\n if data.decode(\"utf-8\")[0:-2] == '':\n countForDel += 1\n if countForDel > 5:\n print(\"deleting user: Antispam\")\n self.users.pop(self.users.index(listened_socket))\n raise ConnectionResetError\n \n print(f\"User sent {data}\")\n self.send_data(data)\n\n def accept_sockets(self):\n while True:\n user_socket, address = self.accept()\n print(f\"User <{address[0]}> connected!\")\n self.users.append(user_socket) # добавляется юзер\n print(len(self.users))\n\n listen_accepted_user = threading.Thread(\n target=self.listen_socket,\n args=(user_socket,))\n\n listen_accepted_user.start()\n\n\nif __name__ == '__main__':\n server = Server()\n server.set_up()\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
def get_subset(A, k, n):
a_list = [i for i in A]
if len(a_list) == k:
result.append(a_list)
return
s_num = max(a_list) + 1 if a_list else 1
for i in range(s_num, n + 1):
a_list.append(i)
get_subset(a_list, k, n)
a_list.remove(i)
<|reserved_special_token_0|>
def main():
subset_algor(7, 3)
for i in range(len(result)):
print(result[i], ' Rank: ', i)
print(len(result))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_subset(A, k, n):
a_list = [i for i in A]
if len(a_list) == k:
result.append(a_list)
return
s_num = max(a_list) + 1 if a_list else 1
for i in range(s_num, n + 1):
a_list.append(i)
get_subset(a_list, k, n)
a_list.remove(i)
def subset_algor(n, k):
V = []
get_subset(V, k, n)
def main():
subset_algor(7, 3)
for i in range(len(result)):
print(result[i], ' Rank: ', i)
print(len(result))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_subset(A, k, n):
a_list = [i for i in A]
if len(a_list) == k:
result.append(a_list)
return
s_num = max(a_list) + 1 if a_list else 1
for i in range(s_num, n + 1):
a_list.append(i)
get_subset(a_list, k, n)
a_list.remove(i)
def subset_algor(n, k):
V = []
get_subset(V, k, n)
def main():
subset_algor(7, 3)
for i in range(len(result)):
print(result[i], ' Rank: ', i)
print(len(result))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
result = []
def get_subset(A, k, n):
a_list = [i for i in A]
if len(a_list) == k:
result.append(a_list)
return
s_num = max(a_list) + 1 if a_list else 1
for i in range(s_num, n + 1):
a_list.append(i)
get_subset(a_list, k, n)
a_list.remove(i)
def subset_algor(n, k):
V = []
get_subset(V, k, n)
def main():
subset_algor(7, 3)
for i in range(len(result)):
print(result[i], ' Rank: ', i)
print(len(result))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
"""
k-element subsets of the set [n]
3-element subsets of the set [6]
123
"""
result = []
def get_subset(A, k, n):
a_list = [i for i in A]
if len(a_list) == k:
result.append(a_list)
return
s_num = max(a_list)+1 if a_list else 1
for i in range(s_num, n+1):
a_list.append(i)
get_subset(a_list, k, n)
a_list.remove(i)
def subset_algor(n, k):
V = []
get_subset(V, k, n)
def main():
# subset_algor(int(input()), int(input()))
subset_algor(7, 3)
for i in range(len(result)):
print(result[i], " Rank: ", i)
print(len(result))
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "d48353caa07d3bfa003ea9354b411fe0c79591db",
"index": 2725,
"step-1": "<mask token>\n\n\ndef get_subset(A, k, n):\n a_list = [i for i in A]\n if len(a_list) == k:\n result.append(a_list)\n return\n s_num = max(a_list) + 1 if a_list else 1\n for i in range(s_num, n + 1):\n a_list.append(i)\n get_subset(a_list, k, n)\n a_list.remove(i)\n\n\n<mask token>\n\n\ndef main():\n subset_algor(7, 3)\n for i in range(len(result)):\n print(result[i], ' Rank: ', i)\n print(len(result))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_subset(A, k, n):\n a_list = [i for i in A]\n if len(a_list) == k:\n result.append(a_list)\n return\n s_num = max(a_list) + 1 if a_list else 1\n for i in range(s_num, n + 1):\n a_list.append(i)\n get_subset(a_list, k, n)\n a_list.remove(i)\n\n\ndef subset_algor(n, k):\n V = []\n get_subset(V, k, n)\n\n\ndef main():\n subset_algor(7, 3)\n for i in range(len(result)):\n print(result[i], ' Rank: ', i)\n print(len(result))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_subset(A, k, n):\n a_list = [i for i in A]\n if len(a_list) == k:\n result.append(a_list)\n return\n s_num = max(a_list) + 1 if a_list else 1\n for i in range(s_num, n + 1):\n a_list.append(i)\n get_subset(a_list, k, n)\n a_list.remove(i)\n\n\ndef subset_algor(n, k):\n V = []\n get_subset(V, k, n)\n\n\ndef main():\n subset_algor(7, 3)\n for i in range(len(result)):\n print(result[i], ' Rank: ', i)\n print(len(result))\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nresult = []\n\n\ndef get_subset(A, k, n):\n a_list = [i for i in A]\n if len(a_list) == k:\n result.append(a_list)\n return\n s_num = max(a_list) + 1 if a_list else 1\n for i in range(s_num, n + 1):\n a_list.append(i)\n get_subset(a_list, k, n)\n a_list.remove(i)\n\n\ndef subset_algor(n, k):\n V = []\n get_subset(V, k, n)\n\n\ndef main():\n subset_algor(7, 3)\n for i in range(len(result)):\n print(result[i], ' Rank: ', i)\n print(len(result))\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "\"\"\"\nk-element subsets of the set [n]\n3-element subsets of the set [6]\n\n123\n\"\"\"\n\nresult = []\n\n\ndef get_subset(A, k, n):\n a_list = [i for i in A]\n if len(a_list) == k:\n result.append(a_list)\n return\n s_num = max(a_list)+1 if a_list else 1\n for i in range(s_num, n+1):\n a_list.append(i)\n get_subset(a_list, k, n)\n a_list.remove(i)\n\n\ndef subset_algor(n, k):\n V = []\n get_subset(V, k, n)\n\n\ndef main():\n # subset_algor(int(input()), int(input()))\n subset_algor(7, 3)\n\n for i in range(len(result)):\n print(result[i], \" Rank: \", i)\n print(len(result))\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
ciscoLwappClRoamMIB.setRevisions(('2010-01-29 00:00', '2006-04-11 00:00'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts:
ciscoLwappClRoamMIB.setRevisionsDescriptions((
'Deprecated following attributes:- clcrDot11aMinRssi, clcrDot11aHysteresis, clcrDot11aAdaptiveScanThreshold, clcrDot11aTransitionTime, clcrDot11bMinRssi, clcrDot11bHysteresis, clcrDot11bAdaptiveScanThreshold, clcrDot11bTransitionTime. clcrMIBCompliance, ciscoLwappClRoamDot11aRfParamsGroup, ciscoLwappClRoamDot11bRfParamsGroup Added following attributes:- clcrDot11aMinRssiV2, clcrDot11aHysteresisV2, clcrDot11aAdaptiveScanThresholdV2, clcrDot11aTransitionTimeV2, clcrDot11bMinRssiV2, clcrDot11bHysteresisV2, clcrDot11bAdaptiveScanThresholdV2, clcrDot11bTransitionTimeV2. clcrMIBComplianceRev1, ciscoLwappClRoamDot11aRfParamsGroupSup1, ciscoLwappClRoamDot11bRfParamsGroupSup1'
, 'Initial version of this MIB module.'))
if mibBuilder.loadTexts:
ciscoLwappClRoamMIB.setLastUpdated('201001290000Z')
if mibBuilder.loadTexts:
ciscoLwappClRoamMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts:
ciscoLwappClRoamMIB.setContactInfo(
'Cisco Systems, Customer Service Postal: 170 West Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS Email: [email protected]'
)
if mibBuilder.loadTexts:
ciscoLwappClRoamMIB.setDescription(
"This MIB is intended to be implemented on all those devices operating as Central controllers, that terminate the Light Weight Access Point Protocol tunnel from Cisco Light-weight LWAPP Access Points. Information provided by this MIB is for CCX related features as specified in the CCX specifications. This MIB covers roaming RF parameters for CCX clients. The relationship between CC and the LWAPP APs can be depicted as follows: +......+ +......+ +......+ + + + + + + + CC + + CC + + CC + + + + + + + +......+ +......+ +......+ .. . . .. . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + AP + + AP + + AP + + AP + + + + + + + + + +......+ +......+ +......+ +......+ . . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + MN + + MN + + MN + + MN + + + + + + + + + +......+ +......+ +......+ +......+ The LWAPP tunnel exists between the controller and the APs. The MNs communicate with the APs through the protocol defined by the 802.11 standard. LWAPP APs, upon bootup, discover and join one of the controllers and the controller pushes the configuration, that includes the WLAN parameters, to the LWAPP APs. The APs then encapsulate all the 802.11 frames from wireless clients inside LWAPP frames and forward the LWAPP frames to the controller. GLOSSARY Access Point ( AP ) An entity that contains an 802.11 medium access control ( MAC ) and physical layer ( PHY ) interface and provides access to the distribution services via the wireless medium for associated clients. LWAPP APs encapsulate all the 802.11 frames in LWAPP frames and sends them to the controller to which it is logically connected. Basic Service Set ( BSS ) The IEEE 802.11 BSS of an AP comprises of the stations directly associating with the AP. Central Controller ( CC ) The central entity that terminates the LWAPP protocol tunnel from the LWAPP APs. Throughout this MIB, this entity is also referred to as 'controller'. Cisco Compatible eXtensions (CCX) Wireless LAN Access Points (APs) manufactured by Cisco Systems have features and capabilities beyond those in related standards (e.g., IEEE 802.11 suite of standards ,Wi-Fi recommendations by WECA, 802.1X security suite,etc). A number of features provide higher performance.For example, Cisco AP transmits a specific Information Element, which the clients adapt to for enhanced performance. Similarly, a number of features are implemented by means of proprietary Information Elements, which Cisco clients use in specific ways to carry out tasks above and beyond the standard. Other examples of feature categories are roaming and power saving. Client Roaming A client may decide to reassociate with another AP for reasons of its own choosing. The decision of whether or not to use the information contained in the AP list is up to the discretion of the implementor, as long as the roam time requirement is met. Light Weight Access Point Protocol ( LWAPP ) This is a generic protocol that defines the communication between the Access Points and the Central Controller. Mobile Node ( MN ) A roaming 802.11 wireless device in a wireless network associated with an access point. Mobile Node and client are used interchangeably. REFERENCE [1] Wireless LAN Medium Access Control ( MAC ) and Physical Layer ( PHY ) Specifications [2] Draft-obara-capwap-lwapp-00.txt, IETF Light Weight Access Point Protocol"
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11aMode.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11aMode.setDescription(
'This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11a networks.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11aMinRssi.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11aMinRssi.setDescription(
"This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11aMinRssiV2"
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11aHysteresis.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11aHysteresis.setDescription(
'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11aHysteresisV2'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11aAdaptiveScanThreshold.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11aAdaptiveScanThreshold.setDescription(
'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime. This object is superceded by clcrDot11aAdaptiveScanThresholdV2'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11aTransitionTime.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11aTransitionTime.setDescription(
'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client?s associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11aTransitionTimeV2'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11aMinRssiV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11aMinRssiV2.setDescription(
"This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal."
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11aHysteresisV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11aHysteresisV2.setDescription(
'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11aAdaptiveScanThresholdV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11aAdaptiveScanThresholdV2.setDescription(
'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11aTransitionTimeV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11aTransitionTimeV2.setDescription(
'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the clients associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11bMode.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11bMode.setDescription(
'This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11b/g networks.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11bMinRssi.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11bMinRssi.setDescription(
"This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11bMinRssiV2"
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11bHysteresis.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11bHysteresis.setDescription(
'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11bHysteresisV2'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11bAdaptiveScanThreshold.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11bAdaptiveScanThreshold.setDescription(
'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime. This object is superceded by clcrDot11bAdaptiveScanThresholdV2'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11bTransitionTime.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11bTransitionTime.setDescription(
'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11bTransitionTimeV2'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11bMinRssiV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11bMinRssiV2.setDescription(
"This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal."
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11bHysteresisV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11bHysteresisV2.setDescription(
'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11bAdaptiveScanThresholdV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11bAdaptiveScanThresholdV2.setDescription(
'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11bTransitionTimeV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11bTransitionTimeV2.setDescription(
'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrRoamReasonReportTable.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamReasonReportTable.setDescription(
'This table provides the reasons for CCX clients roaming from one AP to another. When a CCX client associates to an AP, it will always send an IAPP information packet to the new AP listing the characteristics of the previous AP. An entry is added to this table when a roam reason report is sent by a CCX client when it roams to a new AP.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrRoamReasonReportEntry.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamReasonReportEntry.setDescription(
'Each entry corresponds to the roam reason report sent by a CCX client to the new AP to which client associates.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrRoamClientMacAddress.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamClientMacAddress.setDescription(
'This object indicates the mac address of the client which has roamed to a new AP.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrRoamClientTimeStamp.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamClientTimeStamp.setDescription(
"This object indicates the time instance at which this report was received by the new AP, to which client roamed to. This represents number of seconds elapsed since 00:00:00 on January 1, 1970, Coordinated Universal Time (UTC). So a value of '1131362704' means 'Mon Nov 7 16:55:04 2005'."
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrRoamNewApMacAddress.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamNewApMacAddress.setDescription(
'This object indicates the mac address of the current AP to which client has roamed to. This AP receives the roam reason report.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrRoamPrevApMacAddress.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamPrevApMacAddress.setDescription(
'This object indicates the mac address of the previous AP to which client was associated.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrRoamPrevApChannel.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamPrevApChannel.setDescription(
'This object indicates the channel number at which the client was associated to the previous AP.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrRoamPrevApSsid.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamPrevApSsid.setDescription(
'This object indicates the SSID at which the client was associated to the previous AP.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrRoamDisassocTimeInterval.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamDisassocTimeInterval.setDescription(
'This object indicates the time elapsed since the client disassociated, in hundredth of a second.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrRoamReason.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamReason.setDescription(
"This object indicates the reason for a client to roam to a new AP. The semantics are as follows. clcrUnspecified - The reason is not known or can't be found. clcrPoorLink - Normal roam due to poor link (excessive retries, too much interference, RSSI too low, etc.) clcrLoadBalancing - Normal roam due to load balancing clcrInsufficientCapacity - Roaming occured due to the insufficient capacity on the previous AP (TSPEC rejected) clcrDirectedRoam - Roaming is directed by the 802.11 wireless Infrastructure clcrFirstAssociation - This is the first association to a particular WLAN clcrRoamingIn - Roaming in from cellular or other WAN clcrRoamingOut - Roaming out to cellular or other WAN clcrBetterAp - Normal roam due to better AP found clcrDisassociated - Deauthenticated or Disassociated from the previous AP."
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11StatsTable.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11StatsTable.setDescription(
'This table populates the statistics collected when the client roamed in the WLAN. There exists a row in this table for each conceptual row in cLApDot11IfTable that represents a dot11 interface of an AP.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11StatsEntry.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11StatsEntry.setDescription(
'Each entry represents a conceptual row in clcrDot11StatsTable and corresponds to the roam reason report sent by a CCX client to the new AP which the client associates to.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11NeighborRequestRx.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11NeighborRequestRx.setDescription(
'This object indicates the count of the number of requests received from an E2E client for neighbor updates.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11NeighborReplySent.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11NeighborReplySent.setDescription(
'This object indicates the count of the number of replies sent to the client in reply to the request for neighbor updates received from the client.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11RoamReasonReportRx.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11RoamReasonReportRx.setDescription(
'This object reports the count of the number of roam reason reports received from CCX clients.'
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
clcrDot11BcastUpdatesSent.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11BcastUpdatesSent.setDescription(
'This object indicates the count of the number of broadcast neighbor updates sent by an AP.'
)
<|reserved_special_token_0|>
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clcrMIBCompliance = clcrMIBCompliance.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrMIBCompliance.setDescription(
'The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.'
)
<|reserved_special_token_0|>
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clcrMIBComplianceRev1 = clcrMIBComplianceRev1.setStatus('current')
if mibBuilder.loadTexts:
clcrMIBComplianceRev1.setDescription(
'The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.'
)
<|reserved_special_token_0|>
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11aRfParamsGroup = (ciscoLwappClRoamDot11aRfParamsGroup
.setStatus('deprecated'))
if mibBuilder.loadTexts:
ciscoLwappClRoamDot11aRfParamsGroup.setDescription(
'This collection of objects represent the radio parameters for the 802.11a networks.'
)
<|reserved_special_token_0|>
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11bRfParamsGroup = (ciscoLwappClRoamDot11bRfParamsGroup
.setStatus('deprecated'))
if mibBuilder.loadTexts:
ciscoLwappClRoamDot11bRfParamsGroup.setDescription(
'This collection of objects represent the radio parameters for the 802.11b/g bands.'
)
<|reserved_special_token_0|>
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamroamReasonGroup = (ciscoLwappClRoamroamReasonGroup.
setStatus('current'))
if mibBuilder.loadTexts:
ciscoLwappClRoamroamReasonGroup.setDescription(
'This collection of objects provide the reasons for clients roaming between APs.'
)
<|reserved_special_token_0|>
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamroamingStatsGroup = (ciscoLwappClRoamroamingStatsGroup.
setStatus('current'))
if mibBuilder.loadTexts:
ciscoLwappClRoamroamingStatsGroup.setDescription(
'This collection of objects provide the counters related to roaming.')
<|reserved_special_token_0|>
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11aRfParamsGroupSup1 = (
ciscoLwappClRoamDot11aRfParamsGroupSup1.setStatus('current'))
if mibBuilder.loadTexts:
ciscoLwappClRoamDot11aRfParamsGroupSup1.setDescription(
'This collection of objects represent the radio parameters for the 802.11a networks.'
)
<|reserved_special_token_0|>
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11bRfParamsGroupSup1 = (
ciscoLwappClRoamDot11bRfParamsGroupSup1.setStatus('current'))
if mibBuilder.loadTexts:
ciscoLwappClRoamDot11bRfParamsGroupSup1.setDescription(
'This collection of objects represent the radio parameters for the 802.11b/g bands.'
)
mibBuilder.exportSymbols('CISCO-LWAPP-CLIENT-ROAMING-MIB',
clcrDot11aMinRssi=clcrDot11aMinRssi, clcrRoamClientMacAddress=
clcrRoamClientMacAddress, ciscoLwappClRoamroamingStatsGroup=
ciscoLwappClRoamroamingStatsGroup, clcrDot11bTransitionTimeV2=
clcrDot11bTransitionTimeV2, clcrRoamNewApMacAddress=
clcrRoamNewApMacAddress, clcrMIBCompliance=clcrMIBCompliance,
clcrRoamDot11aRfParamConfig=clcrRoamDot11aRfParamConfig,
clcrDot11BcastUpdatesSent=clcrDot11BcastUpdatesSent, clcrRoamPrevApSsid
=clcrRoamPrevApSsid, clcrMIBComplianceRev1=clcrMIBComplianceRev1,
clcrDot11bHysteresisV2=clcrDot11bHysteresisV2,
ciscoLwappClRoamMIBConform=ciscoLwappClRoamMIBConform,
clcrDot11aTransitionTime=clcrDot11aTransitionTime, clcrDot11aHysteresis
=clcrDot11aHysteresis, ciscoLwappClRoamDot11bRfParamsGroupSup1=
ciscoLwappClRoamDot11bRfParamsGroupSup1, PYSNMP_MODULE_ID=
ciscoLwappClRoamMIB, clcrDot11bHysteresis=clcrDot11bHysteresis,
clcrDot11StatsEntry=clcrDot11StatsEntry, clcrRoamDisassocTimeInterval=
clcrRoamDisassocTimeInterval, ciscoLwappClRoamDot11aRfParamsGroupSup1=
ciscoLwappClRoamDot11aRfParamsGroupSup1,
clcrDot11bAdaptiveScanThreshold=clcrDot11bAdaptiveScanThreshold,
clcrDot11NeighborRequestRx=clcrDot11NeighborRequestRx,
clcrRoamClientTimeStamp=clcrRoamClientTimeStamp, clcrRoamReason=
clcrRoamReason, clcrDot11bMode=clcrDot11bMode,
clcrDot11aAdaptiveScanThreshold=clcrDot11aAdaptiveScanThreshold,
clcrDot11RoamReasonReportRx=clcrDot11RoamReasonReportRx,
clcrDot11bAdaptiveScanThresholdV2=clcrDot11bAdaptiveScanThresholdV2,
ciscoLwappClRoamDot11bRfParamsGroup=ciscoLwappClRoamDot11bRfParamsGroup,
ciscoLwappClRoamMIBNotifs=ciscoLwappClRoamMIBNotifs,
clcrRoamReasonReportTable=clcrRoamReasonReportTable,
clcrDot11aMinRssiV2=clcrDot11aMinRssiV2, ciscoLwappClRoamMIBObjects=
ciscoLwappClRoamMIBObjects, clcrDot11NeighborReplySent=
clcrDot11NeighborReplySent, clcrDot11aAdaptiveScanThresholdV2=
clcrDot11aAdaptiveScanThresholdV2, ciscoLwappClRoamroamReasonGroup=
ciscoLwappClRoamroamReasonGroup, clcrDot11StatsTable=
clcrDot11StatsTable, clcrRoamDot11Stats=clcrRoamDot11Stats,
clcrRoamDot11bRfParamConfig=clcrRoamDot11bRfParamConfig,
clcrDot11bMinRssi=clcrDot11bMinRssi, clcrRoamReasonReport=
clcrRoamReasonReport, clcrRoamPrevApMacAddress=clcrRoamPrevApMacAddress,
ciscoLwappClRoamDot11aRfParamsGroup=ciscoLwappClRoamDot11aRfParamsGroup,
clcrRoamReasonReportEntry=clcrRoamReasonReportEntry,
ciscoLwappClRoamMIBGroups=ciscoLwappClRoamMIBGroups,
clcrDot11bMinRssiV2=clcrDot11bMinRssiV2, ciscoLwappClRoamMIBCompliances
=ciscoLwappClRoamMIBCompliances, clcrDot11aMode=clcrDot11aMode,
clcrDot11aTransitionTimeV2=clcrDot11aTransitionTimeV2,
clcrRoamPrevApChannel=clcrRoamPrevApChannel, clcrDot11bTransitionTime=
clcrDot11bTransitionTime, ciscoLwappClRoamMIB=ciscoLwappClRoamMIB,
clcrDot11aHysteresisV2=clcrDot11aHysteresisV2)
<|reserved_special_token_1|>
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols('ASN1',
'ObjectIdentifier', 'OctetString', 'Integer')
NamedValues, = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion,
ConstraintsIntersection, ValueRangeConstraint) = (mibBuilder.
importSymbols('ASN1-REFINEMENT', 'ValueSizeConstraint',
'SingleValueConstraint', 'ConstraintsUnion', 'ConstraintsIntersection',
'ValueRangeConstraint'))
cLApDot11IfSlotId, cLApSysMacAddress = mibBuilder.importSymbols(
'CISCO-LWAPP-AP-MIB', 'cLApDot11IfSlotId', 'cLApSysMacAddress')
CLDot11RfParamMode, CLDot11Channel = mibBuilder.importSymbols(
'CISCO-LWAPP-TC-MIB', 'CLDot11RfParamMode', 'CLDot11Channel')
ciscoMgmt, = mibBuilder.importSymbols('CISCO-SMI', 'ciscoMgmt')
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols(
'SNMPv2-CONF', 'ObjectGroup', 'NotificationGroup', 'ModuleCompliance')
(Integer32, IpAddress, MibIdentifier, NotificationType, TimeTicks, Bits,
ObjectIdentity, Counter64, ModuleIdentity, iso, Gauge32, MibScalar,
MibTable, MibTableRow, MibTableColumn, Counter32, Unsigned32) = (mibBuilder
.importSymbols('SNMPv2-SMI', 'Integer32', 'IpAddress', 'MibIdentifier',
'NotificationType', 'TimeTicks', 'Bits', 'ObjectIdentity', 'Counter64',
'ModuleIdentity', 'iso', 'Gauge32', 'MibScalar', 'MibTable',
'MibTableRow', 'MibTableColumn', 'Counter32', 'Unsigned32'))
DisplayString, MacAddress, TextualConvention, TimeInterval = (mibBuilder.
importSymbols('SNMPv2-TC', 'DisplayString', 'MacAddress',
'TextualConvention', 'TimeInterval'))
ciscoLwappClRoamMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 523))
ciscoLwappClRoamMIB.setRevisions(('2010-01-29 00:00', '2006-04-11 00:00'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts:
ciscoLwappClRoamMIB.setRevisionsDescriptions((
'Deprecated following attributes:- clcrDot11aMinRssi, clcrDot11aHysteresis, clcrDot11aAdaptiveScanThreshold, clcrDot11aTransitionTime, clcrDot11bMinRssi, clcrDot11bHysteresis, clcrDot11bAdaptiveScanThreshold, clcrDot11bTransitionTime. clcrMIBCompliance, ciscoLwappClRoamDot11aRfParamsGroup, ciscoLwappClRoamDot11bRfParamsGroup Added following attributes:- clcrDot11aMinRssiV2, clcrDot11aHysteresisV2, clcrDot11aAdaptiveScanThresholdV2, clcrDot11aTransitionTimeV2, clcrDot11bMinRssiV2, clcrDot11bHysteresisV2, clcrDot11bAdaptiveScanThresholdV2, clcrDot11bTransitionTimeV2. clcrMIBComplianceRev1, ciscoLwappClRoamDot11aRfParamsGroupSup1, ciscoLwappClRoamDot11bRfParamsGroupSup1'
, 'Initial version of this MIB module.'))
if mibBuilder.loadTexts:
ciscoLwappClRoamMIB.setLastUpdated('201001290000Z')
if mibBuilder.loadTexts:
ciscoLwappClRoamMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts:
ciscoLwappClRoamMIB.setContactInfo(
'Cisco Systems, Customer Service Postal: 170 West Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS Email: [email protected]'
)
if mibBuilder.loadTexts:
ciscoLwappClRoamMIB.setDescription(
"This MIB is intended to be implemented on all those devices operating as Central controllers, that terminate the Light Weight Access Point Protocol tunnel from Cisco Light-weight LWAPP Access Points. Information provided by this MIB is for CCX related features as specified in the CCX specifications. This MIB covers roaming RF parameters for CCX clients. The relationship between CC and the LWAPP APs can be depicted as follows: +......+ +......+ +......+ + + + + + + + CC + + CC + + CC + + + + + + + +......+ +......+ +......+ .. . . .. . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + AP + + AP + + AP + + AP + + + + + + + + + +......+ +......+ +......+ +......+ . . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + MN + + MN + + MN + + MN + + + + + + + + + +......+ +......+ +......+ +......+ The LWAPP tunnel exists between the controller and the APs. The MNs communicate with the APs through the protocol defined by the 802.11 standard. LWAPP APs, upon bootup, discover and join one of the controllers and the controller pushes the configuration, that includes the WLAN parameters, to the LWAPP APs. The APs then encapsulate all the 802.11 frames from wireless clients inside LWAPP frames and forward the LWAPP frames to the controller. GLOSSARY Access Point ( AP ) An entity that contains an 802.11 medium access control ( MAC ) and physical layer ( PHY ) interface and provides access to the distribution services via the wireless medium for associated clients. LWAPP APs encapsulate all the 802.11 frames in LWAPP frames and sends them to the controller to which it is logically connected. Basic Service Set ( BSS ) The IEEE 802.11 BSS of an AP comprises of the stations directly associating with the AP. Central Controller ( CC ) The central entity that terminates the LWAPP protocol tunnel from the LWAPP APs. Throughout this MIB, this entity is also referred to as 'controller'. Cisco Compatible eXtensions (CCX) Wireless LAN Access Points (APs) manufactured by Cisco Systems have features and capabilities beyond those in related standards (e.g., IEEE 802.11 suite of standards ,Wi-Fi recommendations by WECA, 802.1X security suite,etc). A number of features provide higher performance.For example, Cisco AP transmits a specific Information Element, which the clients adapt to for enhanced performance. Similarly, a number of features are implemented by means of proprietary Information Elements, which Cisco clients use in specific ways to carry out tasks above and beyond the standard. Other examples of feature categories are roaming and power saving. Client Roaming A client may decide to reassociate with another AP for reasons of its own choosing. The decision of whether or not to use the information contained in the AP list is up to the discretion of the implementor, as long as the roam time requirement is met. Light Weight Access Point Protocol ( LWAPP ) This is a generic protocol that defines the communication between the Access Points and the Central Controller. Mobile Node ( MN ) A roaming 802.11 wireless device in a wireless network associated with an access point. Mobile Node and client are used interchangeably. REFERENCE [1] Wireless LAN Medium Access Control ( MAC ) and Physical Layer ( PHY ) Specifications [2] Draft-obara-capwap-lwapp-00.txt, IETF Light Weight Access Point Protocol"
)
ciscoLwappClRoamMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 0))
ciscoLwappClRoamMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1))
ciscoLwappClRoamMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 2))
clcrRoamDot11aRfParamConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1)
)
clcrRoamDot11bRfParamConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2)
)
clcrRoamReasonReport = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3))
clcrRoamDot11Stats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4))
clcrDot11aMode = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 1),
CLDot11RfParamMode().clone('default')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11aMode.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11aMode.setDescription(
'This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11a networks.'
)
clcrDot11aMinRssi = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 2),
Integer32().subtype(subtypeSpec=ValueRangeConstraint(-90, -80)).clone(-85)
).setUnits('dBm').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11aMinRssi.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11aMinRssi.setDescription(
"This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11aMinRssiV2"
)
clcrDot11aHysteresis = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 3),
Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 4)).clone(2)
).setUnits('dB').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11aHysteresis.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11aHysteresis.setDescription(
'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11aHysteresisV2'
)
clcrDot11aAdaptiveScanThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,
1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-77, -70)).
clone(-72)).setUnits('dBm').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11aAdaptiveScanThreshold.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11aAdaptiveScanThreshold.setDescription(
'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime. This object is superceded by clcrDot11aAdaptiveScanThresholdV2'
)
clcrDot11aTransitionTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 5),
TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(100, 10000)).
clone(500)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11aTransitionTime.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11aTransitionTime.setDescription(
'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client?s associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11aTransitionTimeV2'
)
clcrDot11aMinRssiV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 6),
Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits(
'dBm').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11aMinRssiV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11aMinRssiV2.setDescription(
"This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal."
)
clcrDot11aHysteresisV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 7),
Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setUnits(
'dB').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11aHysteresisV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11aHysteresisV2.setDescription(
'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.'
)
clcrDot11aAdaptiveScanThresholdV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523,
1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))
).setUnits('dBm').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11aAdaptiveScanThresholdV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11aAdaptiveScanThresholdV2.setDescription(
'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime.'
)
clcrDot11aTransitionTimeV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1,
9), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(0, 10000))
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11aTransitionTimeV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11aTransitionTimeV2.setDescription(
'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the clients associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.'
)
clcrDot11bMode = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 1),
CLDot11RfParamMode().clone('default')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11bMode.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11bMode.setDescription(
'This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11b/g networks.'
)
clcrDot11bMinRssi = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 2),
Integer32().subtype(subtypeSpec=ValueRangeConstraint(-90, -80)).clone(-85)
).setUnits('dBm').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11bMinRssi.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11bMinRssi.setDescription(
"This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11bMinRssiV2"
)
clcrDot11bHysteresis = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 3),
Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 4)).clone(2)
).setUnits('dB').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11bHysteresis.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11bHysteresis.setDescription(
'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11bHysteresisV2'
)
clcrDot11bAdaptiveScanThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,
2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-77, -70)).
clone(-72)).setUnits('dBm').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11bAdaptiveScanThreshold.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11bAdaptiveScanThreshold.setDescription(
'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime. This object is superceded by clcrDot11bAdaptiveScanThresholdV2'
)
clcrDot11bTransitionTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 5),
TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(100, 10000)).
clone(500)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11bTransitionTime.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrDot11bTransitionTime.setDescription(
'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11bTransitionTimeV2'
)
clcrDot11bMinRssiV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 6),
Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits(
'dBm').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11bMinRssiV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11bMinRssiV2.setDescription(
"This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal."
)
clcrDot11bHysteresisV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 7),
Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setUnits(
'dB').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11bHysteresisV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11bHysteresisV2.setDescription(
'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.'
)
clcrDot11bAdaptiveScanThresholdV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523,
1, 2, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))
).setUnits('dBm').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11bAdaptiveScanThresholdV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11bAdaptiveScanThresholdV2.setDescription(
'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime.'
)
clcrDot11bTransitionTimeV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2,
9), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(0, 10000))
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
clcrDot11bTransitionTimeV2.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11bTransitionTimeV2.setDescription(
'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.'
)
clcrRoamReasonReportTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1))
if mibBuilder.loadTexts:
clcrRoamReasonReportTable.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamReasonReportTable.setDescription(
'This table provides the reasons for CCX clients roaming from one AP to another. When a CCX client associates to an AP, it will always send an IAPP information packet to the new AP listing the characteristics of the previous AP. An entry is added to this table when a roam reason report is sent by a CCX client when it roams to a new AP.'
)
clcrRoamReasonReportEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3,
1, 1)).setIndexNames((0, 'CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrRoamClientMacAddress'), (0, 'CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrRoamClientTimeStamp'))
if mibBuilder.loadTexts:
clcrRoamReasonReportEntry.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamReasonReportEntry.setDescription(
'Each entry corresponds to the roam reason report sent by a CCX client to the new AP to which client associates.'
)
clcrRoamClientMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,
3, 1, 1, 1), MacAddress())
if mibBuilder.loadTexts:
clcrRoamClientMacAddress.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamClientMacAddress.setDescription(
'This object indicates the mac address of the client which has roamed to a new AP.'
)
clcrRoamClientTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3,
1, 1, 2), TimeTicks())
if mibBuilder.loadTexts:
clcrRoamClientTimeStamp.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamClientTimeStamp.setDescription(
"This object indicates the time instance at which this report was received by the new AP, to which client roamed to. This represents number of seconds elapsed since 00:00:00 on January 1, 1970, Coordinated Universal Time (UTC). So a value of '1131362704' means 'Mon Nov 7 16:55:04 2005'."
)
clcrRoamNewApMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3,
1, 1, 3), MacAddress()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
clcrRoamNewApMacAddress.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamNewApMacAddress.setDescription(
'This object indicates the mac address of the current AP to which client has roamed to. This AP receives the roam reason report.'
)
clcrRoamPrevApMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,
3, 1, 1, 4), MacAddress()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
clcrRoamPrevApMacAddress.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamPrevApMacAddress.setDescription(
'This object indicates the mac address of the previous AP to which client was associated.'
)
clcrRoamPrevApChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3,
1, 1, 5), CLDot11Channel()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
clcrRoamPrevApChannel.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamPrevApChannel.setDescription(
'This object indicates the channel number at which the client was associated to the previous AP.'
)
clcrRoamPrevApSsid = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1,
1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))
).setMaxAccess('readonly')
if mibBuilder.loadTexts:
clcrRoamPrevApSsid.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamPrevApSsid.setDescription(
'This object indicates the SSID at which the client was associated to the previous AP.'
)
clcrRoamDisassocTimeInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523,
1, 3, 1, 1, 7), TimeInterval()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
clcrRoamDisassocTimeInterval.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamDisassocTimeInterval.setDescription(
'This object indicates the time elapsed since the client disassociated, in hundredth of a second.'
)
clcrRoamReason = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 8
), Integer32().subtype(subtypeSpec=ConstraintsUnion(
SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues
=NamedValues(('clcrUnspecified', 0), ('clcrPoorLink', 1), (
'clcrLoadBalancing', 2), ('clcrInsufficientCapacity', 3), (
'clcrDirectedRoam', 4), ('clcrFirstAssociation', 5), ('clcrRoamingIn',
6), ('clcrRoamingOut', 7), ('clcrBetterAp', 8), ('clcrDisassociated', 9)))
).setMaxAccess('readonly')
if mibBuilder.loadTexts:
clcrRoamReason.setStatus('current')
if mibBuilder.loadTexts:
clcrRoamReason.setDescription(
"This object indicates the reason for a client to roam to a new AP. The semantics are as follows. clcrUnspecified - The reason is not known or can't be found. clcrPoorLink - Normal roam due to poor link (excessive retries, too much interference, RSSI too low, etc.) clcrLoadBalancing - Normal roam due to load balancing clcrInsufficientCapacity - Roaming occured due to the insufficient capacity on the previous AP (TSPEC rejected) clcrDirectedRoam - Roaming is directed by the 802.11 wireless Infrastructure clcrFirstAssociation - This is the first association to a particular WLAN clcrRoamingIn - Roaming in from cellular or other WAN clcrRoamingOut - Roaming out to cellular or other WAN clcrBetterAp - Normal roam due to better AP found clcrDisassociated - Deauthenticated or Disassociated from the previous AP."
)
clcrDot11StatsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1))
if mibBuilder.loadTexts:
clcrDot11StatsTable.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11StatsTable.setDescription(
'This table populates the statistics collected when the client roamed in the WLAN. There exists a row in this table for each conceptual row in cLApDot11IfTable that represents a dot11 interface of an AP.'
)
clcrDot11StatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1)
).setIndexNames((0, 'CISCO-LWAPP-AP-MIB', 'cLApSysMacAddress'), (0,
'CISCO-LWAPP-AP-MIB', 'cLApDot11IfSlotId'))
if mibBuilder.loadTexts:
clcrDot11StatsEntry.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11StatsEntry.setDescription(
'Each entry represents a conceptual row in clcrDot11StatsTable and corresponds to the roam reason report sent by a CCX client to the new AP which the client associates to.'
)
clcrDot11NeighborRequestRx = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,
4, 1, 1, 1), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
clcrDot11NeighborRequestRx.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11NeighborRequestRx.setDescription(
'This object indicates the count of the number of requests received from an E2E client for neighbor updates.'
)
clcrDot11NeighborReplySent = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,
4, 1, 1, 2), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
clcrDot11NeighborReplySent.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11NeighborReplySent.setDescription(
'This object indicates the count of the number of replies sent to the client in reply to the request for neighbor updates received from the client.'
)
clcrDot11RoamReasonReportRx = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523,
1, 4, 1, 1, 3), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
clcrDot11RoamReasonReportRx.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11RoamReasonReportRx.setDescription(
'This object reports the count of the number of roam reason reports received from CCX clients.'
)
clcrDot11BcastUpdatesSent = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,
4, 1, 1, 4), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
clcrDot11BcastUpdatesSent.setStatus('current')
if mibBuilder.loadTexts:
clcrDot11BcastUpdatesSent.setDescription(
'This object indicates the count of the number of broadcast neighbor updates sent by an AP.'
)
ciscoLwappClRoamMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523,
2, 1))
ciscoLwappClRoamMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2))
clcrMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 1, 1)
).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'ciscoLwappClRoamDot11aRfParamsGroup'), (
'CISCO-LWAPP-CLIENT-ROAMING-MIB', 'ciscoLwappClRoamDot11bRfParamsGroup'
), ('CISCO-LWAPP-CLIENT-ROAMING-MIB', 'ciscoLwappClRoamroamReasonGroup'
), ('CISCO-LWAPP-CLIENT-ROAMING-MIB', 'ciscoLwappClRoamroamingStatsGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clcrMIBCompliance = clcrMIBCompliance.setStatus('deprecated')
if mibBuilder.loadTexts:
clcrMIBCompliance.setDescription(
'The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.'
)
clcrMIBComplianceRev1 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 1, 2)
).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'ciscoLwappClRoamDot11aRfParamsGroupSup1'), (
'CISCO-LWAPP-CLIENT-ROAMING-MIB',
'ciscoLwappClRoamDot11bRfParamsGroupSup1'), (
'CISCO-LWAPP-CLIENT-ROAMING-MIB', 'ciscoLwappClRoamroamReasonGroup'), (
'CISCO-LWAPP-CLIENT-ROAMING-MIB', 'ciscoLwappClRoamroamingStatsGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clcrMIBComplianceRev1 = clcrMIBComplianceRev1.setStatus('current')
if mibBuilder.loadTexts:
clcrMIBComplianceRev1.setDescription(
'The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.'
)
ciscoLwappClRoamDot11aRfParamsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9,
523, 2, 2, 1)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11aMode'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11aMinRssi'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11aHysteresis'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11aAdaptiveScanThreshold'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11aTransitionTime'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11aRfParamsGroup = (ciscoLwappClRoamDot11aRfParamsGroup
.setStatus('deprecated'))
if mibBuilder.loadTexts:
ciscoLwappClRoamDot11aRfParamsGroup.setDescription(
'This collection of objects represent the radio parameters for the 802.11a networks.'
)
ciscoLwappClRoamDot11bRfParamsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9,
523, 2, 2, 2)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11bMode'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11bMinRssi'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11bHysteresis'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11bAdaptiveScanThreshold'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11bTransitionTime'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11bRfParamsGroup = (ciscoLwappClRoamDot11bRfParamsGroup
.setStatus('deprecated'))
if mibBuilder.loadTexts:
ciscoLwappClRoamDot11bRfParamsGroup.setDescription(
'This collection of objects represent the radio parameters for the 802.11b/g bands.'
)
ciscoLwappClRoamroamReasonGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523,
2, 2, 3)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrRoamNewApMacAddress'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrRoamPrevApMacAddress'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrRoamPrevApChannel'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrRoamPrevApSsid'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrRoamDisassocTimeInterval'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrRoamReason'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamroamReasonGroup = (ciscoLwappClRoamroamReasonGroup.
setStatus('current'))
if mibBuilder.loadTexts:
ciscoLwappClRoamroamReasonGroup.setDescription(
'This collection of objects provide the reasons for clients roaming between APs.'
)
ciscoLwappClRoamroamingStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9,
523, 2, 2, 4)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11NeighborRequestRx'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11NeighborReplySent'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11RoamReasonReportRx'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11BcastUpdatesSent'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamroamingStatsGroup = (ciscoLwappClRoamroamingStatsGroup.
setStatus('current'))
if mibBuilder.loadTexts:
ciscoLwappClRoamroamingStatsGroup.setDescription(
'This collection of objects provide the counters related to roaming.')
ciscoLwappClRoamDot11aRfParamsGroupSup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 9,
9, 523, 2, 2, 5)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11aMode'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11aMinRssiV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11aHysteresisV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11aAdaptiveScanThresholdV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11aTransitionTimeV2'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11aRfParamsGroupSup1 = (
ciscoLwappClRoamDot11aRfParamsGroupSup1.setStatus('current'))
if mibBuilder.loadTexts:
ciscoLwappClRoamDot11aRfParamsGroupSup1.setDescription(
'This collection of objects represent the radio parameters for the 802.11a networks.'
)
ciscoLwappClRoamDot11bRfParamsGroupSup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 9,
9, 523, 2, 2, 6)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11bMode'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11bMinRssiV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11bHysteresisV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11bAdaptiveScanThresholdV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',
'clcrDot11bTransitionTimeV2'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11bRfParamsGroupSup1 = (
ciscoLwappClRoamDot11bRfParamsGroupSup1.setStatus('current'))
if mibBuilder.loadTexts:
ciscoLwappClRoamDot11bRfParamsGroupSup1.setDescription(
'This collection of objects represent the radio parameters for the 802.11b/g bands.'
)
mibBuilder.exportSymbols('CISCO-LWAPP-CLIENT-ROAMING-MIB',
clcrDot11aMinRssi=clcrDot11aMinRssi, clcrRoamClientMacAddress=
clcrRoamClientMacAddress, ciscoLwappClRoamroamingStatsGroup=
ciscoLwappClRoamroamingStatsGroup, clcrDot11bTransitionTimeV2=
clcrDot11bTransitionTimeV2, clcrRoamNewApMacAddress=
clcrRoamNewApMacAddress, clcrMIBCompliance=clcrMIBCompliance,
clcrRoamDot11aRfParamConfig=clcrRoamDot11aRfParamConfig,
clcrDot11BcastUpdatesSent=clcrDot11BcastUpdatesSent, clcrRoamPrevApSsid
=clcrRoamPrevApSsid, clcrMIBComplianceRev1=clcrMIBComplianceRev1,
clcrDot11bHysteresisV2=clcrDot11bHysteresisV2,
ciscoLwappClRoamMIBConform=ciscoLwappClRoamMIBConform,
clcrDot11aTransitionTime=clcrDot11aTransitionTime, clcrDot11aHysteresis
=clcrDot11aHysteresis, ciscoLwappClRoamDot11bRfParamsGroupSup1=
ciscoLwappClRoamDot11bRfParamsGroupSup1, PYSNMP_MODULE_ID=
ciscoLwappClRoamMIB, clcrDot11bHysteresis=clcrDot11bHysteresis,
clcrDot11StatsEntry=clcrDot11StatsEntry, clcrRoamDisassocTimeInterval=
clcrRoamDisassocTimeInterval, ciscoLwappClRoamDot11aRfParamsGroupSup1=
ciscoLwappClRoamDot11aRfParamsGroupSup1,
clcrDot11bAdaptiveScanThreshold=clcrDot11bAdaptiveScanThreshold,
clcrDot11NeighborRequestRx=clcrDot11NeighborRequestRx,
clcrRoamClientTimeStamp=clcrRoamClientTimeStamp, clcrRoamReason=
clcrRoamReason, clcrDot11bMode=clcrDot11bMode,
clcrDot11aAdaptiveScanThreshold=clcrDot11aAdaptiveScanThreshold,
clcrDot11RoamReasonReportRx=clcrDot11RoamReasonReportRx,
clcrDot11bAdaptiveScanThresholdV2=clcrDot11bAdaptiveScanThresholdV2,
ciscoLwappClRoamDot11bRfParamsGroup=ciscoLwappClRoamDot11bRfParamsGroup,
ciscoLwappClRoamMIBNotifs=ciscoLwappClRoamMIBNotifs,
clcrRoamReasonReportTable=clcrRoamReasonReportTable,
clcrDot11aMinRssiV2=clcrDot11aMinRssiV2, ciscoLwappClRoamMIBObjects=
ciscoLwappClRoamMIBObjects, clcrDot11NeighborReplySent=
clcrDot11NeighborReplySent, clcrDot11aAdaptiveScanThresholdV2=
clcrDot11aAdaptiveScanThresholdV2, ciscoLwappClRoamroamReasonGroup=
ciscoLwappClRoamroamReasonGroup, clcrDot11StatsTable=
clcrDot11StatsTable, clcrRoamDot11Stats=clcrRoamDot11Stats,
clcrRoamDot11bRfParamConfig=clcrRoamDot11bRfParamConfig,
clcrDot11bMinRssi=clcrDot11bMinRssi, clcrRoamReasonReport=
clcrRoamReasonReport, clcrRoamPrevApMacAddress=clcrRoamPrevApMacAddress,
ciscoLwappClRoamDot11aRfParamsGroup=ciscoLwappClRoamDot11aRfParamsGroup,
clcrRoamReasonReportEntry=clcrRoamReasonReportEntry,
ciscoLwappClRoamMIBGroups=ciscoLwappClRoamMIBGroups,
clcrDot11bMinRssiV2=clcrDot11bMinRssiV2, ciscoLwappClRoamMIBCompliances
=ciscoLwappClRoamMIBCompliances, clcrDot11aMode=clcrDot11aMode,
clcrDot11aTransitionTimeV2=clcrDot11aTransitionTimeV2,
clcrRoamPrevApChannel=clcrRoamPrevApChannel, clcrDot11bTransitionTime=
clcrDot11bTransitionTime, ciscoLwappClRoamMIB=ciscoLwappClRoamMIB,
clcrDot11aHysteresisV2=clcrDot11aHysteresisV2)
<|reserved_special_token_1|>
#
# PySNMP MIB module CISCO-LWAPP-CLIENT-ROAMING-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-LWAPP-CLIENT-ROAMING-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:04:56 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueRangeConstraint")
cLApDot11IfSlotId, cLApSysMacAddress = mibBuilder.importSymbols("CISCO-LWAPP-AP-MIB", "cLApDot11IfSlotId", "cLApSysMacAddress")
CLDot11RfParamMode, CLDot11Channel = mibBuilder.importSymbols("CISCO-LWAPP-TC-MIB", "CLDot11RfParamMode", "CLDot11Channel")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
Integer32, IpAddress, MibIdentifier, NotificationType, TimeTicks, Bits, ObjectIdentity, Counter64, ModuleIdentity, iso, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "IpAddress", "MibIdentifier", "NotificationType", "TimeTicks", "Bits", "ObjectIdentity", "Counter64", "ModuleIdentity", "iso", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Unsigned32")
DisplayString, MacAddress, TextualConvention, TimeInterval = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "MacAddress", "TextualConvention", "TimeInterval")
ciscoLwappClRoamMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 523))
ciscoLwappClRoamMIB.setRevisions(('2010-01-29 00:00', '2006-04-11 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoLwappClRoamMIB.setRevisionsDescriptions(('Deprecated following attributes:- clcrDot11aMinRssi, clcrDot11aHysteresis, clcrDot11aAdaptiveScanThreshold, clcrDot11aTransitionTime, clcrDot11bMinRssi, clcrDot11bHysteresis, clcrDot11bAdaptiveScanThreshold, clcrDot11bTransitionTime. clcrMIBCompliance, ciscoLwappClRoamDot11aRfParamsGroup, ciscoLwappClRoamDot11bRfParamsGroup Added following attributes:- clcrDot11aMinRssiV2, clcrDot11aHysteresisV2, clcrDot11aAdaptiveScanThresholdV2, clcrDot11aTransitionTimeV2, clcrDot11bMinRssiV2, clcrDot11bHysteresisV2, clcrDot11bAdaptiveScanThresholdV2, clcrDot11bTransitionTimeV2. clcrMIBComplianceRev1, ciscoLwappClRoamDot11aRfParamsGroupSup1, ciscoLwappClRoamDot11bRfParamsGroupSup1', 'Initial version of this MIB module.',))
if mibBuilder.loadTexts: ciscoLwappClRoamMIB.setLastUpdated('201001290000Z')
if mibBuilder.loadTexts: ciscoLwappClRoamMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoLwappClRoamMIB.setContactInfo('Cisco Systems, Customer Service Postal: 170 West Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS Email: [email protected]')
if mibBuilder.loadTexts: ciscoLwappClRoamMIB.setDescription("This MIB is intended to be implemented on all those devices operating as Central controllers, that terminate the Light Weight Access Point Protocol tunnel from Cisco Light-weight LWAPP Access Points. Information provided by this MIB is for CCX related features as specified in the CCX specifications. This MIB covers roaming RF parameters for CCX clients. The relationship between CC and the LWAPP APs can be depicted as follows: +......+ +......+ +......+ + + + + + + + CC + + CC + + CC + + + + + + + +......+ +......+ +......+ .. . . .. . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + AP + + AP + + AP + + AP + + + + + + + + + +......+ +......+ +......+ +......+ . . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + MN + + MN + + MN + + MN + + + + + + + + + +......+ +......+ +......+ +......+ The LWAPP tunnel exists between the controller and the APs. The MNs communicate with the APs through the protocol defined by the 802.11 standard. LWAPP APs, upon bootup, discover and join one of the controllers and the controller pushes the configuration, that includes the WLAN parameters, to the LWAPP APs. The APs then encapsulate all the 802.11 frames from wireless clients inside LWAPP frames and forward the LWAPP frames to the controller. GLOSSARY Access Point ( AP ) An entity that contains an 802.11 medium access control ( MAC ) and physical layer ( PHY ) interface and provides access to the distribution services via the wireless medium for associated clients. LWAPP APs encapsulate all the 802.11 frames in LWAPP frames and sends them to the controller to which it is logically connected. Basic Service Set ( BSS ) The IEEE 802.11 BSS of an AP comprises of the stations directly associating with the AP. Central Controller ( CC ) The central entity that terminates the LWAPP protocol tunnel from the LWAPP APs. Throughout this MIB, this entity is also referred to as 'controller'. Cisco Compatible eXtensions (CCX) Wireless LAN Access Points (APs) manufactured by Cisco Systems have features and capabilities beyond those in related standards (e.g., IEEE 802.11 suite of standards ,Wi-Fi recommendations by WECA, 802.1X security suite,etc). A number of features provide higher performance.For example, Cisco AP transmits a specific Information Element, which the clients adapt to for enhanced performance. Similarly, a number of features are implemented by means of proprietary Information Elements, which Cisco clients use in specific ways to carry out tasks above and beyond the standard. Other examples of feature categories are roaming and power saving. Client Roaming A client may decide to reassociate with another AP for reasons of its own choosing. The decision of whether or not to use the information contained in the AP list is up to the discretion of the implementor, as long as the roam time requirement is met. Light Weight Access Point Protocol ( LWAPP ) This is a generic protocol that defines the communication between the Access Points and the Central Controller. Mobile Node ( MN ) A roaming 802.11 wireless device in a wireless network associated with an access point. Mobile Node and client are used interchangeably. REFERENCE [1] Wireless LAN Medium Access Control ( MAC ) and Physical Layer ( PHY ) Specifications [2] Draft-obara-capwap-lwapp-00.txt, IETF Light Weight Access Point Protocol")
ciscoLwappClRoamMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 0))
ciscoLwappClRoamMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1))
ciscoLwappClRoamMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 2))
clcrRoamDot11aRfParamConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1))
clcrRoamDot11bRfParamConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2))
clcrRoamReasonReport = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3))
clcrRoamDot11Stats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4))
clcrDot11aMode = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 1), CLDot11RfParamMode().clone('default')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11aMode.setStatus('current')
if mibBuilder.loadTexts: clcrDot11aMode.setDescription('This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11a networks.')
clcrDot11aMinRssi = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-90, -80)).clone(-85)).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11aMinRssi.setStatus('deprecated')
if mibBuilder.loadTexts: clcrDot11aMinRssi.setDescription("This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11aMinRssiV2")
clcrDot11aHysteresis = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 4)).clone(2)).setUnits('dB').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11aHysteresis.setStatus('deprecated')
if mibBuilder.loadTexts: clcrDot11aHysteresis.setDescription('This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11aHysteresisV2')
clcrDot11aAdaptiveScanThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-77, -70)).clone(-72)).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11aAdaptiveScanThreshold.setStatus('deprecated')
if mibBuilder.loadTexts: clcrDot11aAdaptiveScanThreshold.setDescription('This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime. This object is superceded by clcrDot11aAdaptiveScanThresholdV2')
clcrDot11aTransitionTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 5), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(100, 10000)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11aTransitionTime.setStatus('deprecated')
if mibBuilder.loadTexts: clcrDot11aTransitionTime.setDescription('This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client?s associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11aTransitionTimeV2')
clcrDot11aMinRssiV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11aMinRssiV2.setStatus('current')
if mibBuilder.loadTexts: clcrDot11aMinRssiV2.setDescription("This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal.")
clcrDot11aHysteresisV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setUnits('dB').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11aHysteresisV2.setStatus('current')
if mibBuilder.loadTexts: clcrDot11aHysteresisV2.setDescription('This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.')
clcrDot11aAdaptiveScanThresholdV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11aAdaptiveScanThresholdV2.setStatus('current')
if mibBuilder.loadTexts: clcrDot11aAdaptiveScanThresholdV2.setDescription('This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime.')
clcrDot11aTransitionTimeV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 9), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(0, 10000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11aTransitionTimeV2.setStatus('current')
if mibBuilder.loadTexts: clcrDot11aTransitionTimeV2.setDescription('This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the clients associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.')
clcrDot11bMode = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 1), CLDot11RfParamMode().clone('default')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11bMode.setStatus('current')
if mibBuilder.loadTexts: clcrDot11bMode.setDescription('This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11b/g networks.')
clcrDot11bMinRssi = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-90, -80)).clone(-85)).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11bMinRssi.setStatus('deprecated')
if mibBuilder.loadTexts: clcrDot11bMinRssi.setDescription("This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11bMinRssiV2")
clcrDot11bHysteresis = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 4)).clone(2)).setUnits('dB').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11bHysteresis.setStatus('deprecated')
if mibBuilder.loadTexts: clcrDot11bHysteresis.setDescription('This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11bHysteresisV2')
clcrDot11bAdaptiveScanThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-77, -70)).clone(-72)).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11bAdaptiveScanThreshold.setStatus('deprecated')
if mibBuilder.loadTexts: clcrDot11bAdaptiveScanThreshold.setDescription('This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime. This object is superceded by clcrDot11bAdaptiveScanThresholdV2')
clcrDot11bTransitionTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 5), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(100, 10000)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11bTransitionTime.setStatus('deprecated')
if mibBuilder.loadTexts: clcrDot11bTransitionTime.setDescription('This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11bTransitionTimeV2')
clcrDot11bMinRssiV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11bMinRssiV2.setStatus('current')
if mibBuilder.loadTexts: clcrDot11bMinRssiV2.setDescription("This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal.")
clcrDot11bHysteresisV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setUnits('dB').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11bHysteresisV2.setStatus('current')
if mibBuilder.loadTexts: clcrDot11bHysteresisV2.setDescription('This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.')
clcrDot11bAdaptiveScanThresholdV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11bAdaptiveScanThresholdV2.setStatus('current')
if mibBuilder.loadTexts: clcrDot11bAdaptiveScanThresholdV2.setDescription('This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime.')
clcrDot11bTransitionTimeV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 9), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(0, 10000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: clcrDot11bTransitionTimeV2.setStatus('current')
if mibBuilder.loadTexts: clcrDot11bTransitionTimeV2.setDescription('This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.')
clcrRoamReasonReportTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1), )
if mibBuilder.loadTexts: clcrRoamReasonReportTable.setStatus('current')
if mibBuilder.loadTexts: clcrRoamReasonReportTable.setDescription('This table provides the reasons for CCX clients roaming from one AP to another. When a CCX client associates to an AP, it will always send an IAPP information packet to the new AP listing the characteristics of the previous AP. An entry is added to this table when a roam reason report is sent by a CCX client when it roams to a new AP.')
clcrRoamReasonReportEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1), ).setIndexNames((0, "CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrRoamClientMacAddress"), (0, "CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrRoamClientTimeStamp"))
if mibBuilder.loadTexts: clcrRoamReasonReportEntry.setStatus('current')
if mibBuilder.loadTexts: clcrRoamReasonReportEntry.setDescription('Each entry corresponds to the roam reason report sent by a CCX client to the new AP to which client associates.')
clcrRoamClientMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 1), MacAddress())
if mibBuilder.loadTexts: clcrRoamClientMacAddress.setStatus('current')
if mibBuilder.loadTexts: clcrRoamClientMacAddress.setDescription('This object indicates the mac address of the client which has roamed to a new AP.')
clcrRoamClientTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 2), TimeTicks())
if mibBuilder.loadTexts: clcrRoamClientTimeStamp.setStatus('current')
if mibBuilder.loadTexts: clcrRoamClientTimeStamp.setDescription("This object indicates the time instance at which this report was received by the new AP, to which client roamed to. This represents number of seconds elapsed since 00:00:00 on January 1, 1970, Coordinated Universal Time (UTC). So a value of '1131362704' means 'Mon Nov 7 16:55:04 2005'.")
clcrRoamNewApMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 3), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clcrRoamNewApMacAddress.setStatus('current')
if mibBuilder.loadTexts: clcrRoamNewApMacAddress.setDescription('This object indicates the mac address of the current AP to which client has roamed to. This AP receives the roam reason report.')
clcrRoamPrevApMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clcrRoamPrevApMacAddress.setStatus('current')
if mibBuilder.loadTexts: clcrRoamPrevApMacAddress.setDescription('This object indicates the mac address of the previous AP to which client was associated.')
clcrRoamPrevApChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 5), CLDot11Channel()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clcrRoamPrevApChannel.setStatus('current')
if mibBuilder.loadTexts: clcrRoamPrevApChannel.setDescription('This object indicates the channel number at which the client was associated to the previous AP.')
clcrRoamPrevApSsid = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: clcrRoamPrevApSsid.setStatus('current')
if mibBuilder.loadTexts: clcrRoamPrevApSsid.setDescription('This object indicates the SSID at which the client was associated to the previous AP.')
clcrRoamDisassocTimeInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 7), TimeInterval()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clcrRoamDisassocTimeInterval.setStatus('current')
if mibBuilder.loadTexts: clcrRoamDisassocTimeInterval.setDescription('This object indicates the time elapsed since the client disassociated, in hundredth of a second.')
clcrRoamReason = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("clcrUnspecified", 0), ("clcrPoorLink", 1), ("clcrLoadBalancing", 2), ("clcrInsufficientCapacity", 3), ("clcrDirectedRoam", 4), ("clcrFirstAssociation", 5), ("clcrRoamingIn", 6), ("clcrRoamingOut", 7), ("clcrBetterAp", 8), ("clcrDisassociated", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: clcrRoamReason.setStatus('current')
if mibBuilder.loadTexts: clcrRoamReason.setDescription("This object indicates the reason for a client to roam to a new AP. The semantics are as follows. clcrUnspecified - The reason is not known or can't be found. clcrPoorLink - Normal roam due to poor link (excessive retries, too much interference, RSSI too low, etc.) clcrLoadBalancing - Normal roam due to load balancing clcrInsufficientCapacity - Roaming occured due to the insufficient capacity on the previous AP (TSPEC rejected) clcrDirectedRoam - Roaming is directed by the 802.11 wireless Infrastructure clcrFirstAssociation - This is the first association to a particular WLAN clcrRoamingIn - Roaming in from cellular or other WAN clcrRoamingOut - Roaming out to cellular or other WAN clcrBetterAp - Normal roam due to better AP found clcrDisassociated - Deauthenticated or Disassociated from the previous AP.")
clcrDot11StatsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1), )
if mibBuilder.loadTexts: clcrDot11StatsTable.setStatus('current')
if mibBuilder.loadTexts: clcrDot11StatsTable.setDescription('This table populates the statistics collected when the client roamed in the WLAN. There exists a row in this table for each conceptual row in cLApDot11IfTable that represents a dot11 interface of an AP.')
clcrDot11StatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1), ).setIndexNames((0, "CISCO-LWAPP-AP-MIB", "cLApSysMacAddress"), (0, "CISCO-LWAPP-AP-MIB", "cLApDot11IfSlotId"))
if mibBuilder.loadTexts: clcrDot11StatsEntry.setStatus('current')
if mibBuilder.loadTexts: clcrDot11StatsEntry.setDescription('Each entry represents a conceptual row in clcrDot11StatsTable and corresponds to the roam reason report sent by a CCX client to the new AP which the client associates to.')
clcrDot11NeighborRequestRx = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clcrDot11NeighborRequestRx.setStatus('current')
if mibBuilder.loadTexts: clcrDot11NeighborRequestRx.setDescription('This object indicates the count of the number of requests received from an E2E client for neighbor updates.')
clcrDot11NeighborReplySent = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clcrDot11NeighborReplySent.setStatus('current')
if mibBuilder.loadTexts: clcrDot11NeighborReplySent.setDescription('This object indicates the count of the number of replies sent to the client in reply to the request for neighbor updates received from the client.')
clcrDot11RoamReasonReportRx = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clcrDot11RoamReasonReportRx.setStatus('current')
if mibBuilder.loadTexts: clcrDot11RoamReasonReportRx.setDescription('This object reports the count of the number of roam reason reports received from CCX clients.')
clcrDot11BcastUpdatesSent = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clcrDot11BcastUpdatesSent.setStatus('current')
if mibBuilder.loadTexts: clcrDot11BcastUpdatesSent.setDescription('This object indicates the count of the number of broadcast neighbor updates sent by an AP.')
ciscoLwappClRoamMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 1))
ciscoLwappClRoamMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2))
clcrMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 1, 1)).setObjects(("CISCO-LWAPP-CLIENT-ROAMING-MIB", "ciscoLwappClRoamDot11aRfParamsGroup"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "ciscoLwappClRoamDot11bRfParamsGroup"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "ciscoLwappClRoamroamReasonGroup"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "ciscoLwappClRoamroamingStatsGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clcrMIBCompliance = clcrMIBCompliance.setStatus('deprecated')
if mibBuilder.loadTexts: clcrMIBCompliance.setDescription('The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.')
clcrMIBComplianceRev1 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 1, 2)).setObjects(("CISCO-LWAPP-CLIENT-ROAMING-MIB", "ciscoLwappClRoamDot11aRfParamsGroupSup1"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "ciscoLwappClRoamDot11bRfParamsGroupSup1"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "ciscoLwappClRoamroamReasonGroup"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "ciscoLwappClRoamroamingStatsGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clcrMIBComplianceRev1 = clcrMIBComplianceRev1.setStatus('current')
if mibBuilder.loadTexts: clcrMIBComplianceRev1.setDescription('The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.')
ciscoLwappClRoamDot11aRfParamsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 1)).setObjects(("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11aMode"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11aMinRssi"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11aHysteresis"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11aAdaptiveScanThreshold"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11aTransitionTime"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11aRfParamsGroup = ciscoLwappClRoamDot11aRfParamsGroup.setStatus('deprecated')
if mibBuilder.loadTexts: ciscoLwappClRoamDot11aRfParamsGroup.setDescription('This collection of objects represent the radio parameters for the 802.11a networks.')
ciscoLwappClRoamDot11bRfParamsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 2)).setObjects(("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11bMode"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11bMinRssi"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11bHysteresis"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11bAdaptiveScanThreshold"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11bTransitionTime"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11bRfParamsGroup = ciscoLwappClRoamDot11bRfParamsGroup.setStatus('deprecated')
if mibBuilder.loadTexts: ciscoLwappClRoamDot11bRfParamsGroup.setDescription('This collection of objects represent the radio parameters for the 802.11b/g bands.')
ciscoLwappClRoamroamReasonGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 3)).setObjects(("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrRoamNewApMacAddress"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrRoamPrevApMacAddress"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrRoamPrevApChannel"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrRoamPrevApSsid"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrRoamDisassocTimeInterval"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrRoamReason"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamroamReasonGroup = ciscoLwappClRoamroamReasonGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoLwappClRoamroamReasonGroup.setDescription('This collection of objects provide the reasons for clients roaming between APs.')
ciscoLwappClRoamroamingStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 4)).setObjects(("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11NeighborRequestRx"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11NeighborReplySent"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11RoamReasonReportRx"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11BcastUpdatesSent"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamroamingStatsGroup = ciscoLwappClRoamroamingStatsGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoLwappClRoamroamingStatsGroup.setDescription('This collection of objects provide the counters related to roaming.')
ciscoLwappClRoamDot11aRfParamsGroupSup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 5)).setObjects(("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11aMode"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11aMinRssiV2"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11aHysteresisV2"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11aAdaptiveScanThresholdV2"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11aTransitionTimeV2"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11aRfParamsGroupSup1 = ciscoLwappClRoamDot11aRfParamsGroupSup1.setStatus('current')
if mibBuilder.loadTexts: ciscoLwappClRoamDot11aRfParamsGroupSup1.setDescription('This collection of objects represent the radio parameters for the 802.11a networks.')
ciscoLwappClRoamDot11bRfParamsGroupSup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 6)).setObjects(("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11bMode"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11bMinRssiV2"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11bHysteresisV2"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11bAdaptiveScanThresholdV2"), ("CISCO-LWAPP-CLIENT-ROAMING-MIB", "clcrDot11bTransitionTimeV2"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLwappClRoamDot11bRfParamsGroupSup1 = ciscoLwappClRoamDot11bRfParamsGroupSup1.setStatus('current')
if mibBuilder.loadTexts: ciscoLwappClRoamDot11bRfParamsGroupSup1.setDescription('This collection of objects represent the radio parameters for the 802.11b/g bands.')
mibBuilder.exportSymbols("CISCO-LWAPP-CLIENT-ROAMING-MIB", clcrDot11aMinRssi=clcrDot11aMinRssi, clcrRoamClientMacAddress=clcrRoamClientMacAddress, ciscoLwappClRoamroamingStatsGroup=ciscoLwappClRoamroamingStatsGroup, clcrDot11bTransitionTimeV2=clcrDot11bTransitionTimeV2, clcrRoamNewApMacAddress=clcrRoamNewApMacAddress, clcrMIBCompliance=clcrMIBCompliance, clcrRoamDot11aRfParamConfig=clcrRoamDot11aRfParamConfig, clcrDot11BcastUpdatesSent=clcrDot11BcastUpdatesSent, clcrRoamPrevApSsid=clcrRoamPrevApSsid, clcrMIBComplianceRev1=clcrMIBComplianceRev1, clcrDot11bHysteresisV2=clcrDot11bHysteresisV2, ciscoLwappClRoamMIBConform=ciscoLwappClRoamMIBConform, clcrDot11aTransitionTime=clcrDot11aTransitionTime, clcrDot11aHysteresis=clcrDot11aHysteresis, ciscoLwappClRoamDot11bRfParamsGroupSup1=ciscoLwappClRoamDot11bRfParamsGroupSup1, PYSNMP_MODULE_ID=ciscoLwappClRoamMIB, clcrDot11bHysteresis=clcrDot11bHysteresis, clcrDot11StatsEntry=clcrDot11StatsEntry, clcrRoamDisassocTimeInterval=clcrRoamDisassocTimeInterval, ciscoLwappClRoamDot11aRfParamsGroupSup1=ciscoLwappClRoamDot11aRfParamsGroupSup1, clcrDot11bAdaptiveScanThreshold=clcrDot11bAdaptiveScanThreshold, clcrDot11NeighborRequestRx=clcrDot11NeighborRequestRx, clcrRoamClientTimeStamp=clcrRoamClientTimeStamp, clcrRoamReason=clcrRoamReason, clcrDot11bMode=clcrDot11bMode, clcrDot11aAdaptiveScanThreshold=clcrDot11aAdaptiveScanThreshold, clcrDot11RoamReasonReportRx=clcrDot11RoamReasonReportRx, clcrDot11bAdaptiveScanThresholdV2=clcrDot11bAdaptiveScanThresholdV2, ciscoLwappClRoamDot11bRfParamsGroup=ciscoLwappClRoamDot11bRfParamsGroup, ciscoLwappClRoamMIBNotifs=ciscoLwappClRoamMIBNotifs, clcrRoamReasonReportTable=clcrRoamReasonReportTable, clcrDot11aMinRssiV2=clcrDot11aMinRssiV2, ciscoLwappClRoamMIBObjects=ciscoLwappClRoamMIBObjects, clcrDot11NeighborReplySent=clcrDot11NeighborReplySent, clcrDot11aAdaptiveScanThresholdV2=clcrDot11aAdaptiveScanThresholdV2, ciscoLwappClRoamroamReasonGroup=ciscoLwappClRoamroamReasonGroup, clcrDot11StatsTable=clcrDot11StatsTable, clcrRoamDot11Stats=clcrRoamDot11Stats, clcrRoamDot11bRfParamConfig=clcrRoamDot11bRfParamConfig, clcrDot11bMinRssi=clcrDot11bMinRssi, clcrRoamReasonReport=clcrRoamReasonReport, clcrRoamPrevApMacAddress=clcrRoamPrevApMacAddress, ciscoLwappClRoamDot11aRfParamsGroup=ciscoLwappClRoamDot11aRfParamsGroup, clcrRoamReasonReportEntry=clcrRoamReasonReportEntry, ciscoLwappClRoamMIBGroups=ciscoLwappClRoamMIBGroups, clcrDot11bMinRssiV2=clcrDot11bMinRssiV2, ciscoLwappClRoamMIBCompliances=ciscoLwappClRoamMIBCompliances, clcrDot11aMode=clcrDot11aMode, clcrDot11aTransitionTimeV2=clcrDot11aTransitionTimeV2, clcrRoamPrevApChannel=clcrRoamPrevApChannel, clcrDot11bTransitionTime=clcrDot11bTransitionTime, ciscoLwappClRoamMIB=ciscoLwappClRoamMIB, clcrDot11aHysteresisV2=clcrDot11aHysteresisV2)
|
flexible
|
{
"blob_id": "76fbe055b53af9321cc0d57a210cfffe9188f800",
"index": 6531,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nciscoLwappClRoamMIB.setRevisions(('2010-01-29 00:00', '2006-04-11 00:00'))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n if mibBuilder.loadTexts:\n ciscoLwappClRoamMIB.setRevisionsDescriptions((\n 'Deprecated following attributes:- clcrDot11aMinRssi, clcrDot11aHysteresis, clcrDot11aAdaptiveScanThreshold, clcrDot11aTransitionTime, clcrDot11bMinRssi, clcrDot11bHysteresis, clcrDot11bAdaptiveScanThreshold, clcrDot11bTransitionTime. clcrMIBCompliance, ciscoLwappClRoamDot11aRfParamsGroup, ciscoLwappClRoamDot11bRfParamsGroup Added following attributes:- clcrDot11aMinRssiV2, clcrDot11aHysteresisV2, clcrDot11aAdaptiveScanThresholdV2, clcrDot11aTransitionTimeV2, clcrDot11bMinRssiV2, clcrDot11bHysteresisV2, clcrDot11bAdaptiveScanThresholdV2, clcrDot11bTransitionTimeV2. clcrMIBComplianceRev1, ciscoLwappClRoamDot11aRfParamsGroupSup1, ciscoLwappClRoamDot11bRfParamsGroupSup1'\n , 'Initial version of this MIB module.'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamMIB.setLastUpdated('201001290000Z')\nif mibBuilder.loadTexts:\n ciscoLwappClRoamMIB.setOrganization('Cisco Systems, Inc.')\nif mibBuilder.loadTexts:\n ciscoLwappClRoamMIB.setContactInfo(\n 'Cisco Systems, Customer Service Postal: 170 West Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS Email: [email protected]'\n )\nif mibBuilder.loadTexts:\n ciscoLwappClRoamMIB.setDescription(\n \"This MIB is intended to be implemented on all those devices operating as Central controllers, that terminate the Light Weight Access Point Protocol tunnel from Cisco Light-weight LWAPP Access Points. Information provided by this MIB is for CCX related features as specified in the CCX specifications. This MIB covers roaming RF parameters for CCX clients. The relationship between CC and the LWAPP APs can be depicted as follows: +......+ +......+ +......+ + + + + + + + CC + + CC + + CC + + + + + + + +......+ +......+ +......+ .. . . .. . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + AP + + AP + + AP + + AP + + + + + + + + + +......+ +......+ +......+ +......+ . . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + MN + + MN + + MN + + MN + + + + + + + + + +......+ +......+ +......+ +......+ The LWAPP tunnel exists between the controller and the APs. The MNs communicate with the APs through the protocol defined by the 802.11 standard. LWAPP APs, upon bootup, discover and join one of the controllers and the controller pushes the configuration, that includes the WLAN parameters, to the LWAPP APs. The APs then encapsulate all the 802.11 frames from wireless clients inside LWAPP frames and forward the LWAPP frames to the controller. GLOSSARY Access Point ( AP ) An entity that contains an 802.11 medium access control ( MAC ) and physical layer ( PHY ) interface and provides access to the distribution services via the wireless medium for associated clients. LWAPP APs encapsulate all the 802.11 frames in LWAPP frames and sends them to the controller to which it is logically connected. Basic Service Set ( BSS ) The IEEE 802.11 BSS of an AP comprises of the stations directly associating with the AP. Central Controller ( CC ) The central entity that terminates the LWAPP protocol tunnel from the LWAPP APs. Throughout this MIB, this entity is also referred to as 'controller'. Cisco Compatible eXtensions (CCX) Wireless LAN Access Points (APs) manufactured by Cisco Systems have features and capabilities beyond those in related standards (e.g., IEEE 802.11 suite of standards ,Wi-Fi recommendations by WECA, 802.1X security suite,etc). A number of features provide higher performance.For example, Cisco AP transmits a specific Information Element, which the clients adapt to for enhanced performance. Similarly, a number of features are implemented by means of proprietary Information Elements, which Cisco clients use in specific ways to carry out tasks above and beyond the standard. Other examples of feature categories are roaming and power saving. Client Roaming A client may decide to reassociate with another AP for reasons of its own choosing. The decision of whether or not to use the information contained in the AP list is up to the discretion of the implementor, as long as the roam time requirement is met. Light Weight Access Point Protocol ( LWAPP ) This is a generic protocol that defines the communication between the Access Points and the Central Controller. Mobile Node ( MN ) A roaming 802.11 wireless device in a wireless network associated with an access point. Mobile Node and client are used interchangeably. REFERENCE [1] Wireless LAN Medium Access Control ( MAC ) and Physical Layer ( PHY ) Specifications [2] Draft-obara-capwap-lwapp-00.txt, IETF Light Weight Access Point Protocol\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11aMode.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11aMode.setDescription(\n 'This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11a networks.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11aMinRssi.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11aMinRssi.setDescription(\n \"This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11aMinRssiV2\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11aHysteresis.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11aHysteresis.setDescription(\n 'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11aHysteresisV2'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11aAdaptiveScanThreshold.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11aAdaptiveScanThreshold.setDescription(\n 'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime. This object is superceded by clcrDot11aAdaptiveScanThresholdV2'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11aTransitionTime.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11aTransitionTime.setDescription(\n 'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client?s associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11aTransitionTimeV2'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11aMinRssiV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11aMinRssiV2.setDescription(\n \"This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal.\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11aHysteresisV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11aHysteresisV2.setDescription(\n 'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11aAdaptiveScanThresholdV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11aAdaptiveScanThresholdV2.setDescription(\n 'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11aTransitionTimeV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11aTransitionTimeV2.setDescription(\n 'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the clients associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11bMode.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11bMode.setDescription(\n 'This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11b/g networks.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11bMinRssi.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11bMinRssi.setDescription(\n \"This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11bMinRssiV2\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11bHysteresis.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11bHysteresis.setDescription(\n 'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11bHysteresisV2'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11bAdaptiveScanThreshold.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11bAdaptiveScanThreshold.setDescription(\n 'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime. This object is superceded by clcrDot11bAdaptiveScanThresholdV2'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11bTransitionTime.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11bTransitionTime.setDescription(\n 'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11bTransitionTimeV2'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11bMinRssiV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11bMinRssiV2.setDescription(\n \"This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal.\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11bHysteresisV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11bHysteresisV2.setDescription(\n 'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11bAdaptiveScanThresholdV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11bAdaptiveScanThresholdV2.setDescription(\n 'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11bTransitionTimeV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11bTransitionTimeV2.setDescription(\n 'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrRoamReasonReportTable.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamReasonReportTable.setDescription(\n 'This table provides the reasons for CCX clients roaming from one AP to another. When a CCX client associates to an AP, it will always send an IAPP information packet to the new AP listing the characteristics of the previous AP. An entry is added to this table when a roam reason report is sent by a CCX client when it roams to a new AP.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrRoamReasonReportEntry.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamReasonReportEntry.setDescription(\n 'Each entry corresponds to the roam reason report sent by a CCX client to the new AP to which client associates.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrRoamClientMacAddress.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamClientMacAddress.setDescription(\n 'This object indicates the mac address of the client which has roamed to a new AP.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrRoamClientTimeStamp.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamClientTimeStamp.setDescription(\n \"This object indicates the time instance at which this report was received by the new AP, to which client roamed to. This represents number of seconds elapsed since 00:00:00 on January 1, 1970, Coordinated Universal Time (UTC). So a value of '1131362704' means 'Mon Nov 7 16:55:04 2005'.\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrRoamNewApMacAddress.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamNewApMacAddress.setDescription(\n 'This object indicates the mac address of the current AP to which client has roamed to. This AP receives the roam reason report.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrRoamPrevApMacAddress.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamPrevApMacAddress.setDescription(\n 'This object indicates the mac address of the previous AP to which client was associated.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrRoamPrevApChannel.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamPrevApChannel.setDescription(\n 'This object indicates the channel number at which the client was associated to the previous AP.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrRoamPrevApSsid.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamPrevApSsid.setDescription(\n 'This object indicates the SSID at which the client was associated to the previous AP.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrRoamDisassocTimeInterval.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamDisassocTimeInterval.setDescription(\n 'This object indicates the time elapsed since the client disassociated, in hundredth of a second.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrRoamReason.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamReason.setDescription(\n \"This object indicates the reason for a client to roam to a new AP. The semantics are as follows. clcrUnspecified - The reason is not known or can't be found. clcrPoorLink - Normal roam due to poor link (excessive retries, too much interference, RSSI too low, etc.) clcrLoadBalancing - Normal roam due to load balancing clcrInsufficientCapacity - Roaming occured due to the insufficient capacity on the previous AP (TSPEC rejected) clcrDirectedRoam - Roaming is directed by the 802.11 wireless Infrastructure clcrFirstAssociation - This is the first association to a particular WLAN clcrRoamingIn - Roaming in from cellular or other WAN clcrRoamingOut - Roaming out to cellular or other WAN clcrBetterAp - Normal roam due to better AP found clcrDisassociated - Deauthenticated or Disassociated from the previous AP.\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11StatsTable.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11StatsTable.setDescription(\n 'This table populates the statistics collected when the client roamed in the WLAN. There exists a row in this table for each conceptual row in cLApDot11IfTable that represents a dot11 interface of an AP.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11StatsEntry.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11StatsEntry.setDescription(\n 'Each entry represents a conceptual row in clcrDot11StatsTable and corresponds to the roam reason report sent by a CCX client to the new AP which the client associates to.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11NeighborRequestRx.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11NeighborRequestRx.setDescription(\n 'This object indicates the count of the number of requests received from an E2E client for neighbor updates.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11NeighborReplySent.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11NeighborReplySent.setDescription(\n 'This object indicates the count of the number of replies sent to the client in reply to the request for neighbor updates received from the client.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11RoamReasonReportRx.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11RoamReasonReportRx.setDescription(\n 'This object reports the count of the number of roam reason reports received from CCX clients.'\n )\n<mask token>\nif mibBuilder.loadTexts:\n clcrDot11BcastUpdatesSent.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11BcastUpdatesSent.setDescription(\n 'This object indicates the count of the number of broadcast neighbor updates sent by an AP.'\n )\n<mask token>\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n clcrMIBCompliance = clcrMIBCompliance.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrMIBCompliance.setDescription(\n 'The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.'\n )\n<mask token>\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n clcrMIBComplianceRev1 = clcrMIBComplianceRev1.setStatus('current')\nif mibBuilder.loadTexts:\n clcrMIBComplianceRev1.setDescription(\n 'The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.'\n )\n<mask token>\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11aRfParamsGroup = (ciscoLwappClRoamDot11aRfParamsGroup\n .setStatus('deprecated'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamDot11aRfParamsGroup.setDescription(\n 'This collection of objects represent the radio parameters for the 802.11a networks.'\n )\n<mask token>\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11bRfParamsGroup = (ciscoLwappClRoamDot11bRfParamsGroup\n .setStatus('deprecated'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamDot11bRfParamsGroup.setDescription(\n 'This collection of objects represent the radio parameters for the 802.11b/g bands.'\n )\n<mask token>\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamroamReasonGroup = (ciscoLwappClRoamroamReasonGroup.\n setStatus('current'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamroamReasonGroup.setDescription(\n 'This collection of objects provide the reasons for clients roaming between APs.'\n )\n<mask token>\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamroamingStatsGroup = (ciscoLwappClRoamroamingStatsGroup.\n setStatus('current'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamroamingStatsGroup.setDescription(\n 'This collection of objects provide the counters related to roaming.')\n<mask token>\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11aRfParamsGroupSup1 = (\n ciscoLwappClRoamDot11aRfParamsGroupSup1.setStatus('current'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamDot11aRfParamsGroupSup1.setDescription(\n 'This collection of objects represent the radio parameters for the 802.11a networks.'\n )\n<mask token>\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11bRfParamsGroupSup1 = (\n ciscoLwappClRoamDot11bRfParamsGroupSup1.setStatus('current'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamDot11bRfParamsGroupSup1.setDescription(\n 'This collection of objects represent the radio parameters for the 802.11b/g bands.'\n )\nmibBuilder.exportSymbols('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n clcrDot11aMinRssi=clcrDot11aMinRssi, clcrRoamClientMacAddress=\n clcrRoamClientMacAddress, ciscoLwappClRoamroamingStatsGroup=\n ciscoLwappClRoamroamingStatsGroup, clcrDot11bTransitionTimeV2=\n clcrDot11bTransitionTimeV2, clcrRoamNewApMacAddress=\n clcrRoamNewApMacAddress, clcrMIBCompliance=clcrMIBCompliance,\n clcrRoamDot11aRfParamConfig=clcrRoamDot11aRfParamConfig,\n clcrDot11BcastUpdatesSent=clcrDot11BcastUpdatesSent, clcrRoamPrevApSsid\n =clcrRoamPrevApSsid, clcrMIBComplianceRev1=clcrMIBComplianceRev1,\n clcrDot11bHysteresisV2=clcrDot11bHysteresisV2,\n ciscoLwappClRoamMIBConform=ciscoLwappClRoamMIBConform,\n clcrDot11aTransitionTime=clcrDot11aTransitionTime, clcrDot11aHysteresis\n =clcrDot11aHysteresis, ciscoLwappClRoamDot11bRfParamsGroupSup1=\n ciscoLwappClRoamDot11bRfParamsGroupSup1, PYSNMP_MODULE_ID=\n ciscoLwappClRoamMIB, clcrDot11bHysteresis=clcrDot11bHysteresis,\n clcrDot11StatsEntry=clcrDot11StatsEntry, clcrRoamDisassocTimeInterval=\n clcrRoamDisassocTimeInterval, ciscoLwappClRoamDot11aRfParamsGroupSup1=\n ciscoLwappClRoamDot11aRfParamsGroupSup1,\n clcrDot11bAdaptiveScanThreshold=clcrDot11bAdaptiveScanThreshold,\n clcrDot11NeighborRequestRx=clcrDot11NeighborRequestRx,\n clcrRoamClientTimeStamp=clcrRoamClientTimeStamp, clcrRoamReason=\n clcrRoamReason, clcrDot11bMode=clcrDot11bMode,\n clcrDot11aAdaptiveScanThreshold=clcrDot11aAdaptiveScanThreshold,\n clcrDot11RoamReasonReportRx=clcrDot11RoamReasonReportRx,\n clcrDot11bAdaptiveScanThresholdV2=clcrDot11bAdaptiveScanThresholdV2,\n ciscoLwappClRoamDot11bRfParamsGroup=ciscoLwappClRoamDot11bRfParamsGroup,\n ciscoLwappClRoamMIBNotifs=ciscoLwappClRoamMIBNotifs,\n clcrRoamReasonReportTable=clcrRoamReasonReportTable,\n clcrDot11aMinRssiV2=clcrDot11aMinRssiV2, ciscoLwappClRoamMIBObjects=\n ciscoLwappClRoamMIBObjects, clcrDot11NeighborReplySent=\n clcrDot11NeighborReplySent, clcrDot11aAdaptiveScanThresholdV2=\n clcrDot11aAdaptiveScanThresholdV2, ciscoLwappClRoamroamReasonGroup=\n ciscoLwappClRoamroamReasonGroup, clcrDot11StatsTable=\n clcrDot11StatsTable, clcrRoamDot11Stats=clcrRoamDot11Stats,\n clcrRoamDot11bRfParamConfig=clcrRoamDot11bRfParamConfig,\n clcrDot11bMinRssi=clcrDot11bMinRssi, clcrRoamReasonReport=\n clcrRoamReasonReport, clcrRoamPrevApMacAddress=clcrRoamPrevApMacAddress,\n ciscoLwappClRoamDot11aRfParamsGroup=ciscoLwappClRoamDot11aRfParamsGroup,\n clcrRoamReasonReportEntry=clcrRoamReasonReportEntry,\n ciscoLwappClRoamMIBGroups=ciscoLwappClRoamMIBGroups,\n clcrDot11bMinRssiV2=clcrDot11bMinRssiV2, ciscoLwappClRoamMIBCompliances\n =ciscoLwappClRoamMIBCompliances, clcrDot11aMode=clcrDot11aMode,\n clcrDot11aTransitionTimeV2=clcrDot11aTransitionTimeV2,\n clcrRoamPrevApChannel=clcrRoamPrevApChannel, clcrDot11bTransitionTime=\n clcrDot11bTransitionTime, ciscoLwappClRoamMIB=ciscoLwappClRoamMIB,\n clcrDot11aHysteresisV2=clcrDot11aHysteresisV2)\n",
"step-3": "ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols('ASN1',\n 'ObjectIdentifier', 'OctetString', 'Integer')\nNamedValues, = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')\n(ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion,\n ConstraintsIntersection, ValueRangeConstraint) = (mibBuilder.\n importSymbols('ASN1-REFINEMENT', 'ValueSizeConstraint',\n 'SingleValueConstraint', 'ConstraintsUnion', 'ConstraintsIntersection',\n 'ValueRangeConstraint'))\ncLApDot11IfSlotId, cLApSysMacAddress = mibBuilder.importSymbols(\n 'CISCO-LWAPP-AP-MIB', 'cLApDot11IfSlotId', 'cLApSysMacAddress')\nCLDot11RfParamMode, CLDot11Channel = mibBuilder.importSymbols(\n 'CISCO-LWAPP-TC-MIB', 'CLDot11RfParamMode', 'CLDot11Channel')\nciscoMgmt, = mibBuilder.importSymbols('CISCO-SMI', 'ciscoMgmt')\nObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols(\n 'SNMPv2-CONF', 'ObjectGroup', 'NotificationGroup', 'ModuleCompliance')\n(Integer32, IpAddress, MibIdentifier, NotificationType, TimeTicks, Bits,\n ObjectIdentity, Counter64, ModuleIdentity, iso, Gauge32, MibScalar,\n MibTable, MibTableRow, MibTableColumn, Counter32, Unsigned32) = (mibBuilder\n .importSymbols('SNMPv2-SMI', 'Integer32', 'IpAddress', 'MibIdentifier',\n 'NotificationType', 'TimeTicks', 'Bits', 'ObjectIdentity', 'Counter64',\n 'ModuleIdentity', 'iso', 'Gauge32', 'MibScalar', 'MibTable',\n 'MibTableRow', 'MibTableColumn', 'Counter32', 'Unsigned32'))\nDisplayString, MacAddress, TextualConvention, TimeInterval = (mibBuilder.\n importSymbols('SNMPv2-TC', 'DisplayString', 'MacAddress',\n 'TextualConvention', 'TimeInterval'))\nciscoLwappClRoamMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 523))\nciscoLwappClRoamMIB.setRevisions(('2010-01-29 00:00', '2006-04-11 00:00'))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n if mibBuilder.loadTexts:\n ciscoLwappClRoamMIB.setRevisionsDescriptions((\n 'Deprecated following attributes:- clcrDot11aMinRssi, clcrDot11aHysteresis, clcrDot11aAdaptiveScanThreshold, clcrDot11aTransitionTime, clcrDot11bMinRssi, clcrDot11bHysteresis, clcrDot11bAdaptiveScanThreshold, clcrDot11bTransitionTime. clcrMIBCompliance, ciscoLwappClRoamDot11aRfParamsGroup, ciscoLwappClRoamDot11bRfParamsGroup Added following attributes:- clcrDot11aMinRssiV2, clcrDot11aHysteresisV2, clcrDot11aAdaptiveScanThresholdV2, clcrDot11aTransitionTimeV2, clcrDot11bMinRssiV2, clcrDot11bHysteresisV2, clcrDot11bAdaptiveScanThresholdV2, clcrDot11bTransitionTimeV2. clcrMIBComplianceRev1, ciscoLwappClRoamDot11aRfParamsGroupSup1, ciscoLwappClRoamDot11bRfParamsGroupSup1'\n , 'Initial version of this MIB module.'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamMIB.setLastUpdated('201001290000Z')\nif mibBuilder.loadTexts:\n ciscoLwappClRoamMIB.setOrganization('Cisco Systems, Inc.')\nif mibBuilder.loadTexts:\n ciscoLwappClRoamMIB.setContactInfo(\n 'Cisco Systems, Customer Service Postal: 170 West Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS Email: [email protected]'\n )\nif mibBuilder.loadTexts:\n ciscoLwappClRoamMIB.setDescription(\n \"This MIB is intended to be implemented on all those devices operating as Central controllers, that terminate the Light Weight Access Point Protocol tunnel from Cisco Light-weight LWAPP Access Points. Information provided by this MIB is for CCX related features as specified in the CCX specifications. This MIB covers roaming RF parameters for CCX clients. The relationship between CC and the LWAPP APs can be depicted as follows: +......+ +......+ +......+ + + + + + + + CC + + CC + + CC + + + + + + + +......+ +......+ +......+ .. . . .. . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + AP + + AP + + AP + + AP + + + + + + + + + +......+ +......+ +......+ +......+ . . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + MN + + MN + + MN + + MN + + + + + + + + + +......+ +......+ +......+ +......+ The LWAPP tunnel exists between the controller and the APs. The MNs communicate with the APs through the protocol defined by the 802.11 standard. LWAPP APs, upon bootup, discover and join one of the controllers and the controller pushes the configuration, that includes the WLAN parameters, to the LWAPP APs. The APs then encapsulate all the 802.11 frames from wireless clients inside LWAPP frames and forward the LWAPP frames to the controller. GLOSSARY Access Point ( AP ) An entity that contains an 802.11 medium access control ( MAC ) and physical layer ( PHY ) interface and provides access to the distribution services via the wireless medium for associated clients. LWAPP APs encapsulate all the 802.11 frames in LWAPP frames and sends them to the controller to which it is logically connected. Basic Service Set ( BSS ) The IEEE 802.11 BSS of an AP comprises of the stations directly associating with the AP. Central Controller ( CC ) The central entity that terminates the LWAPP protocol tunnel from the LWAPP APs. Throughout this MIB, this entity is also referred to as 'controller'. Cisco Compatible eXtensions (CCX) Wireless LAN Access Points (APs) manufactured by Cisco Systems have features and capabilities beyond those in related standards (e.g., IEEE 802.11 suite of standards ,Wi-Fi recommendations by WECA, 802.1X security suite,etc). A number of features provide higher performance.For example, Cisco AP transmits a specific Information Element, which the clients adapt to for enhanced performance. Similarly, a number of features are implemented by means of proprietary Information Elements, which Cisco clients use in specific ways to carry out tasks above and beyond the standard. Other examples of feature categories are roaming and power saving. Client Roaming A client may decide to reassociate with another AP for reasons of its own choosing. The decision of whether or not to use the information contained in the AP list is up to the discretion of the implementor, as long as the roam time requirement is met. Light Weight Access Point Protocol ( LWAPP ) This is a generic protocol that defines the communication between the Access Points and the Central Controller. Mobile Node ( MN ) A roaming 802.11 wireless device in a wireless network associated with an access point. Mobile Node and client are used interchangeably. REFERENCE [1] Wireless LAN Medium Access Control ( MAC ) and Physical Layer ( PHY ) Specifications [2] Draft-obara-capwap-lwapp-00.txt, IETF Light Weight Access Point Protocol\"\n )\nciscoLwappClRoamMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 0))\nciscoLwappClRoamMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1))\nciscoLwappClRoamMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 2))\nclcrRoamDot11aRfParamConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1)\n )\nclcrRoamDot11bRfParamConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2)\n )\nclcrRoamReasonReport = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3))\nclcrRoamDot11Stats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4))\nclcrDot11aMode = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 1),\n CLDot11RfParamMode().clone('default')).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11aMode.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11aMode.setDescription(\n 'This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11a networks.'\n )\nclcrDot11aMinRssi = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 2),\n Integer32().subtype(subtypeSpec=ValueRangeConstraint(-90, -80)).clone(-85)\n ).setUnits('dBm').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11aMinRssi.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11aMinRssi.setDescription(\n \"This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11aMinRssiV2\"\n )\nclcrDot11aHysteresis = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 3),\n Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 4)).clone(2)\n ).setUnits('dB').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11aHysteresis.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11aHysteresis.setDescription(\n 'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11aHysteresisV2'\n )\nclcrDot11aAdaptiveScanThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,\n 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-77, -70)).\n clone(-72)).setUnits('dBm').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11aAdaptiveScanThreshold.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11aAdaptiveScanThreshold.setDescription(\n 'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime. This object is superceded by clcrDot11aAdaptiveScanThresholdV2'\n )\nclcrDot11aTransitionTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 5),\n TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(100, 10000)).\n clone(500)).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11aTransitionTime.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11aTransitionTime.setDescription(\n 'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client?s associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11aTransitionTimeV2'\n )\nclcrDot11aMinRssiV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 6),\n Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits(\n 'dBm').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11aMinRssiV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11aMinRssiV2.setDescription(\n \"This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal.\"\n )\nclcrDot11aHysteresisV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 7),\n Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setUnits(\n 'dB').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11aHysteresisV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11aHysteresisV2.setDescription(\n 'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.'\n )\nclcrDot11aAdaptiveScanThresholdV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523,\n 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))\n ).setUnits('dBm').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11aAdaptiveScanThresholdV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11aAdaptiveScanThresholdV2.setDescription(\n 'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime.'\n )\nclcrDot11aTransitionTimeV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, \n 9), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(0, 10000))\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11aTransitionTimeV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11aTransitionTimeV2.setDescription(\n 'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the clients associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.'\n )\nclcrDot11bMode = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 1),\n CLDot11RfParamMode().clone('default')).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11bMode.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11bMode.setDescription(\n 'This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11b/g networks.'\n )\nclcrDot11bMinRssi = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 2),\n Integer32().subtype(subtypeSpec=ValueRangeConstraint(-90, -80)).clone(-85)\n ).setUnits('dBm').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11bMinRssi.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11bMinRssi.setDescription(\n \"This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11bMinRssiV2\"\n )\nclcrDot11bHysteresis = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 3),\n Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 4)).clone(2)\n ).setUnits('dB').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11bHysteresis.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11bHysteresis.setDescription(\n 'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11bHysteresisV2'\n )\nclcrDot11bAdaptiveScanThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,\n 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-77, -70)).\n clone(-72)).setUnits('dBm').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11bAdaptiveScanThreshold.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11bAdaptiveScanThreshold.setDescription(\n 'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime. This object is superceded by clcrDot11bAdaptiveScanThresholdV2'\n )\nclcrDot11bTransitionTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 5),\n TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(100, 10000)).\n clone(500)).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11bTransitionTime.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrDot11bTransitionTime.setDescription(\n 'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11bTransitionTimeV2'\n )\nclcrDot11bMinRssiV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 6),\n Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits(\n 'dBm').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11bMinRssiV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11bMinRssiV2.setDescription(\n \"This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal.\"\n )\nclcrDot11bHysteresisV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 7),\n Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setUnits(\n 'dB').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11bHysteresisV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11bHysteresisV2.setDescription(\n 'This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.'\n )\nclcrDot11bAdaptiveScanThresholdV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523,\n 1, 2, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))\n ).setUnits('dBm').setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11bAdaptiveScanThresholdV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11bAdaptiveScanThresholdV2.setDescription(\n 'This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime.'\n )\nclcrDot11bTransitionTimeV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, \n 9), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(0, 10000))\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n clcrDot11bTransitionTimeV2.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11bTransitionTimeV2.setDescription(\n 'This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.'\n )\nclcrRoamReasonReportTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1))\nif mibBuilder.loadTexts:\n clcrRoamReasonReportTable.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamReasonReportTable.setDescription(\n 'This table provides the reasons for CCX clients roaming from one AP to another. When a CCX client associates to an AP, it will always send an IAPP information packet to the new AP listing the characteristics of the previous AP. An entry is added to this table when a roam reason report is sent by a CCX client when it roams to a new AP.'\n )\nclcrRoamReasonReportEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3,\n 1, 1)).setIndexNames((0, 'CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrRoamClientMacAddress'), (0, 'CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrRoamClientTimeStamp'))\nif mibBuilder.loadTexts:\n clcrRoamReasonReportEntry.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamReasonReportEntry.setDescription(\n 'Each entry corresponds to the roam reason report sent by a CCX client to the new AP to which client associates.'\n )\nclcrRoamClientMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, \n 3, 1, 1, 1), MacAddress())\nif mibBuilder.loadTexts:\n clcrRoamClientMacAddress.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamClientMacAddress.setDescription(\n 'This object indicates the mac address of the client which has roamed to a new AP.'\n )\nclcrRoamClientTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3,\n 1, 1, 2), TimeTicks())\nif mibBuilder.loadTexts:\n clcrRoamClientTimeStamp.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamClientTimeStamp.setDescription(\n \"This object indicates the time instance at which this report was received by the new AP, to which client roamed to. This represents number of seconds elapsed since 00:00:00 on January 1, 1970, Coordinated Universal Time (UTC). So a value of '1131362704' means 'Mon Nov 7 16:55:04 2005'.\"\n )\nclcrRoamNewApMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3,\n 1, 1, 3), MacAddress()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n clcrRoamNewApMacAddress.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamNewApMacAddress.setDescription(\n 'This object indicates the mac address of the current AP to which client has roamed to. This AP receives the roam reason report.'\n )\nclcrRoamPrevApMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, \n 3, 1, 1, 4), MacAddress()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n clcrRoamPrevApMacAddress.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamPrevApMacAddress.setDescription(\n 'This object indicates the mac address of the previous AP to which client was associated.'\n )\nclcrRoamPrevApChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, \n 1, 1, 5), CLDot11Channel()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n clcrRoamPrevApChannel.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamPrevApChannel.setDescription(\n 'This object indicates the channel number at which the client was associated to the previous AP.'\n )\nclcrRoamPrevApSsid = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, \n 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))\n ).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n clcrRoamPrevApSsid.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamPrevApSsid.setDescription(\n 'This object indicates the SSID at which the client was associated to the previous AP.'\n )\nclcrRoamDisassocTimeInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523,\n 1, 3, 1, 1, 7), TimeInterval()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n clcrRoamDisassocTimeInterval.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamDisassocTimeInterval.setDescription(\n 'This object indicates the time elapsed since the client disassociated, in hundredth of a second.'\n )\nclcrRoamReason = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 8\n ), Integer32().subtype(subtypeSpec=ConstraintsUnion(\n SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues\n =NamedValues(('clcrUnspecified', 0), ('clcrPoorLink', 1), (\n 'clcrLoadBalancing', 2), ('clcrInsufficientCapacity', 3), (\n 'clcrDirectedRoam', 4), ('clcrFirstAssociation', 5), ('clcrRoamingIn', \n 6), ('clcrRoamingOut', 7), ('clcrBetterAp', 8), ('clcrDisassociated', 9)))\n ).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n clcrRoamReason.setStatus('current')\nif mibBuilder.loadTexts:\n clcrRoamReason.setDescription(\n \"This object indicates the reason for a client to roam to a new AP. The semantics are as follows. clcrUnspecified - The reason is not known or can't be found. clcrPoorLink - Normal roam due to poor link (excessive retries, too much interference, RSSI too low, etc.) clcrLoadBalancing - Normal roam due to load balancing clcrInsufficientCapacity - Roaming occured due to the insufficient capacity on the previous AP (TSPEC rejected) clcrDirectedRoam - Roaming is directed by the 802.11 wireless Infrastructure clcrFirstAssociation - This is the first association to a particular WLAN clcrRoamingIn - Roaming in from cellular or other WAN clcrRoamingOut - Roaming out to cellular or other WAN clcrBetterAp - Normal roam due to better AP found clcrDisassociated - Deauthenticated or Disassociated from the previous AP.\"\n )\nclcrDot11StatsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1))\nif mibBuilder.loadTexts:\n clcrDot11StatsTable.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11StatsTable.setDescription(\n 'This table populates the statistics collected when the client roamed in the WLAN. There exists a row in this table for each conceptual row in cLApDot11IfTable that represents a dot11 interface of an AP.'\n )\nclcrDot11StatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1)\n ).setIndexNames((0, 'CISCO-LWAPP-AP-MIB', 'cLApSysMacAddress'), (0,\n 'CISCO-LWAPP-AP-MIB', 'cLApDot11IfSlotId'))\nif mibBuilder.loadTexts:\n clcrDot11StatsEntry.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11StatsEntry.setDescription(\n 'Each entry represents a conceptual row in clcrDot11StatsTable and corresponds to the roam reason report sent by a CCX client to the new AP which the client associates to.'\n )\nclcrDot11NeighborRequestRx = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,\n 4, 1, 1, 1), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n clcrDot11NeighborRequestRx.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11NeighborRequestRx.setDescription(\n 'This object indicates the count of the number of requests received from an E2E client for neighbor updates.'\n )\nclcrDot11NeighborReplySent = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,\n 4, 1, 1, 2), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n clcrDot11NeighborReplySent.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11NeighborReplySent.setDescription(\n 'This object indicates the count of the number of replies sent to the client in reply to the request for neighbor updates received from the client.'\n )\nclcrDot11RoamReasonReportRx = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, \n 1, 4, 1, 1, 3), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n clcrDot11RoamReasonReportRx.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11RoamReasonReportRx.setDescription(\n 'This object reports the count of the number of roam reason reports received from CCX clients.'\n )\nclcrDot11BcastUpdatesSent = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1,\n 4, 1, 1, 4), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n clcrDot11BcastUpdatesSent.setStatus('current')\nif mibBuilder.loadTexts:\n clcrDot11BcastUpdatesSent.setDescription(\n 'This object indicates the count of the number of broadcast neighbor updates sent by an AP.'\n )\nciscoLwappClRoamMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523,\n 2, 1))\nciscoLwappClRoamMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2))\nclcrMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 1, 1)\n ).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'ciscoLwappClRoamDot11aRfParamsGroup'), (\n 'CISCO-LWAPP-CLIENT-ROAMING-MIB', 'ciscoLwappClRoamDot11bRfParamsGroup'\n ), ('CISCO-LWAPP-CLIENT-ROAMING-MIB', 'ciscoLwappClRoamroamReasonGroup'\n ), ('CISCO-LWAPP-CLIENT-ROAMING-MIB', 'ciscoLwappClRoamroamingStatsGroup'))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n clcrMIBCompliance = clcrMIBCompliance.setStatus('deprecated')\nif mibBuilder.loadTexts:\n clcrMIBCompliance.setDescription(\n 'The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.'\n )\nclcrMIBComplianceRev1 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 1, 2)\n ).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'ciscoLwappClRoamDot11aRfParamsGroupSup1'), (\n 'CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'ciscoLwappClRoamDot11bRfParamsGroupSup1'), (\n 'CISCO-LWAPP-CLIENT-ROAMING-MIB', 'ciscoLwappClRoamroamReasonGroup'), (\n 'CISCO-LWAPP-CLIENT-ROAMING-MIB', 'ciscoLwappClRoamroamingStatsGroup'))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n clcrMIBComplianceRev1 = clcrMIBComplianceRev1.setStatus('current')\nif mibBuilder.loadTexts:\n clcrMIBComplianceRev1.setDescription(\n 'The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.'\n )\nciscoLwappClRoamDot11aRfParamsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, \n 523, 2, 2, 1)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11aMode'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11aMinRssi'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11aHysteresis'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11aAdaptiveScanThreshold'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11aTransitionTime'))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11aRfParamsGroup = (ciscoLwappClRoamDot11aRfParamsGroup\n .setStatus('deprecated'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamDot11aRfParamsGroup.setDescription(\n 'This collection of objects represent the radio parameters for the 802.11a networks.'\n )\nciscoLwappClRoamDot11bRfParamsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, \n 523, 2, 2, 2)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11bMode'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11bMinRssi'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11bHysteresis'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11bAdaptiveScanThreshold'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11bTransitionTime'))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11bRfParamsGroup = (ciscoLwappClRoamDot11bRfParamsGroup\n .setStatus('deprecated'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamDot11bRfParamsGroup.setDescription(\n 'This collection of objects represent the radio parameters for the 802.11b/g bands.'\n )\nciscoLwappClRoamroamReasonGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523,\n 2, 2, 3)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrRoamNewApMacAddress'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrRoamPrevApMacAddress'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrRoamPrevApChannel'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrRoamPrevApSsid'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrRoamDisassocTimeInterval'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrRoamReason'))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamroamReasonGroup = (ciscoLwappClRoamroamReasonGroup.\n setStatus('current'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamroamReasonGroup.setDescription(\n 'This collection of objects provide the reasons for clients roaming between APs.'\n )\nciscoLwappClRoamroamingStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, \n 523, 2, 2, 4)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11NeighborRequestRx'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11NeighborReplySent'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11RoamReasonReportRx'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11BcastUpdatesSent'))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamroamingStatsGroup = (ciscoLwappClRoamroamingStatsGroup.\n setStatus('current'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamroamingStatsGroup.setDescription(\n 'This collection of objects provide the counters related to roaming.')\nciscoLwappClRoamDot11aRfParamsGroupSup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 9,\n 9, 523, 2, 2, 5)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11aMode'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11aMinRssiV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11aHysteresisV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11aAdaptiveScanThresholdV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11aTransitionTimeV2'))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11aRfParamsGroupSup1 = (\n ciscoLwappClRoamDot11aRfParamsGroupSup1.setStatus('current'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamDot11aRfParamsGroupSup1.setDescription(\n 'This collection of objects represent the radio parameters for the 802.11a networks.'\n )\nciscoLwappClRoamDot11bRfParamsGroupSup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 9,\n 9, 523, 2, 2, 6)).setObjects(('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11bMode'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11bMinRssiV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11bHysteresisV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11bAdaptiveScanThresholdV2'), ('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n 'clcrDot11bTransitionTimeV2'))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11bRfParamsGroupSup1 = (\n ciscoLwappClRoamDot11bRfParamsGroupSup1.setStatus('current'))\nif mibBuilder.loadTexts:\n ciscoLwappClRoamDot11bRfParamsGroupSup1.setDescription(\n 'This collection of objects represent the radio parameters for the 802.11b/g bands.'\n )\nmibBuilder.exportSymbols('CISCO-LWAPP-CLIENT-ROAMING-MIB',\n clcrDot11aMinRssi=clcrDot11aMinRssi, clcrRoamClientMacAddress=\n clcrRoamClientMacAddress, ciscoLwappClRoamroamingStatsGroup=\n ciscoLwappClRoamroamingStatsGroup, clcrDot11bTransitionTimeV2=\n clcrDot11bTransitionTimeV2, clcrRoamNewApMacAddress=\n clcrRoamNewApMacAddress, clcrMIBCompliance=clcrMIBCompliance,\n clcrRoamDot11aRfParamConfig=clcrRoamDot11aRfParamConfig,\n clcrDot11BcastUpdatesSent=clcrDot11BcastUpdatesSent, clcrRoamPrevApSsid\n =clcrRoamPrevApSsid, clcrMIBComplianceRev1=clcrMIBComplianceRev1,\n clcrDot11bHysteresisV2=clcrDot11bHysteresisV2,\n ciscoLwappClRoamMIBConform=ciscoLwappClRoamMIBConform,\n clcrDot11aTransitionTime=clcrDot11aTransitionTime, clcrDot11aHysteresis\n =clcrDot11aHysteresis, ciscoLwappClRoamDot11bRfParamsGroupSup1=\n ciscoLwappClRoamDot11bRfParamsGroupSup1, PYSNMP_MODULE_ID=\n ciscoLwappClRoamMIB, clcrDot11bHysteresis=clcrDot11bHysteresis,\n clcrDot11StatsEntry=clcrDot11StatsEntry, clcrRoamDisassocTimeInterval=\n clcrRoamDisassocTimeInterval, ciscoLwappClRoamDot11aRfParamsGroupSup1=\n ciscoLwappClRoamDot11aRfParamsGroupSup1,\n clcrDot11bAdaptiveScanThreshold=clcrDot11bAdaptiveScanThreshold,\n clcrDot11NeighborRequestRx=clcrDot11NeighborRequestRx,\n clcrRoamClientTimeStamp=clcrRoamClientTimeStamp, clcrRoamReason=\n clcrRoamReason, clcrDot11bMode=clcrDot11bMode,\n clcrDot11aAdaptiveScanThreshold=clcrDot11aAdaptiveScanThreshold,\n clcrDot11RoamReasonReportRx=clcrDot11RoamReasonReportRx,\n clcrDot11bAdaptiveScanThresholdV2=clcrDot11bAdaptiveScanThresholdV2,\n ciscoLwappClRoamDot11bRfParamsGroup=ciscoLwappClRoamDot11bRfParamsGroup,\n ciscoLwappClRoamMIBNotifs=ciscoLwappClRoamMIBNotifs,\n clcrRoamReasonReportTable=clcrRoamReasonReportTable,\n clcrDot11aMinRssiV2=clcrDot11aMinRssiV2, ciscoLwappClRoamMIBObjects=\n ciscoLwappClRoamMIBObjects, clcrDot11NeighborReplySent=\n clcrDot11NeighborReplySent, clcrDot11aAdaptiveScanThresholdV2=\n clcrDot11aAdaptiveScanThresholdV2, ciscoLwappClRoamroamReasonGroup=\n ciscoLwappClRoamroamReasonGroup, clcrDot11StatsTable=\n clcrDot11StatsTable, clcrRoamDot11Stats=clcrRoamDot11Stats,\n clcrRoamDot11bRfParamConfig=clcrRoamDot11bRfParamConfig,\n clcrDot11bMinRssi=clcrDot11bMinRssi, clcrRoamReasonReport=\n clcrRoamReasonReport, clcrRoamPrevApMacAddress=clcrRoamPrevApMacAddress,\n ciscoLwappClRoamDot11aRfParamsGroup=ciscoLwappClRoamDot11aRfParamsGroup,\n clcrRoamReasonReportEntry=clcrRoamReasonReportEntry,\n ciscoLwappClRoamMIBGroups=ciscoLwappClRoamMIBGroups,\n clcrDot11bMinRssiV2=clcrDot11bMinRssiV2, ciscoLwappClRoamMIBCompliances\n =ciscoLwappClRoamMIBCompliances, clcrDot11aMode=clcrDot11aMode,\n clcrDot11aTransitionTimeV2=clcrDot11aTransitionTimeV2,\n clcrRoamPrevApChannel=clcrRoamPrevApChannel, clcrDot11bTransitionTime=\n clcrDot11bTransitionTime, ciscoLwappClRoamMIB=ciscoLwappClRoamMIB,\n clcrDot11aHysteresisV2=clcrDot11aHysteresisV2)\n",
"step-4": "#\n# PySNMP MIB module CISCO-LWAPP-CLIENT-ROAMING-MIB (http://snmplabs.com/pysmi)\n# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-LWAPP-CLIENT-ROAMING-MIB\n# Produced by pysmi-0.3.4 at Wed May 1 12:04:56 2019\n# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4\n# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) \n#\nObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols(\"ASN1\", \"ObjectIdentifier\", \"OctetString\", \"Integer\")\nNamedValues, = mibBuilder.importSymbols(\"ASN1-ENUMERATION\", \"NamedValues\")\nValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols(\"ASN1-REFINEMENT\", \"ValueSizeConstraint\", \"SingleValueConstraint\", \"ConstraintsUnion\", \"ConstraintsIntersection\", \"ValueRangeConstraint\")\ncLApDot11IfSlotId, cLApSysMacAddress = mibBuilder.importSymbols(\"CISCO-LWAPP-AP-MIB\", \"cLApDot11IfSlotId\", \"cLApSysMacAddress\")\nCLDot11RfParamMode, CLDot11Channel = mibBuilder.importSymbols(\"CISCO-LWAPP-TC-MIB\", \"CLDot11RfParamMode\", \"CLDot11Channel\")\nciscoMgmt, = mibBuilder.importSymbols(\"CISCO-SMI\", \"ciscoMgmt\")\nObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols(\"SNMPv2-CONF\", \"ObjectGroup\", \"NotificationGroup\", \"ModuleCompliance\")\nInteger32, IpAddress, MibIdentifier, NotificationType, TimeTicks, Bits, ObjectIdentity, Counter64, ModuleIdentity, iso, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Unsigned32 = mibBuilder.importSymbols(\"SNMPv2-SMI\", \"Integer32\", \"IpAddress\", \"MibIdentifier\", \"NotificationType\", \"TimeTicks\", \"Bits\", \"ObjectIdentity\", \"Counter64\", \"ModuleIdentity\", \"iso\", \"Gauge32\", \"MibScalar\", \"MibTable\", \"MibTableRow\", \"MibTableColumn\", \"Counter32\", \"Unsigned32\")\nDisplayString, MacAddress, TextualConvention, TimeInterval = mibBuilder.importSymbols(\"SNMPv2-TC\", \"DisplayString\", \"MacAddress\", \"TextualConvention\", \"TimeInterval\")\nciscoLwappClRoamMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 523))\nciscoLwappClRoamMIB.setRevisions(('2010-01-29 00:00', '2006-04-11 00:00',))\n\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n if mibBuilder.loadTexts: ciscoLwappClRoamMIB.setRevisionsDescriptions(('Deprecated following attributes:- clcrDot11aMinRssi, clcrDot11aHysteresis, clcrDot11aAdaptiveScanThreshold, clcrDot11aTransitionTime, clcrDot11bMinRssi, clcrDot11bHysteresis, clcrDot11bAdaptiveScanThreshold, clcrDot11bTransitionTime. clcrMIBCompliance, ciscoLwappClRoamDot11aRfParamsGroup, ciscoLwappClRoamDot11bRfParamsGroup Added following attributes:- clcrDot11aMinRssiV2, clcrDot11aHysteresisV2, clcrDot11aAdaptiveScanThresholdV2, clcrDot11aTransitionTimeV2, clcrDot11bMinRssiV2, clcrDot11bHysteresisV2, clcrDot11bAdaptiveScanThresholdV2, clcrDot11bTransitionTimeV2. clcrMIBComplianceRev1, ciscoLwappClRoamDot11aRfParamsGroupSup1, ciscoLwappClRoamDot11bRfParamsGroupSup1', 'Initial version of this MIB module.',))\nif mibBuilder.loadTexts: ciscoLwappClRoamMIB.setLastUpdated('201001290000Z')\nif mibBuilder.loadTexts: ciscoLwappClRoamMIB.setOrganization('Cisco Systems, Inc.')\nif mibBuilder.loadTexts: ciscoLwappClRoamMIB.setContactInfo('Cisco Systems, Customer Service Postal: 170 West Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS Email: [email protected]')\nif mibBuilder.loadTexts: ciscoLwappClRoamMIB.setDescription(\"This MIB is intended to be implemented on all those devices operating as Central controllers, that terminate the Light Weight Access Point Protocol tunnel from Cisco Light-weight LWAPP Access Points. Information provided by this MIB is for CCX related features as specified in the CCX specifications. This MIB covers roaming RF parameters for CCX clients. The relationship between CC and the LWAPP APs can be depicted as follows: +......+ +......+ +......+ + + + + + + + CC + + CC + + CC + + + + + + + +......+ +......+ +......+ .. . . .. . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + AP + + AP + + AP + + AP + + + + + + + + + +......+ +......+ +......+ +......+ . . . . . . . . . . . . . . . . . . . +......+ +......+ +......+ +......+ + + + + + + + + + MN + + MN + + MN + + MN + + + + + + + + + +......+ +......+ +......+ +......+ The LWAPP tunnel exists between the controller and the APs. The MNs communicate with the APs through the protocol defined by the 802.11 standard. LWAPP APs, upon bootup, discover and join one of the controllers and the controller pushes the configuration, that includes the WLAN parameters, to the LWAPP APs. The APs then encapsulate all the 802.11 frames from wireless clients inside LWAPP frames and forward the LWAPP frames to the controller. GLOSSARY Access Point ( AP ) An entity that contains an 802.11 medium access control ( MAC ) and physical layer ( PHY ) interface and provides access to the distribution services via the wireless medium for associated clients. LWAPP APs encapsulate all the 802.11 frames in LWAPP frames and sends them to the controller to which it is logically connected. Basic Service Set ( BSS ) The IEEE 802.11 BSS of an AP comprises of the stations directly associating with the AP. Central Controller ( CC ) The central entity that terminates the LWAPP protocol tunnel from the LWAPP APs. Throughout this MIB, this entity is also referred to as 'controller'. Cisco Compatible eXtensions (CCX) Wireless LAN Access Points (APs) manufactured by Cisco Systems have features and capabilities beyond those in related standards (e.g., IEEE 802.11 suite of standards ,Wi-Fi recommendations by WECA, 802.1X security suite,etc). A number of features provide higher performance.For example, Cisco AP transmits a specific Information Element, which the clients adapt to for enhanced performance. Similarly, a number of features are implemented by means of proprietary Information Elements, which Cisco clients use in specific ways to carry out tasks above and beyond the standard. Other examples of feature categories are roaming and power saving. Client Roaming A client may decide to reassociate with another AP for reasons of its own choosing. The decision of whether or not to use the information contained in the AP list is up to the discretion of the implementor, as long as the roam time requirement is met. Light Weight Access Point Protocol ( LWAPP ) This is a generic protocol that defines the communication between the Access Points and the Central Controller. Mobile Node ( MN ) A roaming 802.11 wireless device in a wireless network associated with an access point. Mobile Node and client are used interchangeably. REFERENCE [1] Wireless LAN Medium Access Control ( MAC ) and Physical Layer ( PHY ) Specifications [2] Draft-obara-capwap-lwapp-00.txt, IETF Light Weight Access Point Protocol\")\nciscoLwappClRoamMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 0))\nciscoLwappClRoamMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1))\nciscoLwappClRoamMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 2))\nclcrRoamDot11aRfParamConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1))\nclcrRoamDot11bRfParamConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2))\nclcrRoamReasonReport = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3))\nclcrRoamDot11Stats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4))\nclcrDot11aMode = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 1), CLDot11RfParamMode().clone('default')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11aMode.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11aMode.setDescription('This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11a networks.')\nclcrDot11aMinRssi = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-90, -80)).clone(-85)).setUnits('dBm').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11aMinRssi.setStatus('deprecated')\nif mibBuilder.loadTexts: clcrDot11aMinRssi.setDescription(\"This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11aMinRssiV2\")\nclcrDot11aHysteresis = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 4)).clone(2)).setUnits('dB').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11aHysteresis.setStatus('deprecated')\nif mibBuilder.loadTexts: clcrDot11aHysteresis.setDescription('This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11aHysteresisV2')\nclcrDot11aAdaptiveScanThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-77, -70)).clone(-72)).setUnits('dBm').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11aAdaptiveScanThreshold.setStatus('deprecated')\nif mibBuilder.loadTexts: clcrDot11aAdaptiveScanThreshold.setDescription('This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime. This object is superceded by clcrDot11aAdaptiveScanThresholdV2')\nclcrDot11aTransitionTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 5), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(100, 10000)).clone(500)).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11aTransitionTime.setStatus('deprecated')\nif mibBuilder.loadTexts: clcrDot11aTransitionTime.setDescription('This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client?s associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11aTransitionTimeV2')\nclcrDot11aMinRssiV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits('dBm').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11aMinRssiV2.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11aMinRssiV2.setDescription(\"This object indicates the Minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal.\")\nclcrDot11aHysteresisV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setUnits('dB').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11aHysteresisV2.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11aHysteresisV2.setDescription('This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.')\nclcrDot11aAdaptiveScanThresholdV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits('dBm').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11aAdaptiveScanThresholdV2.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11aAdaptiveScanThresholdV2.setDescription('This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11aTransitionTime.')\nclcrDot11aTransitionTimeV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 1, 9), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(0, 10000))).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11aTransitionTimeV2.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11aTransitionTimeV2.setDescription('This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the clients associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.')\nclcrDot11bMode = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 1), CLDot11RfParamMode().clone('default')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11bMode.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11bMode.setDescription('This object represents how the controller chooses the values of the RF parameters needed to manage roaming in 802.11b/g networks.')\nclcrDot11bMinRssi = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-90, -80)).clone(-85)).setUnits('dBm').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11bMinRssi.setStatus('deprecated')\nif mibBuilder.loadTexts: clcrDot11bMinRssi.setDescription(\"This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal. This object is superceded by clcrDot11bMinRssiV2\")\nclcrDot11bHysteresis = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 4)).clone(2)).setUnits('dB').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11bHysteresis.setStatus('deprecated')\nif mibBuilder.loadTexts: clcrDot11bHysteresis.setDescription('This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs. This object is superceded by clcrDot11bHysteresisV2')\nclcrDot11bAdaptiveScanThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-77, -70)).clone(-72)).setUnits('dBm').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11bAdaptiveScanThreshold.setStatus('deprecated')\nif mibBuilder.loadTexts: clcrDot11bAdaptiveScanThreshold.setDescription('This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime. This object is superceded by clcrDot11bAdaptiveScanThresholdV2')\nclcrDot11bTransitionTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 5), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(100, 10000)).clone(500)).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11bTransitionTime.setStatus('deprecated')\nif mibBuilder.loadTexts: clcrDot11bTransitionTime.setDescription('This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second. This object is superceded by clcrDot11bTransitionTimeV2')\nclcrDot11bMinRssiV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits('dBm').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11bMinRssiV2.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11bMinRssiV2.setDescription(\"This object indicates the minimum Received Signal Strength Indication (RSSI) in dBm required to associate with the AP. It also defines the edge of coverage for the BSS. If the client's average received signal power dips below this threshold, clients must have roamed to another AP with a stronger signal.\")\nclcrDot11bHysteresisV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setUnits('dB').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11bHysteresisV2.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11bHysteresisV2.setDescription('This object indicates how much stronger the signal strength (dB) of a neighbor AP must be, in order for the client to roam to it. The use of roaming hysteresis is intended to reduce the amount of clients roaming back and forth between BSSs if the client is physically located on or near the border between two BSSs.')\nclcrDot11bAdaptiveScanThresholdV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-255, 255))).setUnits('dBm').setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11bAdaptiveScanThresholdV2.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11bAdaptiveScanThresholdV2.setDescription('This object configures the threshold for the strength of the signals received(RSSI) from an AP, as seen by an associated client, below which the client must be able to roam to a neighbor AP within the specified Transition Time configured through clcrDot11bTransitionTime.')\nclcrDot11bTransitionTimeV2 = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 2, 9), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(0, 10000))).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: clcrDot11bTransitionTimeV2.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11bTransitionTimeV2.setDescription('This object configures the maximum time duration permitted for the client to detect a suitable neighbor AP to roam to and to complete the roam, whenever the RSSI from the client is associated AP is below the adaptive scan threshold configured through clcrDot11aAdaptiveScanThreshold. The time is expressed in 100th of a second.')\nclcrRoamReasonReportTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1), )\nif mibBuilder.loadTexts: clcrRoamReasonReportTable.setStatus('current')\nif mibBuilder.loadTexts: clcrRoamReasonReportTable.setDescription('This table provides the reasons for CCX clients roaming from one AP to another. When a CCX client associates to an AP, it will always send an IAPP information packet to the new AP listing the characteristics of the previous AP. An entry is added to this table when a roam reason report is sent by a CCX client when it roams to a new AP.')\nclcrRoamReasonReportEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1), ).setIndexNames((0, \"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrRoamClientMacAddress\"), (0, \"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrRoamClientTimeStamp\"))\nif mibBuilder.loadTexts: clcrRoamReasonReportEntry.setStatus('current')\nif mibBuilder.loadTexts: clcrRoamReasonReportEntry.setDescription('Each entry corresponds to the roam reason report sent by a CCX client to the new AP to which client associates.')\nclcrRoamClientMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 1), MacAddress())\nif mibBuilder.loadTexts: clcrRoamClientMacAddress.setStatus('current')\nif mibBuilder.loadTexts: clcrRoamClientMacAddress.setDescription('This object indicates the mac address of the client which has roamed to a new AP.')\nclcrRoamClientTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 2), TimeTicks())\nif mibBuilder.loadTexts: clcrRoamClientTimeStamp.setStatus('current')\nif mibBuilder.loadTexts: clcrRoamClientTimeStamp.setDescription(\"This object indicates the time instance at which this report was received by the new AP, to which client roamed to. This represents number of seconds elapsed since 00:00:00 on January 1, 1970, Coordinated Universal Time (UTC). So a value of '1131362704' means 'Mon Nov 7 16:55:04 2005'.\")\nclcrRoamNewApMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 3), MacAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: clcrRoamNewApMacAddress.setStatus('current')\nif mibBuilder.loadTexts: clcrRoamNewApMacAddress.setDescription('This object indicates the mac address of the current AP to which client has roamed to. This AP receives the roam reason report.')\nclcrRoamPrevApMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 4), MacAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: clcrRoamPrevApMacAddress.setStatus('current')\nif mibBuilder.loadTexts: clcrRoamPrevApMacAddress.setDescription('This object indicates the mac address of the previous AP to which client was associated.')\nclcrRoamPrevApChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 5), CLDot11Channel()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: clcrRoamPrevApChannel.setStatus('current')\nif mibBuilder.loadTexts: clcrRoamPrevApChannel.setDescription('This object indicates the channel number at which the client was associated to the previous AP.')\nclcrRoamPrevApSsid = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: clcrRoamPrevApSsid.setStatus('current')\nif mibBuilder.loadTexts: clcrRoamPrevApSsid.setDescription('This object indicates the SSID at which the client was associated to the previous AP.')\nclcrRoamDisassocTimeInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 7), TimeInterval()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: clcrRoamDisassocTimeInterval.setStatus('current')\nif mibBuilder.loadTexts: clcrRoamDisassocTimeInterval.setDescription('This object indicates the time elapsed since the client disassociated, in hundredth of a second.')\nclcrRoamReason = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 3, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues((\"clcrUnspecified\", 0), (\"clcrPoorLink\", 1), (\"clcrLoadBalancing\", 2), (\"clcrInsufficientCapacity\", 3), (\"clcrDirectedRoam\", 4), (\"clcrFirstAssociation\", 5), (\"clcrRoamingIn\", 6), (\"clcrRoamingOut\", 7), (\"clcrBetterAp\", 8), (\"clcrDisassociated\", 9)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: clcrRoamReason.setStatus('current')\nif mibBuilder.loadTexts: clcrRoamReason.setDescription(\"This object indicates the reason for a client to roam to a new AP. The semantics are as follows. clcrUnspecified - The reason is not known or can't be found. clcrPoorLink - Normal roam due to poor link (excessive retries, too much interference, RSSI too low, etc.) clcrLoadBalancing - Normal roam due to load balancing clcrInsufficientCapacity - Roaming occured due to the insufficient capacity on the previous AP (TSPEC rejected) clcrDirectedRoam - Roaming is directed by the 802.11 wireless Infrastructure clcrFirstAssociation - This is the first association to a particular WLAN clcrRoamingIn - Roaming in from cellular or other WAN clcrRoamingOut - Roaming out to cellular or other WAN clcrBetterAp - Normal roam due to better AP found clcrDisassociated - Deauthenticated or Disassociated from the previous AP.\")\nclcrDot11StatsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1), )\nif mibBuilder.loadTexts: clcrDot11StatsTable.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11StatsTable.setDescription('This table populates the statistics collected when the client roamed in the WLAN. There exists a row in this table for each conceptual row in cLApDot11IfTable that represents a dot11 interface of an AP.')\nclcrDot11StatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1), ).setIndexNames((0, \"CISCO-LWAPP-AP-MIB\", \"cLApSysMacAddress\"), (0, \"CISCO-LWAPP-AP-MIB\", \"cLApDot11IfSlotId\"))\nif mibBuilder.loadTexts: clcrDot11StatsEntry.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11StatsEntry.setDescription('Each entry represents a conceptual row in clcrDot11StatsTable and corresponds to the roam reason report sent by a CCX client to the new AP which the client associates to.')\nclcrDot11NeighborRequestRx = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: clcrDot11NeighborRequestRx.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11NeighborRequestRx.setDescription('This object indicates the count of the number of requests received from an E2E client for neighbor updates.')\nclcrDot11NeighborReplySent = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: clcrDot11NeighborReplySent.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11NeighborReplySent.setDescription('This object indicates the count of the number of replies sent to the client in reply to the request for neighbor updates received from the client.')\nclcrDot11RoamReasonReportRx = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1, 3), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: clcrDot11RoamReasonReportRx.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11RoamReasonReportRx.setDescription('This object reports the count of the number of roam reason reports received from CCX clients.')\nclcrDot11BcastUpdatesSent = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 523, 1, 4, 1, 1, 4), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: clcrDot11BcastUpdatesSent.setStatus('current')\nif mibBuilder.loadTexts: clcrDot11BcastUpdatesSent.setDescription('This object indicates the count of the number of broadcast neighbor updates sent by an AP.')\nciscoLwappClRoamMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 1))\nciscoLwappClRoamMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2))\nclcrMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 1, 1)).setObjects((\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"ciscoLwappClRoamDot11aRfParamsGroup\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"ciscoLwappClRoamDot11bRfParamsGroup\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"ciscoLwappClRoamroamReasonGroup\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"ciscoLwappClRoamroamingStatsGroup\"))\n\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n clcrMIBCompliance = clcrMIBCompliance.setStatus('deprecated')\nif mibBuilder.loadTexts: clcrMIBCompliance.setDescription('The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.')\nclcrMIBComplianceRev1 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 1, 2)).setObjects((\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"ciscoLwappClRoamDot11aRfParamsGroupSup1\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"ciscoLwappClRoamDot11bRfParamsGroupSup1\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"ciscoLwappClRoamroamReasonGroup\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"ciscoLwappClRoamroamingStatsGroup\"))\n\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n clcrMIBComplianceRev1 = clcrMIBComplianceRev1.setStatus('current')\nif mibBuilder.loadTexts: clcrMIBComplianceRev1.setDescription('The compliance statement for the SNMP entities that implement the ciscoLwappRoamMIB module.')\nciscoLwappClRoamDot11aRfParamsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 1)).setObjects((\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11aMode\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11aMinRssi\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11aHysteresis\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11aAdaptiveScanThreshold\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11aTransitionTime\"))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11aRfParamsGroup = ciscoLwappClRoamDot11aRfParamsGroup.setStatus('deprecated')\nif mibBuilder.loadTexts: ciscoLwappClRoamDot11aRfParamsGroup.setDescription('This collection of objects represent the radio parameters for the 802.11a networks.')\nciscoLwappClRoamDot11bRfParamsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 2)).setObjects((\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11bMode\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11bMinRssi\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11bHysteresis\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11bAdaptiveScanThreshold\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11bTransitionTime\"))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11bRfParamsGroup = ciscoLwappClRoamDot11bRfParamsGroup.setStatus('deprecated')\nif mibBuilder.loadTexts: ciscoLwappClRoamDot11bRfParamsGroup.setDescription('This collection of objects represent the radio parameters for the 802.11b/g bands.')\nciscoLwappClRoamroamReasonGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 3)).setObjects((\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrRoamNewApMacAddress\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrRoamPrevApMacAddress\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrRoamPrevApChannel\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrRoamPrevApSsid\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrRoamDisassocTimeInterval\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrRoamReason\"))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamroamReasonGroup = ciscoLwappClRoamroamReasonGroup.setStatus('current')\nif mibBuilder.loadTexts: ciscoLwappClRoamroamReasonGroup.setDescription('This collection of objects provide the reasons for clients roaming between APs.')\nciscoLwappClRoamroamingStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 4)).setObjects((\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11NeighborRequestRx\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11NeighborReplySent\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11RoamReasonReportRx\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11BcastUpdatesSent\"))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamroamingStatsGroup = ciscoLwappClRoamroamingStatsGroup.setStatus('current')\nif mibBuilder.loadTexts: ciscoLwappClRoamroamingStatsGroup.setDescription('This collection of objects provide the counters related to roaming.')\nciscoLwappClRoamDot11aRfParamsGroupSup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 5)).setObjects((\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11aMode\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11aMinRssiV2\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11aHysteresisV2\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11aAdaptiveScanThresholdV2\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11aTransitionTimeV2\"))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11aRfParamsGroupSup1 = ciscoLwappClRoamDot11aRfParamsGroupSup1.setStatus('current')\nif mibBuilder.loadTexts: ciscoLwappClRoamDot11aRfParamsGroupSup1.setDescription('This collection of objects represent the radio parameters for the 802.11a networks.')\nciscoLwappClRoamDot11bRfParamsGroupSup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 523, 2, 2, 6)).setObjects((\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11bMode\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11bMinRssiV2\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11bHysteresisV2\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11bAdaptiveScanThresholdV2\"), (\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", \"clcrDot11bTransitionTimeV2\"))\nif getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):\n ciscoLwappClRoamDot11bRfParamsGroupSup1 = ciscoLwappClRoamDot11bRfParamsGroupSup1.setStatus('current')\nif mibBuilder.loadTexts: ciscoLwappClRoamDot11bRfParamsGroupSup1.setDescription('This collection of objects represent the radio parameters for the 802.11b/g bands.')\nmibBuilder.exportSymbols(\"CISCO-LWAPP-CLIENT-ROAMING-MIB\", clcrDot11aMinRssi=clcrDot11aMinRssi, clcrRoamClientMacAddress=clcrRoamClientMacAddress, ciscoLwappClRoamroamingStatsGroup=ciscoLwappClRoamroamingStatsGroup, clcrDot11bTransitionTimeV2=clcrDot11bTransitionTimeV2, clcrRoamNewApMacAddress=clcrRoamNewApMacAddress, clcrMIBCompliance=clcrMIBCompliance, clcrRoamDot11aRfParamConfig=clcrRoamDot11aRfParamConfig, clcrDot11BcastUpdatesSent=clcrDot11BcastUpdatesSent, clcrRoamPrevApSsid=clcrRoamPrevApSsid, clcrMIBComplianceRev1=clcrMIBComplianceRev1, clcrDot11bHysteresisV2=clcrDot11bHysteresisV2, ciscoLwappClRoamMIBConform=ciscoLwappClRoamMIBConform, clcrDot11aTransitionTime=clcrDot11aTransitionTime, clcrDot11aHysteresis=clcrDot11aHysteresis, ciscoLwappClRoamDot11bRfParamsGroupSup1=ciscoLwappClRoamDot11bRfParamsGroupSup1, PYSNMP_MODULE_ID=ciscoLwappClRoamMIB, clcrDot11bHysteresis=clcrDot11bHysteresis, clcrDot11StatsEntry=clcrDot11StatsEntry, clcrRoamDisassocTimeInterval=clcrRoamDisassocTimeInterval, ciscoLwappClRoamDot11aRfParamsGroupSup1=ciscoLwappClRoamDot11aRfParamsGroupSup1, clcrDot11bAdaptiveScanThreshold=clcrDot11bAdaptiveScanThreshold, clcrDot11NeighborRequestRx=clcrDot11NeighborRequestRx, clcrRoamClientTimeStamp=clcrRoamClientTimeStamp, clcrRoamReason=clcrRoamReason, clcrDot11bMode=clcrDot11bMode, clcrDot11aAdaptiveScanThreshold=clcrDot11aAdaptiveScanThreshold, clcrDot11RoamReasonReportRx=clcrDot11RoamReasonReportRx, clcrDot11bAdaptiveScanThresholdV2=clcrDot11bAdaptiveScanThresholdV2, ciscoLwappClRoamDot11bRfParamsGroup=ciscoLwappClRoamDot11bRfParamsGroup, ciscoLwappClRoamMIBNotifs=ciscoLwappClRoamMIBNotifs, clcrRoamReasonReportTable=clcrRoamReasonReportTable, clcrDot11aMinRssiV2=clcrDot11aMinRssiV2, ciscoLwappClRoamMIBObjects=ciscoLwappClRoamMIBObjects, clcrDot11NeighborReplySent=clcrDot11NeighborReplySent, clcrDot11aAdaptiveScanThresholdV2=clcrDot11aAdaptiveScanThresholdV2, ciscoLwappClRoamroamReasonGroup=ciscoLwappClRoamroamReasonGroup, clcrDot11StatsTable=clcrDot11StatsTable, clcrRoamDot11Stats=clcrRoamDot11Stats, clcrRoamDot11bRfParamConfig=clcrRoamDot11bRfParamConfig, clcrDot11bMinRssi=clcrDot11bMinRssi, clcrRoamReasonReport=clcrRoamReasonReport, clcrRoamPrevApMacAddress=clcrRoamPrevApMacAddress, ciscoLwappClRoamDot11aRfParamsGroup=ciscoLwappClRoamDot11aRfParamsGroup, clcrRoamReasonReportEntry=clcrRoamReasonReportEntry, ciscoLwappClRoamMIBGroups=ciscoLwappClRoamMIBGroups, clcrDot11bMinRssiV2=clcrDot11bMinRssiV2, ciscoLwappClRoamMIBCompliances=ciscoLwappClRoamMIBCompliances, clcrDot11aMode=clcrDot11aMode, clcrDot11aTransitionTimeV2=clcrDot11aTransitionTimeV2, clcrRoamPrevApChannel=clcrRoamPrevApChannel, clcrDot11bTransitionTime=clcrDot11bTransitionTime, ciscoLwappClRoamMIB=ciscoLwappClRoamMIB, clcrDot11aHysteresisV2=clcrDot11aHysteresisV2)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#a list of functions/Classes to be inported when a user imports * from swarmpose
__all__ = ['Swarmpose']
|
normal
|
{
"blob_id": "e375501e6b815530e61af9181d4cade83d4588ca",
"index": 8762,
"step-1": "<mask token>\n",
"step-2": "__all__ = ['Swarmpose']\n",
"step-3": "#a list of functions/Classes to be inported when a user imports * from swarmpose\n__all__ = ['Swarmpose']",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# Generated by Django 3.1.2 on 2020-10-21 21:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('monitoring', '0002_auto_20201021_0027'),
]
operations = [
migrations.AlterField(
model_name='endpoint',
name='frequency_in_minutes',
field=models.FloatField(default=30),
),
migrations.AlterField(
model_name='endpoint',
name='last_check',
field=models.DateTimeField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='endpoint',
name='response_text',
field=models.TextField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='endpoint',
name='status_code',
field=models.FloatField(default=200),
),
migrations.AlterField(
model_name='endpoint',
name='test_pattern',
field=models.CharField(blank=True,
default=None,
help_text='If left blank sys will only ping',
max_length=100,
null=True),
),
]
|
normal
|
{
"blob_id": "20f56ff484321a7d623cead4315e5a6b3b0653a7",
"index": 2720,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('monitoring', '0002_auto_20201021_0027')]\n operations = [migrations.AlterField(model_name='endpoint', name=\n 'frequency_in_minutes', field=models.FloatField(default=30)),\n migrations.AlterField(model_name='endpoint', name='last_check',\n field=models.DateTimeField(blank=True, default=None, null=True)),\n migrations.AlterField(model_name='endpoint', name='response_text',\n field=models.TextField(blank=True, default=None, null=True)),\n migrations.AlterField(model_name='endpoint', name='status_code',\n field=models.FloatField(default=200)), migrations.AlterField(\n model_name='endpoint', name='test_pattern', field=models.CharField(\n blank=True, default=None, help_text=\n 'If left blank sys will only ping', max_length=100, null=True))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('monitoring', '0002_auto_20201021_0027')]\n operations = [migrations.AlterField(model_name='endpoint', name=\n 'frequency_in_minutes', field=models.FloatField(default=30)),\n migrations.AlterField(model_name='endpoint', name='last_check',\n field=models.DateTimeField(blank=True, default=None, null=True)),\n migrations.AlterField(model_name='endpoint', name='response_text',\n field=models.TextField(blank=True, default=None, null=True)),\n migrations.AlterField(model_name='endpoint', name='status_code',\n field=models.FloatField(default=200)), migrations.AlterField(\n model_name='endpoint', name='test_pattern', field=models.CharField(\n blank=True, default=None, help_text=\n 'If left blank sys will only ping', max_length=100, null=True))]\n",
"step-5": "# Generated by Django 3.1.2 on 2020-10-21 21:00\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('monitoring', '0002_auto_20201021_0027'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='endpoint',\n name='frequency_in_minutes',\n field=models.FloatField(default=30),\n ),\n migrations.AlterField(\n model_name='endpoint',\n name='last_check',\n field=models.DateTimeField(blank=True, default=None, null=True),\n ),\n migrations.AlterField(\n model_name='endpoint',\n name='response_text',\n field=models.TextField(blank=True, default=None, null=True),\n ),\n migrations.AlterField(\n model_name='endpoint',\n name='status_code',\n field=models.FloatField(default=200),\n ),\n migrations.AlterField(\n model_name='endpoint',\n name='test_pattern',\n field=models.CharField(blank=True,\n default=None,\n help_text='If left blank sys will only ping',\n max_length=100,\n null=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
@app.route('/')
def hello_world():
return '你好'
@app.route('/test1/<name>')
def test1(name):
return '你好,%s' % name
@app.route('/test2/<int:id>')
def test2(id):
return '你好,%d' % id
<|reserved_special_token_0|>
@app.route('/test/register')
def register():
return render_template('test/register.html')
@app.route('/result', methods=['POST', 'GET'])
def result():
if request.method == 'POST':
result = request.form
return render_template('test/result.html', result=result)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.route('/')
def hello_world():
return '你好'
@app.route('/test1/<name>')
def test1(name):
return '你好,%s' % name
@app.route('/test2/<int:id>')
def test2(id):
return '你好,%d' % id
@app.route('/index1')
def index2():
time = datetime.date.today()
name = ['小新', '小英', '小红']
task = {'任务': '打扫卫生', '时间': '3小时'}
return render_template('index.html', var=time, list=name, task=task)
@app.route('/test/register')
def register():
return render_template('test/register.html')
@app.route('/result', methods=['POST', 'GET'])
def result():
if request.method == 'POST':
result = request.form
return render_template('test/result.html', result=result)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.route('/')
def hello_world():
return '你好'
@app.route('/test1/<name>')
def test1(name):
return '你好,%s' % name
@app.route('/test2/<int:id>')
def test2(id):
return '你好,%d' % id
@app.route('/index1')
def index2():
time = datetime.date.today()
name = ['小新', '小英', '小红']
task = {'任务': '打扫卫生', '时间': '3小时'}
return render_template('index.html', var=time, list=name, task=task)
@app.route('/test/register')
def register():
return render_template('test/register.html')
@app.route('/result', methods=['POST', 'GET'])
def result():
if request.method == 'POST':
result = request.form
return render_template('test/result.html', result=result)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
import time
import datetime
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def hello_world():
return '你好'
@app.route('/test1/<name>')
def test1(name):
return '你好,%s' % name
@app.route('/test2/<int:id>')
def test2(id):
return '你好,%d' % id
@app.route('/index1')
def index2():
time = datetime.date.today()
name = ['小新', '小英', '小红']
task = {'任务': '打扫卫生', '时间': '3小时'}
return render_template('index.html', var=time, list=name, task=task)
@app.route('/test/register')
def register():
return render_template('test/register.html')
@app.route('/result', methods=['POST', 'GET'])
def result():
if request.method == 'POST':
result = request.form
return render_template('test/result.html', result=result)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
#-*- coding = utf-8-*-
#@Time : 2020/6/26 11:02
#@Author :Ella
#@File :app.py
#@Software : PyCharm
import time
import datetime
from flask import Flask,render_template,request #render_template渲染模板
app = Flask(__name__) #初始化的对象
#路由解析,通过用户访问的路径,匹配想要的函数
@app.route('/')
def hello_world():
return '你好'
#通过访问路径,获取用户的字符串参数
@app.route('/test1/<name>')
def test1(name):
return '你好,%s'%name
#通过访问路径,获取用户的整形参数 此外,还有float类型
@app.route('/test2/<int:id>')
def test2(id):
return '你好,%d'%id
#返回给用户渲染后的网页文件
# @app.route('/index1')
# def index1():
# return render_template("index.html")
#向页面传递变量
@app.route('/index1')
def index2():
time = datetime.date.today() #普通变量
name = ['小新','小英','小红'] #列表类型
task = {"任务":"打扫卫生","时间":"3小时"} #字典类型
return render_template("index.html",var = time,list = name,task = task)
#表单提交
@app.route('/test/register')
def register():
return render_template("test/register.html")
#接受表单提交的路由,需要指定methods为post
@app.route('/result',methods = ['POST','GET'])
def result():
if request.method == 'POST':
result = request.form
return render_template("test/result.html",result = result)
if __name__ == '__main__':
app.run(debug=True)
|
flexible
|
{
"blob_id": "d68bd9c90a106a9eac767607ad77bdd84d0f18d2",
"index": 1006,
"step-1": "<mask token>\n\n\[email protected]('/')\ndef hello_world():\n return '你好'\n\n\[email protected]('/test1/<name>')\ndef test1(name):\n return '你好,%s' % name\n\n\[email protected]('/test2/<int:id>')\ndef test2(id):\n return '你好,%d' % id\n\n\n<mask token>\n\n\[email protected]('/test/register')\ndef register():\n return render_template('test/register.html')\n\n\[email protected]('/result', methods=['POST', 'GET'])\ndef result():\n if request.method == 'POST':\n result = request.form\n return render_template('test/result.html', result=result)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/')\ndef hello_world():\n return '你好'\n\n\[email protected]('/test1/<name>')\ndef test1(name):\n return '你好,%s' % name\n\n\[email protected]('/test2/<int:id>')\ndef test2(id):\n return '你好,%d' % id\n\n\[email protected]('/index1')\ndef index2():\n time = datetime.date.today()\n name = ['小新', '小英', '小红']\n task = {'任务': '打扫卫生', '时间': '3小时'}\n return render_template('index.html', var=time, list=name, task=task)\n\n\[email protected]('/test/register')\ndef register():\n return render_template('test/register.html')\n\n\[email protected]('/result', methods=['POST', 'GET'])\ndef result():\n if request.method == 'POST':\n result = request.form\n return render_template('test/result.html', result=result)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\[email protected]('/')\ndef hello_world():\n return '你好'\n\n\[email protected]('/test1/<name>')\ndef test1(name):\n return '你好,%s' % name\n\n\[email protected]('/test2/<int:id>')\ndef test2(id):\n return '你好,%d' % id\n\n\[email protected]('/index1')\ndef index2():\n time = datetime.date.today()\n name = ['小新', '小英', '小红']\n task = {'任务': '打扫卫生', '时间': '3小时'}\n return render_template('index.html', var=time, list=name, task=task)\n\n\[email protected]('/test/register')\ndef register():\n return render_template('test/register.html')\n\n\[email protected]('/result', methods=['POST', 'GET'])\ndef result():\n if request.method == 'POST':\n result = request.form\n return render_template('test/result.html', result=result)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "import time\nimport datetime\nfrom flask import Flask, render_template, request\napp = Flask(__name__)\n\n\[email protected]('/')\ndef hello_world():\n return '你好'\n\n\[email protected]('/test1/<name>')\ndef test1(name):\n return '你好,%s' % name\n\n\[email protected]('/test2/<int:id>')\ndef test2(id):\n return '你好,%d' % id\n\n\[email protected]('/index1')\ndef index2():\n time = datetime.date.today()\n name = ['小新', '小英', '小红']\n task = {'任务': '打扫卫生', '时间': '3小时'}\n return render_template('index.html', var=time, list=name, task=task)\n\n\[email protected]('/test/register')\ndef register():\n return render_template('test/register.html')\n\n\[email protected]('/result', methods=['POST', 'GET'])\ndef result():\n if request.method == 'POST':\n result = request.form\n return render_template('test/result.html', result=result)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "#-*- coding = utf-8-*-\n#@Time : 2020/6/26 11:02\n#@Author :Ella\n#@File :app.py\n#@Software : PyCharm\n\nimport time\nimport datetime\n\nfrom flask import Flask,render_template,request #render_template渲染模板\napp = Flask(__name__) #初始化的对象\n\n#路由解析,通过用户访问的路径,匹配想要的函数\[email protected]('/')\ndef hello_world():\n return '你好'\n\n#通过访问路径,获取用户的字符串参数\[email protected]('/test1/<name>')\ndef test1(name):\n return '你好,%s'%name\n\n#通过访问路径,获取用户的整形参数 此外,还有float类型\[email protected]('/test2/<int:id>')\ndef test2(id):\n return '你好,%d'%id\n\n#返回给用户渲染后的网页文件\n# @app.route('/index1')\n# def index1():\n# return render_template(\"index.html\")\n\n#向页面传递变量\[email protected]('/index1')\ndef index2():\n time = datetime.date.today() #普通变量\n name = ['小新','小英','小红'] #列表类型\n task = {\"任务\":\"打扫卫生\",\"时间\":\"3小时\"} #字典类型\n return render_template(\"index.html\",var = time,list = name,task = task)\n\n#表单提交\[email protected]('/test/register')\ndef register():\n return render_template(\"test/register.html\")\n\n#接受表单提交的路由,需要指定methods为post\[email protected]('/result',methods = ['POST','GET'])\ndef result():\n if request.method == 'POST':\n result = request.form\n return render_template(\"test/result.html\",result = result)\n\nif __name__ == '__main__':\n app.run(debug=True)",
"step-ids": [
5,
6,
7,
9,
10
]
}
|
[
5,
6,
7,
9,
10
] |
import abc
import hashlib
import hmac
from typing import Any, Dict
from urllib.parse import urlencode
class IceCubedClientABC(abc.ABC):
@abc.abstractproperty
def _has_auth_details(self) -> bool:
pass
@abc.abstractmethod
def sign(self, params: Dict[str, Any]) -> str:
pass
class IceCubedClientBase(IceCubedClientABC):
BASE_URI = "https://ice3x.com/api/v1/"
def __init__(self, api_key: str = None, secret: str = None) -> None:
"""Instantiate the client
Args:
api_key: An ICE3X public API key
secret: An ICE3X private API key
"""
self.api_key = api_key
self.secret = secret
@property
def _has_auth_details(self) -> bool:
"""Internal helper function which checks that an API key and secret have been provided"""
return all([self.secret is not None, self.api_key is not None])
def sign(self, params: Dict[str, Any]) -> str:
"""Sign a dict of query params for private API calls
Args:
params: A dict of query params
Returns:
A sha512 signed payload
"""
assert self.secret is not None, "A client secret is required to sign requests."
query = urlencode(params)
signature = hmac.new(self.secret.encode(), query.encode(), hashlib.sha512)
return signature.hexdigest()
|
normal
|
{
"blob_id": "8bd918896fb72c89a622ba4e18666bb90755cafd",
"index": 4545,
"step-1": "<mask token>\n\n\nclass IceCubedClientBase(IceCubedClientABC):\n BASE_URI = 'https://ice3x.com/api/v1/'\n\n def __init__(self, api_key: str=None, secret: str=None) ->None:\n \"\"\"Instantiate the client\n\n Args:\n api_key: An ICE3X public API key\n secret: An ICE3X private API key\n \"\"\"\n self.api_key = api_key\n self.secret = secret\n\n @property\n def _has_auth_details(self) ->bool:\n \"\"\"Internal helper function which checks that an API key and secret have been provided\"\"\"\n return all([self.secret is not None, self.api_key is not None])\n\n def sign(self, params: Dict[str, Any]) ->str:\n \"\"\"Sign a dict of query params for private API calls\n\n Args:\n params: A dict of query params\n\n Returns:\n A sha512 signed payload\n \"\"\"\n assert self.secret is not None, 'A client secret is required to sign requests.'\n query = urlencode(params)\n signature = hmac.new(self.secret.encode(), query.encode(), hashlib.\n sha512)\n return signature.hexdigest()\n",
"step-2": "<mask token>\n\n\nclass IceCubedClientABC(abc.ABC):\n <mask token>\n <mask token>\n\n\nclass IceCubedClientBase(IceCubedClientABC):\n BASE_URI = 'https://ice3x.com/api/v1/'\n\n def __init__(self, api_key: str=None, secret: str=None) ->None:\n \"\"\"Instantiate the client\n\n Args:\n api_key: An ICE3X public API key\n secret: An ICE3X private API key\n \"\"\"\n self.api_key = api_key\n self.secret = secret\n\n @property\n def _has_auth_details(self) ->bool:\n \"\"\"Internal helper function which checks that an API key and secret have been provided\"\"\"\n return all([self.secret is not None, self.api_key is not None])\n\n def sign(self, params: Dict[str, Any]) ->str:\n \"\"\"Sign a dict of query params for private API calls\n\n Args:\n params: A dict of query params\n\n Returns:\n A sha512 signed payload\n \"\"\"\n assert self.secret is not None, 'A client secret is required to sign requests.'\n query = urlencode(params)\n signature = hmac.new(self.secret.encode(), query.encode(), hashlib.\n sha512)\n return signature.hexdigest()\n",
"step-3": "<mask token>\n\n\nclass IceCubedClientABC(abc.ABC):\n <mask token>\n\n @abc.abstractmethod\n def sign(self, params: Dict[str, Any]) ->str:\n pass\n\n\nclass IceCubedClientBase(IceCubedClientABC):\n BASE_URI = 'https://ice3x.com/api/v1/'\n\n def __init__(self, api_key: str=None, secret: str=None) ->None:\n \"\"\"Instantiate the client\n\n Args:\n api_key: An ICE3X public API key\n secret: An ICE3X private API key\n \"\"\"\n self.api_key = api_key\n self.secret = secret\n\n @property\n def _has_auth_details(self) ->bool:\n \"\"\"Internal helper function which checks that an API key and secret have been provided\"\"\"\n return all([self.secret is not None, self.api_key is not None])\n\n def sign(self, params: Dict[str, Any]) ->str:\n \"\"\"Sign a dict of query params for private API calls\n\n Args:\n params: A dict of query params\n\n Returns:\n A sha512 signed payload\n \"\"\"\n assert self.secret is not None, 'A client secret is required to sign requests.'\n query = urlencode(params)\n signature = hmac.new(self.secret.encode(), query.encode(), hashlib.\n sha512)\n return signature.hexdigest()\n",
"step-4": "import abc\nimport hashlib\nimport hmac\nfrom typing import Any, Dict\nfrom urllib.parse import urlencode\n\n\nclass IceCubedClientABC(abc.ABC):\n\n @abc.abstractproperty\n def _has_auth_details(self) ->bool:\n pass\n\n @abc.abstractmethod\n def sign(self, params: Dict[str, Any]) ->str:\n pass\n\n\nclass IceCubedClientBase(IceCubedClientABC):\n BASE_URI = 'https://ice3x.com/api/v1/'\n\n def __init__(self, api_key: str=None, secret: str=None) ->None:\n \"\"\"Instantiate the client\n\n Args:\n api_key: An ICE3X public API key\n secret: An ICE3X private API key\n \"\"\"\n self.api_key = api_key\n self.secret = secret\n\n @property\n def _has_auth_details(self) ->bool:\n \"\"\"Internal helper function which checks that an API key and secret have been provided\"\"\"\n return all([self.secret is not None, self.api_key is not None])\n\n def sign(self, params: Dict[str, Any]) ->str:\n \"\"\"Sign a dict of query params for private API calls\n\n Args:\n params: A dict of query params\n\n Returns:\n A sha512 signed payload\n \"\"\"\n assert self.secret is not None, 'A client secret is required to sign requests.'\n query = urlencode(params)\n signature = hmac.new(self.secret.encode(), query.encode(), hashlib.\n sha512)\n return signature.hexdigest()\n",
"step-5": "import abc\nimport hashlib\nimport hmac\nfrom typing import Any, Dict\nfrom urllib.parse import urlencode\n\n\nclass IceCubedClientABC(abc.ABC):\n @abc.abstractproperty\n def _has_auth_details(self) -> bool:\n pass\n\n @abc.abstractmethod\n def sign(self, params: Dict[str, Any]) -> str:\n pass\n\n\nclass IceCubedClientBase(IceCubedClientABC):\n BASE_URI = \"https://ice3x.com/api/v1/\"\n\n def __init__(self, api_key: str = None, secret: str = None) -> None:\n \"\"\"Instantiate the client\n\n Args:\n api_key: An ICE3X public API key\n secret: An ICE3X private API key\n \"\"\"\n\n self.api_key = api_key\n self.secret = secret\n\n @property\n def _has_auth_details(self) -> bool:\n \"\"\"Internal helper function which checks that an API key and secret have been provided\"\"\"\n\n return all([self.secret is not None, self.api_key is not None])\n\n def sign(self, params: Dict[str, Any]) -> str:\n \"\"\"Sign a dict of query params for private API calls\n\n Args:\n params: A dict of query params\n\n Returns:\n A sha512 signed payload\n \"\"\"\n\n assert self.secret is not None, \"A client secret is required to sign requests.\"\n\n query = urlencode(params)\n signature = hmac.new(self.secret.encode(), query.encode(), hashlib.sha512)\n\n return signature.hexdigest()\n",
"step-ids": [
5,
6,
7,
9,
10
]
}
|
[
5,
6,
7,
9,
10
] |
# To add a new cell, type '# %%'
# To add a new markdown cell, type '# %% [markdown]'
# %% [markdown]
# ### Bài tập 1.
# - <ins>Yêu cầu</ins>: Ý tưởng cơ bản của thuật toán ``Support Vector Machine`` (``SVM``) là gì? Ý tưởng của thuật toán biên mềm (``soft margin``) ``SVM``. Nêu ý nghĩa của siêu tham số ``C`` trong bài toán cực tiểu hàm mất mát.
#
# 1. Ý tưởng cơ bản của SVM là đưa toàn bộ dataset vào không gian nhiều chiều (n chiều), từ đó tìm ra mặt phẳng thích hợp nhất (hyperplane) để phân chia
# 2. Support Vector Machine thuần (hard margin) thì gặp hai vấn đề chính đó là nó chỉ hoạt động trên dataset ``Linearly Separable`` và thứ 2 đó là nó khá nhạy cảm với biến nhiễu (sensitive to noise). Để tránh vấn đề này, chúng ta cần sử dụng một mô hình linh hoạt
# hơn. Nhiệm vụ của nó là tìm được mặt phẳng vẫn phân loại tốt nhưng chấp nhận sai lệch ở một mức độ chấp nhận được.
# 3. Tham số `C` là hằng số dương giúp cân đối độ lớn của margin và sự hy sinh của các điểm nằm trong vùng không an toàn. Khi $C = \infty $ hoặc rất lớn, Soft Margin SVM trở thành Hard Margin SVM.
# %% [markdown]
# ### Bài tập 2.
# - <ins>Yêu cầu</ins>: Sử dụng mô hình ``SVM`` thuộc thư viện ``sklearn`` để xây dựng mô hình phân loại dựa trên tập dữ liệu huấn luyện ``X_train``, ``y_train``. Hãy nhận xét về tỉ lệ nhãn ``0`` và ``1`` trong bộ dữ liệu đã cho như đoạn code bên dưới. Hãy thử thay đổi giá trị của tham số ``C`` và nhận xét các độ đo ``Recall``, ``Precison``, ``F1-score``, và ``Accuracy`` của mô hình thu được trên tập dữ liệu kiểm tra ``X_test``, ``y_test``.
# - Nguồn tham khảo dữ liệu ``thyroid_sick.csv``: https://archive.ics.uci.edu/ml/datasets/Thyroid+Disease
# %%
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn import preprocessing
from sklearn.metrics import classification_report, confusion_matrix, accuracy_score
import itertools
import numpy as np
from sklearn import preprocessing
# %%
df = pd.read_csv('thyroid_sick.csv')
X = df[[column_name for column_name in df.columns if column_name != 'classes']]
y = df[['classes']]
X = preprocessing.StandardScaler().fit(X).transform(X.astype(float))
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1, random_state=42)
# %%
df.pivot_table(index =['classes'], aggfunc='size')
# %% [markdown]
# * Nhận xét:
# %%
from sklearn import svm
C_parameter = 10.5
mean_acc = []
for n in np.arange(0.5, C_parameter, 0.5):
#Train Model and Predict
clf = svm.SVC(C=n).fit(X_train,y_train)
yhat=clf.predict(X_test)
cnf_matrix = confusion_matrix(y_test, yhat)
mean_acc.append(float(accuracy_score(y_test, yhat)))
print("Result with C = " + str(n))
np.set_printoptions(precision=2)
print (classification_report(y_test, yhat))
print( "The best accuracy was with", max(mean_acc), "with C=", n)
# %% [markdown]
# ### Bài tập 3.
# - <ins>Yêu cầu</ins>: Ý tưởng của hàm ``kernel`` $K(\dots, \dots)$ là gì? Khi nào chúng ta áp dụng hàm ``kernel``? Chúng ta có cần biết biểu thức của hàm $\Phi(x)$ không?
# 1. Kernel SVM là việc đi tìm một hàm số biến đổi dữ liệu $x$ từ không gian feature ban đầu thành dữ liệu trong một không gian mới bằng hàm số $\Phi(\mathbf{x})$. Hàm số này cần thoả mãn mục đích đó là tronng không gian mới, dữ liệu giữa hai classes là phân biệt tuyến tính hoặc gần như phần biệt tuyến tính.
# 2. Chúng ta áp dụng hàm ``kernel`` khi dữ liệu không phân biệt tuyến tính, Với dữ liệu gần phân biệt tuyến tính, linear và poly kernels cho kết quả tốt hơn.
# 3.
# %% [markdown]
# ### Bài tập 4.
# - <ins>Yêu cầu</ins>: Cho điểm dữ liệu trong không gian hai chiều $x = [x_1, x_2]^T$ và hàm biến đổi sang không gian năm chiều $\Phi(x) = [1, \sqrt{2}x_1, \sqrt{2}x_2, x_1^2, \sqrt{2}x_1x_2, x_2^2]^T$. Hãy tính hàm ``kernel`` $K(a, b)$.
#
# \begin{eqnarray}
# \Phi(\mathbf{x})^T\Phi(\mathbf{z}) &=& [1, \sqrt{2} x_1, \sqrt{2} x_2, x_1^2, \sqrt{2} x_1x_2, x_2^2] [1, \sqrt{2} z_1, \sqrt{2} z_2, z_1^2, \sqrt{2} z_1z_2, z_2^2]^T \\
# &=& 1 + 2x_1z_1 + 2x_2z_2 + x_1^2x_2^2 + 2x_1z_1x_2z_2 + x_2^2z_2^2 \\
# &=& (1 + x_1z_1 + x_2z_2)^2 = (1 + \mathbf{x}^T\mathbf{z})^2 = k(\mathbf{x}, \mathbf{z})
# \end{eqnarray}
# %% [markdown]
# ### Bài tập 5.
# - <ins>Yêu cầu</ins>: Giả sử bạn dùng bộ phân loại ``SVM`` với hàm ``kernel`` (radial basis function) ``RBF`` cho tập huấn luyện và thấy mô hình phân loại chưa tốt. Để cải thiện, bạn sẽ giảm hay tăng tham số $\gamma$ trong công thức hàm ``kernel``, tham số ``C`` trong hàm mất mát.
# %% [markdown]
# ### Bài tập 6. (Exercise 9 trang 174, Chapter 5: Support Vector Machines)
# - <ins>Yêu cầu</ins>: Huấn luyện một bộ phân lớp ``SVM`` dựa trên bộ dữ liệu ``MNIST`` (dùng để phân loại hình ảnh các ký tự số có cùng kích thước). Bởi vì bộ phân loại ``SVM`` là bộ phân lớp nhị phân, chúng ta sẽ cần sử dụng chiến thuật ``one-versus-the-rest`` để phân loại tất cả ``10`` ký tự số (trong thực tế chúng ta chỉ dùng chiến thuật ``one-versus-one`` trong các trường hợp dữ liệu nhỏ). Bạn hãy báo cáo độ chính xác (``accuracy``) của mô hình đã huấn luyện trên tập test.
# %%
import numpy as np
from sklearn.svm import LinearSVC
from sklearn.datasets import fetch_openml
from sklearn.metrics import accuracy_score
from sklearn.preprocessing import StandardScaler
mnist = fetch_openml('mnist_784', version=1, cache=True)
# %%
X = mnist["data"]
y = mnist["target"].astype(np.uint8)
X_train = X[:60000]
y_train = y[:60000]
X_test = X[60000:]
y_test = y[60000:]
lin_clf = LinearSVC(random_state=42)
lin_clf.fit(X_train, y_train)
y_pred = lin_clf.predict(X_train)
accuracy_score(y_train, y_pred)
# %%
y_test_predict =lin_clf.predict(X_test)
accuracy_score(y_test, y_test_predict)
# %%
Scaler = StandardScaler()
X_train_scaled = Scaler.fit_transform(X_train.astype(np.float32))
X_test_scaled = Scaler.fit_transform(X_test.astype(np.float32))
# %%
lin_clf = LinearSVC(random_state =42)
lin_clf.fit(X_train, y_train)
y_pred = lin_clf.predict(X_train)
accuracy_score(y_train, y_pred)
# %%
y_test_predict = lin_clf.predict(X_test_scaled)
accuracy_score(y_test, y_test_predict)
from sklearn.model_selection import RandomizedSearchCV
from scipy.stats import reciproca, uniform
from sklearn.model_selection import RandomizedSearchCV
from scipy.stats import reciprocal, uniform
from sklearn.svm import SVC
param_distributions = {"gamma": reciprocal(0.001, 0.1), "C": uniform(1, 10)}
rnd_search_cv = RandomizedSearchCV(svm_clf, param_distributions, n_iter=10, verbose=2, cv=3)
rnd_search_cv.fit(X_train_scaled[:1000], y_train[:1000])
# %% [markdown]
# ### Bài tập 7. (Exercise 10 trang 174, Chapter 5: Support Vector Machines)
# - <ins>Yêu cầu</ins>: Hãy huấn luyện một mô hình hồi quy tuyến tính với dữ liệu giá nhà ``California housing dataset``.
# %%
from sklearn.datasets import fetch_california_housing
from sklearn.model_selection import train_test_split
housing = fetch_california_housing()
X = housing["data"]
y = housing["target"]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
normal
|
{
"blob_id": "1b1b646a75fe2ff8d54e66d025b60bde0c9ed2d6",
"index": 9361,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndf.pivot_table(index=['classes'], aggfunc='size')\n<mask token>\nfor n in np.arange(0.5, C_parameter, 0.5):\n clf = svm.SVC(C=n).fit(X_train, y_train)\n yhat = clf.predict(X_test)\n cnf_matrix = confusion_matrix(y_test, yhat)\n mean_acc.append(float(accuracy_score(y_test, yhat)))\n print('Result with C = ' + str(n))\n np.set_printoptions(precision=2)\n print(classification_report(y_test, yhat))\nprint('The best accuracy was with', max(mean_acc), 'with C=', n)\n<mask token>\nlin_clf.fit(X_train, y_train)\n<mask token>\naccuracy_score(y_train, y_pred)\n<mask token>\naccuracy_score(y_test, y_test_predict)\n<mask token>\nlin_clf.fit(X_train, y_train)\n<mask token>\naccuracy_score(y_train, y_pred)\n<mask token>\naccuracy_score(y_test, y_test_predict)\n<mask token>\nrnd_search_cv.fit(X_train_scaled[:1000], y_train[:1000])\n<mask token>\n",
"step-3": "<mask token>\ndf = pd.read_csv('thyroid_sick.csv')\nX = df[[column_name for column_name in df.columns if column_name != 'classes']]\ny = df[['classes']]\nX = preprocessing.StandardScaler().fit(X).transform(X.astype(float))\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1,\n random_state=42)\ndf.pivot_table(index=['classes'], aggfunc='size')\n<mask token>\nC_parameter = 10.5\nmean_acc = []\nfor n in np.arange(0.5, C_parameter, 0.5):\n clf = svm.SVC(C=n).fit(X_train, y_train)\n yhat = clf.predict(X_test)\n cnf_matrix = confusion_matrix(y_test, yhat)\n mean_acc.append(float(accuracy_score(y_test, yhat)))\n print('Result with C = ' + str(n))\n np.set_printoptions(precision=2)\n print(classification_report(y_test, yhat))\nprint('The best accuracy was with', max(mean_acc), 'with C=', n)\n<mask token>\nmnist = fetch_openml('mnist_784', version=1, cache=True)\nX = mnist['data']\ny = mnist['target'].astype(np.uint8)\nX_train = X[:60000]\ny_train = y[:60000]\nX_test = X[60000:]\ny_test = y[60000:]\nlin_clf = LinearSVC(random_state=42)\nlin_clf.fit(X_train, y_train)\ny_pred = lin_clf.predict(X_train)\naccuracy_score(y_train, y_pred)\ny_test_predict = lin_clf.predict(X_test)\naccuracy_score(y_test, y_test_predict)\nScaler = StandardScaler()\nX_train_scaled = Scaler.fit_transform(X_train.astype(np.float32))\nX_test_scaled = Scaler.fit_transform(X_test.astype(np.float32))\nlin_clf = LinearSVC(random_state=42)\nlin_clf.fit(X_train, y_train)\ny_pred = lin_clf.predict(X_train)\naccuracy_score(y_train, y_pred)\ny_test_predict = lin_clf.predict(X_test_scaled)\naccuracy_score(y_test, y_test_predict)\n<mask token>\nparam_distributions = {'gamma': reciprocal(0.001, 0.1), 'C': uniform(1, 10)}\nrnd_search_cv = RandomizedSearchCV(svm_clf, param_distributions, n_iter=10,\n verbose=2, cv=3)\nrnd_search_cv.fit(X_train_scaled[:1000], y_train[:1000])\n<mask token>\nhousing = fetch_california_housing()\nX = housing['data']\ny = housing['target']\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2,\n random_state=42)\n",
"step-4": "import pandas as pd\nfrom sklearn.model_selection import train_test_split\nfrom sklearn import preprocessing\nfrom sklearn.metrics import classification_report, confusion_matrix, accuracy_score\nimport itertools\nimport numpy as np\nfrom sklearn import preprocessing\ndf = pd.read_csv('thyroid_sick.csv')\nX = df[[column_name for column_name in df.columns if column_name != 'classes']]\ny = df[['classes']]\nX = preprocessing.StandardScaler().fit(X).transform(X.astype(float))\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1,\n random_state=42)\ndf.pivot_table(index=['classes'], aggfunc='size')\nfrom sklearn import svm\nC_parameter = 10.5\nmean_acc = []\nfor n in np.arange(0.5, C_parameter, 0.5):\n clf = svm.SVC(C=n).fit(X_train, y_train)\n yhat = clf.predict(X_test)\n cnf_matrix = confusion_matrix(y_test, yhat)\n mean_acc.append(float(accuracy_score(y_test, yhat)))\n print('Result with C = ' + str(n))\n np.set_printoptions(precision=2)\n print(classification_report(y_test, yhat))\nprint('The best accuracy was with', max(mean_acc), 'with C=', n)\nimport numpy as np\nfrom sklearn.svm import LinearSVC\nfrom sklearn.datasets import fetch_openml\nfrom sklearn.metrics import accuracy_score\nfrom sklearn.preprocessing import StandardScaler\nmnist = fetch_openml('mnist_784', version=1, cache=True)\nX = mnist['data']\ny = mnist['target'].astype(np.uint8)\nX_train = X[:60000]\ny_train = y[:60000]\nX_test = X[60000:]\ny_test = y[60000:]\nlin_clf = LinearSVC(random_state=42)\nlin_clf.fit(X_train, y_train)\ny_pred = lin_clf.predict(X_train)\naccuracy_score(y_train, y_pred)\ny_test_predict = lin_clf.predict(X_test)\naccuracy_score(y_test, y_test_predict)\nScaler = StandardScaler()\nX_train_scaled = Scaler.fit_transform(X_train.astype(np.float32))\nX_test_scaled = Scaler.fit_transform(X_test.astype(np.float32))\nlin_clf = LinearSVC(random_state=42)\nlin_clf.fit(X_train, y_train)\ny_pred = lin_clf.predict(X_train)\naccuracy_score(y_train, y_pred)\ny_test_predict = lin_clf.predict(X_test_scaled)\naccuracy_score(y_test, y_test_predict)\nfrom sklearn.model_selection import RandomizedSearchCV\nfrom scipy.stats import reciproca, uniform\nfrom sklearn.model_selection import RandomizedSearchCV\nfrom scipy.stats import reciprocal, uniform\nfrom sklearn.svm import SVC\nparam_distributions = {'gamma': reciprocal(0.001, 0.1), 'C': uniform(1, 10)}\nrnd_search_cv = RandomizedSearchCV(svm_clf, param_distributions, n_iter=10,\n verbose=2, cv=3)\nrnd_search_cv.fit(X_train_scaled[:1000], y_train[:1000])\nfrom sklearn.datasets import fetch_california_housing\nfrom sklearn.model_selection import train_test_split\nhousing = fetch_california_housing()\nX = housing['data']\ny = housing['target']\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2,\n random_state=42)\n",
"step-5": "# To add a new cell, type '# %%'\n# To add a new markdown cell, type '# %% [markdown]'\n# %% [markdown]\n# ### Bài tập 1.\n# - <ins>Yêu cầu</ins>: Ý tưởng cơ bản của thuật toán ``Support Vector Machine`` (``SVM``) là gì? Ý tưởng của thuật toán biên mềm (``soft margin``) ``SVM``. Nêu ý nghĩa của siêu tham số ``C`` trong bài toán cực tiểu hàm mất mát.\n# \n# 1. Ý tưởng cơ bản của SVM là đưa toàn bộ dataset vào không gian nhiều chiều (n chiều), từ đó tìm ra mặt phẳng thích hợp nhất (hyperplane) để phân chia\n# 2. Support Vector Machine thuần (hard margin) thì gặp hai vấn đề chính đó là nó chỉ hoạt động trên dataset ``Linearly Separable`` và thứ 2 đó là nó khá nhạy cảm với biến nhiễu (sensitive to noise). Để tránh vấn đề này, chúng ta cần sử dụng một mô hình linh hoạt \n# hơn. Nhiệm vụ của nó là tìm được mặt phẳng vẫn phân loại tốt nhưng chấp nhận sai lệch ở một mức độ chấp nhận được.\n# 3. Tham số `C` là hằng số dương giúp cân đối độ lớn của margin và sự hy sinh của các điểm nằm trong vùng không an toàn. Khi $C = \\infty $ hoặc rất lớn, Soft Margin SVM trở thành Hard Margin SVM.\n# %% [markdown]\n# ### Bài tập 2.\n# - <ins>Yêu cầu</ins>: Sử dụng mô hình ``SVM`` thuộc thư viện ``sklearn`` để xây dựng mô hình phân loại dựa trên tập dữ liệu huấn luyện ``X_train``, ``y_train``. Hãy nhận xét về tỉ lệ nhãn ``0`` và ``1`` trong bộ dữ liệu đã cho như đoạn code bên dưới. Hãy thử thay đổi giá trị của tham số ``C`` và nhận xét các độ đo ``Recall``, ``Precison``, ``F1-score``, và ``Accuracy`` của mô hình thu được trên tập dữ liệu kiểm tra ``X_test``, ``y_test``.\n# - Nguồn tham khảo dữ liệu ``thyroid_sick.csv``: https://archive.ics.uci.edu/ml/datasets/Thyroid+Disease\n\n# %%\nimport pandas as pd\nfrom sklearn.model_selection import train_test_split\nfrom sklearn import preprocessing\nfrom sklearn.metrics import classification_report, confusion_matrix, accuracy_score\nimport itertools\nimport numpy as np\nfrom sklearn import preprocessing\n\n# %%\ndf = pd.read_csv('thyroid_sick.csv')\nX = df[[column_name for column_name in df.columns if column_name != 'classes']]\ny = df[['classes']]\nX = preprocessing.StandardScaler().fit(X).transform(X.astype(float))\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1, random_state=42)\n\n\n# %%\ndf.pivot_table(index =['classes'], aggfunc='size')\n\n# %% [markdown]\n# * Nhận xét:\n\n# %%\nfrom sklearn import svm\nC_parameter = 10.5\nmean_acc = []\nfor n in np.arange(0.5, C_parameter, 0.5):\n \n #Train Model and Predict \n clf = svm.SVC(C=n).fit(X_train,y_train)\n yhat=clf.predict(X_test)\n cnf_matrix = confusion_matrix(y_test, yhat)\n\n mean_acc.append(float(accuracy_score(y_test, yhat)))\n print(\"Result with C = \" + str(n))\n np.set_printoptions(precision=2)\n print (classification_report(y_test, yhat))\n\nprint( \"The best accuracy was with\", max(mean_acc), \"with C=\", n)\n\n# %% [markdown]\n# ### Bài tập 3.\n# - <ins>Yêu cầu</ins>: Ý tưởng của hàm ``kernel`` $K(\\dots, \\dots)$ là gì? Khi nào chúng ta áp dụng hàm ``kernel``? Chúng ta có cần biết biểu thức của hàm $\\Phi(x)$ không?\n# 1. Kernel SVM là việc đi tìm một hàm số biến đổi dữ liệu $x$ từ không gian feature ban đầu thành dữ liệu trong một không gian mới bằng hàm số $\\Phi(\\mathbf{x})$. Hàm số này cần thoả mãn mục đích đó là tronng không gian mới, dữ liệu giữa hai classes là phân biệt tuyến tính hoặc gần như phần biệt tuyến tính.\n# 2. Chúng ta áp dụng hàm ``kernel`` khi dữ liệu không phân biệt tuyến tính, Với dữ liệu gần phân biệt tuyến tính, linear và poly kernels cho kết quả tốt hơn.\n# 3.\n# %% [markdown]\n# ### Bài tập 4.\n# - <ins>Yêu cầu</ins>: Cho điểm dữ liệu trong không gian hai chiều $x = [x_1, x_2]^T$ và hàm biến đổi sang không gian năm chiều $\\Phi(x) = [1, \\sqrt{2}x_1, \\sqrt{2}x_2, x_1^2, \\sqrt{2}x_1x_2, x_2^2]^T$. Hãy tính hàm ``kernel`` $K(a, b)$.\n# \n# \\begin{eqnarray}\n# \\Phi(\\mathbf{x})^T\\Phi(\\mathbf{z}) &=& [1, \\sqrt{2} x_1, \\sqrt{2} x_2, x_1^2, \\sqrt{2} x_1x_2, x_2^2] [1, \\sqrt{2} z_1, \\sqrt{2} z_2, z_1^2, \\sqrt{2} z_1z_2, z_2^2]^T \\\\\n# &=& 1 + 2x_1z_1 + 2x_2z_2 + x_1^2x_2^2 + 2x_1z_1x_2z_2 + x_2^2z_2^2 \\\\\n# &=& (1 + x_1z_1 + x_2z_2)^2 = (1 + \\mathbf{x}^T\\mathbf{z})^2 = k(\\mathbf{x}, \\mathbf{z})\n# \\end{eqnarray}\n# %% [markdown]\n# ### Bài tập 5.\n# - <ins>Yêu cầu</ins>: Giả sử bạn dùng bộ phân loại ``SVM`` với hàm ``kernel`` (radial basis function) ``RBF`` cho tập huấn luyện và thấy mô hình phân loại chưa tốt. Để cải thiện, bạn sẽ giảm hay tăng tham số $\\gamma$ trong công thức hàm ``kernel``, tham số ``C`` trong hàm mất mát.\n# %% [markdown]\n# ### Bài tập 6. (Exercise 9 trang 174, Chapter 5: Support Vector Machines)\n# - <ins>Yêu cầu</ins>: Huấn luyện một bộ phân lớp ``SVM`` dựa trên bộ dữ liệu ``MNIST`` (dùng để phân loại hình ảnh các ký tự số có cùng kích thước). Bởi vì bộ phân loại ``SVM`` là bộ phân lớp nhị phân, chúng ta sẽ cần sử dụng chiến thuật ``one-versus-the-rest`` để phân loại tất cả ``10`` ký tự số (trong thực tế chúng ta chỉ dùng chiến thuật ``one-versus-one`` trong các trường hợp dữ liệu nhỏ). Bạn hãy báo cáo độ chính xác (``accuracy``) của mô hình đã huấn luyện trên tập test.\n\n# %%\nimport numpy as np\nfrom sklearn.svm import LinearSVC\nfrom sklearn.datasets import fetch_openml\nfrom sklearn.metrics import accuracy_score\nfrom sklearn.preprocessing import StandardScaler\nmnist = fetch_openml('mnist_784', version=1, cache=True)\n\n# %%\nX = mnist[\"data\"]\ny = mnist[\"target\"].astype(np.uint8)\n\nX_train = X[:60000]\ny_train = y[:60000]\nX_test = X[60000:]\ny_test = y[60000:]\nlin_clf = LinearSVC(random_state=42)\nlin_clf.fit(X_train, y_train)\ny_pred = lin_clf.predict(X_train)\naccuracy_score(y_train, y_pred)\n\n# %%\ny_test_predict =lin_clf.predict(X_test)\naccuracy_score(y_test, y_test_predict)\n# %%\nScaler = StandardScaler()\nX_train_scaled = Scaler.fit_transform(X_train.astype(np.float32))\nX_test_scaled = Scaler.fit_transform(X_test.astype(np.float32))\n# %%\nlin_clf = LinearSVC(random_state =42)\nlin_clf.fit(X_train, y_train)\ny_pred = lin_clf.predict(X_train)\naccuracy_score(y_train, y_pred)\n# %%\ny_test_predict = lin_clf.predict(X_test_scaled)\naccuracy_score(y_test, y_test_predict)\nfrom sklearn.model_selection import RandomizedSearchCV\nfrom scipy.stats import reciproca, uniform\nfrom sklearn.model_selection import RandomizedSearchCV\nfrom scipy.stats import reciprocal, uniform\nfrom sklearn.svm import SVC\n\nparam_distributions = {\"gamma\": reciprocal(0.001, 0.1), \"C\": uniform(1, 10)}\nrnd_search_cv = RandomizedSearchCV(svm_clf, param_distributions, n_iter=10, verbose=2, cv=3)\nrnd_search_cv.fit(X_train_scaled[:1000], y_train[:1000])\n\n# %% [markdown]\n# ### Bài tập 7. (Exercise 10 trang 174, Chapter 5: Support Vector Machines)\n# - <ins>Yêu cầu</ins>: Hãy huấn luyện một mô hình hồi quy tuyến tính với dữ liệu giá nhà ``California housing dataset``.\n\n# %%\nfrom sklearn.datasets import fetch_california_housing\nfrom sklearn.model_selection import train_test_split\n\n\nhousing = fetch_california_housing()\nX = housing[\"data\"]\ny = housing[\"target\"]\n\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def maxProduct(self, A):
size= len(A)
if size==1:
return A[0]
Max=[A[0]]
Min=[A[0]]
for i in range(1,size):
Max.append(max(max(Max[i-1]*A[i],Min[i-1]*A[i]),A[i]))
Min.append(min(min(Max[i-1]*A[i],Min[i-1]*A[i]),A[i]))
tmax=Max[0]
for i in range(0,size):
if Max[i]>tmax:
tmax=Max[i]
return tmax
|
normal
|
{
"blob_id": "1fafbc1e415b5089afcd2976d4f0dc2aa1c5a144",
"index": 1077,
"step-1": " def maxProduct(self, A):\n size= len(A)\n if size==1:\n return A[0]\n Max=[A[0]]\n Min=[A[0]]\n for i in range(1,size):\n Max.append(max(max(Max[i-1]*A[i],Min[i-1]*A[i]),A[i]))\n Min.append(min(min(Max[i-1]*A[i],Min[i-1]*A[i]),A[i]))\n tmax=Max[0]\n for i in range(0,size):\n if Max[i]>tmax:\n tmax=Max[i]\n return tmax\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AlertMailModel(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AlertMailModel(models.Model):
receipient_mail = models.EmailField()
host_mail = models.EmailField()
host_smtpaddress = models.CharField(max_length=25)
mail_host_password = models.CharField(max_length=200)
use_tls = models.BooleanField(default=False)
port = models.CharField(max_length=3, default=25)
<|reserved_special_token_1|>
from django.db import models
class AlertMailModel(models.Model):
receipient_mail = models.EmailField()
host_mail = models.EmailField()
host_smtpaddress = models.CharField(max_length=25)
mail_host_password = models.CharField(max_length=200)
use_tls = models.BooleanField(default=False)
port = models.CharField(max_length=3, default=25)
<|reserved_special_token_1|>
from django.db import models
# Create your models here.
class AlertMailModel(models.Model):
receipient_mail = models.EmailField()
host_mail = models.EmailField()
host_smtpaddress = models.CharField(max_length=25)
mail_host_password = models.CharField(max_length=200)
use_tls=models.BooleanField(default=False)
port=models.CharField(max_length=3,default=25)
|
flexible
|
{
"blob_id": "2872c86294037b4585158e7ff6db414ba7ab90cc",
"index": 1814,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass AlertMailModel(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass AlertMailModel(models.Model):\n receipient_mail = models.EmailField()\n host_mail = models.EmailField()\n host_smtpaddress = models.CharField(max_length=25)\n mail_host_password = models.CharField(max_length=200)\n use_tls = models.BooleanField(default=False)\n port = models.CharField(max_length=3, default=25)\n",
"step-4": "from django.db import models\n\n\nclass AlertMailModel(models.Model):\n receipient_mail = models.EmailField()\n host_mail = models.EmailField()\n host_smtpaddress = models.CharField(max_length=25)\n mail_host_password = models.CharField(max_length=200)\n use_tls = models.BooleanField(default=False)\n port = models.CharField(max_length=3, default=25)\n",
"step-5": "from django.db import models\n\n# Create your models here.\nclass AlertMailModel(models.Model):\n receipient_mail = models.EmailField()\n host_mail = models.EmailField()\n host_smtpaddress = models.CharField(max_length=25)\n mail_host_password = models.CharField(max_length=200)\n use_tls=models.BooleanField(default=False)\n port=models.CharField(max_length=3,default=25)\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test(data):
actions.navigate(data.env.url + 'tabs/')
actions.send_keys('#title', 'lorem ipsum')
actions.click('#goButtonCustom')
actions.assert_amount_of_windows(2)
actions.close_window_by_partial_title('lorem')
golem_steps.assert_last_step_message(
"Close window by partial title 'lorem'")
actions.assert_amount_of_windows(1)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
description = 'close_window_by_partial_title action'
def test(data):
actions.navigate(data.env.url + 'tabs/')
actions.send_keys('#title', 'lorem ipsum')
actions.click('#goButtonCustom')
actions.assert_amount_of_windows(2)
actions.close_window_by_partial_title('lorem')
golem_steps.assert_last_step_message(
"Close window by partial title 'lorem'")
actions.assert_amount_of_windows(1)
<|reserved_special_token_1|>
from golem import actions
from projects.golem_integration.pages import golem_steps
description = 'close_window_by_partial_title action'
def test(data):
actions.navigate(data.env.url + 'tabs/')
actions.send_keys('#title', 'lorem ipsum')
actions.click('#goButtonCustom')
actions.assert_amount_of_windows(2)
actions.close_window_by_partial_title('lorem')
golem_steps.assert_last_step_message(
"Close window by partial title 'lorem'")
actions.assert_amount_of_windows(1)
|
flexible
|
{
"blob_id": "8fe45332ce09195beabb24c8cbb56868c564ded4",
"index": 2132,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test(data):\n actions.navigate(data.env.url + 'tabs/')\n actions.send_keys('#title', 'lorem ipsum')\n actions.click('#goButtonCustom')\n actions.assert_amount_of_windows(2)\n actions.close_window_by_partial_title('lorem')\n golem_steps.assert_last_step_message(\n \"Close window by partial title 'lorem'\")\n actions.assert_amount_of_windows(1)\n",
"step-3": "<mask token>\ndescription = 'close_window_by_partial_title action'\n\n\ndef test(data):\n actions.navigate(data.env.url + 'tabs/')\n actions.send_keys('#title', 'lorem ipsum')\n actions.click('#goButtonCustom')\n actions.assert_amount_of_windows(2)\n actions.close_window_by_partial_title('lorem')\n golem_steps.assert_last_step_message(\n \"Close window by partial title 'lorem'\")\n actions.assert_amount_of_windows(1)\n",
"step-4": "from golem import actions\nfrom projects.golem_integration.pages import golem_steps\ndescription = 'close_window_by_partial_title action'\n\n\ndef test(data):\n actions.navigate(data.env.url + 'tabs/')\n actions.send_keys('#title', 'lorem ipsum')\n actions.click('#goButtonCustom')\n actions.assert_amount_of_windows(2)\n actions.close_window_by_partial_title('lorem')\n golem_steps.assert_last_step_message(\n \"Close window by partial title 'lorem'\")\n actions.assert_amount_of_windows(1)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def koodrinate(kraj, kraji):
for ime, x, y in kraji:
if ime == kraj:
return x, y
kraji = {
'Brežice': (68.66, 7.04),
'Lenart': (85.20, 78.75),
'Rateče': (-65.04, 70.04),
'Ljutomer': (111.26, 71.82),
'Rogaška Slatina': (71.00, 42.00),
'Ribnica': (7.10, -10.50),
'Dutovlje': (-56.80, -6.93),
'Lokve': (-57.94, 19.32),
'Vinica': (43.81, -38.43),
'Brtonigla': (-71.00, -47.25),
'Kanal': (-71.00, 26.25),
'Črnomelj': (39.05, -27.93),
'Trbovlje': (29.61, 35.07),
'Beltinci': (114.81, 80.54),
'Domžale': (-2.34, 31.50)
}
def koodrinate(kraj, kraji):
return kraji.get(kraj)
napis = "KRNEKI"
vrednost = {"I": 1, "V": 5, "X": 10, "L": 50, "C": 100, "D": 500, "M": 1000}
vsota = 0
for c in napis:
vsota += vrednost.get(c, 0)
print(sum(vrednost.get(c, 0) for c in napis))
for c in napis:
if c == "I":
vsota += 1
elif c == "V":
vsota += 5
elif c == "X":
vsota += 10
elif c == "L":
vsota += 50
elif c == "C":
vsota += 100
elif c == "D":
vsota += 500
elif c == "M":
vsota += 1000
|
normal
|
{
"blob_id": "2cfc1bea6dd1571eff67c3f49b2a1899560c7ba7",
"index": 3469,
"step-1": "def koodrinate(kraj, kraji):\n for ime, x, y in kraji:\n if ime == kraj:\n return x, y\n\n\n<mask token>\n",
"step-2": "def koodrinate(kraj, kraji):\n for ime, x, y in kraji:\n if ime == kraj:\n return x, y\n\n\n<mask token>\n\n\ndef koodrinate(kraj, kraji):\n return kraji.get(kraj)\n\n\n<mask token>\n",
"step-3": "def koodrinate(kraj, kraji):\n for ime, x, y in kraji:\n if ime == kraj:\n return x, y\n\n\n<mask token>\n\n\ndef koodrinate(kraj, kraji):\n return kraji.get(kraj)\n\n\n<mask token>\nfor c in napis:\n vsota += vrednost.get(c, 0)\nprint(sum(vrednost.get(c, 0) for c in napis))\nfor c in napis:\n if c == 'I':\n vsota += 1\n elif c == 'V':\n vsota += 5\n elif c == 'X':\n vsota += 10\n elif c == 'L':\n vsota += 50\n elif c == 'C':\n vsota += 100\n elif c == 'D':\n vsota += 500\n elif c == 'M':\n vsota += 1000\n",
"step-4": "def koodrinate(kraj, kraji):\n for ime, x, y in kraji:\n if ime == kraj:\n return x, y\n\n\nkraji = {'Brežice': (68.66, 7.04), 'Lenart': (85.2, 78.75), 'Rateče': (-\n 65.04, 70.04), 'Ljutomer': (111.26, 71.82), 'Rogaška Slatina': (71.0, \n 42.0), 'Ribnica': (7.1, -10.5), 'Dutovlje': (-56.8, -6.93), 'Lokve': (-\n 57.94, 19.32), 'Vinica': (43.81, -38.43), 'Brtonigla': (-71.0, -47.25),\n 'Kanal': (-71.0, 26.25), 'Črnomelj': (39.05, -27.93), 'Trbovlje': (\n 29.61, 35.07), 'Beltinci': (114.81, 80.54), 'Domžale': (-2.34, 31.5)}\n\n\ndef koodrinate(kraj, kraji):\n return kraji.get(kraj)\n\n\nnapis = 'KRNEKI'\nvrednost = {'I': 1, 'V': 5, 'X': 10, 'L': 50, 'C': 100, 'D': 500, 'M': 1000}\nvsota = 0\nfor c in napis:\n vsota += vrednost.get(c, 0)\nprint(sum(vrednost.get(c, 0) for c in napis))\nfor c in napis:\n if c == 'I':\n vsota += 1\n elif c == 'V':\n vsota += 5\n elif c == 'X':\n vsota += 10\n elif c == 'L':\n vsota += 50\n elif c == 'C':\n vsota += 100\n elif c == 'D':\n vsota += 500\n elif c == 'M':\n vsota += 1000\n",
"step-5": "def koodrinate(kraj, kraji):\n for ime, x, y in kraji:\n if ime == kraj:\n return x, y\n\n\nkraji = {\n 'Brežice': (68.66, 7.04),\n 'Lenart': (85.20, 78.75),\n 'Rateče': (-65.04, 70.04),\n 'Ljutomer': (111.26, 71.82),\n 'Rogaška Slatina': (71.00, 42.00),\n 'Ribnica': (7.10, -10.50),\n 'Dutovlje': (-56.80, -6.93),\n 'Lokve': (-57.94, 19.32),\n 'Vinica': (43.81, -38.43),\n 'Brtonigla': (-71.00, -47.25),\n 'Kanal': (-71.00, 26.25),\n 'Črnomelj': (39.05, -27.93),\n 'Trbovlje': (29.61, 35.07),\n 'Beltinci': (114.81, 80.54),\n 'Domžale': (-2.34, 31.50)\n}\n\n\ndef koodrinate(kraj, kraji):\n return kraji.get(kraj)\n\n\nnapis = \"KRNEKI\"\n\nvrednost = {\"I\": 1, \"V\": 5, \"X\": 10, \"L\": 50, \"C\": 100, \"D\": 500, \"M\": 1000}\n\nvsota = 0\n\nfor c in napis:\n vsota += vrednost.get(c, 0)\n\nprint(sum(vrednost.get(c, 0) for c in napis))\n\nfor c in napis:\n if c == \"I\":\n vsota += 1\n elif c == \"V\":\n vsota += 5\n elif c == \"X\":\n vsota += 10\n elif c == \"L\":\n vsota += 50\n elif c == \"C\":\n vsota += 100\n elif c == \"D\":\n vsota += 500\n elif c == \"M\":\n vsota += 1000\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def cnct_cb():
print('Connected: ')
<|reserved_special_token_0|>
def _funCb(ACT):
result = instance.read()
if result.is_valid():
strTemp = '%.2f' % result.temperature
strHumi = '%.2f' % result.humidity
oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)
blynk.virtual_write(1, strTemp)
blynk.virtual_write(2, strHumi)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def cnct_cb():
print('Connected: ')
blynk.on_connect(cnct_cb)
def _funCb(ACT):
result = instance.read()
if result.is_valid():
strTemp = '%.2f' % result.temperature
strHumi = '%.2f' % result.humidity
oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)
blynk.virtual_write(1, strTemp)
blynk.virtual_write(2, strHumi)
blynk.Ticker(_funCb, 140, False)
blynk.gpio_auto('button')
blynk.run()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
instance = dht22.DHT22(pin=4)
token = '---token---'
blynk = Blynk(token)
def cnct_cb():
print('Connected: ')
blynk.on_connect(cnct_cb)
def _funCb(ACT):
result = instance.read()
if result.is_valid():
strTemp = '%.2f' % result.temperature
strHumi = '%.2f' % result.humidity
oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)
blynk.virtual_write(1, strTemp)
blynk.virtual_write(2, strHumi)
blynk.Ticker(_funCb, 140, False)
blynk.gpio_auto('button')
blynk.run()
<|reserved_special_token_1|>
from DHT_Python import dht22
from oled96 import oled
from PiBlynk import Blynk
instance = dht22.DHT22(pin=4)
token = '---token---'
blynk = Blynk(token)
def cnct_cb():
print('Connected: ')
blynk.on_connect(cnct_cb)
def _funCb(ACT):
result = instance.read()
if result.is_valid():
strTemp = '%.2f' % result.temperature
strHumi = '%.2f' % result.humidity
oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)
blynk.virtual_write(1, strTemp)
blynk.virtual_write(2, strHumi)
blynk.Ticker(_funCb, 140, False)
blynk.gpio_auto('button')
blynk.run()
<|reserved_special_token_1|>
from DHT_Python import dht22
from oled96 import oled
from PiBlynk import Blynk
# read data using pin 4
instance = dht22.DHT22(pin=4)
token = "---token---"
blynk = Blynk(token)
def cnct_cb():
print ("Connected: ")
blynk.on_connect(cnct_cb)
def _funCb(ACT):
result = instance.read()
if result.is_valid():
strTemp=("%.2f" % result.temperature)
strHumi=("%.2f" % result.humidity)
# Show temperature and humidity on OLED
oled.yell2("Temp="+strTemp,"Humi="+strHumi)
blynk.virtual_write(1,strTemp) # User Virtual port V1
blynk.virtual_write(2,strHumi) # User Virtual port V2
blynk.Ticker(_funCb, 140, False) # ~2 Hz
blynk.gpio_auto("button")
blynk.run()
|
flexible
|
{
"blob_id": "e95ebb2aa6526e3bf3789da17d144e71cdb49aca",
"index": 2712,
"step-1": "<mask token>\n\n\ndef cnct_cb():\n print('Connected: ')\n\n\n<mask token>\n\n\ndef _funCb(ACT):\n result = instance.read()\n if result.is_valid():\n strTemp = '%.2f' % result.temperature\n strHumi = '%.2f' % result.humidity\n oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)\n blynk.virtual_write(1, strTemp)\n blynk.virtual_write(2, strHumi)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef cnct_cb():\n print('Connected: ')\n\n\nblynk.on_connect(cnct_cb)\n\n\ndef _funCb(ACT):\n result = instance.read()\n if result.is_valid():\n strTemp = '%.2f' % result.temperature\n strHumi = '%.2f' % result.humidity\n oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)\n blynk.virtual_write(1, strTemp)\n blynk.virtual_write(2, strHumi)\n\n\nblynk.Ticker(_funCb, 140, False)\nblynk.gpio_auto('button')\nblynk.run()\n",
"step-3": "<mask token>\ninstance = dht22.DHT22(pin=4)\ntoken = '---token---'\nblynk = Blynk(token)\n\n\ndef cnct_cb():\n print('Connected: ')\n\n\nblynk.on_connect(cnct_cb)\n\n\ndef _funCb(ACT):\n result = instance.read()\n if result.is_valid():\n strTemp = '%.2f' % result.temperature\n strHumi = '%.2f' % result.humidity\n oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)\n blynk.virtual_write(1, strTemp)\n blynk.virtual_write(2, strHumi)\n\n\nblynk.Ticker(_funCb, 140, False)\nblynk.gpio_auto('button')\nblynk.run()\n",
"step-4": "from DHT_Python import dht22\nfrom oled96 import oled\nfrom PiBlynk import Blynk\ninstance = dht22.DHT22(pin=4)\ntoken = '---token---'\nblynk = Blynk(token)\n\n\ndef cnct_cb():\n print('Connected: ')\n\n\nblynk.on_connect(cnct_cb)\n\n\ndef _funCb(ACT):\n result = instance.read()\n if result.is_valid():\n strTemp = '%.2f' % result.temperature\n strHumi = '%.2f' % result.humidity\n oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)\n blynk.virtual_write(1, strTemp)\n blynk.virtual_write(2, strHumi)\n\n\nblynk.Ticker(_funCb, 140, False)\nblynk.gpio_auto('button')\nblynk.run()\n",
"step-5": "from DHT_Python import dht22\nfrom oled96 import oled \nfrom PiBlynk import Blynk\n\n# read data using pin 4\ninstance = dht22.DHT22(pin=4)\n\ntoken = \"---token---\"\nblynk = Blynk(token)\ndef cnct_cb():\n\tprint (\"Connected: \")\n\t\nblynk.on_connect(cnct_cb)\n\ndef _funCb(ACT):\n\tresult = instance.read()\n\tif result.is_valid():\n\t\tstrTemp=(\"%.2f\" % result.temperature)\n\t\tstrHumi=(\"%.2f\" % result.humidity)\n\t\t# Show temperature and humidity on OLED\n\t\toled.yell2(\"Temp=\"+strTemp,\"Humi=\"+strHumi) \n\t\tblynk.virtual_write(1,strTemp) # User Virtual port V1\n\t\tblynk.virtual_write(2,strHumi) # User Virtual port V2\nblynk.Ticker(_funCb, 140, False) # ~2 Hz\n\nblynk.gpio_auto(\"button\")\n\nblynk.run()\n\n\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import json
import os
from lib.create import create_server, create_user
os.chdir(r'/home/niko/data/Marvin')
def edit_user_stats(server_id: str, user_id: str, stat: str, datas):
create_user(server_id, user_id)
if os.path.isfile("Server/{}/user.json".format(server_id)):
with open("Server/{}/user.json".format(server_id), 'r') as fp:
data = json.load(fp)
data[user_id][stat] = datas
with open("Server/{}/user.json".format(server_id, user_id), 'w') as fp:
json.dump(data, fp, indent=4)
def set_message(server_id: str, name: str, message_id: str):
create_server(server_id)
with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8') as fp:
data = json.load(fp)
if name in data:
data[name]['message'] = message_id
with open('Server/{}/ticket.json'.format(server_id), "w+") as fp:
json.dump(data, fp, indent=4)
else:
return False
def set_log(server_id: str, name: str, channel_id: str):
create_server(server_id)
with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8') as fp:
data = json.load(fp)
if name in data:
data[name]['log'] = channel_id
with open('Server/{}/ticket.json'.format(server_id), "w+") as fp:
json.dump(data, fp, indent=4)
else:
return False
def set_category(server_id: str, name: str, category_id: str):
create_server(server_id)
with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8') as fp:
data = json.load(fp)
if name in data:
data[name]['category'] = category_id
with open('Server/{}/ticket.json'.format(server_id), "w+") as fp:
json.dump(data, fp, indent=4)
else:
return False
def set_count(server_id: str, name: str):
create_server(server_id)
with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8') as fp:
data = json.load(fp)
if name in data:
count = data[name]['ticket']
data[name]['ticket'] = count + 1
with open('Server/{}/ticket.json'.format(server_id), "w+") as fp:
json.dump(data, fp, indent=4)
else:
return False
def edit_setting(server_id: str, vari: str, new):
create_server(server_id)
with open('Server/{}/settings.json'.format(server_id), encoding='utf-8') as fp:
data = json.load(fp)
if vari in data:
data[vari] = new
with open('Server/{}/settings.json'.format(server_id), "w+") as fp:
json.dump(data, fp, indent=4)
else:
return False
|
normal
|
{
"blob_id": "e6d506dd45e72ee7f0162a884981ee1156153d3d",
"index": 8661,
"step-1": "<mask token>\n\n\ndef edit_user_stats(server_id: str, user_id: str, stat: str, datas):\n create_user(server_id, user_id)\n if os.path.isfile('Server/{}/user.json'.format(server_id)):\n with open('Server/{}/user.json'.format(server_id), 'r') as fp:\n data = json.load(fp)\n data[user_id][stat] = datas\n with open('Server/{}/user.json'.format(server_id, user_id), 'w') as fp:\n json.dump(data, fp, indent=4)\n\n\ndef set_message(server_id: str, name: str, message_id: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n data[name]['message'] = message_id\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef set_log(server_id: str, name: str, channel_id: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n data[name]['log'] = channel_id\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\n<mask token>\n\n\ndef set_count(server_id: str, name: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n count = data[name]['ticket']\n data[name]['ticket'] = count + 1\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef edit_setting(server_id: str, vari: str, new):\n create_server(server_id)\n with open('Server/{}/settings.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if vari in data:\n data[vari] = new\n with open('Server/{}/settings.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n",
"step-2": "<mask token>\n\n\ndef edit_user_stats(server_id: str, user_id: str, stat: str, datas):\n create_user(server_id, user_id)\n if os.path.isfile('Server/{}/user.json'.format(server_id)):\n with open('Server/{}/user.json'.format(server_id), 'r') as fp:\n data = json.load(fp)\n data[user_id][stat] = datas\n with open('Server/{}/user.json'.format(server_id, user_id), 'w') as fp:\n json.dump(data, fp, indent=4)\n\n\ndef set_message(server_id: str, name: str, message_id: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n data[name]['message'] = message_id\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef set_log(server_id: str, name: str, channel_id: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n data[name]['log'] = channel_id\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef set_category(server_id: str, name: str, category_id: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n data[name]['category'] = category_id\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef set_count(server_id: str, name: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n count = data[name]['ticket']\n data[name]['ticket'] = count + 1\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef edit_setting(server_id: str, vari: str, new):\n create_server(server_id)\n with open('Server/{}/settings.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if vari in data:\n data[vari] = new\n with open('Server/{}/settings.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n",
"step-3": "<mask token>\nos.chdir('/home/niko/data/Marvin')\n\n\ndef edit_user_stats(server_id: str, user_id: str, stat: str, datas):\n create_user(server_id, user_id)\n if os.path.isfile('Server/{}/user.json'.format(server_id)):\n with open('Server/{}/user.json'.format(server_id), 'r') as fp:\n data = json.load(fp)\n data[user_id][stat] = datas\n with open('Server/{}/user.json'.format(server_id, user_id), 'w') as fp:\n json.dump(data, fp, indent=4)\n\n\ndef set_message(server_id: str, name: str, message_id: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n data[name]['message'] = message_id\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef set_log(server_id: str, name: str, channel_id: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n data[name]['log'] = channel_id\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef set_category(server_id: str, name: str, category_id: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n data[name]['category'] = category_id\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef set_count(server_id: str, name: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n count = data[name]['ticket']\n data[name]['ticket'] = count + 1\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef edit_setting(server_id: str, vari: str, new):\n create_server(server_id)\n with open('Server/{}/settings.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if vari in data:\n data[vari] = new\n with open('Server/{}/settings.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n",
"step-4": "import json\nimport os\nfrom lib.create import create_server, create_user\nos.chdir('/home/niko/data/Marvin')\n\n\ndef edit_user_stats(server_id: str, user_id: str, stat: str, datas):\n create_user(server_id, user_id)\n if os.path.isfile('Server/{}/user.json'.format(server_id)):\n with open('Server/{}/user.json'.format(server_id), 'r') as fp:\n data = json.load(fp)\n data[user_id][stat] = datas\n with open('Server/{}/user.json'.format(server_id, user_id), 'w') as fp:\n json.dump(data, fp, indent=4)\n\n\ndef set_message(server_id: str, name: str, message_id: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n data[name]['message'] = message_id\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef set_log(server_id: str, name: str, channel_id: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n data[name]['log'] = channel_id\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef set_category(server_id: str, name: str, category_id: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n data[name]['category'] = category_id\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef set_count(server_id: str, name: str):\n create_server(server_id)\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if name in data:\n count = data[name]['ticket']\n data[name]['ticket'] = count + 1\n with open('Server/{}/ticket.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n\n\ndef edit_setting(server_id: str, vari: str, new):\n create_server(server_id)\n with open('Server/{}/settings.json'.format(server_id), encoding='utf-8'\n ) as fp:\n data = json.load(fp)\n if vari in data:\n data[vari] = new\n with open('Server/{}/settings.json'.format(server_id), 'w+') as fp:\n json.dump(data, fp, indent=4)\n else:\n return False\n",
"step-5": "import json\r\nimport os\r\n\r\nfrom lib.create import create_server, create_user\r\n\r\nos.chdir(r'/home/niko/data/Marvin')\r\n\r\n\r\ndef edit_user_stats(server_id: str, user_id: str, stat: str, datas):\r\n create_user(server_id, user_id)\r\n if os.path.isfile(\"Server/{}/user.json\".format(server_id)):\r\n with open(\"Server/{}/user.json\".format(server_id), 'r') as fp:\r\n data = json.load(fp)\r\n data[user_id][stat] = datas\r\n with open(\"Server/{}/user.json\".format(server_id, user_id), 'w') as fp:\r\n json.dump(data, fp, indent=4)\r\n\r\n\r\ndef set_message(server_id: str, name: str, message_id: str):\r\n create_server(server_id)\r\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8') as fp:\r\n data = json.load(fp)\r\n if name in data:\r\n data[name]['message'] = message_id\r\n with open('Server/{}/ticket.json'.format(server_id), \"w+\") as fp:\r\n json.dump(data, fp, indent=4)\r\n else:\r\n return False\r\n\r\n\r\ndef set_log(server_id: str, name: str, channel_id: str):\r\n create_server(server_id)\r\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8') as fp:\r\n data = json.load(fp)\r\n if name in data:\r\n data[name]['log'] = channel_id\r\n with open('Server/{}/ticket.json'.format(server_id), \"w+\") as fp:\r\n json.dump(data, fp, indent=4)\r\n else:\r\n return False\r\n\r\n\r\ndef set_category(server_id: str, name: str, category_id: str):\r\n create_server(server_id)\r\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8') as fp:\r\n data = json.load(fp)\r\n if name in data:\r\n data[name]['category'] = category_id\r\n with open('Server/{}/ticket.json'.format(server_id), \"w+\") as fp:\r\n json.dump(data, fp, indent=4)\r\n else:\r\n return False\r\n\r\n\r\ndef set_count(server_id: str, name: str):\r\n create_server(server_id)\r\n with open('Server/{}/ticket.json'.format(server_id), encoding='utf-8') as fp:\r\n data = json.load(fp)\r\n if name in data:\r\n count = data[name]['ticket']\r\n data[name]['ticket'] = count + 1\r\n with open('Server/{}/ticket.json'.format(server_id), \"w+\") as fp:\r\n json.dump(data, fp, indent=4)\r\n else:\r\n return False\r\n\r\n\r\ndef edit_setting(server_id: str, vari: str, new):\r\n create_server(server_id)\r\n with open('Server/{}/settings.json'.format(server_id), encoding='utf-8') as fp:\r\n data = json.load(fp)\r\n if vari in data:\r\n data[vari] = new\r\n with open('Server/{}/settings.json'.format(server_id), \"w+\") as fp:\r\n json.dump(data, fp, indent=4)\r\n else:\r\n return False\r\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
class GameOfLife:
@staticmethod
def simulate(board):
for row in range(len(board)):
for col in range(len(board[0])):
ones = GameOfLife.countOnes(board, row, col)
if board[row][col] and (ones == 2 or ones == 3):
board[row][col] |= 2
elif not board[row][col] and ones == 3:
board[row][col] |= 2
for row in range(len(board)):
for col in range(len(board[0])):
board[row][col] >>= 1
@staticmethod
def countOnes(board, row, col):
total = 0
total += GameOfLife.isOne(board, row - 1, col - 1)
total += GameOfLife.isOne(board, row - 1, col)
total += GameOfLife.isOne(board, row - 1, col + 1)
total += GameOfLife.isOne(board, row, col - 1)
total += GameOfLife.isOne(board, row, col + 1)
total += GameOfLife.isOne(board, row + 1, col - 1)
total += GameOfLife.isOne(board, row + 1, col)
total += GameOfLife.isOne(board, row + 1, col + 1)
return total
@staticmethod
def isOne(board, row, col):
if row >= len(board) or row < 0:
return 0
if col >= len(board) or col < 0:
return 0
return board[row][col] & 1
|
normal
|
{
"blob_id": "862c5794a4da794678de419f053ae15b11bca6e7",
"index": 7453,
"step-1": "class GameOfLife:\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "class GameOfLife:\n\n @staticmethod\n def simulate(board):\n for row in range(len(board)):\n for col in range(len(board[0])):\n ones = GameOfLife.countOnes(board, row, col)\n if board[row][col] and (ones == 2 or ones == 3):\n board[row][col] |= 2\n elif not board[row][col] and ones == 3:\n board[row][col] |= 2\n for row in range(len(board)):\n for col in range(len(board[0])):\n board[row][col] >>= 1\n <mask token>\n <mask token>\n",
"step-3": "class GameOfLife:\n\n @staticmethod\n def simulate(board):\n for row in range(len(board)):\n for col in range(len(board[0])):\n ones = GameOfLife.countOnes(board, row, col)\n if board[row][col] and (ones == 2 or ones == 3):\n board[row][col] |= 2\n elif not board[row][col] and ones == 3:\n board[row][col] |= 2\n for row in range(len(board)):\n for col in range(len(board[0])):\n board[row][col] >>= 1\n\n @staticmethod\n def countOnes(board, row, col):\n total = 0\n total += GameOfLife.isOne(board, row - 1, col - 1)\n total += GameOfLife.isOne(board, row - 1, col)\n total += GameOfLife.isOne(board, row - 1, col + 1)\n total += GameOfLife.isOne(board, row, col - 1)\n total += GameOfLife.isOne(board, row, col + 1)\n total += GameOfLife.isOne(board, row + 1, col - 1)\n total += GameOfLife.isOne(board, row + 1, col)\n total += GameOfLife.isOne(board, row + 1, col + 1)\n return total\n <mask token>\n",
"step-4": "class GameOfLife:\n\n @staticmethod\n def simulate(board):\n for row in range(len(board)):\n for col in range(len(board[0])):\n ones = GameOfLife.countOnes(board, row, col)\n if board[row][col] and (ones == 2 or ones == 3):\n board[row][col] |= 2\n elif not board[row][col] and ones == 3:\n board[row][col] |= 2\n for row in range(len(board)):\n for col in range(len(board[0])):\n board[row][col] >>= 1\n\n @staticmethod\n def countOnes(board, row, col):\n total = 0\n total += GameOfLife.isOne(board, row - 1, col - 1)\n total += GameOfLife.isOne(board, row - 1, col)\n total += GameOfLife.isOne(board, row - 1, col + 1)\n total += GameOfLife.isOne(board, row, col - 1)\n total += GameOfLife.isOne(board, row, col + 1)\n total += GameOfLife.isOne(board, row + 1, col - 1)\n total += GameOfLife.isOne(board, row + 1, col)\n total += GameOfLife.isOne(board, row + 1, col + 1)\n return total\n\n @staticmethod\n def isOne(board, row, col):\n if row >= len(board) or row < 0:\n return 0\n if col >= len(board) or col < 0:\n return 0\n return board[row][col] & 1\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
import json
import os, django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dangjianyun.settings")# project_name 项目名称
django.setup()
from dangjiansite.djfuncs import *
import os
import datetime
import requests
import time
import urllib3
import base64
import csv
import random
from bs4 import BeautifulSoup
from dangjiansite.models import *
class Runner():
# def __init__(self, appid='TJZHDJ01', username='024549', password='Aa1234'):
def __init__(self, appid='TJZHDJ01', username='', password=''):
urllib3.disable_warnings()#屏蔽ssl告警
self.currentTime = datetime.datetime.now().strftime("%H:%M:%S")
self.username = username
self.password = password
self.thumbedFilePath = './lib/'.format(username)
self.logFilePath = './log/'.format(username)
self.errFilePath = './err/'.format(username)
# self.thumbedFileList = self.getThumbFromFile()
self.thumbedFileList = []
self.debug = True
self.session = requests.session()
self.appid = appid#应该是本设备安装app的id 等换个设备试一下就知道了
self.headers ={
'User-Agent': 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)',
'header_version': '80',
'system': 'android',
'Connection': 'Keep-Alive',
'Host': 'mapi.dangjianwang.com',
}
self.token = self.getToken()
time.sleep(0.1)
self.thumbPageList = self.getPages(urls=[
'https://mapi.dangjianwang.com/v3_1/Learn/List',
'https://mapi.dangjianwang.com/v3_1/Activities/List',
'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'
])
self.thumbPages = [i[1] for i in self.thumbPageList]
time.sleep(0.1)
self.helpPageList = self.getPages(urls=['https://mapi.dangjianwang.com/v3_1/Help/List', ])
self.helpPages = [i[1] for i in self.helpPageList]
self.helpResults = {}
time.sleep(0.1)
self.studyPageList = self.getPagesII(urls=['https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])
self.studyPages = [i[1] for i in self.studyPageList]
time.sleep(0.1)
self.studyRsults = {}
self.thumbedPages = []
self.thumbResults = {}
self.helpedPages = []
self.multiThumbed = []#考虑最后要写入文件之中
self.viewsResults = []
self.examC19Info = []
self.examlist = []
self.qaList = []
def getCurrentTime(self):
return datetime.datetime.now().strftime("%H:%M:%S")
def writeErr2File(self, err):
path = self.logFilePath
fullPath = '{}{}err.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, err))
print('err已经写入{}'.format(fullPath))
def writeLog2File(self, log):
path = self.logFilePath
fullPath = '{}{}logs.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, log))
print('log已经写入{}'.format(fullPath))
def writeThumb2File(self, id):
path = self.thumbedFilePath
fullPath = '{}{}thumbs.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write(',{}'.format(id))
print('点赞记录已经写入{}'.format(fullPath))
def getThumbFromFile(self):
'''
:return: 文件中id组成的列表
'''
path = self.thumbedFilePath
inFileList = []
fullPath = '{}{}thumbs.txt'.format(path, self.username)
if not os.path.exists(fullPath):
return fullPath
with open(fullPath, 'r') as f:
inFileList.extend(list(set(f.readlines()[0].split(','))))
# print('getThumbFormFile', inFileList)
with open(fullPath, 'w') as f1:
f1.write(','.join(sorted(inFileList)))
return inFileList
def getExcuteTimes(self):
'''
返回点赞等自动执行的次数的字典
:return:
'''
excuteTimes = {}
credInfo = self.getCredItinfo()
print(credInfo)
currentScore = credInfo[0]
# 点赞次数
thumbScore = credInfo[1]['信息评论'].split('/')[0]
thumbExcuteTimes = 10 - int(thumbScore)
excuteTimes.update({'thumb': thumbExcuteTimes})
# 帮助次数
helpScore = credInfo[1]['互助广场回答'].split('/')[0]
helpExctuteTimes = 2 - int(helpScore)
excuteTimes.update({'help': helpExctuteTimes})
# 党员视角发布次数
viewScore = credInfo[1]['党员视角发布'].split('/')[0]
viewExcuteTimes = int((4 - int(viewScore)) / 2)
excuteTimes.update({'view': viewExcuteTimes})
# 在线知识竞答次数
examScore = credInfo[1]['在线知识竞答'].split('/')[0]
examExcuteTimes = int((4 - int(examScore)) / 2)
excuteTimes.update({'exam': examExcuteTimes})
# 学习次数
flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]['在线阅读学习资料'].split('/')[0])
flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]['学习资料写体会'].split('/')[0])
examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0
excuteTimes.update({'study': examExcuteTimes})
return excuteTimes
def getToken(self):
'''
获得一个连接的token
每个连接都需要使用到
:return:
'''
data = {
'appid': self.appid,
'username': self.username,
'password': self.password,
}
longinurl = 'https://mapi.dangjianwang.com/v3_1/login'
r = self.session.post(url=longinurl, data=data, verify=False)
rjson = r.json()
# print(type(rjson))
# print(rjson)
if rjson['code'] == '200':
return rjson['token']
else:
print('token 获得失败')
return None
def getRJson(self, url):
data={
'token': self.token,
'appid': self.appid
}
return self.session.post(url=url, data=data, verify=False).json()
def getUserInfo(self):
'''
获得一大串用户的信息,暂时没用
:return:
'''
infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'
return self.getRJson(url=infoUrl)
def getCredItinfoToday(self):
'''
获得人员当前的得分等级参数
:return:
'''
creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'
info = self.getRJson(url=creditInfourl)
fullScore = info['data']['full']
gainScore = info['data']['gain']
currentLevel = info['data']['level']
username = info['data']['name']
ret = {
'fullScore': fullScore,
'gainScore': gainScore,
'currentLevel': currentLevel,
'username': username,
}
return ret
def getCredItinfo(self):
'''
获得用户的今日积分状态
可用来判断是否需要再继续流程
数据如下
('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])
:return:(haved_credit, credit_detail)
'''
creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'
haved_credit = 0
credit_detail = {}
info = self.getRJson(url=creditInfourl)
for k, v in info.items():
if k == 'data':
for k2, v2 in v.items():
if k2 == 'haved_credit':
haved_credit = v2
if k2 == 'credit_detail':
for i in v2:
credit_detail.update({i['title']: i['score']})
return (haved_credit, credit_detail)
def getPages(self, urls):
pages = []
for url in urls:
data = self.getRJson(url=url)
for k, v in data.items():
if k == 'data':
for i in v:
# pages.append({'pageId': i['id'], 'pageTitle': i['title']})
# pages.append(i['id'])
pages.append((i['title'], i['id']))
return pages
def getPagesII(self, urls):
def getRJson(url):
data = {
'token': self.token,
'appid': self.appid,
'type_id': '791',
'page_index': '1',
}
return self.session.post(url=url, data=data, verify=False).json()
pages = []
for url in urls:
data = getRJson(url=url)
for k, v in data.items():
# print(k, v)
if k == 'data':
for i in v:
# pages.append({'pageId': i['id'], 'pageTitle': i['title']})
# pages.append(i['id'])
pages.append((i['name'], i['id']))
return pages
def doThumb(self, id):
'''
点赞函数,操作与id对应的页面
每次记录对应的信息到文件
:return:
'''
contents = [
'关注',
'关注!',
'关注!!']
data = {
'id': id,
'comment': random.choice(contents),
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
print(rjson)
if rjson['code'] == '1003':
self.token = self.getToken()
elif rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.thumbedPages.append(id)
# print(self.thumbPageList)
# print(len(self.thumbPageList), len(list(set(self.thumbPageList))))
for i in list(set(self.thumbPageList)):
if id == i[1]:
temp = {'title': i[0]}
self.thumbResults.update(temp)
log = '信息点赞:\n主题: {}\n提交:{}'.format(i[0], data['comment'])
detail = '{} 主题:{}\n回复:{}\n'.format(self.getCurrentTime(), i[0], data['comment'])
write2File(self, './results/', 'result.txt', log)
thumbInfo = {'title': i[0], 'reply': data['comment']}
self.thumbPages.remove(id)
self.writeThumb2File(id=id)
return (detail, thumbInfo)
elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':
print('因评论过快,等待一段时间')
time.sleep(20)
else:
print('rjson', rjson)
# self.multiThumbed.append(id)
self.thumbedPages.remove(id)#不成功的时候也要去掉不然总会选到
self.writeThumb2File(id=id)
log = '点赞:{}'.format(rjson)
self.writeLog2File(log)
print(log)
time.sleep(10)
def doHelp(self, id, callback=None):
'''
互助功能
:param id:
:return:
'''
detail = ''
helpInfo = None
log = ''
content = [
'把党的政治建设摆在首位!',
'不忘初心,牢记使命!',
'发展史第一要务,人才是第一资源,创新是第一动力。',
'要把党的领导贯彻到依法治国全过程和各方面',
'毫不动摇坚持中国共产党领导',]
data = {
'id': id,
'content': random.choice(content),
'token': self.token,
'appid': self.appid,
}
print(data)
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.helpedPages.append(id)
self.helpPages.remove(id)
#记录成功的到result
for i in self.helpPageList:
if id == i[1]:
curTime = self.getCurrentTime()
# print('('*88)
# print(curTime)
self.helpResults.update({'title': id[0]})
log = '互助:\n主题: {}\n提交内容: {}'.format(i[0], rjson['comment'])
write2File(self, './results/', 'result.txt', log)
# #写入数据库
detail = '{} 主题: {}\n提交内容: {}\n'.format(curTime, i[0], rjson['comment'].strip())
helpInfo = {'title': i[0], 'reply': rjson['comment']}
else:
pass
else:
pass
log = '帮助:{}'.format(rjson)
self.writeLog2File(log)
print(log)
return (detail, log, helpInfo)
def doView(self):
'''
党员视角发布功能
:return:
'''
content = [
'全面的小康,覆盖的人口要全面,是惠及全体人民的小康。',
'不忘初心,牢记使命,坚持终身学习!']
data = {
'content': random.choice(content),
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.viewsResults.append(1)
# self.viewsResults.append(id)
else:
pass
log = '党员视角:{}'.format(rjson)
detail = '{} 党员视角:\n发布内容:{}\n'.format(self.getCurrentTime(), rjson['data']['content'])
publicContent = rjson['data']['content']
# print(detail)
# self.writeLog2File(log)
# print('党员视角'*12)
# print(id)
# print(log)
# print('党员视角' * 12)
return (detail, publicContent)
def doStudy(self, mid):
'''
前三个post函数的响应的三个请求
get用来获得填写的内容
最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。
:param mid:
:return:
'''
interval = 60 * 5 + 5
def post1():
data = {
'mid': mid,
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
log = '学习post1:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post2():
data = {
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
log = '学习post2:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post3():
data = {
'mid': mid,
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
log = '学习post3:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def get1():
url = 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'.format(self.token, mid)
rjson = self.session.get(url=url)
text = rjson.content
soup = BeautifulSoup(text, 'html.parser')
retContents = []
for div in soup.find_all('p'):
p = div.text.strip()
retContents.append(p if 100 > len(p) < 200 else p[0:200])
return random.choice(retContents)
def recordFeeling(content=None):
if not content:
content = '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,' \
'是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。' \
'邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'
data = {
'mid': mid,
'token': self.token,
'appid': self.appid,
'content': content
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
log = '学习recordFeeling:{}'.format(rjson)
self.writeLog2File(log)
print('in recordFeeling')
print(log)
if rjson['code'] == '200':
return {'content': content}
elif rjson['code'] == '1120':
addtion = [
'我们必须坚定不移,任何时候任何情况下都不能动摇',
'人民有信心,国家才有未来,国家才有力量。',
'新时代,属于自强不息、勇于创造的奋斗者。',
'民主政治建设有序推进,依法治市迈出新步伐。',
'一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。',
]
return recordFeeling(content= '{}\n{}'.format(content, random.choice(addtion)))
else:
return None
#记录回复的心得
def readTime():
data = {
'mid': mid,
'token': self.token,
'appid': self.appid,
'time': interval,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
log = '学习readTime:{}'.format(rjson)
# self.studyRsults.update({'学习readTime', rjson})
self.writeLog2File(log)
print(log)
post1()
time.sleep(1)
post2()
time.sleep(1)
post3()
time.sleep(1)
content = get1()
time.sleep(1)
# time.sleep(interval)
count = 0
print('开始学习请稍后')
for i in range(interval):
count += 1
# print(i + 1)
if count % 30 == 0:
print('已用时{}秒'.format(count))
time.sleep(1)
# time.sleep(5)
print('填写的学习体会', content)
self.studyRsults.update(recordFeeling(content=content))
time.sleep(1)
readTime()
time.sleep(1)
pass
def doExam(self):
'''
:param self:
:return:
'''
ids = []
data = {
'page': '1',
'page_size': '20',
'token': self.token,
'appid': self.appid,
}
examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'
rjson = self.session.post(url=examlistUrl,
data=data,
verify=False).json()
# print(rjson)
# for i in rjson['data']:
# print(i)
time.sleep(0.3)
#########################################################
print('*' * 99)
data = {
'page': '1',
'page_size': '20',
'token': self.token,
'appid': self.appid,
}
banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'
rjson = self.session.post(url=banklistUrl,
data=data,
verify=False).json()
# print(rjson)
for i in rjson['data']:
tem = (i['bank_name'], i['id'])
self.examlist.append(tem)
if i['bank_name'] == '十九大报告100题(单选)':
# if i['bank_num'] == '65':
temp = {
'title': i['bank_name'],
'detail': i['detail'],
'id': i['id'],
}
self.examC19Info.append(temp)
# print(self.examC19Info)
# print(self.examlist)
time.sleep(0.3)
#########################################################
print('*' * 99)
data = {
'bank': '6',
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
aa = rjson['data']
paper = aa['id']
for i in aa['questions']:
temp = {'id': i['id'], 'content': i['content']}
ids.append(temp)
#########################################################
print('*' * 99)
time.sleep(0.5)
# 以下答题交卷
answers = []
# 先得到答案
for i in ids:
# 丛书据库获得答案
correctAnswer = Qa.objects.filter(question__contains=i['content'])[0]
answerText = correctAnswer.answerText
answer = correctAnswer.answer
#从文键获得答案
# answerText = getAnswer(i['content'])[2]
# answer = getAnswer(i['content'])[1]
temp = {'index': i['id'], 'answer': answer}
qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}
self.qaList.append(qa)
print(qa, i['content'])
answers.append(temp)
time.sleep(1)
hdata = {
'token': self.token,
'appid': self.appid,
'paper': paper,
'answers': json.dumps(answers),
# 'answers': [{'answer': 'A', 'index': '639'}, {'answer': 'A', 'index': '639'}],
}
# print('hdata:', hdata)
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'
rjson = self.session.post(url=commitUrl,
data=hdata,
verify=False).json()
print(rjson)
print(self.examlist)
print(self.examC19Info)
print(self.qaList)
def getAnswerInfo(self):
'''
获得答题的结果与正确率
:return:
'''
data = {
'token': self.token,
'appid': self.appid,
'page_size': '20',
'page_index': 'page_index',
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
print(rjson)
'''
https://mapi.dangjianwang.com/v3_1/exam/randexam 答题地址 主id是交卷的paper 这里要获取到questions里的id 等于回答问题中的index
appid TJZHDJ01
bank 6
token 5jTY47PbPZ0KdUprwmfJVfH4cX23tyDcV25XrEYkWVvElH3YjJpIb1JCDwq_
https://mapi.dangjianwang.com/v3_1/exam/handpaper 交卷的连接
appid TJZHDJ01
answers [{"index":"635","answer":"D"},{"index":"640","answer":"C"},{"index":"641","answer":"B"},{"index":"665","answer":"B"},{"index":"670","answer":"B"},{"index":"673","answer":"B"},{"index":"677","answer":"C"},{"index":"682","answer":"B"},{"index":"684","answer":"C"},{"index":"690","answer":"A"}]
token 5jTY47PbPZ0KdUprwmfJVfH4cX23tyDcV25XrEYkWVvElH3YjJpIb1JCDwq_
paper 4565894
https://mapi.dangjianwang.com/v3_1/exam/banklist 获得答题情况的连接
appid TJZHDJ01
page_size 20
token 5jTY47PbPZxXeRxlkzScAPWidyvssy3TBD5Y9UYiCQnMmCfa2pRNb1JCDwq_
page_index 1
--------------------------------------------------
https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList 学习的id列表
appid TJZHDJ01
page_size 20
type_id 791
token 5jTY47PbPZJbeh9ixjfOUvaoI3604SrSAz5Zokt3DAmfz3qIis4Yb1JCDwq_
page_index 1
下面是针对791id列表中的访问地址
https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus
post1:
appid TJZHDJ01
mid 9729
token 5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_
post2:
https://mapi.dangjianwang.com/v3_1/Login/CheckToken
appid TJZHDJ01
token 5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_
post3:
https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum
appid TJZHDJ01
mid 9729
token 5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_
get1 https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={} 获得页面
post 发表体会
https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling
appid TJZHDJ01
content 伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。
mid 9729
token 5jTY47PbPckOdUlllmfOCaCvcy7ls3rSVmxRoE0gDg3EmyrYi5Ucb1JCDwq_
post 结束学习
https://mapi.dangjianwang.com/v3_1/Study/ReadTime
appid TJZHDJ01
time 362
mid 9729
token 5jTY47PbPckOdUlllmfOCaCvcy7ls3rSVmxRoE0gDg3EmyrYi5Ucb1JCDwq_
---------------------------------------
https://mapi.dangjianwang.com/v3_1/Help/List 这里获得帮助id
https://mapi.dangjianwang.com/v3_1/Help/PostComment 提交评论的地址
appid TJZHDJ01
content 不忘初心,牢记使命!
id 55984
token 5jTY47PbPcpZe0s1xDLKAqKoIimx6SnSVjcApB92DF3Nmy/djZ1Nb1JCDwq_
把党的政治建设摆在首位!
不忘初心,牢记使命!
-------------------------------
发布的内容
https://mapi.dangjianwang.com/v3_1/Viewpoint/Create
appid TJZHDJ01
content 不忘初心牢记使命
token 5jTY47PbPZ9deR5rkTXIB/b/fymw5HvbAj9R900gDArNnXqE1s9Kb1JCDwq_
不忘初心,牢记使命,坚持终身学习!
全面的小康,覆盖的人口要全面,是惠及全体人民的小康。
-----------------------------
点赞错误
{'msg': '重复评论过多,请您修改后重新提交。', 'code': '500'}
'''
|
normal
|
{
"blob_id": "55a26eb2625acb201677f5ff50fde809402c9b93",
"index": 2630,
"step-1": "<mask token>\n\n\nclass Runner:\n\n def __init__(self, appid='TJZHDJ01', username='', password=''):\n urllib3.disable_warnings()\n self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')\n self.username = username\n self.password = password\n self.thumbedFilePath = './lib/'.format(username)\n self.logFilePath = './log/'.format(username)\n self.errFilePath = './err/'.format(username)\n self.thumbedFileList = []\n self.debug = True\n self.session = requests.session()\n self.appid = appid\n self.headers = {'User-Agent':\n 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'\n , 'header_version': '80', 'system': 'android', 'Connection':\n 'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}\n self.token = self.getToken()\n time.sleep(0.1)\n self.thumbPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Learn/List',\n 'https://mapi.dangjianwang.com/v3_1/Activities/List',\n 'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])\n self.thumbPages = [i[1] for i in self.thumbPageList]\n time.sleep(0.1)\n self.helpPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Help/List'])\n self.helpPages = [i[1] for i in self.helpPageList]\n self.helpResults = {}\n time.sleep(0.1)\n self.studyPageList = self.getPagesII(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])\n self.studyPages = [i[1] for i in self.studyPageList]\n time.sleep(0.1)\n self.studyRsults = {}\n self.thumbedPages = []\n self.thumbResults = {}\n self.helpedPages = []\n self.multiThumbed = []\n self.viewsResults = []\n self.examC19Info = []\n self.examlist = []\n self.qaList = []\n <mask token>\n\n def writeErr2File(self, err):\n path = self.logFilePath\n fullPath = '{}{}err.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, err))\n print('err已经写入{}'.format(fullPath))\n\n def writeLog2File(self, log):\n path = self.logFilePath\n fullPath = '{}{}logs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, log))\n print('log已经写入{}'.format(fullPath))\n <mask token>\n\n def getThumbFromFile(self):\n \"\"\"\n\n :return: 文件中id组成的列表\n \"\"\"\n path = self.thumbedFilePath\n inFileList = []\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(fullPath):\n return fullPath\n with open(fullPath, 'r') as f:\n inFileList.extend(list(set(f.readlines()[0].split(','))))\n with open(fullPath, 'w') as f1:\n f1.write(','.join(sorted(inFileList)))\n return inFileList\n\n def getExcuteTimes(self):\n \"\"\"\n 返回点赞等自动执行的次数的字典\n :return:\n \"\"\"\n excuteTimes = {}\n credInfo = self.getCredItinfo()\n print(credInfo)\n currentScore = credInfo[0]\n thumbScore = credInfo[1]['信息评论'].split('/')[0]\n thumbExcuteTimes = 10 - int(thumbScore)\n excuteTimes.update({'thumb': thumbExcuteTimes})\n helpScore = credInfo[1]['互助广场回答'].split('/')[0]\n helpExctuteTimes = 2 - int(helpScore)\n excuteTimes.update({'help': helpExctuteTimes})\n viewScore = credInfo[1]['党员视角发布'].split('/')[0]\n viewExcuteTimes = int((4 - int(viewScore)) / 2)\n excuteTimes.update({'view': viewExcuteTimes})\n examScore = credInfo[1]['在线知识竞答'].split('/')[0]\n examExcuteTimes = int((4 - int(examScore)) / 2)\n excuteTimes.update({'exam': examExcuteTimes})\n flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]\n ['在线阅读学习资料'].split('/')[0])\n flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]\n ['学习资料写体会'].split('/')[0])\n examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0\n excuteTimes.update({'study': examExcuteTimes})\n return excuteTimes\n\n def getToken(self):\n \"\"\"\n 获得一个连接的token\n 每个连接都需要使用到\n :return:\n \"\"\"\n data = {'appid': self.appid, 'username': self.username, 'password':\n self.password}\n longinurl = 'https://mapi.dangjianwang.com/v3_1/login'\n r = self.session.post(url=longinurl, data=data, verify=False)\n rjson = r.json()\n if rjson['code'] == '200':\n return rjson['token']\n else:\n print('token 获得失败')\n return None\n\n def getRJson(self, url):\n data = {'token': self.token, 'appid': self.appid}\n return self.session.post(url=url, data=data, verify=False).json()\n\n def getUserInfo(self):\n \"\"\"\n 获得一大串用户的信息,暂时没用\n :return:\n \"\"\"\n infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'\n return self.getRJson(url=infoUrl)\n\n def getCredItinfoToday(self):\n \"\"\"\n 获得人员当前的得分等级参数\n :return:\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n info = self.getRJson(url=creditInfourl)\n fullScore = info['data']['full']\n gainScore = info['data']['gain']\n currentLevel = info['data']['level']\n username = info['data']['name']\n ret = {'fullScore': fullScore, 'gainScore': gainScore,\n 'currentLevel': currentLevel, 'username': username}\n return ret\n <mask token>\n\n def getPages(self, urls):\n pages = []\n for url in urls:\n data = self.getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['title'], i['id']))\n return pages\n\n def getPagesII(self, urls):\n\n def getRJson(url):\n data = {'token': self.token, 'appid': self.appid, 'type_id':\n '791', 'page_index': '1'}\n return self.session.post(url=url, data=data, verify=False).json()\n pages = []\n for url in urls:\n data = getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['name'], i['id']))\n return pages\n\n def doThumb(self, id):\n \"\"\"\n 点赞函数,操作与id对应的页面\n 每次记录对应的信息到文件\n :return:\n \"\"\"\n contents = ['关注', '关注!', '关注!!']\n data = {'id': id, 'comment': random.choice(contents), 'token': self\n .token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n if rjson['code'] == '1003':\n self.token = self.getToken()\n elif rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.thumbedPages.append(id)\n for i in list(set(self.thumbPageList)):\n if id == i[1]:\n temp = {'title': i[0]}\n self.thumbResults.update(temp)\n log = '信息点赞:\\n主题: {}\\n提交:{}'.format(i[0], data[\n 'comment'])\n detail = '{} 主题:{}\\n回复:{}\\n'.format(self.\n getCurrentTime(), i[0], data['comment'])\n write2File(self, './results/', 'result.txt', log)\n thumbInfo = {'title': i[0], 'reply': data['comment']}\n self.thumbPages.remove(id)\n self.writeThumb2File(id=id)\n return detail, thumbInfo\n elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':\n print('因评论过快,等待一段时间')\n time.sleep(20)\n else:\n print('rjson', rjson)\n self.thumbedPages.remove(id)\n self.writeThumb2File(id=id)\n log = '点赞:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n time.sleep(10)\n\n def doHelp(self, id, callback=None):\n \"\"\"\n 互助功能\n :param id:\n :return:\n \"\"\"\n detail = ''\n helpInfo = None\n log = ''\n content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',\n '要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']\n data = {'id': id, 'content': random.choice(content), 'token': self.\n token, 'appid': self.appid}\n print(data)\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.helpedPages.append(id)\n self.helpPages.remove(id)\n for i in self.helpPageList:\n if id == i[1]:\n curTime = self.getCurrentTime()\n self.helpResults.update({'title': id[0]})\n log = '互助:\\n主题: {}\\n提交内容: {}'.format(i[0], rjson[\n 'comment'])\n write2File(self, './results/', 'result.txt', log)\n detail = '{} 主题: {}\\n提交内容: {}\\n'.format(curTime, i[\n 0], rjson['comment'].strip())\n helpInfo = {'title': i[0], 'reply': rjson['comment']}\n else:\n pass\n else:\n pass\n log = '帮助:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n return detail, log, helpInfo\n\n def doView(self):\n \"\"\"\n 党员视角发布功能\n\n :return:\n \"\"\"\n content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']\n data = {'content': random.choice(content), 'token': self.token,\n 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.viewsResults.append(1)\n else:\n pass\n log = '党员视角:{}'.format(rjson)\n detail = '{} 党员视角:\\n发布内容:{}\\n'.format(self.getCurrentTime(), rjson[\n 'data']['content'])\n publicContent = rjson['data']['content']\n return detail, publicContent\n\n def doStudy(self, mid):\n \"\"\"\n 前三个post函数的响应的三个请求\n get用来获得填写的内容\n 最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。\n :param mid:\n :return:\n \"\"\"\n interval = 60 * 5 + 5\n\n def post1():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post1:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post2():\n data = {'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post2:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post3():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post3:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def get1():\n url = (\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'\n .format(self.token, mid))\n rjson = self.session.get(url=url)\n text = rjson.content\n soup = BeautifulSoup(text, 'html.parser')\n retContents = []\n for div in soup.find_all('p'):\n p = div.text.strip()\n retContents.append(p if 100 > len(p) < 200 else p[0:200])\n return random.choice(retContents)\n\n def recordFeeling(content=None):\n if not content:\n content = (\n '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'\n )\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'content': content}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习recordFeeling:{}'.format(rjson)\n self.writeLog2File(log)\n print('in recordFeeling')\n print(log)\n if rjson['code'] == '200':\n return {'content': content}\n elif rjson['code'] == '1120':\n addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',\n '人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',\n '民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']\n return recordFeeling(content='{}\\n{}'.format(content,\n random.choice(addtion)))\n else:\n return None\n\n def readTime():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'time': interval}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习readTime:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n post1()\n time.sleep(1)\n post2()\n time.sleep(1)\n post3()\n time.sleep(1)\n content = get1()\n time.sleep(1)\n count = 0\n print('开始学习请稍后')\n for i in range(interval):\n count += 1\n if count % 30 == 0:\n print('已用时{}秒'.format(count))\n time.sleep(1)\n print('填写的学习体会', content)\n self.studyRsults.update(recordFeeling(content=content))\n time.sleep(1)\n readTime()\n time.sleep(1)\n pass\n\n def doExam(self):\n \"\"\"\n\n :param self:\n :return:\n \"\"\"\n ids = []\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'\n rjson = self.session.post(url=examlistUrl, data=data, verify=False\n ).json()\n time.sleep(0.3)\n print('*' * 99)\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'\n rjson = self.session.post(url=banklistUrl, data=data, verify=False\n ).json()\n for i in rjson['data']:\n tem = i['bank_name'], i['id']\n self.examlist.append(tem)\n if i['bank_name'] == '十九大报告100题(单选)':\n temp = {'title': i['bank_name'], 'detail': i['detail'],\n 'id': i['id']}\n self.examC19Info.append(temp)\n time.sleep(0.3)\n print('*' * 99)\n data = {'bank': '6', 'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n aa = rjson['data']\n paper = aa['id']\n for i in aa['questions']:\n temp = {'id': i['id'], 'content': i['content']}\n ids.append(temp)\n print('*' * 99)\n time.sleep(0.5)\n answers = []\n for i in ids:\n correctAnswer = Qa.objects.filter(question__contains=i['content'])[\n 0]\n answerText = correctAnswer.answerText\n answer = correctAnswer.answer\n temp = {'index': i['id'], 'answer': answer}\n qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}\n self.qaList.append(qa)\n print(qa, i['content'])\n answers.append(temp)\n time.sleep(1)\n hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,\n 'answers': json.dumps(answers)}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'\n rjson = self.session.post(url=commitUrl, data=hdata, verify=False\n ).json()\n print(rjson)\n print(self.examlist)\n print(self.examC19Info)\n print(self.qaList)\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Runner:\n\n def __init__(self, appid='TJZHDJ01', username='', password=''):\n urllib3.disable_warnings()\n self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')\n self.username = username\n self.password = password\n self.thumbedFilePath = './lib/'.format(username)\n self.logFilePath = './log/'.format(username)\n self.errFilePath = './err/'.format(username)\n self.thumbedFileList = []\n self.debug = True\n self.session = requests.session()\n self.appid = appid\n self.headers = {'User-Agent':\n 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'\n , 'header_version': '80', 'system': 'android', 'Connection':\n 'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}\n self.token = self.getToken()\n time.sleep(0.1)\n self.thumbPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Learn/List',\n 'https://mapi.dangjianwang.com/v3_1/Activities/List',\n 'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])\n self.thumbPages = [i[1] for i in self.thumbPageList]\n time.sleep(0.1)\n self.helpPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Help/List'])\n self.helpPages = [i[1] for i in self.helpPageList]\n self.helpResults = {}\n time.sleep(0.1)\n self.studyPageList = self.getPagesII(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])\n self.studyPages = [i[1] for i in self.studyPageList]\n time.sleep(0.1)\n self.studyRsults = {}\n self.thumbedPages = []\n self.thumbResults = {}\n self.helpedPages = []\n self.multiThumbed = []\n self.viewsResults = []\n self.examC19Info = []\n self.examlist = []\n self.qaList = []\n <mask token>\n\n def writeErr2File(self, err):\n path = self.logFilePath\n fullPath = '{}{}err.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, err))\n print('err已经写入{}'.format(fullPath))\n\n def writeLog2File(self, log):\n path = self.logFilePath\n fullPath = '{}{}logs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, log))\n print('log已经写入{}'.format(fullPath))\n\n def writeThumb2File(self, id):\n path = self.thumbedFilePath\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write(',{}'.format(id))\n print('点赞记录已经写入{}'.format(fullPath))\n\n def getThumbFromFile(self):\n \"\"\"\n\n :return: 文件中id组成的列表\n \"\"\"\n path = self.thumbedFilePath\n inFileList = []\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(fullPath):\n return fullPath\n with open(fullPath, 'r') as f:\n inFileList.extend(list(set(f.readlines()[0].split(','))))\n with open(fullPath, 'w') as f1:\n f1.write(','.join(sorted(inFileList)))\n return inFileList\n\n def getExcuteTimes(self):\n \"\"\"\n 返回点赞等自动执行的次数的字典\n :return:\n \"\"\"\n excuteTimes = {}\n credInfo = self.getCredItinfo()\n print(credInfo)\n currentScore = credInfo[0]\n thumbScore = credInfo[1]['信息评论'].split('/')[0]\n thumbExcuteTimes = 10 - int(thumbScore)\n excuteTimes.update({'thumb': thumbExcuteTimes})\n helpScore = credInfo[1]['互助广场回答'].split('/')[0]\n helpExctuteTimes = 2 - int(helpScore)\n excuteTimes.update({'help': helpExctuteTimes})\n viewScore = credInfo[1]['党员视角发布'].split('/')[0]\n viewExcuteTimes = int((4 - int(viewScore)) / 2)\n excuteTimes.update({'view': viewExcuteTimes})\n examScore = credInfo[1]['在线知识竞答'].split('/')[0]\n examExcuteTimes = int((4 - int(examScore)) / 2)\n excuteTimes.update({'exam': examExcuteTimes})\n flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]\n ['在线阅读学习资料'].split('/')[0])\n flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]\n ['学习资料写体会'].split('/')[0])\n examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0\n excuteTimes.update({'study': examExcuteTimes})\n return excuteTimes\n\n def getToken(self):\n \"\"\"\n 获得一个连接的token\n 每个连接都需要使用到\n :return:\n \"\"\"\n data = {'appid': self.appid, 'username': self.username, 'password':\n self.password}\n longinurl = 'https://mapi.dangjianwang.com/v3_1/login'\n r = self.session.post(url=longinurl, data=data, verify=False)\n rjson = r.json()\n if rjson['code'] == '200':\n return rjson['token']\n else:\n print('token 获得失败')\n return None\n\n def getRJson(self, url):\n data = {'token': self.token, 'appid': self.appid}\n return self.session.post(url=url, data=data, verify=False).json()\n\n def getUserInfo(self):\n \"\"\"\n 获得一大串用户的信息,暂时没用\n :return:\n \"\"\"\n infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'\n return self.getRJson(url=infoUrl)\n\n def getCredItinfoToday(self):\n \"\"\"\n 获得人员当前的得分等级参数\n :return:\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n info = self.getRJson(url=creditInfourl)\n fullScore = info['data']['full']\n gainScore = info['data']['gain']\n currentLevel = info['data']['level']\n username = info['data']['name']\n ret = {'fullScore': fullScore, 'gainScore': gainScore,\n 'currentLevel': currentLevel, 'username': username}\n return ret\n\n def getCredItinfo(self):\n \"\"\"\n 获得用户的今日积分状态\n 可用来判断是否需要再继续流程\n 数据如下\n ('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])\n :return:(haved_credit, credit_detail)\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n haved_credit = 0\n credit_detail = {}\n info = self.getRJson(url=creditInfourl)\n for k, v in info.items():\n if k == 'data':\n for k2, v2 in v.items():\n if k2 == 'haved_credit':\n haved_credit = v2\n if k2 == 'credit_detail':\n for i in v2:\n credit_detail.update({i['title']: i['score']})\n return haved_credit, credit_detail\n\n def getPages(self, urls):\n pages = []\n for url in urls:\n data = self.getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['title'], i['id']))\n return pages\n\n def getPagesII(self, urls):\n\n def getRJson(url):\n data = {'token': self.token, 'appid': self.appid, 'type_id':\n '791', 'page_index': '1'}\n return self.session.post(url=url, data=data, verify=False).json()\n pages = []\n for url in urls:\n data = getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['name'], i['id']))\n return pages\n\n def doThumb(self, id):\n \"\"\"\n 点赞函数,操作与id对应的页面\n 每次记录对应的信息到文件\n :return:\n \"\"\"\n contents = ['关注', '关注!', '关注!!']\n data = {'id': id, 'comment': random.choice(contents), 'token': self\n .token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n if rjson['code'] == '1003':\n self.token = self.getToken()\n elif rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.thumbedPages.append(id)\n for i in list(set(self.thumbPageList)):\n if id == i[1]:\n temp = {'title': i[0]}\n self.thumbResults.update(temp)\n log = '信息点赞:\\n主题: {}\\n提交:{}'.format(i[0], data[\n 'comment'])\n detail = '{} 主题:{}\\n回复:{}\\n'.format(self.\n getCurrentTime(), i[0], data['comment'])\n write2File(self, './results/', 'result.txt', log)\n thumbInfo = {'title': i[0], 'reply': data['comment']}\n self.thumbPages.remove(id)\n self.writeThumb2File(id=id)\n return detail, thumbInfo\n elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':\n print('因评论过快,等待一段时间')\n time.sleep(20)\n else:\n print('rjson', rjson)\n self.thumbedPages.remove(id)\n self.writeThumb2File(id=id)\n log = '点赞:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n time.sleep(10)\n\n def doHelp(self, id, callback=None):\n \"\"\"\n 互助功能\n :param id:\n :return:\n \"\"\"\n detail = ''\n helpInfo = None\n log = ''\n content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',\n '要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']\n data = {'id': id, 'content': random.choice(content), 'token': self.\n token, 'appid': self.appid}\n print(data)\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.helpedPages.append(id)\n self.helpPages.remove(id)\n for i in self.helpPageList:\n if id == i[1]:\n curTime = self.getCurrentTime()\n self.helpResults.update({'title': id[0]})\n log = '互助:\\n主题: {}\\n提交内容: {}'.format(i[0], rjson[\n 'comment'])\n write2File(self, './results/', 'result.txt', log)\n detail = '{} 主题: {}\\n提交内容: {}\\n'.format(curTime, i[\n 0], rjson['comment'].strip())\n helpInfo = {'title': i[0], 'reply': rjson['comment']}\n else:\n pass\n else:\n pass\n log = '帮助:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n return detail, log, helpInfo\n\n def doView(self):\n \"\"\"\n 党员视角发布功能\n\n :return:\n \"\"\"\n content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']\n data = {'content': random.choice(content), 'token': self.token,\n 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.viewsResults.append(1)\n else:\n pass\n log = '党员视角:{}'.format(rjson)\n detail = '{} 党员视角:\\n发布内容:{}\\n'.format(self.getCurrentTime(), rjson[\n 'data']['content'])\n publicContent = rjson['data']['content']\n return detail, publicContent\n\n def doStudy(self, mid):\n \"\"\"\n 前三个post函数的响应的三个请求\n get用来获得填写的内容\n 最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。\n :param mid:\n :return:\n \"\"\"\n interval = 60 * 5 + 5\n\n def post1():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post1:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post2():\n data = {'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post2:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post3():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post3:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def get1():\n url = (\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'\n .format(self.token, mid))\n rjson = self.session.get(url=url)\n text = rjson.content\n soup = BeautifulSoup(text, 'html.parser')\n retContents = []\n for div in soup.find_all('p'):\n p = div.text.strip()\n retContents.append(p if 100 > len(p) < 200 else p[0:200])\n return random.choice(retContents)\n\n def recordFeeling(content=None):\n if not content:\n content = (\n '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'\n )\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'content': content}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习recordFeeling:{}'.format(rjson)\n self.writeLog2File(log)\n print('in recordFeeling')\n print(log)\n if rjson['code'] == '200':\n return {'content': content}\n elif rjson['code'] == '1120':\n addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',\n '人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',\n '民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']\n return recordFeeling(content='{}\\n{}'.format(content,\n random.choice(addtion)))\n else:\n return None\n\n def readTime():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'time': interval}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习readTime:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n post1()\n time.sleep(1)\n post2()\n time.sleep(1)\n post3()\n time.sleep(1)\n content = get1()\n time.sleep(1)\n count = 0\n print('开始学习请稍后')\n for i in range(interval):\n count += 1\n if count % 30 == 0:\n print('已用时{}秒'.format(count))\n time.sleep(1)\n print('填写的学习体会', content)\n self.studyRsults.update(recordFeeling(content=content))\n time.sleep(1)\n readTime()\n time.sleep(1)\n pass\n\n def doExam(self):\n \"\"\"\n\n :param self:\n :return:\n \"\"\"\n ids = []\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'\n rjson = self.session.post(url=examlistUrl, data=data, verify=False\n ).json()\n time.sleep(0.3)\n print('*' * 99)\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'\n rjson = self.session.post(url=banklistUrl, data=data, verify=False\n ).json()\n for i in rjson['data']:\n tem = i['bank_name'], i['id']\n self.examlist.append(tem)\n if i['bank_name'] == '十九大报告100题(单选)':\n temp = {'title': i['bank_name'], 'detail': i['detail'],\n 'id': i['id']}\n self.examC19Info.append(temp)\n time.sleep(0.3)\n print('*' * 99)\n data = {'bank': '6', 'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n aa = rjson['data']\n paper = aa['id']\n for i in aa['questions']:\n temp = {'id': i['id'], 'content': i['content']}\n ids.append(temp)\n print('*' * 99)\n time.sleep(0.5)\n answers = []\n for i in ids:\n correctAnswer = Qa.objects.filter(question__contains=i['content'])[\n 0]\n answerText = correctAnswer.answerText\n answer = correctAnswer.answer\n temp = {'index': i['id'], 'answer': answer}\n qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}\n self.qaList.append(qa)\n print(qa, i['content'])\n answers.append(temp)\n time.sleep(1)\n hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,\n 'answers': json.dumps(answers)}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'\n rjson = self.session.post(url=commitUrl, data=hdata, verify=False\n ).json()\n print(rjson)\n print(self.examlist)\n print(self.examC19Info)\n print(self.qaList)\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Runner:\n\n def __init__(self, appid='TJZHDJ01', username='', password=''):\n urllib3.disable_warnings()\n self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')\n self.username = username\n self.password = password\n self.thumbedFilePath = './lib/'.format(username)\n self.logFilePath = './log/'.format(username)\n self.errFilePath = './err/'.format(username)\n self.thumbedFileList = []\n self.debug = True\n self.session = requests.session()\n self.appid = appid\n self.headers = {'User-Agent':\n 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'\n , 'header_version': '80', 'system': 'android', 'Connection':\n 'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}\n self.token = self.getToken()\n time.sleep(0.1)\n self.thumbPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Learn/List',\n 'https://mapi.dangjianwang.com/v3_1/Activities/List',\n 'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])\n self.thumbPages = [i[1] for i in self.thumbPageList]\n time.sleep(0.1)\n self.helpPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Help/List'])\n self.helpPages = [i[1] for i in self.helpPageList]\n self.helpResults = {}\n time.sleep(0.1)\n self.studyPageList = self.getPagesII(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])\n self.studyPages = [i[1] for i in self.studyPageList]\n time.sleep(0.1)\n self.studyRsults = {}\n self.thumbedPages = []\n self.thumbResults = {}\n self.helpedPages = []\n self.multiThumbed = []\n self.viewsResults = []\n self.examC19Info = []\n self.examlist = []\n self.qaList = []\n <mask token>\n\n def writeErr2File(self, err):\n path = self.logFilePath\n fullPath = '{}{}err.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, err))\n print('err已经写入{}'.format(fullPath))\n\n def writeLog2File(self, log):\n path = self.logFilePath\n fullPath = '{}{}logs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, log))\n print('log已经写入{}'.format(fullPath))\n\n def writeThumb2File(self, id):\n path = self.thumbedFilePath\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write(',{}'.format(id))\n print('点赞记录已经写入{}'.format(fullPath))\n\n def getThumbFromFile(self):\n \"\"\"\n\n :return: 文件中id组成的列表\n \"\"\"\n path = self.thumbedFilePath\n inFileList = []\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(fullPath):\n return fullPath\n with open(fullPath, 'r') as f:\n inFileList.extend(list(set(f.readlines()[0].split(','))))\n with open(fullPath, 'w') as f1:\n f1.write(','.join(sorted(inFileList)))\n return inFileList\n\n def getExcuteTimes(self):\n \"\"\"\n 返回点赞等自动执行的次数的字典\n :return:\n \"\"\"\n excuteTimes = {}\n credInfo = self.getCredItinfo()\n print(credInfo)\n currentScore = credInfo[0]\n thumbScore = credInfo[1]['信息评论'].split('/')[0]\n thumbExcuteTimes = 10 - int(thumbScore)\n excuteTimes.update({'thumb': thumbExcuteTimes})\n helpScore = credInfo[1]['互助广场回答'].split('/')[0]\n helpExctuteTimes = 2 - int(helpScore)\n excuteTimes.update({'help': helpExctuteTimes})\n viewScore = credInfo[1]['党员视角发布'].split('/')[0]\n viewExcuteTimes = int((4 - int(viewScore)) / 2)\n excuteTimes.update({'view': viewExcuteTimes})\n examScore = credInfo[1]['在线知识竞答'].split('/')[0]\n examExcuteTimes = int((4 - int(examScore)) / 2)\n excuteTimes.update({'exam': examExcuteTimes})\n flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]\n ['在线阅读学习资料'].split('/')[0])\n flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]\n ['学习资料写体会'].split('/')[0])\n examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0\n excuteTimes.update({'study': examExcuteTimes})\n return excuteTimes\n\n def getToken(self):\n \"\"\"\n 获得一个连接的token\n 每个连接都需要使用到\n :return:\n \"\"\"\n data = {'appid': self.appid, 'username': self.username, 'password':\n self.password}\n longinurl = 'https://mapi.dangjianwang.com/v3_1/login'\n r = self.session.post(url=longinurl, data=data, verify=False)\n rjson = r.json()\n if rjson['code'] == '200':\n return rjson['token']\n else:\n print('token 获得失败')\n return None\n\n def getRJson(self, url):\n data = {'token': self.token, 'appid': self.appid}\n return self.session.post(url=url, data=data, verify=False).json()\n\n def getUserInfo(self):\n \"\"\"\n 获得一大串用户的信息,暂时没用\n :return:\n \"\"\"\n infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'\n return self.getRJson(url=infoUrl)\n\n def getCredItinfoToday(self):\n \"\"\"\n 获得人员当前的得分等级参数\n :return:\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n info = self.getRJson(url=creditInfourl)\n fullScore = info['data']['full']\n gainScore = info['data']['gain']\n currentLevel = info['data']['level']\n username = info['data']['name']\n ret = {'fullScore': fullScore, 'gainScore': gainScore,\n 'currentLevel': currentLevel, 'username': username}\n return ret\n\n def getCredItinfo(self):\n \"\"\"\n 获得用户的今日积分状态\n 可用来判断是否需要再继续流程\n 数据如下\n ('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])\n :return:(haved_credit, credit_detail)\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n haved_credit = 0\n credit_detail = {}\n info = self.getRJson(url=creditInfourl)\n for k, v in info.items():\n if k == 'data':\n for k2, v2 in v.items():\n if k2 == 'haved_credit':\n haved_credit = v2\n if k2 == 'credit_detail':\n for i in v2:\n credit_detail.update({i['title']: i['score']})\n return haved_credit, credit_detail\n\n def getPages(self, urls):\n pages = []\n for url in urls:\n data = self.getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['title'], i['id']))\n return pages\n\n def getPagesII(self, urls):\n\n def getRJson(url):\n data = {'token': self.token, 'appid': self.appid, 'type_id':\n '791', 'page_index': '1'}\n return self.session.post(url=url, data=data, verify=False).json()\n pages = []\n for url in urls:\n data = getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['name'], i['id']))\n return pages\n\n def doThumb(self, id):\n \"\"\"\n 点赞函数,操作与id对应的页面\n 每次记录对应的信息到文件\n :return:\n \"\"\"\n contents = ['关注', '关注!', '关注!!']\n data = {'id': id, 'comment': random.choice(contents), 'token': self\n .token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n if rjson['code'] == '1003':\n self.token = self.getToken()\n elif rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.thumbedPages.append(id)\n for i in list(set(self.thumbPageList)):\n if id == i[1]:\n temp = {'title': i[0]}\n self.thumbResults.update(temp)\n log = '信息点赞:\\n主题: {}\\n提交:{}'.format(i[0], data[\n 'comment'])\n detail = '{} 主题:{}\\n回复:{}\\n'.format(self.\n getCurrentTime(), i[0], data['comment'])\n write2File(self, './results/', 'result.txt', log)\n thumbInfo = {'title': i[0], 'reply': data['comment']}\n self.thumbPages.remove(id)\n self.writeThumb2File(id=id)\n return detail, thumbInfo\n elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':\n print('因评论过快,等待一段时间')\n time.sleep(20)\n else:\n print('rjson', rjson)\n self.thumbedPages.remove(id)\n self.writeThumb2File(id=id)\n log = '点赞:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n time.sleep(10)\n\n def doHelp(self, id, callback=None):\n \"\"\"\n 互助功能\n :param id:\n :return:\n \"\"\"\n detail = ''\n helpInfo = None\n log = ''\n content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',\n '要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']\n data = {'id': id, 'content': random.choice(content), 'token': self.\n token, 'appid': self.appid}\n print(data)\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.helpedPages.append(id)\n self.helpPages.remove(id)\n for i in self.helpPageList:\n if id == i[1]:\n curTime = self.getCurrentTime()\n self.helpResults.update({'title': id[0]})\n log = '互助:\\n主题: {}\\n提交内容: {}'.format(i[0], rjson[\n 'comment'])\n write2File(self, './results/', 'result.txt', log)\n detail = '{} 主题: {}\\n提交内容: {}\\n'.format(curTime, i[\n 0], rjson['comment'].strip())\n helpInfo = {'title': i[0], 'reply': rjson['comment']}\n else:\n pass\n else:\n pass\n log = '帮助:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n return detail, log, helpInfo\n\n def doView(self):\n \"\"\"\n 党员视角发布功能\n\n :return:\n \"\"\"\n content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']\n data = {'content': random.choice(content), 'token': self.token,\n 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.viewsResults.append(1)\n else:\n pass\n log = '党员视角:{}'.format(rjson)\n detail = '{} 党员视角:\\n发布内容:{}\\n'.format(self.getCurrentTime(), rjson[\n 'data']['content'])\n publicContent = rjson['data']['content']\n return detail, publicContent\n\n def doStudy(self, mid):\n \"\"\"\n 前三个post函数的响应的三个请求\n get用来获得填写的内容\n 最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。\n :param mid:\n :return:\n \"\"\"\n interval = 60 * 5 + 5\n\n def post1():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post1:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post2():\n data = {'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post2:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post3():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post3:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def get1():\n url = (\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'\n .format(self.token, mid))\n rjson = self.session.get(url=url)\n text = rjson.content\n soup = BeautifulSoup(text, 'html.parser')\n retContents = []\n for div in soup.find_all('p'):\n p = div.text.strip()\n retContents.append(p if 100 > len(p) < 200 else p[0:200])\n return random.choice(retContents)\n\n def recordFeeling(content=None):\n if not content:\n content = (\n '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'\n )\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'content': content}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习recordFeeling:{}'.format(rjson)\n self.writeLog2File(log)\n print('in recordFeeling')\n print(log)\n if rjson['code'] == '200':\n return {'content': content}\n elif rjson['code'] == '1120':\n addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',\n '人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',\n '民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']\n return recordFeeling(content='{}\\n{}'.format(content,\n random.choice(addtion)))\n else:\n return None\n\n def readTime():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'time': interval}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习readTime:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n post1()\n time.sleep(1)\n post2()\n time.sleep(1)\n post3()\n time.sleep(1)\n content = get1()\n time.sleep(1)\n count = 0\n print('开始学习请稍后')\n for i in range(interval):\n count += 1\n if count % 30 == 0:\n print('已用时{}秒'.format(count))\n time.sleep(1)\n print('填写的学习体会', content)\n self.studyRsults.update(recordFeeling(content=content))\n time.sleep(1)\n readTime()\n time.sleep(1)\n pass\n\n def doExam(self):\n \"\"\"\n\n :param self:\n :return:\n \"\"\"\n ids = []\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'\n rjson = self.session.post(url=examlistUrl, data=data, verify=False\n ).json()\n time.sleep(0.3)\n print('*' * 99)\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'\n rjson = self.session.post(url=banklistUrl, data=data, verify=False\n ).json()\n for i in rjson['data']:\n tem = i['bank_name'], i['id']\n self.examlist.append(tem)\n if i['bank_name'] == '十九大报告100题(单选)':\n temp = {'title': i['bank_name'], 'detail': i['detail'],\n 'id': i['id']}\n self.examC19Info.append(temp)\n time.sleep(0.3)\n print('*' * 99)\n data = {'bank': '6', 'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n aa = rjson['data']\n paper = aa['id']\n for i in aa['questions']:\n temp = {'id': i['id'], 'content': i['content']}\n ids.append(temp)\n print('*' * 99)\n time.sleep(0.5)\n answers = []\n for i in ids:\n correctAnswer = Qa.objects.filter(question__contains=i['content'])[\n 0]\n answerText = correctAnswer.answerText\n answer = correctAnswer.answer\n temp = {'index': i['id'], 'answer': answer}\n qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}\n self.qaList.append(qa)\n print(qa, i['content'])\n answers.append(temp)\n time.sleep(1)\n hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,\n 'answers': json.dumps(answers)}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'\n rjson = self.session.post(url=commitUrl, data=hdata, verify=False\n ).json()\n print(rjson)\n print(self.examlist)\n print(self.examC19Info)\n print(self.qaList)\n\n def getAnswerInfo(self):\n \"\"\"\n 获得答题的结果与正确率\n :return:\n \"\"\"\n data = {'token': self.token, 'appid': self.appid, 'page_size': '20',\n 'page_index': 'page_index'}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Runner:\n\n def __init__(self, appid='TJZHDJ01', username='', password=''):\n urllib3.disable_warnings()\n self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')\n self.username = username\n self.password = password\n self.thumbedFilePath = './lib/'.format(username)\n self.logFilePath = './log/'.format(username)\n self.errFilePath = './err/'.format(username)\n self.thumbedFileList = []\n self.debug = True\n self.session = requests.session()\n self.appid = appid\n self.headers = {'User-Agent':\n 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'\n , 'header_version': '80', 'system': 'android', 'Connection':\n 'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}\n self.token = self.getToken()\n time.sleep(0.1)\n self.thumbPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Learn/List',\n 'https://mapi.dangjianwang.com/v3_1/Activities/List',\n 'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])\n self.thumbPages = [i[1] for i in self.thumbPageList]\n time.sleep(0.1)\n self.helpPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Help/List'])\n self.helpPages = [i[1] for i in self.helpPageList]\n self.helpResults = {}\n time.sleep(0.1)\n self.studyPageList = self.getPagesII(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])\n self.studyPages = [i[1] for i in self.studyPageList]\n time.sleep(0.1)\n self.studyRsults = {}\n self.thumbedPages = []\n self.thumbResults = {}\n self.helpedPages = []\n self.multiThumbed = []\n self.viewsResults = []\n self.examC19Info = []\n self.examlist = []\n self.qaList = []\n\n def getCurrentTime(self):\n return datetime.datetime.now().strftime('%H:%M:%S')\n\n def writeErr2File(self, err):\n path = self.logFilePath\n fullPath = '{}{}err.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, err))\n print('err已经写入{}'.format(fullPath))\n\n def writeLog2File(self, log):\n path = self.logFilePath\n fullPath = '{}{}logs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, log))\n print('log已经写入{}'.format(fullPath))\n\n def writeThumb2File(self, id):\n path = self.thumbedFilePath\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write(',{}'.format(id))\n print('点赞记录已经写入{}'.format(fullPath))\n\n def getThumbFromFile(self):\n \"\"\"\n\n :return: 文件中id组成的列表\n \"\"\"\n path = self.thumbedFilePath\n inFileList = []\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(fullPath):\n return fullPath\n with open(fullPath, 'r') as f:\n inFileList.extend(list(set(f.readlines()[0].split(','))))\n with open(fullPath, 'w') as f1:\n f1.write(','.join(sorted(inFileList)))\n return inFileList\n\n def getExcuteTimes(self):\n \"\"\"\n 返回点赞等自动执行的次数的字典\n :return:\n \"\"\"\n excuteTimes = {}\n credInfo = self.getCredItinfo()\n print(credInfo)\n currentScore = credInfo[0]\n thumbScore = credInfo[1]['信息评论'].split('/')[0]\n thumbExcuteTimes = 10 - int(thumbScore)\n excuteTimes.update({'thumb': thumbExcuteTimes})\n helpScore = credInfo[1]['互助广场回答'].split('/')[0]\n helpExctuteTimes = 2 - int(helpScore)\n excuteTimes.update({'help': helpExctuteTimes})\n viewScore = credInfo[1]['党员视角发布'].split('/')[0]\n viewExcuteTimes = int((4 - int(viewScore)) / 2)\n excuteTimes.update({'view': viewExcuteTimes})\n examScore = credInfo[1]['在线知识竞答'].split('/')[0]\n examExcuteTimes = int((4 - int(examScore)) / 2)\n excuteTimes.update({'exam': examExcuteTimes})\n flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]\n ['在线阅读学习资料'].split('/')[0])\n flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]\n ['学习资料写体会'].split('/')[0])\n examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0\n excuteTimes.update({'study': examExcuteTimes})\n return excuteTimes\n\n def getToken(self):\n \"\"\"\n 获得一个连接的token\n 每个连接都需要使用到\n :return:\n \"\"\"\n data = {'appid': self.appid, 'username': self.username, 'password':\n self.password}\n longinurl = 'https://mapi.dangjianwang.com/v3_1/login'\n r = self.session.post(url=longinurl, data=data, verify=False)\n rjson = r.json()\n if rjson['code'] == '200':\n return rjson['token']\n else:\n print('token 获得失败')\n return None\n\n def getRJson(self, url):\n data = {'token': self.token, 'appid': self.appid}\n return self.session.post(url=url, data=data, verify=False).json()\n\n def getUserInfo(self):\n \"\"\"\n 获得一大串用户的信息,暂时没用\n :return:\n \"\"\"\n infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'\n return self.getRJson(url=infoUrl)\n\n def getCredItinfoToday(self):\n \"\"\"\n 获得人员当前的得分等级参数\n :return:\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n info = self.getRJson(url=creditInfourl)\n fullScore = info['data']['full']\n gainScore = info['data']['gain']\n currentLevel = info['data']['level']\n username = info['data']['name']\n ret = {'fullScore': fullScore, 'gainScore': gainScore,\n 'currentLevel': currentLevel, 'username': username}\n return ret\n\n def getCredItinfo(self):\n \"\"\"\n 获得用户的今日积分状态\n 可用来判断是否需要再继续流程\n 数据如下\n ('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])\n :return:(haved_credit, credit_detail)\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n haved_credit = 0\n credit_detail = {}\n info = self.getRJson(url=creditInfourl)\n for k, v in info.items():\n if k == 'data':\n for k2, v2 in v.items():\n if k2 == 'haved_credit':\n haved_credit = v2\n if k2 == 'credit_detail':\n for i in v2:\n credit_detail.update({i['title']: i['score']})\n return haved_credit, credit_detail\n\n def getPages(self, urls):\n pages = []\n for url in urls:\n data = self.getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['title'], i['id']))\n return pages\n\n def getPagesII(self, urls):\n\n def getRJson(url):\n data = {'token': self.token, 'appid': self.appid, 'type_id':\n '791', 'page_index': '1'}\n return self.session.post(url=url, data=data, verify=False).json()\n pages = []\n for url in urls:\n data = getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['name'], i['id']))\n return pages\n\n def doThumb(self, id):\n \"\"\"\n 点赞函数,操作与id对应的页面\n 每次记录对应的信息到文件\n :return:\n \"\"\"\n contents = ['关注', '关注!', '关注!!']\n data = {'id': id, 'comment': random.choice(contents), 'token': self\n .token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n if rjson['code'] == '1003':\n self.token = self.getToken()\n elif rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.thumbedPages.append(id)\n for i in list(set(self.thumbPageList)):\n if id == i[1]:\n temp = {'title': i[0]}\n self.thumbResults.update(temp)\n log = '信息点赞:\\n主题: {}\\n提交:{}'.format(i[0], data[\n 'comment'])\n detail = '{} 主题:{}\\n回复:{}\\n'.format(self.\n getCurrentTime(), i[0], data['comment'])\n write2File(self, './results/', 'result.txt', log)\n thumbInfo = {'title': i[0], 'reply': data['comment']}\n self.thumbPages.remove(id)\n self.writeThumb2File(id=id)\n return detail, thumbInfo\n elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':\n print('因评论过快,等待一段时间')\n time.sleep(20)\n else:\n print('rjson', rjson)\n self.thumbedPages.remove(id)\n self.writeThumb2File(id=id)\n log = '点赞:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n time.sleep(10)\n\n def doHelp(self, id, callback=None):\n \"\"\"\n 互助功能\n :param id:\n :return:\n \"\"\"\n detail = ''\n helpInfo = None\n log = ''\n content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',\n '要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']\n data = {'id': id, 'content': random.choice(content), 'token': self.\n token, 'appid': self.appid}\n print(data)\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.helpedPages.append(id)\n self.helpPages.remove(id)\n for i in self.helpPageList:\n if id == i[1]:\n curTime = self.getCurrentTime()\n self.helpResults.update({'title': id[0]})\n log = '互助:\\n主题: {}\\n提交内容: {}'.format(i[0], rjson[\n 'comment'])\n write2File(self, './results/', 'result.txt', log)\n detail = '{} 主题: {}\\n提交内容: {}\\n'.format(curTime, i[\n 0], rjson['comment'].strip())\n helpInfo = {'title': i[0], 'reply': rjson['comment']}\n else:\n pass\n else:\n pass\n log = '帮助:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n return detail, log, helpInfo\n\n def doView(self):\n \"\"\"\n 党员视角发布功能\n\n :return:\n \"\"\"\n content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']\n data = {'content': random.choice(content), 'token': self.token,\n 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.viewsResults.append(1)\n else:\n pass\n log = '党员视角:{}'.format(rjson)\n detail = '{} 党员视角:\\n发布内容:{}\\n'.format(self.getCurrentTime(), rjson[\n 'data']['content'])\n publicContent = rjson['data']['content']\n return detail, publicContent\n\n def doStudy(self, mid):\n \"\"\"\n 前三个post函数的响应的三个请求\n get用来获得填写的内容\n 最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。\n :param mid:\n :return:\n \"\"\"\n interval = 60 * 5 + 5\n\n def post1():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post1:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post2():\n data = {'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post2:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post3():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post3:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def get1():\n url = (\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'\n .format(self.token, mid))\n rjson = self.session.get(url=url)\n text = rjson.content\n soup = BeautifulSoup(text, 'html.parser')\n retContents = []\n for div in soup.find_all('p'):\n p = div.text.strip()\n retContents.append(p if 100 > len(p) < 200 else p[0:200])\n return random.choice(retContents)\n\n def recordFeeling(content=None):\n if not content:\n content = (\n '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'\n )\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'content': content}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习recordFeeling:{}'.format(rjson)\n self.writeLog2File(log)\n print('in recordFeeling')\n print(log)\n if rjson['code'] == '200':\n return {'content': content}\n elif rjson['code'] == '1120':\n addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',\n '人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',\n '民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']\n return recordFeeling(content='{}\\n{}'.format(content,\n random.choice(addtion)))\n else:\n return None\n\n def readTime():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'time': interval}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习readTime:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n post1()\n time.sleep(1)\n post2()\n time.sleep(1)\n post3()\n time.sleep(1)\n content = get1()\n time.sleep(1)\n count = 0\n print('开始学习请稍后')\n for i in range(interval):\n count += 1\n if count % 30 == 0:\n print('已用时{}秒'.format(count))\n time.sleep(1)\n print('填写的学习体会', content)\n self.studyRsults.update(recordFeeling(content=content))\n time.sleep(1)\n readTime()\n time.sleep(1)\n pass\n\n def doExam(self):\n \"\"\"\n\n :param self:\n :return:\n \"\"\"\n ids = []\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'\n rjson = self.session.post(url=examlistUrl, data=data, verify=False\n ).json()\n time.sleep(0.3)\n print('*' * 99)\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'\n rjson = self.session.post(url=banklistUrl, data=data, verify=False\n ).json()\n for i in rjson['data']:\n tem = i['bank_name'], i['id']\n self.examlist.append(tem)\n if i['bank_name'] == '十九大报告100题(单选)':\n temp = {'title': i['bank_name'], 'detail': i['detail'],\n 'id': i['id']}\n self.examC19Info.append(temp)\n time.sleep(0.3)\n print('*' * 99)\n data = {'bank': '6', 'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n aa = rjson['data']\n paper = aa['id']\n for i in aa['questions']:\n temp = {'id': i['id'], 'content': i['content']}\n ids.append(temp)\n print('*' * 99)\n time.sleep(0.5)\n answers = []\n for i in ids:\n correctAnswer = Qa.objects.filter(question__contains=i['content'])[\n 0]\n answerText = correctAnswer.answerText\n answer = correctAnswer.answer\n temp = {'index': i['id'], 'answer': answer}\n qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}\n self.qaList.append(qa)\n print(qa, i['content'])\n answers.append(temp)\n time.sleep(1)\n hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,\n 'answers': json.dumps(answers)}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'\n rjson = self.session.post(url=commitUrl, data=hdata, verify=False\n ).json()\n print(rjson)\n print(self.examlist)\n print(self.examC19Info)\n print(self.qaList)\n\n def getAnswerInfo(self):\n \"\"\"\n 获得答题的结果与正确率\n :return:\n \"\"\"\n data = {'token': self.token, 'appid': self.appid, 'page_size': '20',\n 'page_index': 'page_index'}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n\n\n<mask token>\n",
"step-5": "import json\r\nimport os, django\r\n\r\n\r\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"dangjianyun.settings\")# project_name 项目名称\r\ndjango.setup()\r\nfrom dangjiansite.djfuncs import *\r\nimport os\r\nimport datetime\r\nimport requests\r\nimport time\r\nimport urllib3\r\nimport base64\r\nimport csv\r\nimport random\r\nfrom bs4 import BeautifulSoup\r\nfrom dangjiansite.models import *\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass Runner():\r\n\r\n # def __init__(self, appid='TJZHDJ01', username='024549', password='Aa1234'):\r\n def __init__(self, appid='TJZHDJ01', username='', password=''):\r\n urllib3.disable_warnings()#屏蔽ssl告警\r\n self.currentTime = datetime.datetime.now().strftime(\"%H:%M:%S\")\r\n self.username = username\r\n self.password = password\r\n self.thumbedFilePath = './lib/'.format(username)\r\n self.logFilePath = './log/'.format(username)\r\n self.errFilePath = './err/'.format(username)\r\n # self.thumbedFileList = self.getThumbFromFile()\r\n self.thumbedFileList = []\r\n self.debug = True\r\n self.session = requests.session()\r\n self.appid = appid#应该是本设备安装app的id 等换个设备试一下就知道了\r\n self.headers ={\r\n 'User-Agent': 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)',\r\n 'header_version': '80',\r\n 'system': 'android',\r\n 'Connection': 'Keep-Alive',\r\n 'Host': 'mapi.dangjianwang.com',\r\n }\r\n self.token = self.getToken()\r\n time.sleep(0.1)\r\n self.thumbPageList = self.getPages(urls=[\r\n 'https://mapi.dangjianwang.com/v3_1/Learn/List',\r\n 'https://mapi.dangjianwang.com/v3_1/Activities/List',\r\n 'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'\r\n ])\r\n self.thumbPages = [i[1] for i in self.thumbPageList]\r\n time.sleep(0.1)\r\n self.helpPageList = self.getPages(urls=['https://mapi.dangjianwang.com/v3_1/Help/List', ])\r\n self.helpPages = [i[1] for i in self.helpPageList]\r\n self.helpResults = {}\r\n time.sleep(0.1)\r\n self.studyPageList = self.getPagesII(urls=['https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])\r\n self.studyPages = [i[1] for i in self.studyPageList]\r\n time.sleep(0.1)\r\n self.studyRsults = {}\r\n self.thumbedPages = []\r\n self.thumbResults = {}\r\n self.helpedPages = []\r\n self.multiThumbed = []#考虑最后要写入文件之中\r\n self.viewsResults = []\r\n self.examC19Info = []\r\n self.examlist = []\r\n self.qaList = []\r\n\r\n def getCurrentTime(self):\r\n return datetime.datetime.now().strftime(\"%H:%M:%S\")\r\n\r\n\r\n def writeErr2File(self, err):\r\n path = self.logFilePath\r\n fullPath = '{}{}err.txt'.format(path, self.username)\r\n if not os.path.exists(path):\r\n os.mkdir(path)\r\n with open(fullPath, 'a') as f:\r\n f.write('{}:{}\\n'.format(self.currentTime, err))\r\n print('err已经写入{}'.format(fullPath))\r\n\r\n def writeLog2File(self, log):\r\n path = self.logFilePath\r\n fullPath = '{}{}logs.txt'.format(path, self.username)\r\n if not os.path.exists(path):\r\n os.mkdir(path)\r\n with open(fullPath, 'a') as f:\r\n f.write('{}:{}\\n'.format(self.currentTime, log))\r\n print('log已经写入{}'.format(fullPath))\r\n\r\n def writeThumb2File(self, id):\r\n path = self.thumbedFilePath\r\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\r\n if not os.path.exists(path):\r\n os.mkdir(path)\r\n with open(fullPath, 'a') as f:\r\n f.write(',{}'.format(id))\r\n print('点赞记录已经写入{}'.format(fullPath))\r\n\r\n def getThumbFromFile(self):\r\n '''\r\n\r\n :return: 文件中id组成的列表\r\n '''\r\n path = self.thumbedFilePath\r\n inFileList = []\r\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\r\n if not os.path.exists(fullPath):\r\n return fullPath\r\n with open(fullPath, 'r') as f:\r\n inFileList.extend(list(set(f.readlines()[0].split(','))))\r\n # print('getThumbFormFile', inFileList)\r\n with open(fullPath, 'w') as f1:\r\n f1.write(','.join(sorted(inFileList)))\r\n return inFileList\r\n\r\n def getExcuteTimes(self):\r\n '''\r\n 返回点赞等自动执行的次数的字典\r\n :return:\r\n '''\r\n excuteTimes = {}\r\n\r\n credInfo = self.getCredItinfo()\r\n print(credInfo)\r\n currentScore = credInfo[0]\r\n\r\n # 点赞次数\r\n thumbScore = credInfo[1]['信息评论'].split('/')[0]\r\n thumbExcuteTimes = 10 - int(thumbScore)\r\n excuteTimes.update({'thumb': thumbExcuteTimes})\r\n # 帮助次数\r\n helpScore = credInfo[1]['互助广场回答'].split('/')[0]\r\n helpExctuteTimes = 2 - int(helpScore)\r\n excuteTimes.update({'help': helpExctuteTimes})\r\n # 党员视角发布次数\r\n viewScore = credInfo[1]['党员视角发布'].split('/')[0]\r\n viewExcuteTimes = int((4 - int(viewScore)) / 2)\r\n excuteTimes.update({'view': viewExcuteTimes})\r\n # 在线知识竞答次数\r\n examScore = credInfo[1]['在线知识竞答'].split('/')[0]\r\n examExcuteTimes = int((4 - int(examScore)) / 2)\r\n excuteTimes.update({'exam': examExcuteTimes})\r\n # 学习次数\r\n flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]['在线阅读学习资料'].split('/')[0])\r\n flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]['学习资料写体会'].split('/')[0])\r\n examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0\r\n excuteTimes.update({'study': examExcuteTimes})\r\n\r\n return excuteTimes\r\n\r\n def getToken(self):\r\n '''\r\n 获得一个连接的token\r\n 每个连接都需要使用到\r\n :return:\r\n '''\r\n data = {\r\n 'appid': self.appid,\r\n 'username': self.username,\r\n 'password': self.password,\r\n }\r\n longinurl = 'https://mapi.dangjianwang.com/v3_1/login'\r\n\r\n r = self.session.post(url=longinurl, data=data, verify=False)\r\n rjson = r.json()\r\n # print(type(rjson))\r\n # print(rjson)\r\n\r\n if rjson['code'] == '200':\r\n return rjson['token']\r\n else:\r\n print('token 获得失败')\r\n return None\r\n\r\n def getRJson(self, url):\r\n data={\r\n 'token': self.token,\r\n 'appid': self.appid\r\n }\r\n\r\n return self.session.post(url=url, data=data, verify=False).json()\r\n\r\n def getUserInfo(self):\r\n '''\r\n 获得一大串用户的信息,暂时没用\r\n :return:\r\n '''\r\n infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'\r\n return self.getRJson(url=infoUrl)\r\n\r\n def getCredItinfoToday(self):\r\n '''\r\n 获得人员当前的得分等级参数\r\n :return:\r\n '''\r\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\r\n info = self.getRJson(url=creditInfourl)\r\n fullScore = info['data']['full']\r\n gainScore = info['data']['gain']\r\n currentLevel = info['data']['level']\r\n username = info['data']['name']\r\n ret = {\r\n 'fullScore': fullScore,\r\n 'gainScore': gainScore,\r\n 'currentLevel': currentLevel,\r\n 'username': username,\r\n }\r\n return ret\r\n\r\n\r\n def getCredItinfo(self):\r\n '''\r\n 获得用户的今日积分状态\r\n 可用来判断是否需要再继续流程\r\n 数据如下\r\n ('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])\r\n :return:(haved_credit, credit_detail)\r\n '''\r\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\r\n haved_credit = 0\r\n credit_detail = {}\r\n\r\n info = self.getRJson(url=creditInfourl)\r\n for k, v in info.items():\r\n if k == 'data':\r\n for k2, v2 in v.items():\r\n if k2 == 'haved_credit':\r\n haved_credit = v2\r\n if k2 == 'credit_detail':\r\n for i in v2:\r\n credit_detail.update({i['title']: i['score']})\r\n\r\n return (haved_credit, credit_detail)\r\n\r\n def getPages(self, urls):\r\n pages = []\r\n for url in urls:\r\n data = self.getRJson(url=url)\r\n for k, v in data.items():\r\n if k == 'data':\r\n for i in v:\r\n # pages.append({'pageId': i['id'], 'pageTitle': i['title']})\r\n # pages.append(i['id'])\r\n pages.append((i['title'], i['id']))\r\n\r\n return pages\r\n\r\n def getPagesII(self, urls):\r\n def getRJson(url):\r\n data = {\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n 'type_id': '791',\r\n 'page_index': '1',\r\n }\r\n\r\n return self.session.post(url=url, data=data, verify=False).json()\r\n pages = []\r\n for url in urls:\r\n data = getRJson(url=url)\r\n for k, v in data.items():\r\n # print(k, v)\r\n if k == 'data':\r\n for i in v:\r\n # pages.append({'pageId': i['id'], 'pageTitle': i['title']})\r\n # pages.append(i['id'])\r\n pages.append((i['name'], i['id']))\r\n\r\n return pages\r\n\r\n def doThumb(self, id):\r\n '''\r\n 点赞函数,操作与id对应的页面\r\n 每次记录对应的信息到文件\r\n :return:\r\n '''\r\n contents = [\r\n '关注',\r\n '关注!',\r\n '关注!!']\r\n data = {\r\n 'id': id,\r\n 'comment': random.choice(contents),\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n print(rjson)\r\n if rjson['code'] == '1003':\r\n self.token = self.getToken()\r\n elif rjson['code'] == '200':\r\n result = rjson['msg']\r\n if result == '操作成功':\r\n self.thumbedPages.append(id)\r\n # print(self.thumbPageList)\r\n # print(len(self.thumbPageList), len(list(set(self.thumbPageList))))\r\n\r\n for i in list(set(self.thumbPageList)):\r\n if id == i[1]:\r\n temp = {'title': i[0]}\r\n self.thumbResults.update(temp)\r\n log = '信息点赞:\\n主题: {}\\n提交:{}'.format(i[0], data['comment'])\r\n detail = '{} 主题:{}\\n回复:{}\\n'.format(self.getCurrentTime(), i[0], data['comment'])\r\n write2File(self, './results/', 'result.txt', log)\r\n thumbInfo = {'title': i[0], 'reply': data['comment']}\r\n\r\n self.thumbPages.remove(id)\r\n self.writeThumb2File(id=id)\r\n\r\n return (detail, thumbInfo)\r\n elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':\r\n print('因评论过快,等待一段时间')\r\n time.sleep(20)\r\n else:\r\n print('rjson', rjson)\r\n # self.multiThumbed.append(id)\r\n self.thumbedPages.remove(id)#不成功的时候也要去掉不然总会选到\r\n self.writeThumb2File(id=id)\r\n log = '点赞:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print(log)\r\n time.sleep(10)\r\n\r\n\r\n def doHelp(self, id, callback=None):\r\n '''\r\n 互助功能\r\n :param id:\r\n :return:\r\n '''\r\n detail = ''\r\n helpInfo = None\r\n log = ''\r\n content = [\r\n '把党的政治建设摆在首位!',\r\n '不忘初心,牢记使命!',\r\n '发展史第一要务,人才是第一资源,创新是第一动力。',\r\n '要把党的领导贯彻到依法治国全过程和各方面',\r\n '毫不动摇坚持中国共产党领导',]\r\n data = {\r\n 'id': id,\r\n 'content': random.choice(content),\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n print(data)\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n\r\n\r\n\r\n if rjson['code'] == '200':\r\n result = rjson['msg']\r\n if result == '操作成功':\r\n self.helpedPages.append(id)\r\n self.helpPages.remove(id)\r\n #记录成功的到result\r\n for i in self.helpPageList:\r\n if id == i[1]:\r\n curTime = self.getCurrentTime()\r\n # print('('*88)\r\n # print(curTime)\r\n self.helpResults.update({'title': id[0]})\r\n log = '互助:\\n主题: {}\\n提交内容: {}'.format(i[0], rjson['comment'])\r\n write2File(self, './results/', 'result.txt', log)\r\n # #写入数据库\r\n detail = '{} 主题: {}\\n提交内容: {}\\n'.format(curTime, i[0], rjson['comment'].strip())\r\n helpInfo = {'title': i[0], 'reply': rjson['comment']}\r\n else:\r\n pass\r\n else:\r\n pass\r\n\r\n log = '帮助:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print(log)\r\n return (detail, log, helpInfo)\r\n\r\n def doView(self):\r\n '''\r\n 党员视角发布功能\r\n\r\n :return:\r\n '''\r\n\r\n content = [\r\n '全面的小康,覆盖的人口要全面,是惠及全体人民的小康。',\r\n '不忘初心,牢记使命,坚持终身学习!']\r\n data = {\r\n 'content': random.choice(content),\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n if rjson['code'] == '200':\r\n result = rjson['msg']\r\n if result == '操作成功':\r\n self.viewsResults.append(1)\r\n # self.viewsResults.append(id)\r\n else:\r\n pass\r\n\r\n log = '党员视角:{}'.format(rjson)\r\n detail = '{} 党员视角:\\n发布内容:{}\\n'.format(self.getCurrentTime(), rjson['data']['content'])\r\n publicContent = rjson['data']['content']\r\n # print(detail)\r\n # self.writeLog2File(log)\r\n # print('党员视角'*12)\r\n # print(id)\r\n # print(log)\r\n # print('党员视角' * 12)\r\n return (detail, publicContent)\r\n\r\n def doStudy(self, mid):\r\n '''\r\n 前三个post函数的响应的三个请求\r\n get用来获得填写的内容\r\n 最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。\r\n :param mid:\r\n :return:\r\n '''\r\n interval = 60 * 5 + 5\r\n def post1():\r\n data = {\r\n 'mid': mid,\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n log = '学习post1:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print(log)\r\n def post2():\r\n data = {\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n log = '学习post2:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print(log)\r\n def post3():\r\n data = {\r\n 'mid': mid,\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n log = '学习post3:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print(log)\r\n\r\n def get1():\r\n url = 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'.format(self.token, mid)\r\n rjson = self.session.get(url=url)\r\n text = rjson.content\r\n soup = BeautifulSoup(text, 'html.parser')\r\n retContents = []\r\n for div in soup.find_all('p'):\r\n p = div.text.strip()\r\n retContents.append(p if 100 > len(p) < 200 else p[0:200])\r\n return random.choice(retContents)\r\n\r\n def recordFeeling(content=None):\r\n if not content:\r\n content = '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,' \\\r\n '是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。' \\\r\n '邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'\r\n data = {\r\n 'mid': mid,\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n 'content': content\r\n }\r\n\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n log = '学习recordFeeling:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print('in recordFeeling')\r\n print(log)\r\n\r\n if rjson['code'] == '200':\r\n return {'content': content}\r\n elif rjson['code'] == '1120':\r\n addtion = [\r\n '我们必须坚定不移,任何时候任何情况下都不能动摇',\r\n '人民有信心,国家才有未来,国家才有力量。',\r\n '新时代,属于自强不息、勇于创造的奋斗者。',\r\n '民主政治建设有序推进,依法治市迈出新步伐。',\r\n '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。',\r\n\r\n ]\r\n return recordFeeling(content= '{}\\n{}'.format(content, random.choice(addtion)))\r\n else:\r\n return None\r\n #记录回复的心得\r\n\r\n\r\n def readTime():\r\n data = {\r\n 'mid': mid,\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n 'time': interval,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n log = '学习readTime:{}'.format(rjson)\r\n # self.studyRsults.update({'学习readTime', rjson})\r\n self.writeLog2File(log)\r\n print(log)\r\n\r\n\r\n\r\n post1()\r\n time.sleep(1)\r\n post2()\r\n time.sleep(1)\r\n post3()\r\n time.sleep(1)\r\n content = get1()\r\n time.sleep(1)\r\n # time.sleep(interval)\r\n count = 0\r\n print('开始学习请稍后')\r\n for i in range(interval):\r\n count += 1\r\n # print(i + 1)\r\n if count % 30 == 0:\r\n print('已用时{}秒'.format(count))\r\n time.sleep(1)\r\n # time.sleep(5)\r\n print('填写的学习体会', content)\r\n self.studyRsults.update(recordFeeling(content=content))\r\n time.sleep(1)\r\n readTime()\r\n time.sleep(1)\r\n pass\r\n\r\n def doExam(self):\r\n '''\r\n\r\n :param self:\r\n :return:\r\n '''\r\n ids = []\r\n data = {\r\n 'page': '1',\r\n 'page_size': '20',\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'\r\n rjson = self.session.post(url=examlistUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n # for i in rjson['data']:\r\n # print(i)\r\n time.sleep(0.3)\r\n #########################################################\r\n print('*' * 99)\r\n data = {\r\n 'page': '1',\r\n 'page_size': '20',\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'\r\n rjson = self.session.post(url=banklistUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n for i in rjson['data']:\r\n tem = (i['bank_name'], i['id'])\r\n self.examlist.append(tem)\r\n if i['bank_name'] == '十九大报告100题(单选)':\r\n # if i['bank_num'] == '65':\r\n temp = {\r\n 'title': i['bank_name'],\r\n 'detail': i['detail'],\r\n 'id': i['id'],\r\n }\r\n self.examC19Info.append(temp)\r\n # print(self.examC19Info)\r\n # print(self.examlist)\r\n time.sleep(0.3)\r\n #########################################################\r\n print('*' * 99)\r\n data = {\r\n 'bank': '6',\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n aa = rjson['data']\r\n paper = aa['id']\r\n for i in aa['questions']:\r\n temp = {'id': i['id'], 'content': i['content']}\r\n ids.append(temp)\r\n\r\n #########################################################\r\n print('*' * 99)\r\n time.sleep(0.5)\r\n # 以下答题交卷\r\n\r\n answers = []\r\n # 先得到答案\r\n\r\n\r\n for i in ids:\r\n # 丛书据库获得答案\r\n correctAnswer = Qa.objects.filter(question__contains=i['content'])[0]\r\n answerText = correctAnswer.answerText\r\n answer = correctAnswer.answer\r\n #从文键获得答案\r\n # answerText = getAnswer(i['content'])[2]\r\n # answer = getAnswer(i['content'])[1]\r\n temp = {'index': i['id'], 'answer': answer}\r\n qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}\r\n self.qaList.append(qa)\r\n print(qa, i['content'])\r\n answers.append(temp)\r\n time.sleep(1)\r\n hdata = {\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n 'paper': paper,\r\n 'answers': json.dumps(answers),\r\n # 'answers': [{'answer': 'A', 'index': '639'}, {'answer': 'A', 'index': '639'}],\r\n }\r\n # print('hdata:', hdata)\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'\r\n rjson = self.session.post(url=commitUrl,\r\n data=hdata,\r\n verify=False).json()\r\n print(rjson)\r\n print(self.examlist)\r\n print(self.examC19Info)\r\n print(self.qaList)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n def getAnswerInfo(self):\r\n '''\r\n 获得答题的结果与正确率\r\n :return:\r\n '''\r\n data = {\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n 'page_size': '20',\r\n 'page_index': 'page_index',\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n print(rjson)\r\n\r\n\r\n'''\r\n\r\nhttps://mapi.dangjianwang.com/v3_1/exam/randexam 答题地址 主id是交卷的paper 这里要获取到questions里的id 等于回答问题中的index \r\nappid\tTJZHDJ01\r\nbank\t6\r\ntoken\t5jTY47PbPZ0KdUprwmfJVfH4cX23tyDcV25XrEYkWVvElH3YjJpIb1JCDwq_\r\n\r\nhttps://mapi.dangjianwang.com/v3_1/exam/handpaper 交卷的连接\r\nappid\tTJZHDJ01\r\nanswers\t[{\"index\":\"635\",\"answer\":\"D\"},{\"index\":\"640\",\"answer\":\"C\"},{\"index\":\"641\",\"answer\":\"B\"},{\"index\":\"665\",\"answer\":\"B\"},{\"index\":\"670\",\"answer\":\"B\"},{\"index\":\"673\",\"answer\":\"B\"},{\"index\":\"677\",\"answer\":\"C\"},{\"index\":\"682\",\"answer\":\"B\"},{\"index\":\"684\",\"answer\":\"C\"},{\"index\":\"690\",\"answer\":\"A\"}]\r\ntoken\t5jTY47PbPZ0KdUprwmfJVfH4cX23tyDcV25XrEYkWVvElH3YjJpIb1JCDwq_\r\npaper\t4565894\r\n\r\nhttps://mapi.dangjianwang.com/v3_1/exam/banklist 获得答题情况的连接\r\n\r\nappid\tTJZHDJ01\r\npage_size\t20\r\ntoken\t5jTY47PbPZxXeRxlkzScAPWidyvssy3TBD5Y9UYiCQnMmCfa2pRNb1JCDwq_\r\npage_index\t1\r\n\r\n\r\n\r\n\r\n--------------------------------------------------\r\nhttps://mapi.dangjianwang.com/v3_1/Study/MaterialCollList 学习的id列表\r\nappid\tTJZHDJ01\r\npage_size\t20\r\ntype_id\t791\r\ntoken\t5jTY47PbPZJbeh9ixjfOUvaoI3604SrSAz5Zokt3DAmfz3qIis4Yb1JCDwq_\r\npage_index\t1\r\n\r\n下面是针对791id列表中的访问地址\r\nhttps://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus\r\n\r\npost1:\r\nappid\tTJZHDJ01\r\nmid\t9729\r\ntoken\t5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_\r\npost2:\r\n\r\nhttps://mapi.dangjianwang.com/v3_1/Login/CheckToken\r\nappid\tTJZHDJ01\r\ntoken\t5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_\r\n\r\npost3:\r\nhttps://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum\r\nappid\tTJZHDJ01\r\nmid\t9729\r\ntoken\t5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_\r\n\r\nget1 https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={} 获得页面\r\n\r\n\r\n\r\npost 发表体会\r\nhttps://mapi.dangjianwang.com/v3_1/Study/RecordFeeling\r\nappid\tTJZHDJ01\r\ncontent\t 伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。\r\nmid\t9729\r\ntoken\t5jTY47PbPckOdUlllmfOCaCvcy7ls3rSVmxRoE0gDg3EmyrYi5Ucb1JCDwq_\r\n\r\npost 结束学习 \r\nhttps://mapi.dangjianwang.com/v3_1/Study/ReadTime\r\nappid\tTJZHDJ01\r\ntime\t362\r\nmid\t9729\r\ntoken\t5jTY47PbPckOdUlllmfOCaCvcy7ls3rSVmxRoE0gDg3EmyrYi5Ucb1JCDwq_\r\n\r\n\r\n---------------------------------------\r\n\r\nhttps://mapi.dangjianwang.com/v3_1/Help/List 这里获得帮助id\r\nhttps://mapi.dangjianwang.com/v3_1/Help/PostComment 提交评论的地址\r\n\r\n\r\nappid\tTJZHDJ01\r\ncontent\t不忘初心,牢记使命!\r\nid\t55984\r\ntoken\t5jTY47PbPcpZe0s1xDLKAqKoIimx6SnSVjcApB92DF3Nmy/djZ1Nb1JCDwq_\r\n\r\n把党的政治建设摆在首位!\r\n不忘初心,牢记使命!\r\n\r\n-------------------------------\r\n\r\n发布的内容\r\nhttps://mapi.dangjianwang.com/v3_1/Viewpoint/Create\r\n\r\nappid\tTJZHDJ01\r\ncontent\t不忘初心牢记使命\r\ntoken\t5jTY47PbPZ9deR5rkTXIB/b/fymw5HvbAj9R900gDArNnXqE1s9Kb1JCDwq_\r\n\r\n\r\n不忘初心,牢记使命,坚持终身学习!\r\n全面的小康,覆盖的人口要全面,是惠及全体人民的小康。\r\n\r\n-----------------------------\r\n点赞错误\r\n{'msg': '重复评论过多,请您修改后重新提交。', 'code': '500'}\r\n'''",
"step-ids": [
17,
19,
20,
21,
24
]
}
|
[
17,
19,
20,
21,
24
] |
"""GI on fast."""
import logging
from mpf.core.utility_functions import Util
from mpf.platforms.interfaces.gi_platform_interface import GIPlatformInterface
class FASTGIString(GIPlatformInterface):
"""A FAST GI string in a WPC machine."""
def __init__(self, number, sender):
"""Initialise GI string.
TODO: Need to implement the enable_relay and control which strings are
dimmable.
"""
self.log = logging.getLogger('FASTGIString.0x' + str(number))
self.number = number
self.send = sender
def off(self):
"""Turn off GI string."""
self.log.debug("Turning Off GI String")
self.send('GI:' + self.number + ',00')
def on(self, brightness=255):
"""Turn on GI string."""
if brightness >= 255:
brightness = 255
self.log.debug("Turning On GI String to brightness %s", brightness)
# self.send('GI:' + self.number + ',' + Util.int_to_hex_string(brightness))
self.send('GI:{},{}'.format(self.number,
Util.int_to_hex_string(brightness)))
|
normal
|
{
"blob_id": "91cf6d08be2ad86c08de4dd48b2f35dedc55b4bb",
"index": 2177,
"step-1": "<mask token>\n\n\nclass FASTGIString(GIPlatformInterface):\n <mask token>\n\n def __init__(self, number, sender):\n \"\"\"Initialise GI string.\n\n TODO: Need to implement the enable_relay and control which strings are\n dimmable.\n \"\"\"\n self.log = logging.getLogger('FASTGIString.0x' + str(number))\n self.number = number\n self.send = sender\n <mask token>\n\n def on(self, brightness=255):\n \"\"\"Turn on GI string.\"\"\"\n if brightness >= 255:\n brightness = 255\n self.log.debug('Turning On GI String to brightness %s', brightness)\n self.send('GI:{},{}'.format(self.number, Util.int_to_hex_string(\n brightness)))\n",
"step-2": "<mask token>\n\n\nclass FASTGIString(GIPlatformInterface):\n <mask token>\n\n def __init__(self, number, sender):\n \"\"\"Initialise GI string.\n\n TODO: Need to implement the enable_relay and control which strings are\n dimmable.\n \"\"\"\n self.log = logging.getLogger('FASTGIString.0x' + str(number))\n self.number = number\n self.send = sender\n\n def off(self):\n \"\"\"Turn off GI string.\"\"\"\n self.log.debug('Turning Off GI String')\n self.send('GI:' + self.number + ',00')\n\n def on(self, brightness=255):\n \"\"\"Turn on GI string.\"\"\"\n if brightness >= 255:\n brightness = 255\n self.log.debug('Turning On GI String to brightness %s', brightness)\n self.send('GI:{},{}'.format(self.number, Util.int_to_hex_string(\n brightness)))\n",
"step-3": "<mask token>\n\n\nclass FASTGIString(GIPlatformInterface):\n \"\"\"A FAST GI string in a WPC machine.\"\"\"\n\n def __init__(self, number, sender):\n \"\"\"Initialise GI string.\n\n TODO: Need to implement the enable_relay and control which strings are\n dimmable.\n \"\"\"\n self.log = logging.getLogger('FASTGIString.0x' + str(number))\n self.number = number\n self.send = sender\n\n def off(self):\n \"\"\"Turn off GI string.\"\"\"\n self.log.debug('Turning Off GI String')\n self.send('GI:' + self.number + ',00')\n\n def on(self, brightness=255):\n \"\"\"Turn on GI string.\"\"\"\n if brightness >= 255:\n brightness = 255\n self.log.debug('Turning On GI String to brightness %s', brightness)\n self.send('GI:{},{}'.format(self.number, Util.int_to_hex_string(\n brightness)))\n",
"step-4": "<mask token>\nimport logging\nfrom mpf.core.utility_functions import Util\nfrom mpf.platforms.interfaces.gi_platform_interface import GIPlatformInterface\n\n\nclass FASTGIString(GIPlatformInterface):\n \"\"\"A FAST GI string in a WPC machine.\"\"\"\n\n def __init__(self, number, sender):\n \"\"\"Initialise GI string.\n\n TODO: Need to implement the enable_relay and control which strings are\n dimmable.\n \"\"\"\n self.log = logging.getLogger('FASTGIString.0x' + str(number))\n self.number = number\n self.send = sender\n\n def off(self):\n \"\"\"Turn off GI string.\"\"\"\n self.log.debug('Turning Off GI String')\n self.send('GI:' + self.number + ',00')\n\n def on(self, brightness=255):\n \"\"\"Turn on GI string.\"\"\"\n if brightness >= 255:\n brightness = 255\n self.log.debug('Turning On GI String to brightness %s', brightness)\n self.send('GI:{},{}'.format(self.number, Util.int_to_hex_string(\n brightness)))\n",
"step-5": "\"\"\"GI on fast.\"\"\"\nimport logging\n\nfrom mpf.core.utility_functions import Util\nfrom mpf.platforms.interfaces.gi_platform_interface import GIPlatformInterface\n\n\nclass FASTGIString(GIPlatformInterface):\n\n \"\"\"A FAST GI string in a WPC machine.\"\"\"\n\n def __init__(self, number, sender):\n \"\"\"Initialise GI string.\n\n TODO: Need to implement the enable_relay and control which strings are\n dimmable.\n \"\"\"\n self.log = logging.getLogger('FASTGIString.0x' + str(number))\n self.number = number\n self.send = sender\n\n def off(self):\n \"\"\"Turn off GI string.\"\"\"\n self.log.debug(\"Turning Off GI String\")\n self.send('GI:' + self.number + ',00')\n\n def on(self, brightness=255):\n \"\"\"Turn on GI string.\"\"\"\n if brightness >= 255:\n brightness = 255\n\n self.log.debug(\"Turning On GI String to brightness %s\", brightness)\n # self.send('GI:' + self.number + ',' + Util.int_to_hex_string(brightness))\n\n self.send('GI:{},{}'.format(self.number,\n Util.int_to_hex_string(brightness)))\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
"""
Exercise 3 from the Python tutorial Part 1 on:
https://codeandwork.github.io/courses/prep/pythonTutorial1.html
"""
import math
print("Give the length of each side in order to compute the area of a triangle.")
lenA = float(input("Give the length of side A:"))
lenB = float(input("Give the length of side B:"))
lenC = float(input("Give the length of side C:"))
triangleArea = (1/4) * math.sqrt((lenA+lenB+lenC) * (-lenA+lenB+lenC) * (lenA-lenB+lenC) * (lenA+lenB-lenC))
print("The triangle area is:", triangleArea)
|
normal
|
{
"blob_id": "398cb05218a9772a0b62fdfbacc465b26427827d",
"index": 2854,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(\n 'Give the length of each side in order to compute the area of a triangle.')\n<mask token>\nprint('The triangle area is:', triangleArea)\n",
"step-3": "<mask token>\nprint(\n 'Give the length of each side in order to compute the area of a triangle.')\nlenA = float(input('Give the length of side A:'))\nlenB = float(input('Give the length of side B:'))\nlenC = float(input('Give the length of side C:'))\ntriangleArea = 1 / 4 * math.sqrt((lenA + lenB + lenC) * (-lenA + lenB +\n lenC) * (lenA - lenB + lenC) * (lenA + lenB - lenC))\nprint('The triangle area is:', triangleArea)\n",
"step-4": "<mask token>\nimport math\nprint(\n 'Give the length of each side in order to compute the area of a triangle.')\nlenA = float(input('Give the length of side A:'))\nlenB = float(input('Give the length of side B:'))\nlenC = float(input('Give the length of side C:'))\ntriangleArea = 1 / 4 * math.sqrt((lenA + lenB + lenC) * (-lenA + lenB +\n lenC) * (lenA - lenB + lenC) * (lenA + lenB - lenC))\nprint('The triangle area is:', triangleArea)\n",
"step-5": "\"\"\"\n Exercise 3 from the Python tutorial Part 1 on:\n https://codeandwork.github.io/courses/prep/pythonTutorial1.html\n\"\"\"\n\nimport math\n\nprint(\"Give the length of each side in order to compute the area of a triangle.\")\nlenA = float(input(\"Give the length of side A:\"))\nlenB = float(input(\"Give the length of side B:\"))\nlenC = float(input(\"Give the length of side C:\"))\n\ntriangleArea = (1/4) * math.sqrt((lenA+lenB+lenC) * (-lenA+lenB+lenC) * (lenA-lenB+lenC) * (lenA+lenB-lenC))\n\nprint(\"The triangle area is:\", triangleArea)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import sys
import os
import numpy as np
import math
sys.path.append("../")
from sir.improveagent import *
import numpy as np
import numpy.linalg as la
import matplotlib.pyplot as plt
#from sklearn.neighbors import BallTree
from scipy.spatial import KDTree
from scipy.spatial import cKDTree
from scipy.spatial.distance import pdist
import networkx as nx
p = Person()
def run_Simulation2(k,N=100,T=10,start = 1,p=0.5,q=0.08,startcenter = False,startcorner=False):
"""
run the simulation for the pop
"""
recover = [0]
infect = [start]
suspect = [N-start]
pop = [Person() for i in range(N)]
##we need to change the code for the case start people infected
for i in range(start):
pop[i].get_infected();
if(startcenter):
resetcenter(start,pop)
if(startcorner):
resetcorner(start,pop)
np.random.seed(10)
for i in range(T):
for j in range(N):
pop[j].movepos(p)
X = calculatedistance(pop)
tree = cKDTree(X)
for j in range(N):
if pop[j].is_infected():
addvalue = np.array([X[j]])
inds = tree.query_ball_point(addvalue, q)
inds = inds[0]
#may have problem here
for l in inds:
if pop[l].is_willinfected():
pop[l].get_infected()
for j in range(N):
if pop[j].is_infected():
if np.random.rand()< k:
pop[j].get_recovered()
recover.append(count_recover(pop))
infect.append(count_infect(pop))
suspect.append(count_suspectial(pop))
newrecover = [i/N for i in recover]
newsuspect = [s/N for s in suspect]
newinfect = [i/N for i in infect]
plt.plot(range(T+1),newrecover,label = "r: percentage of removed ")
plt.plot(range(T+1),newsuspect,label = "s: percentage of susceptible")
plt.plot(range(T+1),newinfect,label = "i: percentage of infected")
plt.xlabel("T")
plt.ylabel("percentage")
plt.title("Percentage of Population, Discrete")
plt.legend()
plt.show()
#We run a simulation here,use the default value of p and q
run_Simulation2(0.6,N=20000,T = 30,start=10)
def checkinfectb(k,N,T,start=1,p=0.5,q=0.08,startcenter = False,startcorner=False):
"""
we use this function for checking the total infected people
"""
recover = [0]
infect = [start]
suspect = [N-start]
pop = [Person() for i in range(N)]
np.random.seed(10)
for i in range(start):
pop[i].get_infected();
if(startcenter):
resetcenter(start,pop)
if(startcorner):
resetcorner(start,pop)
np.random.seed(10)
for i in range(T):
for j in range(N):
pop[j].movepos(p)
X = calculatedistance(pop)
tree = cKDTree(X)
for j in range(N):
if pop[j].is_infected():
addvalue = np.array([X[j]])
inds = tree.query_ball_point(addvalue, q)
inds = inds[0]
for l in inds:
if pop[l].is_willinfected():
pop[l].get_infected()
for j in range(N):
if pop[j].is_infected():
if np.random.rand()<k:
pop[j].get_recovered()
return np.array([(count_infect(pop)+count_recover(pop))/N,count_infect(pop)/N])
def plotcenterrange():
"""
show how the total infected people i change with p start from center
"""
plist1 = np.arange(0.02,0.1,0.02)
plist = np.arange(0.1,1,0.1)
infectlist = []
for i in plist1:
infectlist.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcenter=True)[0])
for i in plist:
infectlist.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcenter=True)[0])
plt.plot(np.hstack((plist1,plist)),infectlist)
plt.title("centerplot")
plt.xlabel("p")
plt.ylabel("total number of individuals infected")
plt.title("Total Number of Individuals Infected vs p")
plt.show()
plotcenterrange()
"""
def plotrandomcornerrange():
plist1 = np.arange(0.02,0.1,0.02)
plist = np.arange(0.1,1,0.1)
infectlist = []
infectlist2 = []
infectlist3 = []
for i in plist1:
infectlist.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcorner=True)[0])
infectlist2.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)))[0])
infectlist3.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcenter = True)[0])
for i in plist:
infectlist.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcorner=True)[0])
infectlist2.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)))[0])
infectlist3.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcenter = True)[0])
plt.plot(np.hstack((plist1,plist)),infectlist,label = "corner")
plt.plot(np.hstack((plist1,plist)),infectlist2,label = "random")
plt.plot(np.hstack((plist1,plist)),infectlist3,label = "center")
plt.title("Change from random corner center")
plt.xlabel("change of p")
plt.ylabel("change of total infected people")
plt.legend()
plt.show()
"""
#plotrandomcornerrange()
#no need for us to use this function
valuecorner = checkinfectb(0.5,20000,30,200,p = 0.05,q = np.sqrt(2/(20000*math.pi)),startcorner=True)[0]
valuecenter = checkinfectb(0.5,20000,30,200,p = 0.05,q = np.sqrt(2/(20000*math.pi)),startcenter=True)[0]
valuerandom = checkinfectb(0.5,20000,30,200,p = 0.05,q = np.sqrt(2/(20000*math.pi)))[0]
print("p = 0.05, starting randomly, the total infected number is "+ str(valuerandom))
print("p = 0.05, starting from corner, the total infected number is "+ str(valuecorner))
print("p = 0.05, starting from center, the total infected number is "+ str(valuecenter))
|
normal
|
{
"blob_id": "92317996f884befd646138cd3a3dc3f8345679f4",
"index": 2122,
"step-1": "<mask token>\n\n\ndef run_Simulation2(k, N=100, T=10, start=1, p=0.5, q=0.08, startcenter=\n False, startcorner=False):\n \"\"\"\n run the simulation for the pop\n \"\"\"\n recover = [0]\n infect = [start]\n suspect = [N - start]\n pop = [Person() for i in range(N)]\n for i in range(start):\n pop[i].get_infected()\n if startcenter:\n resetcenter(start, pop)\n if startcorner:\n resetcorner(start, pop)\n np.random.seed(10)\n for i in range(T):\n for j in range(N):\n pop[j].movepos(p)\n X = calculatedistance(pop)\n tree = cKDTree(X)\n for j in range(N):\n if pop[j].is_infected():\n addvalue = np.array([X[j]])\n inds = tree.query_ball_point(addvalue, q)\n inds = inds[0]\n for l in inds:\n if pop[l].is_willinfected():\n pop[l].get_infected()\n for j in range(N):\n if pop[j].is_infected():\n if np.random.rand() < k:\n pop[j].get_recovered()\n recover.append(count_recover(pop))\n infect.append(count_infect(pop))\n suspect.append(count_suspectial(pop))\n newrecover = [(i / N) for i in recover]\n newsuspect = [(s / N) for s in suspect]\n newinfect = [(i / N) for i in infect]\n plt.plot(range(T + 1), newrecover, label='r: percentage of removed ')\n plt.plot(range(T + 1), newsuspect, label='s: percentage of susceptible')\n plt.plot(range(T + 1), newinfect, label='i: percentage of infected')\n plt.xlabel('T')\n plt.ylabel('percentage')\n plt.title('Percentage of Population, Discrete')\n plt.legend()\n plt.show()\n\n\n<mask token>\n\n\ndef checkinfectb(k, N, T, start=1, p=0.5, q=0.08, startcenter=False,\n startcorner=False):\n \"\"\"\n we use this function for checking the total infected people\n \"\"\"\n recover = [0]\n infect = [start]\n suspect = [N - start]\n pop = [Person() for i in range(N)]\n np.random.seed(10)\n for i in range(start):\n pop[i].get_infected()\n if startcenter:\n resetcenter(start, pop)\n if startcorner:\n resetcorner(start, pop)\n np.random.seed(10)\n for i in range(T):\n for j in range(N):\n pop[j].movepos(p)\n X = calculatedistance(pop)\n tree = cKDTree(X)\n for j in range(N):\n if pop[j].is_infected():\n addvalue = np.array([X[j]])\n inds = tree.query_ball_point(addvalue, q)\n inds = inds[0]\n for l in inds:\n if pop[l].is_willinfected():\n pop[l].get_infected()\n for j in range(N):\n if pop[j].is_infected():\n if np.random.rand() < k:\n pop[j].get_recovered()\n return np.array([(count_infect(pop) + count_recover(pop)) / N, \n count_infect(pop) / N])\n\n\ndef plotcenterrange():\n \"\"\"\n show how the total infected people i change with p start from center\n \"\"\"\n plist1 = np.arange(0.02, 0.1, 0.02)\n plist = np.arange(0.1, 1, 0.1)\n infectlist = []\n for i in plist1:\n infectlist.append(checkinfectb(0.5, 20000, 30, 200, p=i, q=np.sqrt(\n 2 / (20000 * math.pi)), startcenter=True)[0])\n for i in plist:\n infectlist.append(checkinfectb(0.5, 20000, 30, 200, p=i, q=np.sqrt(\n 2 / (20000 * math.pi)), startcenter=True)[0])\n plt.plot(np.hstack((plist1, plist)), infectlist)\n plt.title('centerplot')\n plt.xlabel('p')\n plt.ylabel('total number of individuals infected')\n plt.title('Total Number of Individuals Infected vs p')\n plt.show()\n\n\n<mask token>\n",
"step-2": "<mask token>\nsys.path.append('../')\n<mask token>\n\n\ndef run_Simulation2(k, N=100, T=10, start=1, p=0.5, q=0.08, startcenter=\n False, startcorner=False):\n \"\"\"\n run the simulation for the pop\n \"\"\"\n recover = [0]\n infect = [start]\n suspect = [N - start]\n pop = [Person() for i in range(N)]\n for i in range(start):\n pop[i].get_infected()\n if startcenter:\n resetcenter(start, pop)\n if startcorner:\n resetcorner(start, pop)\n np.random.seed(10)\n for i in range(T):\n for j in range(N):\n pop[j].movepos(p)\n X = calculatedistance(pop)\n tree = cKDTree(X)\n for j in range(N):\n if pop[j].is_infected():\n addvalue = np.array([X[j]])\n inds = tree.query_ball_point(addvalue, q)\n inds = inds[0]\n for l in inds:\n if pop[l].is_willinfected():\n pop[l].get_infected()\n for j in range(N):\n if pop[j].is_infected():\n if np.random.rand() < k:\n pop[j].get_recovered()\n recover.append(count_recover(pop))\n infect.append(count_infect(pop))\n suspect.append(count_suspectial(pop))\n newrecover = [(i / N) for i in recover]\n newsuspect = [(s / N) for s in suspect]\n newinfect = [(i / N) for i in infect]\n plt.plot(range(T + 1), newrecover, label='r: percentage of removed ')\n plt.plot(range(T + 1), newsuspect, label='s: percentage of susceptible')\n plt.plot(range(T + 1), newinfect, label='i: percentage of infected')\n plt.xlabel('T')\n plt.ylabel('percentage')\n plt.title('Percentage of Population, Discrete')\n plt.legend()\n plt.show()\n\n\nrun_Simulation2(0.6, N=20000, T=30, start=10)\n\n\ndef checkinfectb(k, N, T, start=1, p=0.5, q=0.08, startcenter=False,\n startcorner=False):\n \"\"\"\n we use this function for checking the total infected people\n \"\"\"\n recover = [0]\n infect = [start]\n suspect = [N - start]\n pop = [Person() for i in range(N)]\n np.random.seed(10)\n for i in range(start):\n pop[i].get_infected()\n if startcenter:\n resetcenter(start, pop)\n if startcorner:\n resetcorner(start, pop)\n np.random.seed(10)\n for i in range(T):\n for j in range(N):\n pop[j].movepos(p)\n X = calculatedistance(pop)\n tree = cKDTree(X)\n for j in range(N):\n if pop[j].is_infected():\n addvalue = np.array([X[j]])\n inds = tree.query_ball_point(addvalue, q)\n inds = inds[0]\n for l in inds:\n if pop[l].is_willinfected():\n pop[l].get_infected()\n for j in range(N):\n if pop[j].is_infected():\n if np.random.rand() < k:\n pop[j].get_recovered()\n return np.array([(count_infect(pop) + count_recover(pop)) / N, \n count_infect(pop) / N])\n\n\ndef plotcenterrange():\n \"\"\"\n show how the total infected people i change with p start from center\n \"\"\"\n plist1 = np.arange(0.02, 0.1, 0.02)\n plist = np.arange(0.1, 1, 0.1)\n infectlist = []\n for i in plist1:\n infectlist.append(checkinfectb(0.5, 20000, 30, 200, p=i, q=np.sqrt(\n 2 / (20000 * math.pi)), startcenter=True)[0])\n for i in plist:\n infectlist.append(checkinfectb(0.5, 20000, 30, 200, p=i, q=np.sqrt(\n 2 / (20000 * math.pi)), startcenter=True)[0])\n plt.plot(np.hstack((plist1, plist)), infectlist)\n plt.title('centerplot')\n plt.xlabel('p')\n plt.ylabel('total number of individuals infected')\n plt.title('Total Number of Individuals Infected vs p')\n plt.show()\n\n\nplotcenterrange()\n<mask token>\nprint('p = 0.05, starting randomly, the total infected number is ' + str(\n valuerandom))\nprint('p = 0.05, starting from corner, the total infected number is ' + str\n (valuecorner))\nprint('p = 0.05, starting from center, the total infected number is ' + str\n (valuecenter))\n",
"step-3": "<mask token>\nsys.path.append('../')\n<mask token>\np = Person()\n\n\ndef run_Simulation2(k, N=100, T=10, start=1, p=0.5, q=0.08, startcenter=\n False, startcorner=False):\n \"\"\"\n run the simulation for the pop\n \"\"\"\n recover = [0]\n infect = [start]\n suspect = [N - start]\n pop = [Person() for i in range(N)]\n for i in range(start):\n pop[i].get_infected()\n if startcenter:\n resetcenter(start, pop)\n if startcorner:\n resetcorner(start, pop)\n np.random.seed(10)\n for i in range(T):\n for j in range(N):\n pop[j].movepos(p)\n X = calculatedistance(pop)\n tree = cKDTree(X)\n for j in range(N):\n if pop[j].is_infected():\n addvalue = np.array([X[j]])\n inds = tree.query_ball_point(addvalue, q)\n inds = inds[0]\n for l in inds:\n if pop[l].is_willinfected():\n pop[l].get_infected()\n for j in range(N):\n if pop[j].is_infected():\n if np.random.rand() < k:\n pop[j].get_recovered()\n recover.append(count_recover(pop))\n infect.append(count_infect(pop))\n suspect.append(count_suspectial(pop))\n newrecover = [(i / N) for i in recover]\n newsuspect = [(s / N) for s in suspect]\n newinfect = [(i / N) for i in infect]\n plt.plot(range(T + 1), newrecover, label='r: percentage of removed ')\n plt.plot(range(T + 1), newsuspect, label='s: percentage of susceptible')\n plt.plot(range(T + 1), newinfect, label='i: percentage of infected')\n plt.xlabel('T')\n plt.ylabel('percentage')\n plt.title('Percentage of Population, Discrete')\n plt.legend()\n plt.show()\n\n\nrun_Simulation2(0.6, N=20000, T=30, start=10)\n\n\ndef checkinfectb(k, N, T, start=1, p=0.5, q=0.08, startcenter=False,\n startcorner=False):\n \"\"\"\n we use this function for checking the total infected people\n \"\"\"\n recover = [0]\n infect = [start]\n suspect = [N - start]\n pop = [Person() for i in range(N)]\n np.random.seed(10)\n for i in range(start):\n pop[i].get_infected()\n if startcenter:\n resetcenter(start, pop)\n if startcorner:\n resetcorner(start, pop)\n np.random.seed(10)\n for i in range(T):\n for j in range(N):\n pop[j].movepos(p)\n X = calculatedistance(pop)\n tree = cKDTree(X)\n for j in range(N):\n if pop[j].is_infected():\n addvalue = np.array([X[j]])\n inds = tree.query_ball_point(addvalue, q)\n inds = inds[0]\n for l in inds:\n if pop[l].is_willinfected():\n pop[l].get_infected()\n for j in range(N):\n if pop[j].is_infected():\n if np.random.rand() < k:\n pop[j].get_recovered()\n return np.array([(count_infect(pop) + count_recover(pop)) / N, \n count_infect(pop) / N])\n\n\ndef plotcenterrange():\n \"\"\"\n show how the total infected people i change with p start from center\n \"\"\"\n plist1 = np.arange(0.02, 0.1, 0.02)\n plist = np.arange(0.1, 1, 0.1)\n infectlist = []\n for i in plist1:\n infectlist.append(checkinfectb(0.5, 20000, 30, 200, p=i, q=np.sqrt(\n 2 / (20000 * math.pi)), startcenter=True)[0])\n for i in plist:\n infectlist.append(checkinfectb(0.5, 20000, 30, 200, p=i, q=np.sqrt(\n 2 / (20000 * math.pi)), startcenter=True)[0])\n plt.plot(np.hstack((plist1, plist)), infectlist)\n plt.title('centerplot')\n plt.xlabel('p')\n plt.ylabel('total number of individuals infected')\n plt.title('Total Number of Individuals Infected vs p')\n plt.show()\n\n\nplotcenterrange()\n<mask token>\nvaluecorner = checkinfectb(0.5, 20000, 30, 200, p=0.05, q=np.sqrt(2 / (\n 20000 * math.pi)), startcorner=True)[0]\nvaluecenter = checkinfectb(0.5, 20000, 30, 200, p=0.05, q=np.sqrt(2 / (\n 20000 * math.pi)), startcenter=True)[0]\nvaluerandom = checkinfectb(0.5, 20000, 30, 200, p=0.05, q=np.sqrt(2 / (\n 20000 * math.pi)))[0]\nprint('p = 0.05, starting randomly, the total infected number is ' + str(\n valuerandom))\nprint('p = 0.05, starting from corner, the total infected number is ' + str\n (valuecorner))\nprint('p = 0.05, starting from center, the total infected number is ' + str\n (valuecenter))\n",
"step-4": "import sys\nimport os\nimport numpy as np\nimport math\nsys.path.append('../')\nfrom sir.improveagent import *\nimport numpy as np\nimport numpy.linalg as la\nimport matplotlib.pyplot as plt\nfrom scipy.spatial import KDTree\nfrom scipy.spatial import cKDTree\nfrom scipy.spatial.distance import pdist\nimport networkx as nx\np = Person()\n\n\ndef run_Simulation2(k, N=100, T=10, start=1, p=0.5, q=0.08, startcenter=\n False, startcorner=False):\n \"\"\"\n run the simulation for the pop\n \"\"\"\n recover = [0]\n infect = [start]\n suspect = [N - start]\n pop = [Person() for i in range(N)]\n for i in range(start):\n pop[i].get_infected()\n if startcenter:\n resetcenter(start, pop)\n if startcorner:\n resetcorner(start, pop)\n np.random.seed(10)\n for i in range(T):\n for j in range(N):\n pop[j].movepos(p)\n X = calculatedistance(pop)\n tree = cKDTree(X)\n for j in range(N):\n if pop[j].is_infected():\n addvalue = np.array([X[j]])\n inds = tree.query_ball_point(addvalue, q)\n inds = inds[0]\n for l in inds:\n if pop[l].is_willinfected():\n pop[l].get_infected()\n for j in range(N):\n if pop[j].is_infected():\n if np.random.rand() < k:\n pop[j].get_recovered()\n recover.append(count_recover(pop))\n infect.append(count_infect(pop))\n suspect.append(count_suspectial(pop))\n newrecover = [(i / N) for i in recover]\n newsuspect = [(s / N) for s in suspect]\n newinfect = [(i / N) for i in infect]\n plt.plot(range(T + 1), newrecover, label='r: percentage of removed ')\n plt.plot(range(T + 1), newsuspect, label='s: percentage of susceptible')\n plt.plot(range(T + 1), newinfect, label='i: percentage of infected')\n plt.xlabel('T')\n plt.ylabel('percentage')\n plt.title('Percentage of Population, Discrete')\n plt.legend()\n plt.show()\n\n\nrun_Simulation2(0.6, N=20000, T=30, start=10)\n\n\ndef checkinfectb(k, N, T, start=1, p=0.5, q=0.08, startcenter=False,\n startcorner=False):\n \"\"\"\n we use this function for checking the total infected people\n \"\"\"\n recover = [0]\n infect = [start]\n suspect = [N - start]\n pop = [Person() for i in range(N)]\n np.random.seed(10)\n for i in range(start):\n pop[i].get_infected()\n if startcenter:\n resetcenter(start, pop)\n if startcorner:\n resetcorner(start, pop)\n np.random.seed(10)\n for i in range(T):\n for j in range(N):\n pop[j].movepos(p)\n X = calculatedistance(pop)\n tree = cKDTree(X)\n for j in range(N):\n if pop[j].is_infected():\n addvalue = np.array([X[j]])\n inds = tree.query_ball_point(addvalue, q)\n inds = inds[0]\n for l in inds:\n if pop[l].is_willinfected():\n pop[l].get_infected()\n for j in range(N):\n if pop[j].is_infected():\n if np.random.rand() < k:\n pop[j].get_recovered()\n return np.array([(count_infect(pop) + count_recover(pop)) / N, \n count_infect(pop) / N])\n\n\ndef plotcenterrange():\n \"\"\"\n show how the total infected people i change with p start from center\n \"\"\"\n plist1 = np.arange(0.02, 0.1, 0.02)\n plist = np.arange(0.1, 1, 0.1)\n infectlist = []\n for i in plist1:\n infectlist.append(checkinfectb(0.5, 20000, 30, 200, p=i, q=np.sqrt(\n 2 / (20000 * math.pi)), startcenter=True)[0])\n for i in plist:\n infectlist.append(checkinfectb(0.5, 20000, 30, 200, p=i, q=np.sqrt(\n 2 / (20000 * math.pi)), startcenter=True)[0])\n plt.plot(np.hstack((plist1, plist)), infectlist)\n plt.title('centerplot')\n plt.xlabel('p')\n plt.ylabel('total number of individuals infected')\n plt.title('Total Number of Individuals Infected vs p')\n plt.show()\n\n\nplotcenterrange()\n<mask token>\nvaluecorner = checkinfectb(0.5, 20000, 30, 200, p=0.05, q=np.sqrt(2 / (\n 20000 * math.pi)), startcorner=True)[0]\nvaluecenter = checkinfectb(0.5, 20000, 30, 200, p=0.05, q=np.sqrt(2 / (\n 20000 * math.pi)), startcenter=True)[0]\nvaluerandom = checkinfectb(0.5, 20000, 30, 200, p=0.05, q=np.sqrt(2 / (\n 20000 * math.pi)))[0]\nprint('p = 0.05, starting randomly, the total infected number is ' + str(\n valuerandom))\nprint('p = 0.05, starting from corner, the total infected number is ' + str\n (valuecorner))\nprint('p = 0.05, starting from center, the total infected number is ' + str\n (valuecenter))\n",
"step-5": "import sys\nimport os\nimport numpy as np\nimport math\nsys.path.append(\"../\")\nfrom sir.improveagent import *\nimport numpy as np\nimport numpy.linalg as la\nimport matplotlib.pyplot as plt\n#from sklearn.neighbors import BallTree\nfrom scipy.spatial import KDTree\nfrom scipy.spatial import cKDTree\nfrom scipy.spatial.distance import pdist\nimport networkx as nx\n\np = Person()\n\ndef run_Simulation2(k,N=100,T=10,start = 1,p=0.5,q=0.08,startcenter = False,startcorner=False):\n \"\"\"\n run the simulation for the pop\n \"\"\"\n recover = [0]\n infect = [start]\n suspect = [N-start]\n pop = [Person() for i in range(N)]\n ##we need to change the code for the case start people infected\n for i in range(start):\n pop[i].get_infected();\n if(startcenter):\n resetcenter(start,pop)\n if(startcorner):\n resetcorner(start,pop)\n np.random.seed(10)\n for i in range(T):\n for j in range(N):\n pop[j].movepos(p)\n X = calculatedistance(pop)\n tree = cKDTree(X)\n for j in range(N):\n if pop[j].is_infected():\n addvalue = np.array([X[j]])\n inds = tree.query_ball_point(addvalue, q)\n inds = inds[0]\n #may have problem here\n for l in inds:\n if pop[l].is_willinfected():\n pop[l].get_infected()\n\n for j in range(N):\n if pop[j].is_infected():\n if np.random.rand()< k:\n pop[j].get_recovered()\n\n recover.append(count_recover(pop))\n infect.append(count_infect(pop))\n suspect.append(count_suspectial(pop))\n newrecover = [i/N for i in recover]\n newsuspect = [s/N for s in suspect]\n newinfect = [i/N for i in infect]\n plt.plot(range(T+1),newrecover,label = \"r: percentage of removed \")\n plt.plot(range(T+1),newsuspect,label = \"s: percentage of susceptible\")\n plt.plot(range(T+1),newinfect,label = \"i: percentage of infected\")\n plt.xlabel(\"T\")\n plt.ylabel(\"percentage\")\n plt.title(\"Percentage of Population, Discrete\")\n plt.legend()\n plt.show()\n\n\n#We run a simulation here,use the default value of p and q\nrun_Simulation2(0.6,N=20000,T = 30,start=10)\n\ndef checkinfectb(k,N,T,start=1,p=0.5,q=0.08,startcenter = False,startcorner=False):\n \"\"\"\n we use this function for checking the total infected people\n \"\"\"\n recover = [0]\n infect = [start]\n suspect = [N-start]\n pop = [Person() for i in range(N)]\n np.random.seed(10)\n for i in range(start):\n pop[i].get_infected();\n if(startcenter):\n resetcenter(start,pop)\n if(startcorner):\n resetcorner(start,pop)\n np.random.seed(10)\n for i in range(T):\n for j in range(N):\n pop[j].movepos(p)\n X = calculatedistance(pop)\n tree = cKDTree(X)\n for j in range(N):\n if pop[j].is_infected():\n addvalue = np.array([X[j]])\n inds = tree.query_ball_point(addvalue, q)\n inds = inds[0]\n for l in inds:\n if pop[l].is_willinfected():\n pop[l].get_infected()\n for j in range(N):\n if pop[j].is_infected():\n if np.random.rand()<k:\n pop[j].get_recovered()\n return np.array([(count_infect(pop)+count_recover(pop))/N,count_infect(pop)/N])\n\n\n\ndef plotcenterrange():\n \"\"\"\n show how the total infected people i change with p start from center\n \"\"\"\n plist1 = np.arange(0.02,0.1,0.02)\n plist = np.arange(0.1,1,0.1)\n infectlist = []\n for i in plist1:\n infectlist.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcenter=True)[0])\n for i in plist:\n infectlist.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcenter=True)[0])\n plt.plot(np.hstack((plist1,plist)),infectlist)\n plt.title(\"centerplot\")\n plt.xlabel(\"p\")\n plt.ylabel(\"total number of individuals infected\")\n plt.title(\"Total Number of Individuals Infected vs p\")\n plt.show()\n\nplotcenterrange()\n\n\n\n\"\"\"\ndef plotrandomcornerrange():\n\n plist1 = np.arange(0.02,0.1,0.02)\n plist = np.arange(0.1,1,0.1)\n infectlist = []\n infectlist2 = []\n infectlist3 = []\n for i in plist1:\n infectlist.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcorner=True)[0])\n infectlist2.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)))[0])\n infectlist3.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcenter = True)[0])\n for i in plist:\n infectlist.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcorner=True)[0])\n infectlist2.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)))[0])\n infectlist3.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcenter = True)[0])\n plt.plot(np.hstack((plist1,plist)),infectlist,label = \"corner\")\n plt.plot(np.hstack((plist1,plist)),infectlist2,label = \"random\")\n plt.plot(np.hstack((plist1,plist)),infectlist3,label = \"center\")\n plt.title(\"Change from random corner center\")\n plt.xlabel(\"change of p\")\n plt.ylabel(\"change of total infected people\")\n plt.legend()\n plt.show()\n\n\"\"\"\n#plotrandomcornerrange()\n#no need for us to use this function\n\nvaluecorner = checkinfectb(0.5,20000,30,200,p = 0.05,q = np.sqrt(2/(20000*math.pi)),startcorner=True)[0]\nvaluecenter = checkinfectb(0.5,20000,30,200,p = 0.05,q = np.sqrt(2/(20000*math.pi)),startcenter=True)[0]\nvaluerandom = checkinfectb(0.5,20000,30,200,p = 0.05,q = np.sqrt(2/(20000*math.pi)))[0]\nprint(\"p = 0.05, starting randomly, the total infected number is \"+ str(valuerandom))\nprint(\"p = 0.05, starting from corner, the total infected number is \"+ str(valuecorner))\nprint(\"p = 0.05, starting from center, the total infected number is \"+ str(valuecenter))\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'Admin.ui'
#
# Created by: PyQt5 UI code generator 5.12
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from qtpandas.views.DataTableView import DataTableWidget
from qtpandas.models.DataFrameModel import DataFrameModel
import pandas as pd
class Ui_Admin(object):
def setupUi(self, Admin):
Admin.setObjectName("Admin")
Admin.resize(679, 490)
self.centralwidget = QtWidgets.QWidget(Admin)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.menu_btn = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(15)
self.menu_btn.setFont(font)
self.menu_btn.setObjectName("menu_btn")
self.horizontalLayout.addWidget(self.menu_btn)
self.user_btn = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(15)
self.user_btn.setFont(font)
self.user_btn.setObjectName("user_btn")
self.horizontalLayout.addWidget(self.user_btn)
self.order_btn = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(15)
self.order_btn.setFont(font)
self.order_btn.setObjectName("order_btn")
self.horizontalLayout.addWidget(self.order_btn)
self.back = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(15)
self.back.setFont(font)
self.back.setObjectName("back")
self.horizontalLayout.addWidget(self.back)
self.verticalLayout.addLayout(self.horizontalLayout)
self.infoTable = DataTableWidget(self.centralwidget)
self.infoTable.setObjectName("infoTable")
self.verticalLayout.addWidget(self.infoTable)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.save = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(15)
self.save.setFont(font)
self.save.setObjectName("save")
self.horizontalLayout_2.addWidget(self.save)
self.original = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(15)
self.original.setFont(font)
self.original.setObjectName("original")
self.horizontalLayout_2.addWidget(self.original)
self.fresh = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(15)
self.fresh.setFont(font)
self.fresh.setObjectName("fresh")
self.horizontalLayout_2.addWidget(self.fresh)
self.verticalLayout.addLayout(self.horizontalLayout_2)
Admin.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(Admin)
self.menubar.setGeometry(QtCore.QRect(0, 0, 679, 23))
self.menubar.setObjectName("menubar")
self.menu_4 = QtWidgets.QMenu(self.menubar)
self.menu_4.setObjectName("menu_4")
Admin.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(Admin)
self.statusbar.setObjectName("statusbar")
Admin.setStatusBar(self.statusbar)
self.update1 = QtWidgets.QAction(Admin)
self.update1.setObjectName("update1")
self.add = QtWidgets.QAction(Admin)
self.add.setObjectName("add")
self.update2 = QtWidgets.QAction(Admin)
self.update2.setObjectName("update2")
self.delete_2 = QtWidgets.QAction(Admin)
self.delete_2.setObjectName("delete_2")
self.delete_3 = QtWidgets.QAction(Admin)
self.delete_3.setObjectName("delete_3")
self.add_2 = QtWidgets.QAction(Admin)
self.add_2.setObjectName("add_2")
self.help = QtWidgets.QAction(Admin)
self.help.setObjectName("help")
self.actionAllEmpty = QtWidgets.QAction(Admin)
self.actionAllEmpty.setObjectName("actionAllEmpty")
self.menu_4.addAction(self.help)
self.menubar.addAction(self.menu_4.menuAction())
self.retranslateUi(Admin)
QtCore.QMetaObject.connectSlotsByName(Admin)
self.model=DataFrameModel()
self.infoTable.setModel(self.model)
def retranslateUi(self, Admin):
_translate = QtCore.QCoreApplication.translate
Admin.setWindowTitle(_translate("Admin", "后台管理界面"))
self.menu_btn.setText(_translate("Admin", "菜单管理"))
self.user_btn.setText(_translate("Admin", "用户管理"))
self.order_btn.setText(_translate("Admin", "订单信息"))
self.back.setText(_translate("Admin", "返回登录"))
self.save.setText(_translate("Admin", "保存数据"))
self.original.setText(_translate("Admin", "初始化"))
self.fresh.setText(_translate("Admin", "刷新"))
self.menu_4.setTitle(_translate("Admin", "帮助"))
self.update1.setText(_translate("Admin", "update"))
self.add.setText(_translate("Admin", "add"))
self.update2.setText(_translate("Admin", "update"))
self.delete_2.setText(_translate("Admin", "delete"))
self.delete_3.setText(_translate("Admin", "delete"))
self.add_2.setText(_translate("Admin", "add"))
self.help.setText(_translate("Admin", "help"))
self.actionAllEmpty.setText(_translate("Admin", "AllEmpty"))
|
normal
|
{
"blob_id": "5e2a8e95af88a582b6e760a53dfd41f880d66963",
"index": 2670,
"step-1": "<mask token>\n\n\nclass Ui_Admin(object):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Ui_Admin(object):\n\n def setupUi(self, Admin):\n Admin.setObjectName('Admin')\n Admin.resize(679, 490)\n self.centralwidget = QtWidgets.QWidget(Admin)\n self.centralwidget.setObjectName('centralwidget')\n self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)\n self.verticalLayout.setObjectName('verticalLayout')\n self.horizontalLayout = QtWidgets.QHBoxLayout()\n self.horizontalLayout.setObjectName('horizontalLayout')\n self.menu_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.menu_btn.setFont(font)\n self.menu_btn.setObjectName('menu_btn')\n self.horizontalLayout.addWidget(self.menu_btn)\n self.user_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.user_btn.setFont(font)\n self.user_btn.setObjectName('user_btn')\n self.horizontalLayout.addWidget(self.user_btn)\n self.order_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.order_btn.setFont(font)\n self.order_btn.setObjectName('order_btn')\n self.horizontalLayout.addWidget(self.order_btn)\n self.back = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.back.setFont(font)\n self.back.setObjectName('back')\n self.horizontalLayout.addWidget(self.back)\n self.verticalLayout.addLayout(self.horizontalLayout)\n self.infoTable = DataTableWidget(self.centralwidget)\n self.infoTable.setObjectName('infoTable')\n self.verticalLayout.addWidget(self.infoTable)\n self.horizontalLayout_2 = QtWidgets.QHBoxLayout()\n self.horizontalLayout_2.setObjectName('horizontalLayout_2')\n self.save = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.save.setFont(font)\n self.save.setObjectName('save')\n self.horizontalLayout_2.addWidget(self.save)\n self.original = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.original.setFont(font)\n self.original.setObjectName('original')\n self.horizontalLayout_2.addWidget(self.original)\n self.fresh = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.fresh.setFont(font)\n self.fresh.setObjectName('fresh')\n self.horizontalLayout_2.addWidget(self.fresh)\n self.verticalLayout.addLayout(self.horizontalLayout_2)\n Admin.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(Admin)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 679, 23))\n self.menubar.setObjectName('menubar')\n self.menu_4 = QtWidgets.QMenu(self.menubar)\n self.menu_4.setObjectName('menu_4')\n Admin.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(Admin)\n self.statusbar.setObjectName('statusbar')\n Admin.setStatusBar(self.statusbar)\n self.update1 = QtWidgets.QAction(Admin)\n self.update1.setObjectName('update1')\n self.add = QtWidgets.QAction(Admin)\n self.add.setObjectName('add')\n self.update2 = QtWidgets.QAction(Admin)\n self.update2.setObjectName('update2')\n self.delete_2 = QtWidgets.QAction(Admin)\n self.delete_2.setObjectName('delete_2')\n self.delete_3 = QtWidgets.QAction(Admin)\n self.delete_3.setObjectName('delete_3')\n self.add_2 = QtWidgets.QAction(Admin)\n self.add_2.setObjectName('add_2')\n self.help = QtWidgets.QAction(Admin)\n self.help.setObjectName('help')\n self.actionAllEmpty = QtWidgets.QAction(Admin)\n self.actionAllEmpty.setObjectName('actionAllEmpty')\n self.menu_4.addAction(self.help)\n self.menubar.addAction(self.menu_4.menuAction())\n self.retranslateUi(Admin)\n QtCore.QMetaObject.connectSlotsByName(Admin)\n self.model = DataFrameModel()\n self.infoTable.setModel(self.model)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Ui_Admin(object):\n\n def setupUi(self, Admin):\n Admin.setObjectName('Admin')\n Admin.resize(679, 490)\n self.centralwidget = QtWidgets.QWidget(Admin)\n self.centralwidget.setObjectName('centralwidget')\n self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)\n self.verticalLayout.setObjectName('verticalLayout')\n self.horizontalLayout = QtWidgets.QHBoxLayout()\n self.horizontalLayout.setObjectName('horizontalLayout')\n self.menu_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.menu_btn.setFont(font)\n self.menu_btn.setObjectName('menu_btn')\n self.horizontalLayout.addWidget(self.menu_btn)\n self.user_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.user_btn.setFont(font)\n self.user_btn.setObjectName('user_btn')\n self.horizontalLayout.addWidget(self.user_btn)\n self.order_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.order_btn.setFont(font)\n self.order_btn.setObjectName('order_btn')\n self.horizontalLayout.addWidget(self.order_btn)\n self.back = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.back.setFont(font)\n self.back.setObjectName('back')\n self.horizontalLayout.addWidget(self.back)\n self.verticalLayout.addLayout(self.horizontalLayout)\n self.infoTable = DataTableWidget(self.centralwidget)\n self.infoTable.setObjectName('infoTable')\n self.verticalLayout.addWidget(self.infoTable)\n self.horizontalLayout_2 = QtWidgets.QHBoxLayout()\n self.horizontalLayout_2.setObjectName('horizontalLayout_2')\n self.save = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.save.setFont(font)\n self.save.setObjectName('save')\n self.horizontalLayout_2.addWidget(self.save)\n self.original = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.original.setFont(font)\n self.original.setObjectName('original')\n self.horizontalLayout_2.addWidget(self.original)\n self.fresh = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.fresh.setFont(font)\n self.fresh.setObjectName('fresh')\n self.horizontalLayout_2.addWidget(self.fresh)\n self.verticalLayout.addLayout(self.horizontalLayout_2)\n Admin.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(Admin)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 679, 23))\n self.menubar.setObjectName('menubar')\n self.menu_4 = QtWidgets.QMenu(self.menubar)\n self.menu_4.setObjectName('menu_4')\n Admin.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(Admin)\n self.statusbar.setObjectName('statusbar')\n Admin.setStatusBar(self.statusbar)\n self.update1 = QtWidgets.QAction(Admin)\n self.update1.setObjectName('update1')\n self.add = QtWidgets.QAction(Admin)\n self.add.setObjectName('add')\n self.update2 = QtWidgets.QAction(Admin)\n self.update2.setObjectName('update2')\n self.delete_2 = QtWidgets.QAction(Admin)\n self.delete_2.setObjectName('delete_2')\n self.delete_3 = QtWidgets.QAction(Admin)\n self.delete_3.setObjectName('delete_3')\n self.add_2 = QtWidgets.QAction(Admin)\n self.add_2.setObjectName('add_2')\n self.help = QtWidgets.QAction(Admin)\n self.help.setObjectName('help')\n self.actionAllEmpty = QtWidgets.QAction(Admin)\n self.actionAllEmpty.setObjectName('actionAllEmpty')\n self.menu_4.addAction(self.help)\n self.menubar.addAction(self.menu_4.menuAction())\n self.retranslateUi(Admin)\n QtCore.QMetaObject.connectSlotsByName(Admin)\n self.model = DataFrameModel()\n self.infoTable.setModel(self.model)\n\n def retranslateUi(self, Admin):\n _translate = QtCore.QCoreApplication.translate\n Admin.setWindowTitle(_translate('Admin', '后台管理界面'))\n self.menu_btn.setText(_translate('Admin', '菜单管理'))\n self.user_btn.setText(_translate('Admin', '用户管理'))\n self.order_btn.setText(_translate('Admin', '订单信息'))\n self.back.setText(_translate('Admin', '返回登录'))\n self.save.setText(_translate('Admin', '保存数据'))\n self.original.setText(_translate('Admin', '初始化'))\n self.fresh.setText(_translate('Admin', '刷新'))\n self.menu_4.setTitle(_translate('Admin', '帮助'))\n self.update1.setText(_translate('Admin', 'update'))\n self.add.setText(_translate('Admin', 'add'))\n self.update2.setText(_translate('Admin', 'update'))\n self.delete_2.setText(_translate('Admin', 'delete'))\n self.delete_3.setText(_translate('Admin', 'delete'))\n self.add_2.setText(_translate('Admin', 'add'))\n self.help.setText(_translate('Admin', 'help'))\n self.actionAllEmpty.setText(_translate('Admin', 'AllEmpty'))\n",
"step-4": "from PyQt5 import QtCore, QtGui, QtWidgets\nfrom qtpandas.views.DataTableView import DataTableWidget\nfrom qtpandas.models.DataFrameModel import DataFrameModel\nimport pandas as pd\n\n\nclass Ui_Admin(object):\n\n def setupUi(self, Admin):\n Admin.setObjectName('Admin')\n Admin.resize(679, 490)\n self.centralwidget = QtWidgets.QWidget(Admin)\n self.centralwidget.setObjectName('centralwidget')\n self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)\n self.verticalLayout.setObjectName('verticalLayout')\n self.horizontalLayout = QtWidgets.QHBoxLayout()\n self.horizontalLayout.setObjectName('horizontalLayout')\n self.menu_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.menu_btn.setFont(font)\n self.menu_btn.setObjectName('menu_btn')\n self.horizontalLayout.addWidget(self.menu_btn)\n self.user_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.user_btn.setFont(font)\n self.user_btn.setObjectName('user_btn')\n self.horizontalLayout.addWidget(self.user_btn)\n self.order_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.order_btn.setFont(font)\n self.order_btn.setObjectName('order_btn')\n self.horizontalLayout.addWidget(self.order_btn)\n self.back = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.back.setFont(font)\n self.back.setObjectName('back')\n self.horizontalLayout.addWidget(self.back)\n self.verticalLayout.addLayout(self.horizontalLayout)\n self.infoTable = DataTableWidget(self.centralwidget)\n self.infoTable.setObjectName('infoTable')\n self.verticalLayout.addWidget(self.infoTable)\n self.horizontalLayout_2 = QtWidgets.QHBoxLayout()\n self.horizontalLayout_2.setObjectName('horizontalLayout_2')\n self.save = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.save.setFont(font)\n self.save.setObjectName('save')\n self.horizontalLayout_2.addWidget(self.save)\n self.original = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.original.setFont(font)\n self.original.setObjectName('original')\n self.horizontalLayout_2.addWidget(self.original)\n self.fresh = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.fresh.setFont(font)\n self.fresh.setObjectName('fresh')\n self.horizontalLayout_2.addWidget(self.fresh)\n self.verticalLayout.addLayout(self.horizontalLayout_2)\n Admin.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(Admin)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 679, 23))\n self.menubar.setObjectName('menubar')\n self.menu_4 = QtWidgets.QMenu(self.menubar)\n self.menu_4.setObjectName('menu_4')\n Admin.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(Admin)\n self.statusbar.setObjectName('statusbar')\n Admin.setStatusBar(self.statusbar)\n self.update1 = QtWidgets.QAction(Admin)\n self.update1.setObjectName('update1')\n self.add = QtWidgets.QAction(Admin)\n self.add.setObjectName('add')\n self.update2 = QtWidgets.QAction(Admin)\n self.update2.setObjectName('update2')\n self.delete_2 = QtWidgets.QAction(Admin)\n self.delete_2.setObjectName('delete_2')\n self.delete_3 = QtWidgets.QAction(Admin)\n self.delete_3.setObjectName('delete_3')\n self.add_2 = QtWidgets.QAction(Admin)\n self.add_2.setObjectName('add_2')\n self.help = QtWidgets.QAction(Admin)\n self.help.setObjectName('help')\n self.actionAllEmpty = QtWidgets.QAction(Admin)\n self.actionAllEmpty.setObjectName('actionAllEmpty')\n self.menu_4.addAction(self.help)\n self.menubar.addAction(self.menu_4.menuAction())\n self.retranslateUi(Admin)\n QtCore.QMetaObject.connectSlotsByName(Admin)\n self.model = DataFrameModel()\n self.infoTable.setModel(self.model)\n\n def retranslateUi(self, Admin):\n _translate = QtCore.QCoreApplication.translate\n Admin.setWindowTitle(_translate('Admin', '后台管理界面'))\n self.menu_btn.setText(_translate('Admin', '菜单管理'))\n self.user_btn.setText(_translate('Admin', '用户管理'))\n self.order_btn.setText(_translate('Admin', '订单信息'))\n self.back.setText(_translate('Admin', '返回登录'))\n self.save.setText(_translate('Admin', '保存数据'))\n self.original.setText(_translate('Admin', '初始化'))\n self.fresh.setText(_translate('Admin', '刷新'))\n self.menu_4.setTitle(_translate('Admin', '帮助'))\n self.update1.setText(_translate('Admin', 'update'))\n self.add.setText(_translate('Admin', 'add'))\n self.update2.setText(_translate('Admin', 'update'))\n self.delete_2.setText(_translate('Admin', 'delete'))\n self.delete_3.setText(_translate('Admin', 'delete'))\n self.add_2.setText(_translate('Admin', 'add'))\n self.help.setText(_translate('Admin', 'help'))\n self.actionAllEmpty.setText(_translate('Admin', 'AllEmpty'))\n",
"step-5": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'Admin.ui'\n#\n# Created by: PyQt5 UI code generator 5.12\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nfrom qtpandas.views.DataTableView import DataTableWidget\nfrom qtpandas.models.DataFrameModel import DataFrameModel\nimport pandas as pd\n\nclass Ui_Admin(object):\n def setupUi(self, Admin):\n Admin.setObjectName(\"Admin\")\n Admin.resize(679, 490)\n self.centralwidget = QtWidgets.QWidget(Admin)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)\n self.verticalLayout.setObjectName(\"verticalLayout\")\n self.horizontalLayout = QtWidgets.QHBoxLayout()\n self.horizontalLayout.setObjectName(\"horizontalLayout\")\n self.menu_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.menu_btn.setFont(font)\n self.menu_btn.setObjectName(\"menu_btn\")\n self.horizontalLayout.addWidget(self.menu_btn)\n self.user_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.user_btn.setFont(font)\n self.user_btn.setObjectName(\"user_btn\")\n self.horizontalLayout.addWidget(self.user_btn)\n self.order_btn = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.order_btn.setFont(font)\n self.order_btn.setObjectName(\"order_btn\")\n self.horizontalLayout.addWidget(self.order_btn)\n self.back = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.back.setFont(font)\n self.back.setObjectName(\"back\")\n self.horizontalLayout.addWidget(self.back)\n self.verticalLayout.addLayout(self.horizontalLayout)\n self.infoTable = DataTableWidget(self.centralwidget)\n self.infoTable.setObjectName(\"infoTable\")\n self.verticalLayout.addWidget(self.infoTable)\n self.horizontalLayout_2 = QtWidgets.QHBoxLayout()\n self.horizontalLayout_2.setObjectName(\"horizontalLayout_2\")\n \n self.save = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.save.setFont(font)\n self.save.setObjectName(\"save\")\n self.horizontalLayout_2.addWidget(self.save)\n \n self.original = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.original.setFont(font)\n self.original.setObjectName(\"original\")\n self.horizontalLayout_2.addWidget(self.original)\n \n self.fresh = QtWidgets.QPushButton(self.centralwidget)\n font = QtGui.QFont()\n font.setPointSize(15)\n self.fresh.setFont(font)\n self.fresh.setObjectName(\"fresh\")\n self.horizontalLayout_2.addWidget(self.fresh)\n self.verticalLayout.addLayout(self.horizontalLayout_2)\n Admin.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(Admin)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 679, 23))\n self.menubar.setObjectName(\"menubar\")\n self.menu_4 = QtWidgets.QMenu(self.menubar)\n self.menu_4.setObjectName(\"menu_4\")\n Admin.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(Admin)\n self.statusbar.setObjectName(\"statusbar\")\n Admin.setStatusBar(self.statusbar)\n self.update1 = QtWidgets.QAction(Admin)\n self.update1.setObjectName(\"update1\")\n self.add = QtWidgets.QAction(Admin)\n self.add.setObjectName(\"add\")\n self.update2 = QtWidgets.QAction(Admin)\n self.update2.setObjectName(\"update2\")\n self.delete_2 = QtWidgets.QAction(Admin)\n self.delete_2.setObjectName(\"delete_2\")\n self.delete_3 = QtWidgets.QAction(Admin)\n self.delete_3.setObjectName(\"delete_3\")\n self.add_2 = QtWidgets.QAction(Admin)\n self.add_2.setObjectName(\"add_2\")\n self.help = QtWidgets.QAction(Admin)\n self.help.setObjectName(\"help\")\n self.actionAllEmpty = QtWidgets.QAction(Admin)\n self.actionAllEmpty.setObjectName(\"actionAllEmpty\")\n self.menu_4.addAction(self.help)\n self.menubar.addAction(self.menu_4.menuAction())\n\n self.retranslateUi(Admin)\n QtCore.QMetaObject.connectSlotsByName(Admin)\n \n self.model=DataFrameModel()\n self.infoTable.setModel(self.model)\n\n def retranslateUi(self, Admin):\n _translate = QtCore.QCoreApplication.translate\n Admin.setWindowTitle(_translate(\"Admin\", \"后台管理界面\"))\n self.menu_btn.setText(_translate(\"Admin\", \"菜单管理\"))\n self.user_btn.setText(_translate(\"Admin\", \"用户管理\"))\n self.order_btn.setText(_translate(\"Admin\", \"订单信息\"))\n self.back.setText(_translate(\"Admin\", \"返回登录\"))\n self.save.setText(_translate(\"Admin\", \"保存数据\"))\n self.original.setText(_translate(\"Admin\", \"初始化\"))\n self.fresh.setText(_translate(\"Admin\", \"刷新\"))\n self.menu_4.setTitle(_translate(\"Admin\", \"帮助\"))\n self.update1.setText(_translate(\"Admin\", \"update\"))\n self.add.setText(_translate(\"Admin\", \"add\"))\n self.update2.setText(_translate(\"Admin\", \"update\"))\n self.delete_2.setText(_translate(\"Admin\", \"delete\"))\n self.delete_3.setText(_translate(\"Admin\", \"delete\"))\n self.add_2.setText(_translate(\"Admin\", \"add\"))\n self.help.setText(_translate(\"Admin\", \"help\"))\n self.actionAllEmpty.setText(_translate(\"Admin\", \"AllEmpty\"))\n\n\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import random
from datetime import timedelta
from typing import Union, Type, Tuple, List, Dict
from django import http
from django.test import TestCase, Client
from django.utils import timezone
from exam_web import errors
from exam_web.models import Student, AcademyGroup, uuid_str, ExamSession, \
UserSession, Question, Stage, QuestionType, ExamTicket, ExamStatus
class ApiClient(Client):
path: str
def __init__(self, path: str, student: Student = None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.student = student
self.path = path
self.headers = {'content_type': 'application/json'}
if student:
self.cookies['student'] = student.id
def path_params(self, **params):
return ApiClient(self.path.format(**params), self.student)
def get(self, **kwargs):
return super().get(self.path, data=kwargs, **self.headers)
def post(self, **json):
return super().post(self.path, data=json, **self.headers)
def __call__(self, **kwargs):
raise AttributeError('Use `get` or `post` methods instead')
class ApiTestCase(TestCase):
group: AcademyGroup
student: Student
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.group = AcademyGroup.objects.create(name='test_group')
cls.student = Student.objects.create(name='test user', group=cls.group)
@classmethod
def tearDownClass(cls):
cls.student.delete()
cls.group.delete()
super().tearDownClass()
def setup_exam_objects(self):
self.session = ExamSession.objects.create(
start_time=timezone.now(), duration=timedelta(minutes=40))
self.student_session = UserSession.objects.create(
student=self.student, exam_session=self.session)
self.questions = [
Question.objects.create(
stage=Stage.first, type=QuestionType.single, max_score=1,
text='test single question', options=['a', 'b', 'c']
),
Question.objects.create(
stage=Stage.first, type=QuestionType.multi, max_score=1,
text='test multi question', options=['a', 'b', 'c']
),
Question.objects.create(
stage=Stage.second, type=QuestionType.open, max_score=1,
text='test open question', options=None,
),
]
self.tickets = [
ExamTicket.objects.create(
student=self.student, session=self.student_session,
question=question) for question in self.questions
]
self.ticket_map = {x.id: x for x in self.tickets}
def teardown_exam_objects(self):
for ticket in self.tickets:
ticket.delete()
for question in self.questions:
question.delete()
self.student_session.delete()
def assertResponseSuccess(self, response: http.HttpResponse):
content = response.content.decode()
self.assertEqual(response.status_code, 200,
(response.status_code, content))
content = response.json()
self.assertIn('result', content, content)
return content['result']
def assertResponseError(
self, response: http.JsonResponse,
error: Union[errors.APIError, Type[errors.APIError]] = None
) -> Tuple[int, str]:
content = response.json()
self.assertGreaterEqual(response.status_code, 400,
(response.status_code, content))
self.assertIn('error', content, content)
if error is not None:
if isinstance(error, type):
error = error()
self.assertEqual(response.status_code, error.status,
(response.status_code, content))
self.assertEqual(content['error'], error.message,
(response.status_code, content))
return response.status_code, content['error']
class TestAuthorize(ApiTestCase):
authorize: ApiClient
def setUp(self):
super().setUp()
self.authorize = ApiClient('/api/authorize')
def test_authorized(self):
response = self.authorize.post(token=self.student.id)
result = self.assertResponseSuccess(response)
self.assertEqual(response.cookies['student'].value, self.student.id)
self.assertEqual(result['name'], self.student.name)
self.assertEqual(result['group'], self.group.name)
self.assertEqual(result['id'], self.student.id)
def test_authorized_unknown_token(self):
response = self.authorize.post(token=uuid_str())
self.assertResponseError(response, errors.Unauthorized)
def test_authorized_invalid_params(self):
response = self.authorize.post()
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.post(token=12345678)
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.get()
self.assertEqual(response.status_code, 405)
class TestGetExamSessions(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
def setUp(self):
super().setUp()
self.get_exams = ApiClient('/api/exams', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exams_available(self):
result = self.assertResponseSuccess(self.get_exams.get())
self.assertIsInstance(result, list)
self.assertEqual(len(result), 1)
user_session = result[0]
self.assertEqual(
user_session['started_at'], self.session.start_time.isoformat())
self.assertEqual(user_session['duration'],
self.session.duration.total_seconds() / 60)
self.assertEqual(user_session['checked_in'], False)
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.available.value)
self.assertEqual(user_session['score'], None)
def test_get_exams_check_in(self):
self.student_session.started_at = timezone.now()
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['checked_in'], True)
def test_get_exams_submitted(self):
now = timezone.now()
self.student_session.started_at = timezone.now()
self.student_session.finished_at = now
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['finished_at'], now.isoformat())
self.assertEqual(user_session['status'], ExamStatus.submitted)
self.assertEqual(user_session['score'], None)
def test_get_exams_non_available(self):
self.session.start_time = timezone.now() + self.session.duration
self.session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['started_at'],
self.session.start_time.isoformat())
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.not_available)
def test_get_exams_unauthorized(self):
self.get_exams.cookies = {}
self.assertResponseError(self.get_exams.get(), errors.Unauthorized)
response = self.get_exams.post()
self.assertEqual(response.status_code, 405)
def test_get_exams_score(self):
for ticket in self.tickets:
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'],
sum(t.score for t in self.tickets))
self.tickets[0].score = None
self.tickets[0].save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'], None)
class TestGetExamTickets(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
ticket_map: Dict[str, ExamTicket]
def setUp(self):
super().setUp()
self.get_exam_questions = \
ApiClient('/api/tickets', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exam_questions(self):
self.assertFalse(self.student_session.check_in)
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
self.assertEqual([x['id'] for x in questions], [
x.id for x in sorted(self.tickets, key=lambda x: x.question.stage)
])
for question in questions:
ticket = self.ticket_map[question['id']]
ticket_question = ticket.question
self.assertEqual(question.pop('id'), ticket.id)
view = ticket_question.as_dict
view.pop('id')
self.assertEqual(question, view)
def test_get_exam_questions_already_checked_in(self):
self.student_session.check_in = True
checkin_date = self.student_session.started_at
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
self.assertEqual(self.student_session.started_at, checkin_date)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
def test_get_exam_questions_not_available(self):
self.session.start_time += self.session.duration
self.session.save()
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.not_available)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), 0)
def test_get_exam_questions_submitted(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.save()
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
answer = question.pop('answer')
self.assertEqual(answer, ticket.answer)
self.assertEqual(question['score'], None)
def test_get_exam_questions_submitted_and_scored(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], sum(t.score for t in self.tickets))
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
self.assertEqual(question['score'], ticket.score)
def test_get_exam_questions_invalid_params(self):
self.assertResponseError(self.get_exam_questions.post(),
errors.InvalidParameter('session_id'))
self.assertResponseError(
self.get_exam_questions.post(session_id=uuid_str()),
errors.ExamNotFound)
self.get_exam_questions.cookies = {}
self.assertResponseError(
self.get_exam_questions.post(session_id=self.student_session.id),
errors.Unauthorized)
response = self.get_exam_questions.get()
self.assertEqual(response.status_code, 405)
class TestSubmitExam(ApiTestCase):
def setUp(self):
super().setUp()
self.submit_exam = ApiClient('/api/submit', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_submit_exam(self):
answers = {}
ANSWER = 'answer'
for ticket in self.tickets:
if ticket.question.type == QuestionType.single:
answers[ticket.id] = \
random.randint(0, len(ticket.question.options)-1)
elif ticket.question.type == QuestionType.multi:
answers[ticket.id] = random.sample(
list(range(0, len(ticket.question.options))),
k=random.randint(0, len(ticket.question.options))
)
else:
answers[ticket.id] = ANSWER
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
if ticket.question.type == QuestionType.single:
self.assertEqual(
ticket.answer, ticket.question.options[answers[ticket.id]])
elif ticket.question.type == QuestionType.multi:
self.assertEqual(ticket.answer, ';'.join([
ticket.question.options[x]
for x in sorted(answers[ticket.id])
]))
self.assertIsNotNone(ticket.answered_at)
def test_submit_without_any_answer(self):
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers={}))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(ticket.answered_at)
self.assertIsNone(ticket.answer)
def test_submit_partial_answer_errors(self):
ANSWER = 'answer'
answers = {
# неверный порядковый индекс ответа
self.tickets[0].id: len(self.tickets[0].question.options),
# неверный тип ответа
self.tickets[1].id: 0,
# корректный ответ
self.tickets[2].id: ANSWER,
# неверный ид билета
uuid_str(): ANSWER,
# несуществующий тикет
self.tickets[2].id + 1: ANSWER,
}
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(self.tickets[0].answer)
self.assertIsNone(self.tickets[0].answered_at)
self.assertIsNone(self.tickets[1].answer)
self.assertIsNone(self.tickets[1].answered_at)
self.assertEqual(self.tickets[2].answer, ANSWER)
self.assertIsNotNone(self.tickets[2].answered_at)
def test_submit_errors(self):
self.assertResponseError(self.submit_exam.post(),
errors.InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=123),
errors.InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str()),
errors.InvalidParameter('answers'))
self.assertResponseError(
self.submit_exam.post(session_id=uuid_str(), answers=[]),
errors.InvalidParameter('answers'))
self.assertResponseError(
self.submit_exam.post(session_id=uuid_str(), answers={}),
errors.ExamNotFound)
self.session.start_time += self.session.duration
self.session.save()
self.assertResponseError(self.submit_exam.post(
session_id=self.student_session.id, answers={}),
errors.ExamNotAvailable)
self.student_session.start_time = timezone.now()
self.student_session.save()
self.assertResponseError(self.submit_exam.post(
session_id=self.student_session.id, answers={}),
errors.ExamNotAvailable)
|
normal
|
{
"blob_id": "44e4151279884ce7c5d5a9e5c82916ce2d3ccbc2",
"index": 9789,
"step-1": "<mask token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n <mask token>\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"step-2": "<mask token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n <mask token>\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n <mask token>\n <mask token>\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"step-3": "<mask token>\n\n\nclass ApiClient(Client):\n path: str\n\n def __init__(self, path: str, student: Student=None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.student = student\n self.path = path\n self.headers = {'content_type': 'application/json'}\n if student:\n self.cookies['student'] = student.id\n\n def path_params(self, **params):\n return ApiClient(self.path.format(**params), self.student)\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n <mask token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"step-4": "import random\nfrom datetime import timedelta\nfrom typing import Union, Type, Tuple, List, Dict\nfrom django import http\nfrom django.test import TestCase, Client\nfrom django.utils import timezone\nfrom exam_web import errors\nfrom exam_web.models import Student, AcademyGroup, uuid_str, ExamSession, UserSession, Question, Stage, QuestionType, ExamTicket, ExamStatus\n\n\nclass ApiClient(Client):\n path: str\n\n def __init__(self, path: str, student: Student=None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.student = student\n self.path = path\n self.headers = {'content_type': 'application/json'}\n if student:\n self.cookies['student'] = student.id\n\n def path_params(self, **params):\n return ApiClient(self.path.format(**params), self.student)\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n\n def __call__(self, **kwargs):\n raise AttributeError('Use `get` or `post` methods instead')\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"step-5": "import random\nfrom datetime import timedelta\nfrom typing import Union, Type, Tuple, List, Dict\n\nfrom django import http\nfrom django.test import TestCase, Client\nfrom django.utils import timezone\n\nfrom exam_web import errors\nfrom exam_web.models import Student, AcademyGroup, uuid_str, ExamSession, \\\n UserSession, Question, Stage, QuestionType, ExamTicket, ExamStatus\n\n\nclass ApiClient(Client):\n path: str\n\n def __init__(self, path: str, student: Student = None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.student = student\n self.path = path\n self.headers = {'content_type': 'application/json'}\n if student:\n self.cookies['student'] = student.id\n\n def path_params(self, **params):\n return ApiClient(self.path.format(**params), self.student)\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n\n def __call__(self, **kwargs):\n raise AttributeError('Use `get` or `post` methods instead')\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(\n start_time=timezone.now(), duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(\n student=self.student, exam_session=self.session)\n self.questions = [\n Question.objects.create(\n stage=Stage.first, type=QuestionType.single, max_score=1,\n text='test single question', options=['a', 'b', 'c']\n ),\n Question.objects.create(\n stage=Stage.first, type=QuestionType.multi, max_score=1,\n text='test multi question', options=['a', 'b', 'c']\n ),\n Question.objects.create(\n stage=Stage.second, type=QuestionType.open, max_score=1,\n text='test open question', options=None,\n ),\n ]\n self.tickets = [\n ExamTicket.objects.create(\n student=self.student, session=self.student_session,\n question=question) for question in self.questions\n ]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200,\n (response.status_code, content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(\n self, response: http.JsonResponse,\n error: Union[errors.APIError, Type[errors.APIError]] = None\n ) -> Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400,\n (response.status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status,\n (response.status_code, content))\n self.assertEqual(content['error'], error.message,\n (response.status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n\n self.assertEqual(response.cookies['student'].value, self.student.id)\n\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(\n user_session['started_at'], self.session.start_time.isoformat())\n self.assertEqual(user_session['duration'],\n self.session.duration.total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'],\n self.session.start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'],\n sum(t.score for t in self.tickets))\n\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = \\\n ApiClient('/api/tickets', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [\n x.id for x in sorted(self.tickets, key=lambda x: x.question.stage)\n ])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(\n self.get_exam_questions.post(session_id=uuid_str()),\n errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(\n self.get_exam_questions.post(session_id=self.student_session.id),\n errors.Unauthorized)\n\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = \\\n random.randint(0, len(ticket.question.options)-1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(\n list(range(0, len(ticket.question.options))),\n k=random.randint(0, len(ticket.question.options))\n )\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(\n ticket.answer, ticket.question.options[answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([\n ticket.question.options[x]\n for x in sorted(answers[ticket.id])\n ]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {\n # неверный порядковый индекс ответа\n self.tickets[0].id: len(self.tickets[0].question.options),\n # неверный тип ответа\n self.tickets[1].id: 0,\n # корректный ответ\n self.tickets[2].id: ANSWER,\n # неверный ид билета\n uuid_str(): ANSWER,\n # несуществующий тикет\n self.tickets[2].id + 1: ANSWER,\n }\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str()),\n errors.InvalidParameter('answers'))\n self.assertResponseError(\n self.submit_exam.post(session_id=uuid_str(), answers=[]),\n errors.InvalidParameter('answers'))\n self.assertResponseError(\n self.submit_exam.post(session_id=uuid_str(), answers={}),\n errors.ExamNotFound)\n\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(\n session_id=self.student_session.id, answers={}),\n errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(\n session_id=self.student_session.id, answers={}),\n errors.ExamNotAvailable)\n",
"step-ids": [
15,
34,
42,
44,
45
]
}
|
[
15,
34,
42,
44,
45
] |
<|reserved_special_token_0|>
class ProbabilityNetwork:
def __init__(self, n, edges, probs):
self.nodes = list(range(n))
self.edges = edges
self.probs = probs
def parents(self, node):
return [a for a, b in edges if b == node]
def ancestralOrder(self):
order = []
while len(order) < len(self.nodes):
for node in self.nodes:
if node in order:
continue
if not any(edge[0] not in order and edge[1] == node for
edge in self.edges):
order.append(node)
return order
def logicSampling(self, evidences, targetNode, niters=10000000):
evidenceNodes = evidences.keys()
ancestralOrder = self.ancestralOrder()
hits = 0
total = 0
for it in range(niters):
fail = False
values = dict([[i, None] for i in self.nodes])
for node in ancestralOrder:
pNode = self.probs(node, values)
nodeValue = np.random.random() < pNode
values[node] = nodeValue
if node in evidences and evidences[node] != values[node]:
fail = True
break
if fail:
continue
total += 1
if values[targetNode]:
hits += 1
return hits / total
def weightedLikelihood(self, evidences, targetNode, niters=10000000):
evidenceNodes = evidences.keys()
ancestralOrder = [node for node in self.ancestralOrder() if node not in
evidenceNodes]
cumsumHit = 0
cumsumTotal = 0
hits = 0
for it in range(niters):
values = dict([[i, None] for i in ancestralOrder])
for evNode in evidenceNodes:
values[evNode] = evidences[evNode]
for node in ancestralOrder:
pNode = self.probs(node, values)
nodeValue = np.random.random() < pNode
values[node] = nodeValue
currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if
values[i] else 1 - self.probs(i, values)) for i in
evidenceNodes])
if values[targetNode]:
cumsumHit += currProb
cumsumTotal += currProb
return cumsumHit / cumsumTotal
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ProbabilityNetwork:
def __init__(self, n, edges, probs):
self.nodes = list(range(n))
self.edges = edges
self.probs = probs
def parents(self, node):
return [a for a, b in edges if b == node]
def ancestralOrder(self):
order = []
while len(order) < len(self.nodes):
for node in self.nodes:
if node in order:
continue
if not any(edge[0] not in order and edge[1] == node for
edge in self.edges):
order.append(node)
return order
def logicSampling(self, evidences, targetNode, niters=10000000):
evidenceNodes = evidences.keys()
ancestralOrder = self.ancestralOrder()
hits = 0
total = 0
for it in range(niters):
fail = False
values = dict([[i, None] for i in self.nodes])
for node in ancestralOrder:
pNode = self.probs(node, values)
nodeValue = np.random.random() < pNode
values[node] = nodeValue
if node in evidences and evidences[node] != values[node]:
fail = True
break
if fail:
continue
total += 1
if values[targetNode]:
hits += 1
return hits / total
def weightedLikelihood(self, evidences, targetNode, niters=10000000):
evidenceNodes = evidences.keys()
ancestralOrder = [node for node in self.ancestralOrder() if node not in
evidenceNodes]
cumsumHit = 0
cumsumTotal = 0
hits = 0
for it in range(niters):
values = dict([[i, None] for i in ancestralOrder])
for evNode in evidenceNodes:
values[evNode] = evidences[evNode]
for node in ancestralOrder:
pNode = self.probs(node, values)
nodeValue = np.random.random() < pNode
values[node] = nodeValue
currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if
values[i] else 1 - self.probs(i, values)) for i in
evidenceNodes])
if values[targetNode]:
cumsumHit += currProb
cumsumTotal += currProb
return cumsumHit / cumsumTotal
<|reserved_special_token_0|>
def probs(node, evidences):
if node == 0:
return 0.3
elif node == 1:
if evidences[0]:
return 0.9
else:
return 0.2
elif node == 2:
if evidences[0]:
return 0.75
else:
return 0.25
elif node == 3:
if evidences[1]:
return 0.6
else:
return 0.1
elif node == 4:
if evidences[1] and evidences[2]:
return 0.8
elif evidences[1] and not evidences[2]:
return 0.6
elif not evidences[1] and evidences[2]:
return 0.5
else:
return 0
elif node == 5:
if evidences[2]:
return 0.4
else:
return 0.1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ProbabilityNetwork:
def __init__(self, n, edges, probs):
self.nodes = list(range(n))
self.edges = edges
self.probs = probs
def parents(self, node):
return [a for a, b in edges if b == node]
def ancestralOrder(self):
order = []
while len(order) < len(self.nodes):
for node in self.nodes:
if node in order:
continue
if not any(edge[0] not in order and edge[1] == node for
edge in self.edges):
order.append(node)
return order
def logicSampling(self, evidences, targetNode, niters=10000000):
evidenceNodes = evidences.keys()
ancestralOrder = self.ancestralOrder()
hits = 0
total = 0
for it in range(niters):
fail = False
values = dict([[i, None] for i in self.nodes])
for node in ancestralOrder:
pNode = self.probs(node, values)
nodeValue = np.random.random() < pNode
values[node] = nodeValue
if node in evidences and evidences[node] != values[node]:
fail = True
break
if fail:
continue
total += 1
if values[targetNode]:
hits += 1
return hits / total
def weightedLikelihood(self, evidences, targetNode, niters=10000000):
evidenceNodes = evidences.keys()
ancestralOrder = [node for node in self.ancestralOrder() if node not in
evidenceNodes]
cumsumHit = 0
cumsumTotal = 0
hits = 0
for it in range(niters):
values = dict([[i, None] for i in ancestralOrder])
for evNode in evidenceNodes:
values[evNode] = evidences[evNode]
for node in ancestralOrder:
pNode = self.probs(node, values)
nodeValue = np.random.random() < pNode
values[node] = nodeValue
currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if
values[i] else 1 - self.probs(i, values)) for i in
evidenceNodes])
if values[targetNode]:
cumsumHit += currProb
cumsumTotal += currProb
return cumsumHit / cumsumTotal
edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 4), (2, 5)]
def probs(node, evidences):
if node == 0:
return 0.3
elif node == 1:
if evidences[0]:
return 0.9
else:
return 0.2
elif node == 2:
if evidences[0]:
return 0.75
else:
return 0.25
elif node == 3:
if evidences[1]:
return 0.6
else:
return 0.1
elif node == 4:
if evidences[1] and evidences[2]:
return 0.8
elif evidences[1] and not evidences[2]:
return 0.6
elif not evidences[1] and evidences[2]:
return 0.5
else:
return 0
elif node == 5:
if evidences[2]:
return 0.4
else:
return 0.1
pn = ProbabilityNetwork(6, edges, probs)
evidences = dict([[3, True], [4, True], [5, False]])
print(pn.logicSampling(evidences, 0))
print(pn.weightedLikelihood(evidences, 0))
<|reserved_special_token_1|>
import numpy as np
from functools import reduce
class ProbabilityNetwork:
def __init__(self, n, edges, probs):
self.nodes = list(range(n))
self.edges = edges
self.probs = probs
def parents(self, node):
return [a for a, b in edges if b == node]
def ancestralOrder(self):
order = []
while len(order) < len(self.nodes):
for node in self.nodes:
if node in order:
continue
if not any(edge[0] not in order and edge[1] == node for
edge in self.edges):
order.append(node)
return order
def logicSampling(self, evidences, targetNode, niters=10000000):
evidenceNodes = evidences.keys()
ancestralOrder = self.ancestralOrder()
hits = 0
total = 0
for it in range(niters):
fail = False
values = dict([[i, None] for i in self.nodes])
for node in ancestralOrder:
pNode = self.probs(node, values)
nodeValue = np.random.random() < pNode
values[node] = nodeValue
if node in evidences and evidences[node] != values[node]:
fail = True
break
if fail:
continue
total += 1
if values[targetNode]:
hits += 1
return hits / total
def weightedLikelihood(self, evidences, targetNode, niters=10000000):
evidenceNodes = evidences.keys()
ancestralOrder = [node for node in self.ancestralOrder() if node not in
evidenceNodes]
cumsumHit = 0
cumsumTotal = 0
hits = 0
for it in range(niters):
values = dict([[i, None] for i in ancestralOrder])
for evNode in evidenceNodes:
values[evNode] = evidences[evNode]
for node in ancestralOrder:
pNode = self.probs(node, values)
nodeValue = np.random.random() < pNode
values[node] = nodeValue
currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if
values[i] else 1 - self.probs(i, values)) for i in
evidenceNodes])
if values[targetNode]:
cumsumHit += currProb
cumsumTotal += currProb
return cumsumHit / cumsumTotal
edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 4), (2, 5)]
def probs(node, evidences):
if node == 0:
return 0.3
elif node == 1:
if evidences[0]:
return 0.9
else:
return 0.2
elif node == 2:
if evidences[0]:
return 0.75
else:
return 0.25
elif node == 3:
if evidences[1]:
return 0.6
else:
return 0.1
elif node == 4:
if evidences[1] and evidences[2]:
return 0.8
elif evidences[1] and not evidences[2]:
return 0.6
elif not evidences[1] and evidences[2]:
return 0.5
else:
return 0
elif node == 5:
if evidences[2]:
return 0.4
else:
return 0.1
pn = ProbabilityNetwork(6, edges, probs)
evidences = dict([[3, True], [4, True], [5, False]])
print(pn.logicSampling(evidences, 0))
print(pn.weightedLikelihood(evidences, 0))
<|reserved_special_token_1|>
#YET TO COMMENT.
import numpy as np
from functools import reduce
class ProbabilityNetwork:
def __init__(self,n,edges,probs):
self.nodes=list(range(n))
self.edges=edges
self.probs=probs
def parents(self, node):
return [a for a,b in edges if b==node]
def ancestralOrder(self):
order=[]
while len(order)<len(self.nodes):
for node in self.nodes:
if node in order:
continue
if not any((edge[0] not in order) and (edge[1]==node) for edge in self.edges):
order.append(node)
return order
def logicSampling(self, evidences, targetNode, niters=10000000):
evidenceNodes=evidences.keys()
ancestralOrder = self.ancestralOrder()
hits=0
total=0
for it in range(niters):
fail=False
values=dict([ [i,None] for i in self.nodes]) #True: present. False: not present
for node in ancestralOrder:
pNode=self.probs(node, values)
nodeValue=np.random.random()<pNode
values[node]=nodeValue
if node in evidences and evidences[node]!=values[node]:
fail=True
break
if fail: continue
#print(values)
total+=1
if values[targetNode]:
hits+=1
return hits/total
def weightedLikelihood(self, evidences, targetNode, niters=10000000):
evidenceNodes=evidences.keys()
ancestralOrder = [node for node in self.ancestralOrder() if node not in evidenceNodes]
cumsumHit=0
cumsumTotal=0
hits=0
for it in range(niters):
values=dict([ [i,None] for i in ancestralOrder]) #True: present. False: not present
for evNode in evidenceNodes:
values[evNode]=evidences[evNode]
for node in ancestralOrder:
pNode=self.probs(node, values)
nodeValue=np.random.random()<pNode
values[node]=nodeValue
currProb=reduce(lambda x,y:x*y, [self.probs(i,values) if values[i] else 1-self.probs(i,values) for i in evidenceNodes ])
if values[targetNode]:
cumsumHit+=currProb
cumsumTotal+=currProb
return cumsumHit/cumsumTotal
edges=[(0,1),(0,2),(1,3),(1,4),(2,4),(2,5)]
def probs(node,evidences):
if node==0: return 0.3
elif node==1:
if evidences[0]: return 0.9
else: return 0.2
elif node==2:
if evidences[0]: return 0.75
else: return 0.25
elif node==3:
if evidences[1]: return 0.6
else: return 0.1
elif node==4:
if evidences[1] and evidences[2]: return 0.8
elif evidences[1] and not evidences[2]: return 0.6
elif not evidences[1] and evidences[2]: return 0.5
else: return 0
elif node==5:
if evidences[2]: return 0.4
else: return 0.1
pn=ProbabilityNetwork(6, edges, probs)
evidences=dict([[3,True],[4,True],[5,False]])
print(pn.logicSampling(evidences, 0))
print(pn.weightedLikelihood(evidences,0))
|
flexible
|
{
"blob_id": "24fa41f916b54345e4647354f972bd22e130decf",
"index": 4016,
"step-1": "<mask token>\n\n\nclass ProbabilityNetwork:\n\n def __init__(self, n, edges, probs):\n self.nodes = list(range(n))\n self.edges = edges\n self.probs = probs\n\n def parents(self, node):\n return [a for a, b in edges if b == node]\n\n def ancestralOrder(self):\n order = []\n while len(order) < len(self.nodes):\n for node in self.nodes:\n if node in order:\n continue\n if not any(edge[0] not in order and edge[1] == node for\n edge in self.edges):\n order.append(node)\n return order\n\n def logicSampling(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = self.ancestralOrder()\n hits = 0\n total = 0\n for it in range(niters):\n fail = False\n values = dict([[i, None] for i in self.nodes])\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n if node in evidences and evidences[node] != values[node]:\n fail = True\n break\n if fail:\n continue\n total += 1\n if values[targetNode]:\n hits += 1\n return hits / total\n\n def weightedLikelihood(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = [node for node in self.ancestralOrder() if node not in\n evidenceNodes]\n cumsumHit = 0\n cumsumTotal = 0\n hits = 0\n for it in range(niters):\n values = dict([[i, None] for i in ancestralOrder])\n for evNode in evidenceNodes:\n values[evNode] = evidences[evNode]\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if\n values[i] else 1 - self.probs(i, values)) for i in\n evidenceNodes])\n if values[targetNode]:\n cumsumHit += currProb\n cumsumTotal += currProb\n return cumsumHit / cumsumTotal\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ProbabilityNetwork:\n\n def __init__(self, n, edges, probs):\n self.nodes = list(range(n))\n self.edges = edges\n self.probs = probs\n\n def parents(self, node):\n return [a for a, b in edges if b == node]\n\n def ancestralOrder(self):\n order = []\n while len(order) < len(self.nodes):\n for node in self.nodes:\n if node in order:\n continue\n if not any(edge[0] not in order and edge[1] == node for\n edge in self.edges):\n order.append(node)\n return order\n\n def logicSampling(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = self.ancestralOrder()\n hits = 0\n total = 0\n for it in range(niters):\n fail = False\n values = dict([[i, None] for i in self.nodes])\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n if node in evidences and evidences[node] != values[node]:\n fail = True\n break\n if fail:\n continue\n total += 1\n if values[targetNode]:\n hits += 1\n return hits / total\n\n def weightedLikelihood(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = [node for node in self.ancestralOrder() if node not in\n evidenceNodes]\n cumsumHit = 0\n cumsumTotal = 0\n hits = 0\n for it in range(niters):\n values = dict([[i, None] for i in ancestralOrder])\n for evNode in evidenceNodes:\n values[evNode] = evidences[evNode]\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if\n values[i] else 1 - self.probs(i, values)) for i in\n evidenceNodes])\n if values[targetNode]:\n cumsumHit += currProb\n cumsumTotal += currProb\n return cumsumHit / cumsumTotal\n\n\n<mask token>\n\n\ndef probs(node, evidences):\n if node == 0:\n return 0.3\n elif node == 1:\n if evidences[0]:\n return 0.9\n else:\n return 0.2\n elif node == 2:\n if evidences[0]:\n return 0.75\n else:\n return 0.25\n elif node == 3:\n if evidences[1]:\n return 0.6\n else:\n return 0.1\n elif node == 4:\n if evidences[1] and evidences[2]:\n return 0.8\n elif evidences[1] and not evidences[2]:\n return 0.6\n elif not evidences[1] and evidences[2]:\n return 0.5\n else:\n return 0\n elif node == 5:\n if evidences[2]:\n return 0.4\n else:\n return 0.1\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ProbabilityNetwork:\n\n def __init__(self, n, edges, probs):\n self.nodes = list(range(n))\n self.edges = edges\n self.probs = probs\n\n def parents(self, node):\n return [a for a, b in edges if b == node]\n\n def ancestralOrder(self):\n order = []\n while len(order) < len(self.nodes):\n for node in self.nodes:\n if node in order:\n continue\n if not any(edge[0] not in order and edge[1] == node for\n edge in self.edges):\n order.append(node)\n return order\n\n def logicSampling(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = self.ancestralOrder()\n hits = 0\n total = 0\n for it in range(niters):\n fail = False\n values = dict([[i, None] for i in self.nodes])\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n if node in evidences and evidences[node] != values[node]:\n fail = True\n break\n if fail:\n continue\n total += 1\n if values[targetNode]:\n hits += 1\n return hits / total\n\n def weightedLikelihood(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = [node for node in self.ancestralOrder() if node not in\n evidenceNodes]\n cumsumHit = 0\n cumsumTotal = 0\n hits = 0\n for it in range(niters):\n values = dict([[i, None] for i in ancestralOrder])\n for evNode in evidenceNodes:\n values[evNode] = evidences[evNode]\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if\n values[i] else 1 - self.probs(i, values)) for i in\n evidenceNodes])\n if values[targetNode]:\n cumsumHit += currProb\n cumsumTotal += currProb\n return cumsumHit / cumsumTotal\n\n\nedges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 4), (2, 5)]\n\n\ndef probs(node, evidences):\n if node == 0:\n return 0.3\n elif node == 1:\n if evidences[0]:\n return 0.9\n else:\n return 0.2\n elif node == 2:\n if evidences[0]:\n return 0.75\n else:\n return 0.25\n elif node == 3:\n if evidences[1]:\n return 0.6\n else:\n return 0.1\n elif node == 4:\n if evidences[1] and evidences[2]:\n return 0.8\n elif evidences[1] and not evidences[2]:\n return 0.6\n elif not evidences[1] and evidences[2]:\n return 0.5\n else:\n return 0\n elif node == 5:\n if evidences[2]:\n return 0.4\n else:\n return 0.1\n\n\npn = ProbabilityNetwork(6, edges, probs)\nevidences = dict([[3, True], [4, True], [5, False]])\nprint(pn.logicSampling(evidences, 0))\nprint(pn.weightedLikelihood(evidences, 0))\n",
"step-4": "import numpy as np\nfrom functools import reduce\n\n\nclass ProbabilityNetwork:\n\n def __init__(self, n, edges, probs):\n self.nodes = list(range(n))\n self.edges = edges\n self.probs = probs\n\n def parents(self, node):\n return [a for a, b in edges if b == node]\n\n def ancestralOrder(self):\n order = []\n while len(order) < len(self.nodes):\n for node in self.nodes:\n if node in order:\n continue\n if not any(edge[0] not in order and edge[1] == node for\n edge in self.edges):\n order.append(node)\n return order\n\n def logicSampling(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = self.ancestralOrder()\n hits = 0\n total = 0\n for it in range(niters):\n fail = False\n values = dict([[i, None] for i in self.nodes])\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n if node in evidences and evidences[node] != values[node]:\n fail = True\n break\n if fail:\n continue\n total += 1\n if values[targetNode]:\n hits += 1\n return hits / total\n\n def weightedLikelihood(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = [node for node in self.ancestralOrder() if node not in\n evidenceNodes]\n cumsumHit = 0\n cumsumTotal = 0\n hits = 0\n for it in range(niters):\n values = dict([[i, None] for i in ancestralOrder])\n for evNode in evidenceNodes:\n values[evNode] = evidences[evNode]\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if\n values[i] else 1 - self.probs(i, values)) for i in\n evidenceNodes])\n if values[targetNode]:\n cumsumHit += currProb\n cumsumTotal += currProb\n return cumsumHit / cumsumTotal\n\n\nedges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 4), (2, 5)]\n\n\ndef probs(node, evidences):\n if node == 0:\n return 0.3\n elif node == 1:\n if evidences[0]:\n return 0.9\n else:\n return 0.2\n elif node == 2:\n if evidences[0]:\n return 0.75\n else:\n return 0.25\n elif node == 3:\n if evidences[1]:\n return 0.6\n else:\n return 0.1\n elif node == 4:\n if evidences[1] and evidences[2]:\n return 0.8\n elif evidences[1] and not evidences[2]:\n return 0.6\n elif not evidences[1] and evidences[2]:\n return 0.5\n else:\n return 0\n elif node == 5:\n if evidences[2]:\n return 0.4\n else:\n return 0.1\n\n\npn = ProbabilityNetwork(6, edges, probs)\nevidences = dict([[3, True], [4, True], [5, False]])\nprint(pn.logicSampling(evidences, 0))\nprint(pn.weightedLikelihood(evidences, 0))\n",
"step-5": "#YET TO COMMENT.\n\nimport numpy as np\nfrom functools import reduce\n\nclass ProbabilityNetwork:\n def __init__(self,n,edges,probs):\n self.nodes=list(range(n))\n self.edges=edges\n self.probs=probs\n\n def parents(self, node):\n return [a for a,b in edges if b==node]\n\n def ancestralOrder(self):\n order=[]\n while len(order)<len(self.nodes):\n for node in self.nodes:\n if node in order:\n continue\n if not any((edge[0] not in order) and (edge[1]==node) for edge in self.edges):\n order.append(node)\n return order\n\n def logicSampling(self, evidences, targetNode, niters=10000000):\n evidenceNodes=evidences.keys()\n ancestralOrder = self.ancestralOrder()\n hits=0\n total=0\n\n for it in range(niters):\n fail=False\n values=dict([ [i,None] for i in self.nodes]) #True: present. False: not present\n for node in ancestralOrder:\n pNode=self.probs(node, values)\n nodeValue=np.random.random()<pNode\n values[node]=nodeValue\n if node in evidences and evidences[node]!=values[node]:\n fail=True\n break\n\n if fail: continue\n\n #print(values)\n total+=1\n if values[targetNode]:\n hits+=1\n\n return hits/total\n\n def weightedLikelihood(self, evidences, targetNode, niters=10000000):\n evidenceNodes=evidences.keys()\n\n ancestralOrder = [node for node in self.ancestralOrder() if node not in evidenceNodes]\n cumsumHit=0\n cumsumTotal=0\n hits=0\n for it in range(niters):\n values=dict([ [i,None] for i in ancestralOrder]) #True: present. False: not present\n for evNode in evidenceNodes:\n values[evNode]=evidences[evNode]\n\n for node in ancestralOrder:\n pNode=self.probs(node, values)\n nodeValue=np.random.random()<pNode\n values[node]=nodeValue\n\n currProb=reduce(lambda x,y:x*y, [self.probs(i,values) if values[i] else 1-self.probs(i,values) for i in evidenceNodes ])\n if values[targetNode]:\n cumsumHit+=currProb\n\n cumsumTotal+=currProb\n\n return cumsumHit/cumsumTotal\n\n\n\nedges=[(0,1),(0,2),(1,3),(1,4),(2,4),(2,5)]\n\ndef probs(node,evidences):\n if node==0: return 0.3\n elif node==1:\n if evidences[0]: return 0.9\n else: return 0.2\n elif node==2:\n if evidences[0]: return 0.75\n else: return 0.25\n elif node==3:\n if evidences[1]: return 0.6\n else: return 0.1\n elif node==4:\n if evidences[1] and evidences[2]: return 0.8\n elif evidences[1] and not evidences[2]: return 0.6\n elif not evidences[1] and evidences[2]: return 0.5\n else: return 0\n elif node==5:\n if evidences[2]: return 0.4\n else: return 0.1\n\npn=ProbabilityNetwork(6, edges, probs)\n\nevidences=dict([[3,True],[4,True],[5,False]])\n\nprint(pn.logicSampling(evidences, 0))\nprint(pn.weightedLikelihood(evidences,0))\n\n\n\n",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
try:
from psycopg2 import connect
except:
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import pgnumpy
import cpgnumpy
from pgnumpy import connect
from pgnumpy import PgNumpy
from pgnumpy import PgInput
from pgnumpy import ArrayWriter
from pgnumpy import ArrayStringifier
from pgnumpy import array2table
from pgnumpy import test
from pgnumpy import test_simple
try:
from psycopg2 import connect
except:
pass
<|reserved_special_token_1|>
"""
Package:
pgnumpy
Description
A class and a set of functions for interacting with a PostgreSql database.
A C++ extension module allows returning results as a NumPy array. Numpy
arrays can also be written to tables.
The workhorse class is called PgNumpy
This class has limited functionality compared to the full Python database
api specification. It can execute arbitrary queries and extract results
into numpy arrays. However, cursors are not yet supported. For getting
results, only the fetchall() command is available, as the goal is always to
extract all rows into a single numpy structure rather than work row by row.
More generic DB-API compliant packges like psycopg are more suitable when
more flexible operations are needed.
Classes:
PgNumpy:
The class used in all database interactions. This class represents a
database connection and facilitates executing queries and extracting
results. See docs for pgnumpy.PgNumpy for more details.
PgInput:
A class for writing input files for use in a COPY into the database.
ArrayWriter:
Write arrays to a file for input to postgres. This slower version can
be used if recfile is not available.
ArrayStringifier:
Make a string from an array, possibly with brackets indicating
dimensions.
Convenience Functions:
connect:
Create a database connection, returning a PgNumpy object. If conninfo
is None or "" then the "default" connection based on the PGUSER and
PGDATABASE environment variables is used.
array2table:
Write array with fields (a structure) to a postgres table. If the
table does not yet exist it is created with column definitions based on
the input array. If it does exist the data are appended as new rows in
the table.
"""
import pgnumpy
import cpgnumpy
from pgnumpy import connect
from pgnumpy import PgNumpy
from pgnumpy import PgInput
from pgnumpy import ArrayWriter
from pgnumpy import ArrayStringifier
from pgnumpy import array2table
#from pgnumpy import tables
#from pgnumpy import table_exists
#from pgnumpy import describe
from pgnumpy import test
from pgnumpy import test_simple
#from pgnumpy import obliterate
#from pgnumpy import compare_arrays
# attempt to import the connect method from psycopg2
try:
from psycopg2 import connect
except:
pass
|
flexible
|
{
"blob_id": "7e5cf782692d9cfb2718b2efcc83efa2ecb815cd",
"index": 1371,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntry:\n from psycopg2 import connect\nexcept:\n pass\n",
"step-3": "<mask token>\nimport pgnumpy\nimport cpgnumpy\nfrom pgnumpy import connect\nfrom pgnumpy import PgNumpy\nfrom pgnumpy import PgInput\nfrom pgnumpy import ArrayWriter\nfrom pgnumpy import ArrayStringifier\nfrom pgnumpy import array2table\nfrom pgnumpy import test\nfrom pgnumpy import test_simple\ntry:\n from psycopg2 import connect\nexcept:\n pass\n",
"step-4": "\"\"\"\nPackage:\n pgnumpy\nDescription\n\n A class and a set of functions for interacting with a PostgreSql database.\n A C++ extension module allows returning results as a NumPy array. Numpy\n arrays can also be written to tables. \n \n The workhorse class is called PgNumpy\n\n This class has limited functionality compared to the full Python database\n api specification. It can execute arbitrary queries and extract results\n into numpy arrays. However, cursors are not yet supported. For getting\n results, only the fetchall() command is available, as the goal is always to\n extract all rows into a single numpy structure rather than work row by row.\n \n More generic DB-API compliant packges like psycopg are more suitable when\n more flexible operations are needed.\n\nClasses:\n PgNumpy: \n The class used in all database interactions. This class represents a\n database connection and facilitates executing queries and extracting\n results. See docs for pgnumpy.PgNumpy for more details.\n PgInput: \n A class for writing input files for use in a COPY into the database.\n ArrayWriter: \n Write arrays to a file for input to postgres. This slower version can\n be used if recfile is not available.\n ArrayStringifier: \n Make a string from an array, possibly with brackets indicating\n dimensions.\n\n\nConvenience Functions:\n\n connect:\n Create a database connection, returning a PgNumpy object. If conninfo\n is None or \"\" then the \"default\" connection based on the PGUSER and\n PGDATABASE environment variables is used.\n\n array2table:\n Write array with fields (a structure) to a postgres table. If the\n table does not yet exist it is created with column definitions based on\n the input array. If it does exist the data are appended as new rows in\n the table. \n\n\"\"\"\n\nimport pgnumpy\nimport cpgnumpy\n\nfrom pgnumpy import connect\nfrom pgnumpy import PgNumpy\nfrom pgnumpy import PgInput\nfrom pgnumpy import ArrayWriter\nfrom pgnumpy import ArrayStringifier\nfrom pgnumpy import array2table\n\n#from pgnumpy import tables\n#from pgnumpy import table_exists\n#from pgnumpy import describe\nfrom pgnumpy import test\nfrom pgnumpy import test_simple\n#from pgnumpy import obliterate\n#from pgnumpy import compare_arrays\n\n# attempt to import the connect method from psycopg2\ntry:\n from psycopg2 import connect\nexcept:\n pass\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python3
import sys
from argparse import ArgumentParser
from arg_checks import IsFile, MinInt
from visualisation import Visualisation
parser = ArgumentParser(description="Visualises DS simulations")
# The order of arguments in descending order of file frequency is: config, failures, log.
# This should be the preferable order when using ds-viz via command-line.
# However, failure-free simulations should also be supported, so the failure argument is optional
parser.add_argument("config", action=IsFile,
help="configuration file used in simulation")
parser.add_argument("log", action=IsFile,
help="simulation log file to visualise")
parser.add_argument("-f", "--failures", metavar="RESOURCE_FAILURES", action=IsFile,
help="resource-failures file from simulation")
parser.add_argument("-c", "--core_height", type=int, default=8, action=MinInt, min_int=1,
help="set core height, minimum value of 1")
parser.add_argument("-s", "--scale", type=int, default=sys.maxsize, action=MinInt,
help="set scaling factor of visualisation")
parser.add_argument("-w", "--width", type=int, default=1, action=MinInt, min_int=1,
help="set visualisation width as a multiple of window width, minimum value of 1")
args = parser.parse_args()
viz = Visualisation(args.config, args.failures, args.log, args.core_height, args.scale, args.width)
viz.run()
|
normal
|
{
"blob_id": "1f953b20ff0eb868c2fbff367fafa8b651617e64",
"index": 6131,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nparser.add_argument('config', action=IsFile, help=\n 'configuration file used in simulation')\nparser.add_argument('log', action=IsFile, help=\n 'simulation log file to visualise')\nparser.add_argument('-f', '--failures', metavar='RESOURCE_FAILURES', action\n =IsFile, help='resource-failures file from simulation')\nparser.add_argument('-c', '--core_height', type=int, default=8, action=\n MinInt, min_int=1, help='set core height, minimum value of 1')\nparser.add_argument('-s', '--scale', type=int, default=sys.maxsize, action=\n MinInt, help='set scaling factor of visualisation')\nparser.add_argument('-w', '--width', type=int, default=1, action=MinInt,\n min_int=1, help=\n 'set visualisation width as a multiple of window width, minimum value of 1'\n )\n<mask token>\nviz.run()\n",
"step-3": "<mask token>\nparser = ArgumentParser(description='Visualises DS simulations')\nparser.add_argument('config', action=IsFile, help=\n 'configuration file used in simulation')\nparser.add_argument('log', action=IsFile, help=\n 'simulation log file to visualise')\nparser.add_argument('-f', '--failures', metavar='RESOURCE_FAILURES', action\n =IsFile, help='resource-failures file from simulation')\nparser.add_argument('-c', '--core_height', type=int, default=8, action=\n MinInt, min_int=1, help='set core height, minimum value of 1')\nparser.add_argument('-s', '--scale', type=int, default=sys.maxsize, action=\n MinInt, help='set scaling factor of visualisation')\nparser.add_argument('-w', '--width', type=int, default=1, action=MinInt,\n min_int=1, help=\n 'set visualisation width as a multiple of window width, minimum value of 1'\n )\nargs = parser.parse_args()\nviz = Visualisation(args.config, args.failures, args.log, args.core_height,\n args.scale, args.width)\nviz.run()\n",
"step-4": "import sys\nfrom argparse import ArgumentParser\nfrom arg_checks import IsFile, MinInt\nfrom visualisation import Visualisation\nparser = ArgumentParser(description='Visualises DS simulations')\nparser.add_argument('config', action=IsFile, help=\n 'configuration file used in simulation')\nparser.add_argument('log', action=IsFile, help=\n 'simulation log file to visualise')\nparser.add_argument('-f', '--failures', metavar='RESOURCE_FAILURES', action\n =IsFile, help='resource-failures file from simulation')\nparser.add_argument('-c', '--core_height', type=int, default=8, action=\n MinInt, min_int=1, help='set core height, minimum value of 1')\nparser.add_argument('-s', '--scale', type=int, default=sys.maxsize, action=\n MinInt, help='set scaling factor of visualisation')\nparser.add_argument('-w', '--width', type=int, default=1, action=MinInt,\n min_int=1, help=\n 'set visualisation width as a multiple of window width, minimum value of 1'\n )\nargs = parser.parse_args()\nviz = Visualisation(args.config, args.failures, args.log, args.core_height,\n args.scale, args.width)\nviz.run()\n",
"step-5": "#!/usr/bin/env python3\n\nimport sys\nfrom argparse import ArgumentParser\n\nfrom arg_checks import IsFile, MinInt\nfrom visualisation import Visualisation\n\nparser = ArgumentParser(description=\"Visualises DS simulations\")\n\n# The order of arguments in descending order of file frequency is: config, failures, log.\n# This should be the preferable order when using ds-viz via command-line.\n# However, failure-free simulations should also be supported, so the failure argument is optional\nparser.add_argument(\"config\", action=IsFile,\n help=\"configuration file used in simulation\")\nparser.add_argument(\"log\", action=IsFile,\n help=\"simulation log file to visualise\")\nparser.add_argument(\"-f\", \"--failures\", metavar=\"RESOURCE_FAILURES\", action=IsFile,\n help=\"resource-failures file from simulation\")\nparser.add_argument(\"-c\", \"--core_height\", type=int, default=8, action=MinInt, min_int=1,\n help=\"set core height, minimum value of 1\")\nparser.add_argument(\"-s\", \"--scale\", type=int, default=sys.maxsize, action=MinInt,\n help=\"set scaling factor of visualisation\")\nparser.add_argument(\"-w\", \"--width\", type=int, default=1, action=MinInt, min_int=1,\n help=\"set visualisation width as a multiple of window width, minimum value of 1\")\nargs = parser.parse_args()\n\nviz = Visualisation(args.config, args.failures, args.log, args.core_height, args.scale, args.width)\nviz.run()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
a = [66.25, 333, 333, 1, 1234.5]
a.index(333)
print(a)
|
normal
|
{
"blob_id": "7aba77137b96071101078c38c1c9397bf837d92a",
"index": 1378,
"step-1": "<mask token>\n",
"step-2": "<mask token>\na.index(333)\nprint(a)\n",
"step-3": "a = [66.25, 333, 333, 1, 1234.5]\na.index(333)\nprint(a)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def fall_asleep(record: WarcRecord):
current_uri: str = record.target_uri
start_time = str(datetime.now())
process_id = str(os.getpid())
print('@@1 falling asleep in process {} at {} processing {}'.format(
process_id, start_time, current_uri))
time.sleep(5)
end_time = str(datetime.now())
print('@@2 awakening in process {} at {} processing {}'.format(
process_id, end_time, current_uri))
return process_id, current_uri
<|reserved_special_token_0|>
def quick_print(processid_uri: (int, str)) ->(int, int):
new_process_id = str(os.getpid())
timepoint = str(datetime.now())
print('@@4 map2 in process {} at {} processing {}'.format(
new_process_id, timepoint, processid_uri[1]))
return processid_uri[0], new_process_id
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def fall_asleep(record: WarcRecord):
current_uri: str = record.target_uri
start_time = str(datetime.now())
process_id = str(os.getpid())
print('@@1 falling asleep in process {} at {} processing {}'.format(
process_id, start_time, current_uri))
time.sleep(5)
end_time = str(datetime.now())
print('@@2 awakening in process {} at {} processing {}'.format(
process_id, end_time, current_uri))
return process_id, current_uri
def trivial_filter(processid_uri: (int, str)) ->bool:
new_process_id = str(os.getpid())
timepoint = str(datetime.now())
print('@@3 filter in process {} at {} processing {}'.format(
new_process_id, timepoint, processid_uri[1]))
return True
def quick_print(processid_uri: (int, str)) ->(int, int):
new_process_id = str(os.getpid())
timepoint = str(datetime.now())
print('@@4 map2 in process {} at {} processing {}'.format(
new_process_id, timepoint, processid_uri[1]))
return processid_uri[0], new_process_id
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def fall_asleep(record: WarcRecord):
current_uri: str = record.target_uri
start_time = str(datetime.now())
process_id = str(os.getpid())
print('@@1 falling asleep in process {} at {} processing {}'.format(
process_id, start_time, current_uri))
time.sleep(5)
end_time = str(datetime.now())
print('@@2 awakening in process {} at {} processing {}'.format(
process_id, end_time, current_uri))
return process_id, current_uri
def trivial_filter(processid_uri: (int, str)) ->bool:
new_process_id = str(os.getpid())
timepoint = str(datetime.now())
print('@@3 filter in process {} at {} processing {}'.format(
new_process_id, timepoint, processid_uri[1]))
return True
def quick_print(processid_uri: (int, str)) ->(int, int):
new_process_id = str(os.getpid())
timepoint = str(datetime.now())
print('@@4 map2 in process {} at {} processing {}'.format(
new_process_id, timepoint, processid_uri[1]))
return processid_uri[0], new_process_id
if __name__ == '__main__':
session: SparkSession = create_session(3, 'Wave exploration')
input_warc = (
'/Users/a/Desktop/Buch/CC-MAIN-20191013195541-20191013222541-00000.warc'
)
raw_records = extract_raw_records(input_warc, session)
warc_records = raw_records.flatMap(parse_raw_warc)
process_ids_rdd = warc_records.map(fall_asleep).filter(trivial_filter).map(
quick_print)
distinct_process_ids: List[Tuple[int, int]] = process_ids_rdd.distinct(
).collect()
print(distinct_process_ids)
<|reserved_special_token_1|>
import os
import time
from datetime import datetime
from typing import List, Tuple
from pyspark.sql import SparkSession
from Chapter01.utilities01_py.helper_python import create_session
from Chapter02.utilities02_py.domain_objects import WarcRecord
from Chapter02.utilities02_py.helper_python import extract_raw_records, parse_raw_warc
def fall_asleep(record: WarcRecord):
current_uri: str = record.target_uri
start_time = str(datetime.now())
process_id = str(os.getpid())
print('@@1 falling asleep in process {} at {} processing {}'.format(
process_id, start_time, current_uri))
time.sleep(5)
end_time = str(datetime.now())
print('@@2 awakening in process {} at {} processing {}'.format(
process_id, end_time, current_uri))
return process_id, current_uri
def trivial_filter(processid_uri: (int, str)) ->bool:
new_process_id = str(os.getpid())
timepoint = str(datetime.now())
print('@@3 filter in process {} at {} processing {}'.format(
new_process_id, timepoint, processid_uri[1]))
return True
def quick_print(processid_uri: (int, str)) ->(int, int):
new_process_id = str(os.getpid())
timepoint = str(datetime.now())
print('@@4 map2 in process {} at {} processing {}'.format(
new_process_id, timepoint, processid_uri[1]))
return processid_uri[0], new_process_id
if __name__ == '__main__':
session: SparkSession = create_session(3, 'Wave exploration')
input_warc = (
'/Users/a/Desktop/Buch/CC-MAIN-20191013195541-20191013222541-00000.warc'
)
raw_records = extract_raw_records(input_warc, session)
warc_records = raw_records.flatMap(parse_raw_warc)
process_ids_rdd = warc_records.map(fall_asleep).filter(trivial_filter).map(
quick_print)
distinct_process_ids: List[Tuple[int, int]] = process_ids_rdd.distinct(
).collect()
print(distinct_process_ids)
<|reserved_special_token_1|>
import os
import time
from datetime import datetime
from typing import List, Tuple
from pyspark.sql import SparkSession
from Chapter01.utilities01_py.helper_python import create_session
from Chapter02.utilities02_py.domain_objects import WarcRecord
from Chapter02.utilities02_py.helper_python import extract_raw_records, parse_raw_warc
def fall_asleep(record: WarcRecord):
current_uri: str = record.target_uri
start_time = str(datetime.now())
process_id = str(os.getpid())
print('@@1 falling asleep in process {} at {} processing {}'.format(process_id, start_time, current_uri))
time.sleep(5)
end_time = str(datetime.now())
print('@@2 awakening in process {} at {} processing {}'.format(process_id, end_time, current_uri))
return process_id, current_uri
def trivial_filter(processid_uri: (int, str)) -> bool:
new_process_id = str(os.getpid())
timepoint = str(datetime.now())
print('@@3 filter in process {} at {} processing {}'.format(new_process_id, timepoint, processid_uri[1]))
return True
def quick_print(processid_uri: (int, str)) -> (int, int):
new_process_id = str(os.getpid())
timepoint = str(datetime.now())
print('@@4 map2 in process {} at {} processing {}'.format(new_process_id, timepoint, processid_uri[1]))
return processid_uri[0], new_process_id
if __name__ == "__main__":
session: SparkSession = create_session(3, "Wave exploration")
input_warc = "/Users/a/Desktop/Buch/CC-MAIN-20191013195541-20191013222541-00000.warc" # ToDo: Change path
raw_records = extract_raw_records(input_warc, session)
warc_records = raw_records \
.flatMap(parse_raw_warc)
process_ids_rdd = warc_records\
.map(fall_asleep)\
.filter(trivial_filter)\
.map(quick_print)
distinct_process_ids: List[Tuple[int, int]] = process_ids_rdd.distinct().collect()
print(distinct_process_ids)
|
flexible
|
{
"blob_id": "fccdf75fe83ad8388c12a63555c4132181fd349a",
"index": 1646,
"step-1": "<mask token>\n\n\ndef fall_asleep(record: WarcRecord):\n current_uri: str = record.target_uri\n start_time = str(datetime.now())\n process_id = str(os.getpid())\n print('@@1 falling asleep in process {} at {} processing {}'.format(\n process_id, start_time, current_uri))\n time.sleep(5)\n end_time = str(datetime.now())\n print('@@2 awakening in process {} at {} processing {}'.format(\n process_id, end_time, current_uri))\n return process_id, current_uri\n\n\n<mask token>\n\n\ndef quick_print(processid_uri: (int, str)) ->(int, int):\n new_process_id = str(os.getpid())\n timepoint = str(datetime.now())\n print('@@4 map2 in process {} at {} processing {}'.format(\n new_process_id, timepoint, processid_uri[1]))\n return processid_uri[0], new_process_id\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef fall_asleep(record: WarcRecord):\n current_uri: str = record.target_uri\n start_time = str(datetime.now())\n process_id = str(os.getpid())\n print('@@1 falling asleep in process {} at {} processing {}'.format(\n process_id, start_time, current_uri))\n time.sleep(5)\n end_time = str(datetime.now())\n print('@@2 awakening in process {} at {} processing {}'.format(\n process_id, end_time, current_uri))\n return process_id, current_uri\n\n\ndef trivial_filter(processid_uri: (int, str)) ->bool:\n new_process_id = str(os.getpid())\n timepoint = str(datetime.now())\n print('@@3 filter in process {} at {} processing {}'.format(\n new_process_id, timepoint, processid_uri[1]))\n return True\n\n\ndef quick_print(processid_uri: (int, str)) ->(int, int):\n new_process_id = str(os.getpid())\n timepoint = str(datetime.now())\n print('@@4 map2 in process {} at {} processing {}'.format(\n new_process_id, timepoint, processid_uri[1]))\n return processid_uri[0], new_process_id\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef fall_asleep(record: WarcRecord):\n current_uri: str = record.target_uri\n start_time = str(datetime.now())\n process_id = str(os.getpid())\n print('@@1 falling asleep in process {} at {} processing {}'.format(\n process_id, start_time, current_uri))\n time.sleep(5)\n end_time = str(datetime.now())\n print('@@2 awakening in process {} at {} processing {}'.format(\n process_id, end_time, current_uri))\n return process_id, current_uri\n\n\ndef trivial_filter(processid_uri: (int, str)) ->bool:\n new_process_id = str(os.getpid())\n timepoint = str(datetime.now())\n print('@@3 filter in process {} at {} processing {}'.format(\n new_process_id, timepoint, processid_uri[1]))\n return True\n\n\ndef quick_print(processid_uri: (int, str)) ->(int, int):\n new_process_id = str(os.getpid())\n timepoint = str(datetime.now())\n print('@@4 map2 in process {} at {} processing {}'.format(\n new_process_id, timepoint, processid_uri[1]))\n return processid_uri[0], new_process_id\n\n\nif __name__ == '__main__':\n session: SparkSession = create_session(3, 'Wave exploration')\n input_warc = (\n '/Users/a/Desktop/Buch/CC-MAIN-20191013195541-20191013222541-00000.warc'\n )\n raw_records = extract_raw_records(input_warc, session)\n warc_records = raw_records.flatMap(parse_raw_warc)\n process_ids_rdd = warc_records.map(fall_asleep).filter(trivial_filter).map(\n quick_print)\n distinct_process_ids: List[Tuple[int, int]] = process_ids_rdd.distinct(\n ).collect()\n print(distinct_process_ids)\n",
"step-4": "import os\nimport time\nfrom datetime import datetime\nfrom typing import List, Tuple\nfrom pyspark.sql import SparkSession\nfrom Chapter01.utilities01_py.helper_python import create_session\nfrom Chapter02.utilities02_py.domain_objects import WarcRecord\nfrom Chapter02.utilities02_py.helper_python import extract_raw_records, parse_raw_warc\n\n\ndef fall_asleep(record: WarcRecord):\n current_uri: str = record.target_uri\n start_time = str(datetime.now())\n process_id = str(os.getpid())\n print('@@1 falling asleep in process {} at {} processing {}'.format(\n process_id, start_time, current_uri))\n time.sleep(5)\n end_time = str(datetime.now())\n print('@@2 awakening in process {} at {} processing {}'.format(\n process_id, end_time, current_uri))\n return process_id, current_uri\n\n\ndef trivial_filter(processid_uri: (int, str)) ->bool:\n new_process_id = str(os.getpid())\n timepoint = str(datetime.now())\n print('@@3 filter in process {} at {} processing {}'.format(\n new_process_id, timepoint, processid_uri[1]))\n return True\n\n\ndef quick_print(processid_uri: (int, str)) ->(int, int):\n new_process_id = str(os.getpid())\n timepoint = str(datetime.now())\n print('@@4 map2 in process {} at {} processing {}'.format(\n new_process_id, timepoint, processid_uri[1]))\n return processid_uri[0], new_process_id\n\n\nif __name__ == '__main__':\n session: SparkSession = create_session(3, 'Wave exploration')\n input_warc = (\n '/Users/a/Desktop/Buch/CC-MAIN-20191013195541-20191013222541-00000.warc'\n )\n raw_records = extract_raw_records(input_warc, session)\n warc_records = raw_records.flatMap(parse_raw_warc)\n process_ids_rdd = warc_records.map(fall_asleep).filter(trivial_filter).map(\n quick_print)\n distinct_process_ids: List[Tuple[int, int]] = process_ids_rdd.distinct(\n ).collect()\n print(distinct_process_ids)\n",
"step-5": "import os\nimport time\nfrom datetime import datetime\nfrom typing import List, Tuple\nfrom pyspark.sql import SparkSession\nfrom Chapter01.utilities01_py.helper_python import create_session\nfrom Chapter02.utilities02_py.domain_objects import WarcRecord\nfrom Chapter02.utilities02_py.helper_python import extract_raw_records, parse_raw_warc\n\n\ndef fall_asleep(record: WarcRecord):\n current_uri: str = record.target_uri\n start_time = str(datetime.now())\n process_id = str(os.getpid())\n print('@@1 falling asleep in process {} at {} processing {}'.format(process_id, start_time, current_uri))\n time.sleep(5)\n end_time = str(datetime.now())\n print('@@2 awakening in process {} at {} processing {}'.format(process_id, end_time, current_uri))\n return process_id, current_uri\n\n\ndef trivial_filter(processid_uri: (int, str)) -> bool:\n new_process_id = str(os.getpid())\n timepoint = str(datetime.now())\n print('@@3 filter in process {} at {} processing {}'.format(new_process_id, timepoint, processid_uri[1]))\n return True\n\n\ndef quick_print(processid_uri: (int, str)) -> (int, int):\n new_process_id = str(os.getpid())\n timepoint = str(datetime.now())\n print('@@4 map2 in process {} at {} processing {}'.format(new_process_id, timepoint, processid_uri[1]))\n return processid_uri[0], new_process_id\n\nif __name__ == \"__main__\":\n session: SparkSession = create_session(3, \"Wave exploration\")\n\n input_warc = \"/Users/a/Desktop/Buch/CC-MAIN-20191013195541-20191013222541-00000.warc\" # ToDo: Change path\n raw_records = extract_raw_records(input_warc, session)\n warc_records = raw_records \\\n .flatMap(parse_raw_warc)\n\n process_ids_rdd = warc_records\\\n .map(fall_asleep)\\\n .filter(trivial_filter)\\\n .map(quick_print)\n\n distinct_process_ids: List[Tuple[int, int]] = process_ids_rdd.distinct().collect()\n print(distinct_process_ids)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#!/usr/bin/env python
# ----------------------------------------------------------
# RJGlass Main Program version 0.2 8/1/07
# ----------------------------------------------------------
# Copyright 2007 Michael LaBrie
#
# This file is part of RJGlass.
#
# RJGlass is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# RJGlass is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ---------------------------------------------------------------
import sys, os, time
#Load the modules needed for RJGlass.
from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
#pygame needed for sound in server_only (so load either way)
import pygame
from pygame.locals import *
from pygame import image
from guage import * #All add on guage functions colors etc.
#This is code to import config file (config.py)
try:
import config
except ImportError:
# We're in a py2exe, so we'll append an element to the (one element)
# sys.path which points to Library.zip, to the directory that contains
# Library.zip, allowing us to import config.py
# Adds one level up from the Library.zip directory to the path, so import will go forward
sys.path.append(os.path.split(sys.path[0])[0])
import config
class screen_c(object):
#This controls what is in each screen.
def __init__(self, x, guage_list=[]):
self.guage_list = [] #list of guages to cycle through.
self.guage_index = 0
self.x = x
self.y = 0
self.width = 512
self.heigth = 768
self.add_guage_list(guage_list)
def add_guage_list(self,glist):
for g in glist:
self.append_guage(guage_dict[g])
def append_guage(self,guage):
self.guage_list.append(guage)
def cycle(self):
self.guage_index +=1
if self.guage_index >= len(self.guage_list):
self.guage_index =0
def cycle_reverse(self):
self.guage_index -=1
if self.guage_index <0:
self.guage_index = len(self.guage_list) -1
def active_guage(self):
return self.guage_list[self.guage_index]
#this is a static function not specificaly for the screen.
#the eventhandlers have references to the screens so it is easier to
#get the guage references by name through this object.
def gauge_by_name(self,name):
return guage_dict[name]
def draw(self, aircraft):
self.guage_active = self.guage_list[self.guage_index]
self.guage_active.draw(aircraft, self.x, self.y)
def InitPyGame():
glutInit(())
pygame.init()
if config.full_screen:
s = pygame.display.set_mode((1024,768), DOUBLEBUF|OPENGL|FULLSCREEN)
else:
s = pygame.display.set_mode((1024,768), DOUBLEBUF|OPENGL)
return s
def InitView(smooth, width, heigth):
global x_s, y_s, scissor
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
glLoadIdentity()
glOrtho(0,width,0.0,heigth,-1.0,1.0)
x_s = width/1024.0
y_s = heigth/768.0
glScalef(x_s, y_s, 1.0)
scissor.x_s = x_s
scissor.y_s = y_s
if smooth:
#Enable Smoothing Antianalising
glEnable(GL_LINE_SMOOTH)
glEnable(GL_BLEND)
#glBlendFunc(GL_SRC_ALPHA, GL_ZERO)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glHint(GL_LINE_SMOOTH_HINT, GL_DONT_CARE)
#glDisable(GL_DEPTH_TEST)
#Clear Screen
#glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
def DisplaySplash(filename, delay, window_x, window_y):
#Display needs to be initialized first.
i = image.load(filename)
splash_image = bitmap_image(i)
#Determine the x and y coords to put in center of screen.
splash_x = (window_x / 2) - (splash_image.w/2)
splash_y = (window_y /2) - (splash_image.h/2)
glRasterPos3f(splash_x,splash_y,0)
glDrawPixels(splash_image.w, splash_image.h, GL_RGBA, GL_UNSIGNED_BYTE, splash_image.tostring)
pygame.display.flip()
time.sleep(delay)
def DrawWindow(left_screen, right_screen):
def divider(): #Dividing vertical white line between instruments
glColor(white)
glLineWidth(2.0)
glBegin(GL_LINES)
glVertex2f(512.0, 0.0)
glVertex2f(512.0, 768.0)
glEnd()
def draw_nodata(x,y): #Draw no data text on screen.
glColor(red)
glLineWidth(5.0)
glPushMatrix()
glTranslatef(x,y,0)
glScalef(0.4,0.4,1.0)
glText("NO SIM DATA", 100)
glPopMatrix()
global count
divider()
#PFD.draw(aircraft_data,250,445)
left_screen.draw(aircraft_data)
#ND.draw(aircraft_data,512+256, 400)
#FMS.draw(aircraft_data,512+256, 0)
right_screen.draw(aircraft_data)
glDisable(GL_SCISSOR_TEST) #Disable any scissoring.
draw_FPS(20,740, aircraft_data.frame_time)
#If Nodata is coming from Flight Sim, show on screen
if aircraft_data.nodata:
draw_nodata(50,500)
count = count +1 #Used for FPS calc
def MainLoop(mode, server_only):
#global window
global starttime
global count
global mode_func, left_screen, right_screen, eventhandler
# Start Event Processing Engine
starttime = time.time() # Used for FPS (Frame Per Second) Calculation
if (server_only):
#Set up correct function for selected mode
mode_func = aircraft_data.get_mode_func(mode)
else:
left_screen = screen_c(256,config.left_screen)
right_screen = screen_c(512+256,config.right_screen)
# left_screen.add_guage_list(config.left_screen)
# right_screen.add_guage_list(config.right_screen)
#Set up correct function for selected mode
mode_func = aircraft_data.get_mode_func(mode, left_screen, right_screen)
#Setup Keyboard
#keys.setup_lists(aircraft_data)
#Inititalize View
#left_screen = screen_c(256, [PFD, ND, FMS])
eventhandler = event_handler.event_handler_c(aircraft_data,FMS, right_screen, left_screen)
#Load textures, and guages that use them
FMS.load_texture()
EICAS1.load_texture()
EICAS2.load_texture()
RADIO.load_texture()
if server_only:
server_loop()
else:
graphic_loop()
def graphic_loop():
#This is the loop for the non server mode. Gauges drawn.
while not (aircraft_data.quit_flag):
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT) #Clear Screen
#Update globaltime
aircraft_data.globaltime = time.time()
globaltime.update(time.time())
DrawWindow(left_screen, right_screen)
pygame.display.flip() #Update screen
mode_func() #Run aircraft mode function, to do all teh calaculations etc.
# Check for keypresses
eventhandler.check_events(pygame.event.get(), globaltime.value)
def server_loop():
#This is the loop for the server only mode. No Guages Drawn
while not (aircraft_data.quit_flag):
#Update globaltime
aircraft_data.globaltime = time.time()
globaltime.update(time.time())
mode_func() #Run aircraft mode function, to do all teh calaculations etc.
time.sleep(0.01) #Throw in some time delay, since no guages are being drawn.
# Check for keypresses
#eventhandler.check_events(pygame.event.get(), globaltime.value)
def Init_Graphics(x,y):
InitPyGame()
InitView(True, x,y)
def Initialize(server_only):
#if server_only True then server will just be run, No Graphics
#Initialize count for FPS calc
global count
count = 0
if (not server_only):
Init_Graphics(config.window_x, config.window_y)
#Draw Splash Screen
if config.splash:
DisplaySplash(config.splash_filename, config.splash_delay, config.window_x, config.window_y)
def ShutDown(mode, server_only):
#Close LogFile
datafile.close()
#Close pygame mixer
pygame.mixer.quit()
#Print average Frames per second on shutdown
print "FPS ", count / (time.time() - starttime)
#Try to kill the thread if it exists. Closes it down on exit
aircraft_data.AP.quit() #only here to close debugging files if present.
if ((mode != config.TEST) & (mode != config.CLIENT)): #If simconnected connected, kill the thread.
aircraft_data.kill_SimConnect()
def CheckArg(arg, mode, server_only, addr):
if 'server' in arg:
server_only = True
elif 'guage' in arg:
server_only = False
if 'client' in arg:
mode = config.CLIENT
elif 'test' in arg:
mode = config.TEST
for a in arg:
if 'addr' in a:
addr = a.split('=')[1]
return mode, server_only, addr
#===========================================================================
#Main program starts here
#===========================================================================
#Check arguments first, and get mode and server_only flags
mode, server_only, addr = CheckArg(sys.argv, config.mode, config.server_only, config.addr)
#config.addr = addr
#print addr
Initialize(server_only)
#Import guage files.
import aircraft #Does all of the aircraft_data
import event_handler #Handles all keyboard commands
import variable
if (not server_only):
import PFD_mod
import ND_mod
import EICAS1_mod
import EICAS2_mod
import FMS_guage
import radio_mod
#Create Guages
aircraft_data = aircraft.data()
variables = variable.variable_c(aircraft_data)
if (not server_only):
PFD = PFD_mod.PFD_Guage()
ND = ND_mod.ND_Guage()
FMS = FMS_guage.FMS_guage_c()
EICAS1 = EICAS1_mod.EICAS1_guage()
EICAS2 = EICAS2_mod.EICAS2_guage()
ND.initialize(aircraft_data)
RADIO = radio_mod.radio_guage()
guage_dict= { "RADIO":RADIO,"PFD":PFD,"ND":ND,"FMS":FMS,
"EICAS1":EICAS1,"EICAS2":EICAS2 }
print "Main Loop"
#Run main, and get window size and operation mode from config file. config.py
MainLoop(mode, server_only)
#===================
# Shuting Down
#===================
ShutDown(mode, server_only)
|
normal
|
{
"blob_id": "aafadcbf946db8ed85e3df48f5411967ec35c318",
"index": 7333,
"step-1": "#!/usr/bin/env python\n# ----------------------------------------------------------\n# RJGlass Main Program version 0.2 8/1/07\n# ----------------------------------------------------------\n# Copyright 2007 Michael LaBrie\n#\n# This file is part of RJGlass.\n#\n# RJGlass is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 3 of the License, or\n# (at your option) any later version.\n\n# RJGlass is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n# ---------------------------------------------------------------\nimport sys, os, time\n\n#Load the modules needed for RJGlass.\n\t\n\t\nfrom OpenGL.GL import *\nfrom OpenGL.GLUT import *\nfrom OpenGL.GLU import *\n\n#pygame needed for sound in server_only (so load either way)\nimport pygame\nfrom pygame.locals import *\nfrom pygame import image\n\t\nfrom guage import * #All add on guage functions colors etc. \n\n#This is code to import config file (config.py)\ntry:\n\timport config\nexcept ImportError:\n\t# We're in a py2exe, so we'll append an element to the (one element) \n\t# sys.path which points to Library.zip, to the directory that contains \n\t# Library.zip, allowing us to import config.py\n\t# Adds one level up from the Library.zip directory to the path, so import will go forward\n\tsys.path.append(os.path.split(sys.path[0])[0])\n\timport config\n\n\nclass screen_c(object):\n\t#This controls what is in each screen.\n\tdef __init__(self, x, guage_list=[]):\n\t\tself.guage_list = [] #list of guages to cycle through.\n\t\tself.guage_index = 0\n\t\tself.x = x\n\t\tself.y = 0\n\t\tself.width = 512\n\t\tself.heigth = 768\n\t\tself.add_guage_list(guage_list)\n\t\t\n\tdef add_guage_list(self,glist):\n\t\tfor g in glist:\n\t\t\tself.append_guage(guage_dict[g])\n\t\t\n\tdef append_guage(self,guage):\n\t\tself.guage_list.append(guage)\n\t\t\n\tdef cycle(self):\n\t\tself.guage_index +=1\n\t\tif self.guage_index >= len(self.guage_list):\n\t\t\tself.guage_index =0\n\t\t\t\n\tdef cycle_reverse(self):\n\t\tself.guage_index -=1\n\t\tif self.guage_index <0:\n\t\t\tself.guage_index = len(self.guage_list) -1\n\t\t\t\n\tdef active_guage(self):\n\t\treturn self.guage_list[self.guage_index]\t\t\t\n\t\n\t#this is a static function not specificaly for the screen.\n\t#the eventhandlers have references to the screens so it is easier to\n\t#get the guage references by name through this object.\n\tdef gauge_by_name(self,name):\n\t\treturn guage_dict[name]\n\t\t\t\n\tdef draw(self, aircraft):\n\t\tself.guage_active = self.guage_list[self.guage_index]\n\t\tself.guage_active.draw(aircraft, self.x, self.y)\n\n\ndef InitPyGame():\n\tglutInit(())\n\tpygame.init()\n\tif config.full_screen:\n\t\ts = pygame.display.set_mode((1024,768), DOUBLEBUF|OPENGL|FULLSCREEN)\n\telse:\n\t\ts = pygame.display.set_mode((1024,768), DOUBLEBUF|OPENGL)\n\treturn s\n\t\t\ndef InitView(smooth, width, heigth):\n\tglobal x_s, y_s, scissor\n\t\n\tglClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT) \n\tglLoadIdentity()\n\tglOrtho(0,width,0.0,heigth,-1.0,1.0)\n\t\n\tx_s = width/1024.0\n\ty_s = heigth/768.0\n\n\tglScalef(x_s, y_s, 1.0)\n\tscissor.x_s = x_s\n\tscissor.y_s = y_s\n\tif smooth:\n\t\t#Enable Smoothing Antianalising\n\t\tglEnable(GL_LINE_SMOOTH)\n\t\tglEnable(GL_BLEND)\n\t\t#glBlendFunc(GL_SRC_ALPHA, GL_ZERO)\n\t\tglBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)\n\t\tglHint(GL_LINE_SMOOTH_HINT, GL_DONT_CARE)\n\t\t#glDisable(GL_DEPTH_TEST)\n\t#Clear Screen\n\t#glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)\n\t\n\t\ndef DisplaySplash(filename, delay, window_x, window_y):\n\t#Display needs to be initialized first.\n\ti = image.load(filename)\n\tsplash_image = bitmap_image(i)\n\t#Determine the x and y coords to put in center of screen.\n\tsplash_x = (window_x / 2) - (splash_image.w/2)\n\tsplash_y = (window_y /2) - (splash_image.h/2)\n\tglRasterPos3f(splash_x,splash_y,0)\n\tglDrawPixels(splash_image.w, splash_image.h, GL_RGBA, GL_UNSIGNED_BYTE, splash_image.tostring)\n\tpygame.display.flip()\n\ttime.sleep(delay)\n\t\n\n\t\n\ndef DrawWindow(left_screen, right_screen):\n\t\n\tdef divider(): #Dividing vertical white line between instruments\n\t\tglColor(white)\n\t\tglLineWidth(2.0)\n\t\tglBegin(GL_LINES)\n\t\tglVertex2f(512.0, 0.0)\n\t\tglVertex2f(512.0, 768.0)\n\t\tglEnd()\n\t\t\n\tdef draw_nodata(x,y): #Draw no data text on screen.\n\t\tglColor(red)\n\t\tglLineWidth(5.0)\n\t\tglPushMatrix()\n\t\tglTranslatef(x,y,0)\n\t\tglScalef(0.4,0.4,1.0)\n\t\tglText(\"NO SIM DATA\", 100)\n\t\tglPopMatrix()\n\t\t\n\tglobal count\n\tdivider()\n\t#PFD.draw(aircraft_data,250,445)\n\tleft_screen.draw(aircraft_data)\n\t#ND.draw(aircraft_data,512+256, 400)\n\t#FMS.draw(aircraft_data,512+256, 0)\n\tright_screen.draw(aircraft_data)\n\tglDisable(GL_SCISSOR_TEST) #Disable any scissoring.\n\tdraw_FPS(20,740, aircraft_data.frame_time)\n\t#If Nodata is coming from Flight Sim, show on screen\n\tif aircraft_data.nodata:\n\t\tdraw_nodata(50,500)\n\t\n\t\n\tcount = count +1 #Used for FPS calc\n\t\ndef MainLoop(mode, server_only):\n\t#global window\n\tglobal starttime\n\tglobal count\n\tglobal mode_func, left_screen, right_screen, eventhandler\n\t# Start Event Processing Engine\t\n\tstarttime = time.time() # Used for FPS (Frame Per Second) Calculation\n\t\n\tif (server_only):\n\t\t#Set up correct function for selected mode\n\t\tmode_func = aircraft_data.get_mode_func(mode)\n\telse:\t\n\t\tleft_screen = screen_c(256,config.left_screen)\n\t\tright_screen = screen_c(512+256,config.right_screen)\n\t#\tleft_screen.add_guage_list(config.left_screen)\n\t#\tright_screen.add_guage_list(config.right_screen)\n\t\t#Set up correct function for selected mode\n\t\tmode_func = aircraft_data.get_mode_func(mode, left_screen, right_screen)\n\t\n\t#Setup Keyboard\n\t#keys.setup_lists(aircraft_data)\n\t#Inititalize View\n\t#left_screen = screen_c(256, [PFD, ND, FMS])\n\t\n\t\teventhandler = event_handler.event_handler_c(aircraft_data,FMS, right_screen, left_screen)\n\t\n\t\t#Load textures, and guages that use them\n\t\tFMS.load_texture()\n\t\tEICAS1.load_texture()\n\t\tEICAS2.load_texture()\n\t\tRADIO.load_texture()\n\t\n\tif server_only:\n\t\tserver_loop()\n\telse:\n\t\tgraphic_loop()\n\ndef graphic_loop():\n\t#This is the loop for the non server mode. Gauges drawn.\n\twhile not (aircraft_data.quit_flag):\n\t\tglClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT) #Clear Screen\t\n\t\t#Update globaltime\n\t\taircraft_data.globaltime = time.time()\n\t\tglobaltime.update(time.time())\n\t\tDrawWindow(left_screen, right_screen)\n\t\tpygame.display.flip() #Update screen\n\t\tmode_func() #Run aircraft mode function, to do all teh calaculations etc.\n\t\t\n\t\t# Check for keypresses\n\t\teventhandler.check_events(pygame.event.get(), globaltime.value)\t\t\ndef server_loop():\n\t#This is the loop for the server only mode. No Guages Drawn\n\twhile not (aircraft_data.quit_flag):\n\t\t#Update globaltime\n\t\taircraft_data.globaltime = time.time()\n\t\tglobaltime.update(time.time())\n\t\tmode_func() #Run aircraft mode function, to do all teh calaculations etc.\n\t\ttime.sleep(0.01) #Throw in some time delay, since no guages are being drawn.\n\t\t\n\t\t# Check for keypresses\n\t\t#eventhandler.check_events(pygame.event.get(), globaltime.value)\t\t\n\t\n\t\n\n\ndef Init_Graphics(x,y):\n\tInitPyGame()\n\tInitView(True, x,y)\n\t\ndef Initialize(server_only):\n\t#if server_only True then server will just be run, No Graphics\n\t#Initialize count for FPS calc\n\tglobal count\n\tcount = 0\n\tif (not server_only):\n\t\tInit_Graphics(config.window_x, config.window_y)\n\t\t#Draw Splash Screen\n\t\tif config.splash:\n\t\t\tDisplaySplash(config.splash_filename, config.splash_delay, config.window_x, config.window_y)\n\n\n\t\n\t\ndef ShutDown(mode, server_only):\n\t#Close LogFile\n\tdatafile.close()\n\t#Close pygame mixer\n\tpygame.mixer.quit()\n\t#Print average Frames per second on shutdown\n\tprint \"FPS \", count / (time.time() - starttime)\n\t#Try to kill the thread if it exists. Closes it down on exit\t\t\t\t\n\taircraft_data.AP.quit() #only here to close debugging files if present.\n\tif ((mode != config.TEST) & (mode != config.CLIENT)): #If simconnected connected, kill the thread.\n\t\taircraft_data.kill_SimConnect()\n\ndef CheckArg(arg, mode, server_only, addr):\n\tif 'server' in arg:\n\t\tserver_only = True\n\telif 'guage' in arg:\t\n\t\tserver_only = False\n\t\t\n\tif 'client' in arg:\n\t\tmode = config.CLIENT\n\telif 'test' in arg:\n\t\tmode = config.TEST\n\t\n\tfor a in arg:\n\t\tif 'addr' in a:\n\t\t\taddr = a.split('=')[1]\n\t\t\t\t\n\treturn mode, server_only, addr\n\t\n\n#===========================================================================\n#Main program starts here\n#===========================================================================\n#Check arguments first, and get mode and server_only flags\nmode, server_only, addr = CheckArg(sys.argv, config.mode, config.server_only, config.addr)\n#config.addr = addr\n#print addr\nInitialize(server_only)\n#Import guage files.\nimport aircraft #Does all of the aircraft_data\nimport event_handler #Handles all keyboard commands\nimport variable\n\t\nif (not server_only):\n\timport PFD_mod\n\timport ND_mod\n\timport EICAS1_mod\n\timport EICAS2_mod\n\timport FMS_guage\n\timport radio_mod\n\n#Create Guages\n\naircraft_data = aircraft.data()\nvariables = variable.variable_c(aircraft_data)\n\nif (not server_only):\n\tPFD = PFD_mod.PFD_Guage()\n\tND = ND_mod.ND_Guage()\n\tFMS = FMS_guage.FMS_guage_c()\n\tEICAS1 = EICAS1_mod.EICAS1_guage()\n\tEICAS2 = EICAS2_mod.EICAS2_guage()\n\tND.initialize(aircraft_data)\n\tRADIO = radio_mod.radio_guage()\n\t\n\tguage_dict= { \"RADIO\":RADIO,\"PFD\":PFD,\"ND\":ND,\"FMS\":FMS,\n\t\t\"EICAS1\":EICAS1,\"EICAS2\":EICAS2 }\n\t\t\n\nprint \"Main Loop\"\n#Run main, and get window size and operation mode from config file. config.py\nMainLoop(mode, server_only)\n#===================\n# Shuting Down\n#===================\nShutDown(mode, server_only)\t\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class TestCategory(TestCase):
def test_str(self):
category = Category(name='Test Category')
self.assertEquals(str(category), 'Test Category')
class TestTag(TestCase):
def test_str(self):
tag = Tag(name='Test Tag')
self.assertEquals(str(tag), 'Test Tag')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestPost(TestCase):
<|reserved_special_token_0|>
class TestCategory(TestCase):
def test_str(self):
category = Category(name='Test Category')
self.assertEquals(str(category), 'Test Category')
class TestTag(TestCase):
def test_str(self):
tag = Tag(name='Test Tag')
self.assertEquals(str(tag), 'Test Tag')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestPost(TestCase):
def test_str(self):
my_title = Post(title='This is a basic title for a basic test case')
self.assertEquals(str(my_title),
'This is a basic title for a basic test case')
class TestCategory(TestCase):
def test_str(self):
category = Category(name='Test Category')
self.assertEquals(str(category), 'Test Category')
class TestTag(TestCase):
def test_str(self):
tag = Tag(name='Test Tag')
self.assertEquals(str(tag), 'Test Tag')
<|reserved_special_token_1|>
from django.test import TestCase
from .models import Post, Category, Tag
class TestPost(TestCase):
def test_str(self):
my_title = Post(title='This is a basic title for a basic test case')
self.assertEquals(str(my_title),
'This is a basic title for a basic test case')
class TestCategory(TestCase):
def test_str(self):
category = Category(name='Test Category')
self.assertEquals(str(category), 'Test Category')
class TestTag(TestCase):
def test_str(self):
tag = Tag(name='Test Tag')
self.assertEquals(str(tag), 'Test Tag')
<|reserved_special_token_1|>
from django.test import TestCase
from .models import Post, Category, Tag
# Create your tests here.
class TestPost(TestCase):
def test_str(self):
my_title = Post(title='This is a basic title for a basic test case')
self.assertEquals(str(my_title), 'This is a basic title for a basic test case')
class TestCategory(TestCase):
def test_str(self):
category = Category(name='Test Category')
self.assertEquals(str(category), 'Test Category')
class TestTag(TestCase):
def test_str(self):
tag = Tag(name='Test Tag')
self.assertEquals(str(tag), 'Test Tag')
|
flexible
|
{
"blob_id": "825c9510b055c0fa570f577b1c9616e8bde9c98b",
"index": 7653,
"step-1": "<mask token>\n\n\nclass TestCategory(TestCase):\n\n def test_str(self):\n category = Category(name='Test Category')\n self.assertEquals(str(category), 'Test Category')\n\n\nclass TestTag(TestCase):\n\n def test_str(self):\n tag = Tag(name='Test Tag')\n self.assertEquals(str(tag), 'Test Tag')\n",
"step-2": "<mask token>\n\n\nclass TestPost(TestCase):\n <mask token>\n\n\nclass TestCategory(TestCase):\n\n def test_str(self):\n category = Category(name='Test Category')\n self.assertEquals(str(category), 'Test Category')\n\n\nclass TestTag(TestCase):\n\n def test_str(self):\n tag = Tag(name='Test Tag')\n self.assertEquals(str(tag), 'Test Tag')\n",
"step-3": "<mask token>\n\n\nclass TestPost(TestCase):\n\n def test_str(self):\n my_title = Post(title='This is a basic title for a basic test case')\n self.assertEquals(str(my_title),\n 'This is a basic title for a basic test case')\n\n\nclass TestCategory(TestCase):\n\n def test_str(self):\n category = Category(name='Test Category')\n self.assertEquals(str(category), 'Test Category')\n\n\nclass TestTag(TestCase):\n\n def test_str(self):\n tag = Tag(name='Test Tag')\n self.assertEquals(str(tag), 'Test Tag')\n",
"step-4": "from django.test import TestCase\nfrom .models import Post, Category, Tag\n\n\nclass TestPost(TestCase):\n\n def test_str(self):\n my_title = Post(title='This is a basic title for a basic test case')\n self.assertEquals(str(my_title),\n 'This is a basic title for a basic test case')\n\n\nclass TestCategory(TestCase):\n\n def test_str(self):\n category = Category(name='Test Category')\n self.assertEquals(str(category), 'Test Category')\n\n\nclass TestTag(TestCase):\n\n def test_str(self):\n tag = Tag(name='Test Tag')\n self.assertEquals(str(tag), 'Test Tag')\n",
"step-5": "from django.test import TestCase\n\nfrom .models import Post, Category, Tag\n\n# Create your tests here.\n\nclass TestPost(TestCase):\n\n def test_str(self):\n my_title = Post(title='This is a basic title for a basic test case')\n self.assertEquals(str(my_title), 'This is a basic title for a basic test case')\n\nclass TestCategory(TestCase):\n\n def test_str(self):\n category = Category(name='Test Category')\n self.assertEquals(str(category), 'Test Category')\n\nclass TestTag(TestCase):\n\n def test_str(self):\n tag = Tag(name='Test Tag')\n self.assertEquals(str(tag), 'Test Tag')\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
# project/tests/test_tmdb.py
import unittest
import json
from project.server import db
from project.server.models import Tmdb
from project.tests.base import BaseTestCase
class TestTmdb(BaseTestCase):
"""
Testing if we have the good responses from the api
"""
def test_discover(self):
""" Testing the TMDB API discover endpoint """
response = Tmdb.discover()
self.assertTrue(int(response.status_code) == 200)
data = response.json()
self.assertTrue(isinstance(data['results'], list))
# TODO check if all the shows are in the good format (can be from_dict/to_dict)
def test_search(self):
""" Testing the TMDB API search endpoint """
response = Tmdb.search('ozark')
self.assertTrue(int(response.status_code) == 200)
data = response.json()
self.assertTrue(isinstance(data['results'], list))
# TODO check if all the shows are in the good format (can be from_dict/to_dict)
def test_detail(self):
""" Testing the TMDB API get show """
response = Tmdb.detail(69740)
self.assertTrue(int(response.status_code) == 200)
data = response.json()
self.assertTrue(data['id'])
self.assertTrue(data['name'])
# TODO check if all the shows are in the good format (can be from_dict/to_dict)
def test_similar(self):
""" Testing the TMDB API similar endpoint """
response = Tmdb.similar(69740)
self.assertTrue(int(response.status_code) == 200)
data = response.json()
self.assertTrue(isinstance(data['results'], list))
# TODO check if all the shows are in the good format (can be from_dict/to_dict)
def test_seasons(self):
""" Testing the TMDB API seasons endpoint """
response = Tmdb.season(tmdb_show_id = 69740, season_number = 1)
self.assertTrue(int(response.status_code) == 200)
data = response.json()
self.assertTrue(isinstance(data['episodes'], list))
# TODO check if all the shows are in the good format (can be from_dict/to_dict)
if __name__ == '__main__':
unittest.main()
|
normal
|
{
"blob_id": "9e9403ea1c128e07803d080b337003055759c5ae",
"index": 4507,
"step-1": "<mask token>\n\n\nclass TestTmdb(BaseTestCase):\n <mask token>\n\n def test_discover(self):\n \"\"\" Testing the TMDB API discover endpoint \"\"\"\n response = Tmdb.discover()\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n <mask token>\n <mask token>\n\n def test_similar(self):\n \"\"\" Testing the TMDB API similar endpoint \"\"\"\n response = Tmdb.similar(69740)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n\n def test_seasons(self):\n \"\"\" Testing the TMDB API seasons endpoint \"\"\"\n response = Tmdb.season(tmdb_show_id=69740, season_number=1)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['episodes'], list))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestTmdb(BaseTestCase):\n <mask token>\n\n def test_discover(self):\n \"\"\" Testing the TMDB API discover endpoint \"\"\"\n response = Tmdb.discover()\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n\n def test_search(self):\n \"\"\" Testing the TMDB API search endpoint \"\"\"\n response = Tmdb.search('ozark')\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n <mask token>\n\n def test_similar(self):\n \"\"\" Testing the TMDB API similar endpoint \"\"\"\n response = Tmdb.similar(69740)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n\n def test_seasons(self):\n \"\"\" Testing the TMDB API seasons endpoint \"\"\"\n response = Tmdb.season(tmdb_show_id=69740, season_number=1)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['episodes'], list))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestTmdb(BaseTestCase):\n <mask token>\n\n def test_discover(self):\n \"\"\" Testing the TMDB API discover endpoint \"\"\"\n response = Tmdb.discover()\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n\n def test_search(self):\n \"\"\" Testing the TMDB API search endpoint \"\"\"\n response = Tmdb.search('ozark')\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n\n def test_detail(self):\n \"\"\" Testing the TMDB API get show \"\"\"\n response = Tmdb.detail(69740)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(data['id'])\n self.assertTrue(data['name'])\n\n def test_similar(self):\n \"\"\" Testing the TMDB API similar endpoint \"\"\"\n response = Tmdb.similar(69740)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n\n def test_seasons(self):\n \"\"\" Testing the TMDB API seasons endpoint \"\"\"\n response = Tmdb.season(tmdb_show_id=69740, season_number=1)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['episodes'], list))\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass TestTmdb(BaseTestCase):\n \"\"\"\n Testing if we have the good responses from the api\n \"\"\"\n\n def test_discover(self):\n \"\"\" Testing the TMDB API discover endpoint \"\"\"\n response = Tmdb.discover()\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n\n def test_search(self):\n \"\"\" Testing the TMDB API search endpoint \"\"\"\n response = Tmdb.search('ozark')\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n\n def test_detail(self):\n \"\"\" Testing the TMDB API get show \"\"\"\n response = Tmdb.detail(69740)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(data['id'])\n self.assertTrue(data['name'])\n\n def test_similar(self):\n \"\"\" Testing the TMDB API similar endpoint \"\"\"\n response = Tmdb.similar(69740)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n\n def test_seasons(self):\n \"\"\" Testing the TMDB API seasons endpoint \"\"\"\n response = Tmdb.season(tmdb_show_id=69740, season_number=1)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['episodes'], list))\n\n\n<mask token>\n",
"step-5": "# project/tests/test_tmdb.py\n\n\nimport unittest\nimport json\n\nfrom project.server import db\nfrom project.server.models import Tmdb\nfrom project.tests.base import BaseTestCase\n\n\nclass TestTmdb(BaseTestCase):\n \"\"\"\n Testing if we have the good responses from the api\n \"\"\"\n def test_discover(self):\n \"\"\" Testing the TMDB API discover endpoint \"\"\"\n response = Tmdb.discover()\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n # TODO check if all the shows are in the good format (can be from_dict/to_dict)\n\n def test_search(self):\n \"\"\" Testing the TMDB API search endpoint \"\"\"\n response = Tmdb.search('ozark')\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n # TODO check if all the shows are in the good format (can be from_dict/to_dict)\n\n def test_detail(self):\n \"\"\" Testing the TMDB API get show \"\"\"\n response = Tmdb.detail(69740)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(data['id'])\n self.assertTrue(data['name'])\n # TODO check if all the shows are in the good format (can be from_dict/to_dict)\n\n def test_similar(self):\n \"\"\" Testing the TMDB API similar endpoint \"\"\"\n response = Tmdb.similar(69740)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['results'], list))\n # TODO check if all the shows are in the good format (can be from_dict/to_dict)\n \n def test_seasons(self):\n \"\"\" Testing the TMDB API seasons endpoint \"\"\"\n response = Tmdb.season(tmdb_show_id = 69740, season_number = 1)\n self.assertTrue(int(response.status_code) == 200)\n data = response.json()\n self.assertTrue(isinstance(data['episodes'], list))\n # TODO check if all the shows are in the good format (can be from_dict/to_dict)\n \n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
4,
5,
6,
7,
10
]
}
|
[
4,
5,
6,
7,
10
] |
for row in range(7):
for col in range(5):
if (col == 0) or (row % 3 == 0):
print("*", end=" ")
else:
print(" ", end=" ")
print()
|
normal
|
{
"blob_id": "634c826d30b22c6061531c514914e9ca62b21605",
"index": 7158,
"step-1": "<mask token>\n",
"step-2": "for row in range(7):\n for col in range(5):\n if col == 0 or row % 3 == 0:\n print('*', end=' ')\n else:\n print(' ', end=' ')\n print()\n",
"step-3": "for row in range(7):\r\n for col in range(5):\r\n if (col == 0) or (row % 3 == 0):\r\n print(\"*\", end=\" \")\r\n else:\r\n print(\" \", end=\" \")\r\n print()\r\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
"""
Day 2
"""
with open('input.txt', 'r') as f:
lines = f.read()
lines = lines.split('\n')[:-1]
lines = [l.split(' ') for l in lines]
valid = 0
new_valid = 0
for cur_pw in lines:
letter = cur_pw[1].strip(':')
amount = cur_pw[2].count(letter)
rule = cur_pw[0].split('-')
rule = [int(r) for r in rule]
if amount >= rule[0] and amount <= rule[1]:
valid += 1
occurences = cur_pw[2][rule[0] - 1] + cur_pw[2][rule[1] - 1]
if occurences.count(letter) == 1:
new_valid += 1
print(valid)
print(new_valid)
|
normal
|
{
"blob_id": "46a3c3777d90976c7d39772d2e94430506d3acd7",
"index": 8025,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('input.txt', 'r') as f:\n lines = f.read()\n<mask token>\nfor cur_pw in lines:\n letter = cur_pw[1].strip(':')\n amount = cur_pw[2].count(letter)\n rule = cur_pw[0].split('-')\n rule = [int(r) for r in rule]\n if amount >= rule[0] and amount <= rule[1]:\n valid += 1\n occurences = cur_pw[2][rule[0] - 1] + cur_pw[2][rule[1] - 1]\n if occurences.count(letter) == 1:\n new_valid += 1\nprint(valid)\nprint(new_valid)\n",
"step-3": "<mask token>\nwith open('input.txt', 'r') as f:\n lines = f.read()\nlines = lines.split('\\n')[:-1]\nlines = [l.split(' ') for l in lines]\nvalid = 0\nnew_valid = 0\nfor cur_pw in lines:\n letter = cur_pw[1].strip(':')\n amount = cur_pw[2].count(letter)\n rule = cur_pw[0].split('-')\n rule = [int(r) for r in rule]\n if amount >= rule[0] and amount <= rule[1]:\n valid += 1\n occurences = cur_pw[2][rule[0] - 1] + cur_pw[2][rule[1] - 1]\n if occurences.count(letter) == 1:\n new_valid += 1\nprint(valid)\nprint(new_valid)\n",
"step-4": "\"\"\"\nDay 2\n\"\"\"\n\nwith open('input.txt', 'r') as f:\n lines = f.read()\n\nlines = lines.split('\\n')[:-1]\nlines = [l.split(' ') for l in lines]\n\nvalid = 0\nnew_valid = 0\nfor cur_pw in lines:\n\n letter = cur_pw[1].strip(':')\n amount = cur_pw[2].count(letter)\n rule = cur_pw[0].split('-')\n rule = [int(r) for r in rule]\n\n if amount >= rule[0] and amount <= rule[1]:\n valid += 1\n\n occurences = cur_pw[2][rule[0] - 1] + cur_pw[2][rule[1] - 1]\n if occurences.count(letter) == 1:\n new_valid += 1\n\nprint(valid)\nprint(new_valid)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
Unpacks and preprocesses all of the data from the tarball of partial data,
which includes the flats and dark frames.
"""
import tools.unpack
import util.files
import util.dark
import util.flat
def main():
tools.unpack.main()
util.files.main()
util.dark.main()
util.flat.main()
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "3667651697ac1c093d48fe2c4baa4b4dbdf20f8a",
"index": 6832,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n tools.unpack.main()\n util.files.main()\n util.dark.main()\n util.flat.main()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n tools.unpack.main()\n util.files.main()\n util.dark.main()\n util.flat.main()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nimport tools.unpack\nimport util.files\nimport util.dark\nimport util.flat\n\n\ndef main():\n tools.unpack.main()\n util.files.main()\n util.dark.main()\n util.flat.main()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "\"\"\"\nUnpacks and preprocesses all of the data from the tarball of partial data,\nwhich includes the flats and dark frames.\n\"\"\"\n\nimport tools.unpack\nimport util.files\nimport util.dark\nimport util.flat\n\ndef main():\n tools.unpack.main()\n util.files.main()\n util.dark.main()\n util.flat.main()\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class MoveActorsAction(Action):
<|reserved_special_token_0|>
def execute(self, cast):
"""Executes the action using the given actors.
Args:
cast (dict): The game actors {key: tag, value: list}.
"""
for group in cast.values():
for actor in group:
if actor.change_x != 0 or actor.change_y != 0:
self._move_actor(actor)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MoveActorsAction(Action):
<|reserved_special_token_0|>
def execute(self, cast):
"""Executes the action using the given actors.
Args:
cast (dict): The game actors {key: tag, value: list}.
"""
for group in cast.values():
for actor in group:
if actor.change_x != 0 or actor.change_y != 0:
self._move_actor(actor)
def _move_actor(self, actor):
"""Moves the given actor to its next position according to its
velocity. Will wrap the position from one side of the screen to the
other when it reaches the edge in either direction.
Args:
actor (Actor): The actor to move.
"""
actor.center_x = actor.center_x + actor.change_x
actor.center_y = actor.center_y + actor.change_y
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MoveActorsAction(Action):
"""A code template for moving actors. The responsibility of this class of
objects is move any actor that has a velocity more than zero.
Stereotype:
Controller
Attributes:
_input_service (InputService): An instance of InputService.
"""
def execute(self, cast):
"""Executes the action using the given actors.
Args:
cast (dict): The game actors {key: tag, value: list}.
"""
for group in cast.values():
for actor in group:
if actor.change_x != 0 or actor.change_y != 0:
self._move_actor(actor)
def _move_actor(self, actor):
"""Moves the given actor to its next position according to its
velocity. Will wrap the position from one side of the screen to the
other when it reaches the edge in either direction.
Args:
actor (Actor): The actor to move.
"""
actor.center_x = actor.center_x + actor.change_x
actor.center_y = actor.center_y + actor.change_y
<|reserved_special_token_1|>
from data import constants
from data.action import Action
from data.point import Point
class MoveActorsAction(Action):
"""A code template for moving actors. The responsibility of this class of
objects is move any actor that has a velocity more than zero.
Stereotype:
Controller
Attributes:
_input_service (InputService): An instance of InputService.
"""
def execute(self, cast):
"""Executes the action using the given actors.
Args:
cast (dict): The game actors {key: tag, value: list}.
"""
for group in cast.values():
for actor in group:
if actor.change_x != 0 or actor.change_y != 0:
self._move_actor(actor)
def _move_actor(self, actor):
"""Moves the given actor to its next position according to its
velocity. Will wrap the position from one side of the screen to the
other when it reaches the edge in either direction.
Args:
actor (Actor): The actor to move.
"""
actor.center_x = actor.center_x + actor.change_x
actor.center_y = actor.center_y + actor.change_y
<|reserved_special_token_1|>
from data import constants
from data.action import Action
from data.point import Point
class MoveActorsAction(Action):
"""A code template for moving actors. The responsibility of this class of
objects is move any actor that has a velocity more than zero.
Stereotype:
Controller
Attributes:
_input_service (InputService): An instance of InputService.
"""
def execute(self, cast):
"""Executes the action using the given actors.
Args:
cast (dict): The game actors {key: tag, value: list}.
"""
for group in cast.values():
for actor in group:
# It would be nice to add something to a base Actor class
# to detect is_zero()...
# if not actor.get_velocity().is_zero():
if actor.change_x != 0 or actor.change_y != 0:
self._move_actor(actor)
def _move_actor(self, actor):
"""Moves the given actor to its next position according to its
velocity. Will wrap the position from one side of the screen to the
other when it reaches the edge in either direction.
Args:
actor (Actor): The actor to move.
"""
actor.center_x = actor.center_x + actor.change_x
actor.center_y = actor.center_y + actor.change_y
|
flexible
|
{
"blob_id": "3be7183b5c1d86ee0ebfdea89c6459efe89510f8",
"index": 6103,
"step-1": "<mask token>\n\n\nclass MoveActorsAction(Action):\n <mask token>\n\n def execute(self, cast):\n \"\"\"Executes the action using the given actors.\n\n Args:\n cast (dict): The game actors {key: tag, value: list}.\n \"\"\"\n for group in cast.values():\n for actor in group:\n if actor.change_x != 0 or actor.change_y != 0:\n self._move_actor(actor)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass MoveActorsAction(Action):\n <mask token>\n\n def execute(self, cast):\n \"\"\"Executes the action using the given actors.\n\n Args:\n cast (dict): The game actors {key: tag, value: list}.\n \"\"\"\n for group in cast.values():\n for actor in group:\n if actor.change_x != 0 or actor.change_y != 0:\n self._move_actor(actor)\n\n def _move_actor(self, actor):\n \"\"\"Moves the given actor to its next position according to its \n velocity. Will wrap the position from one side of the screen to the \n other when it reaches the edge in either direction.\n \n Args:\n actor (Actor): The actor to move.\n \"\"\"\n actor.center_x = actor.center_x + actor.change_x\n actor.center_y = actor.center_y + actor.change_y\n",
"step-3": "<mask token>\n\n\nclass MoveActorsAction(Action):\n \"\"\"A code template for moving actors. The responsibility of this class of\n objects is move any actor that has a velocity more than zero.\n \n Stereotype:\n Controller\n\n Attributes:\n _input_service (InputService): An instance of InputService.\n \"\"\"\n\n def execute(self, cast):\n \"\"\"Executes the action using the given actors.\n\n Args:\n cast (dict): The game actors {key: tag, value: list}.\n \"\"\"\n for group in cast.values():\n for actor in group:\n if actor.change_x != 0 or actor.change_y != 0:\n self._move_actor(actor)\n\n def _move_actor(self, actor):\n \"\"\"Moves the given actor to its next position according to its \n velocity. Will wrap the position from one side of the screen to the \n other when it reaches the edge in either direction.\n \n Args:\n actor (Actor): The actor to move.\n \"\"\"\n actor.center_x = actor.center_x + actor.change_x\n actor.center_y = actor.center_y + actor.change_y\n",
"step-4": "from data import constants\nfrom data.action import Action\nfrom data.point import Point\n\n\nclass MoveActorsAction(Action):\n \"\"\"A code template for moving actors. The responsibility of this class of\n objects is move any actor that has a velocity more than zero.\n \n Stereotype:\n Controller\n\n Attributes:\n _input_service (InputService): An instance of InputService.\n \"\"\"\n\n def execute(self, cast):\n \"\"\"Executes the action using the given actors.\n\n Args:\n cast (dict): The game actors {key: tag, value: list}.\n \"\"\"\n for group in cast.values():\n for actor in group:\n if actor.change_x != 0 or actor.change_y != 0:\n self._move_actor(actor)\n\n def _move_actor(self, actor):\n \"\"\"Moves the given actor to its next position according to its \n velocity. Will wrap the position from one side of the screen to the \n other when it reaches the edge in either direction.\n \n Args:\n actor (Actor): The actor to move.\n \"\"\"\n actor.center_x = actor.center_x + actor.change_x\n actor.center_y = actor.center_y + actor.change_y\n",
"step-5": "from data import constants\nfrom data.action import Action\nfrom data.point import Point\n\nclass MoveActorsAction(Action):\n \"\"\"A code template for moving actors. The responsibility of this class of\n objects is move any actor that has a velocity more than zero.\n \n Stereotype:\n Controller\n\n Attributes:\n _input_service (InputService): An instance of InputService.\n \"\"\"\n\n def execute(self, cast):\n \"\"\"Executes the action using the given actors.\n\n Args:\n cast (dict): The game actors {key: tag, value: list}.\n \"\"\"\n for group in cast.values():\n for actor in group:\n # It would be nice to add something to a base Actor class\n # to detect is_zero()...\n # if not actor.get_velocity().is_zero():\n\n if actor.change_x != 0 or actor.change_y != 0:\n self._move_actor(actor)\n\n def _move_actor(self, actor):\n \"\"\"Moves the given actor to its next position according to its \n velocity. Will wrap the position from one side of the screen to the \n other when it reaches the edge in either direction.\n \n Args:\n actor (Actor): The actor to move.\n \"\"\"\n\n actor.center_x = actor.center_x + actor.change_x\n actor.center_y = actor.center_y + actor.change_y\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# -*- coding: utf-8 -*-
file1 = raw_input("Enter the path of your first file: ")
file2 = raw_input("Enter the path of your second file: ")
Basex = open(file1).read().split()
Basey = open(file2).read().split()
if Basex != Basey:
print("The files are different!")
else:
print("The files are the same!")
|
normal
|
{
"blob_id": "661d82adc7d0746635fb57abf6d0e70ee615ada4",
"index": 5974,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif Basex != Basey:\n print('The files are different!')\nelse:\n print('The files are the same!')\n",
"step-3": "file1 = raw_input('Enter the path of your first file: ')\nfile2 = raw_input('Enter the path of your second file: ')\nBasex = open(file1).read().split()\nBasey = open(file2).read().split()\nif Basex != Basey:\n print('The files are different!')\nelse:\n print('The files are the same!')\n",
"step-4": "# -*- coding: utf-8 -*-\nfile1 = raw_input(\"Enter the path of your first file: \")\nfile2 = raw_input(\"Enter the path of your second file: \")\nBasex = open(file1).read().split()\nBasey = open(file2).read().split()\nif Basex != Basey:\n print(\"The files are different!\")\nelse:\n print(\"The files are the same!\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib.auth.views import login, logout
from django.contrib import admin
from magmag_core.app import application
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from magmag import settings
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'magmag.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
(r'', include(application.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^logout$', logout,name='logout' ),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
normal
|
{
"blob_id": "538e582df7bfcf281973a5296adc14ca067be0a5",
"index": 2581,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nadmin.autodiscover()\n<mask token>\nurlpatterns += staticfiles_urlpatterns()\nurlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n",
"step-3": "<mask token>\nadmin.autodiscover()\nurlpatterns = patterns('', ('', include(application.urls)), url('^admin/',\n include(admin.site.urls)), url('^logout$', logout, name='logout'))\nurlpatterns += staticfiles_urlpatterns()\nurlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n",
"step-4": "from django.conf.urls import patterns, include, url\nfrom django.contrib.auth.views import login, logout\nfrom django.contrib import admin\nfrom magmag_core.app import application\nfrom django.contrib.staticfiles.urls import staticfiles_urlpatterns\nfrom django.conf.urls.static import static\nfrom magmag import settings\nadmin.autodiscover()\nurlpatterns = patterns('', ('', include(application.urls)), url('^admin/',\n include(admin.site.urls)), url('^logout$', logout, name='logout'))\nurlpatterns += staticfiles_urlpatterns()\nurlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom django.conf.urls import patterns, include, url\nfrom django.contrib.auth.views import login, logout\nfrom django.contrib import admin\nfrom magmag_core.app import application\nfrom django.contrib.staticfiles.urls import staticfiles_urlpatterns\nfrom django.conf.urls.static import static\nfrom magmag import settings\nadmin.autodiscover()\n\nurlpatterns = patterns('',\n # Examples:\n # url(r'^$', 'magmag.views.home', name='home'),\n # url(r'^blog/', include('blog.urls')),\n (r'', include(application.urls)),\n url(r'^admin/', include(admin.site.urls)),\n url(r'^logout$', logout,name='logout' ),\n)\nurlpatterns += staticfiles_urlpatterns()\nurlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
driver.get('https://www.facebook.com')
<|reserved_special_token_0|>
print(soup.prettify())
driver.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
driver = webdriver.Chrome('/Users/UserName/Downloads/chromedriver')
driver.get('https://www.facebook.com')
soup = BeautifulSoup(driver.page_source, 'lxml')
print(soup.prettify())
driver.close()
<|reserved_special_token_1|>
from selenium import webdriver
from time import sleep
from bs4 import BeautifulSoup
<|reserved_special_token_0|>
driver = webdriver.Chrome('/Users/UserName/Downloads/chromedriver')
driver.get('https://www.facebook.com')
soup = BeautifulSoup(driver.page_source, 'lxml')
print(soup.prettify())
driver.close()
<|reserved_special_token_1|>
from selenium import webdriver
from time import sleep
from bs4 import BeautifulSoup
"""
With selenium we need web driver for our browser.
If you use google chrome, you can download chrome driver from here:
http://chromedriver.chromium.org/downloads
In linux (my OS) I extracted downloaded zip file and placed
exe file in "/home/UserName/bin"
I did this in order not to write chrome driver path everytime
"""
# IF you did not locate exe file in user/bin or user/local/bin
# then you have to specify the driver path while creating driver object
# driver object is browser which you can programatically control
driver = webdriver.Chrome('/Users/UserName/Downloads/chromedriver')
# open some page using get method
driver.get('https://www.facebook.com')
# driver.page_source
# Opens facebook's source html file
soup = BeautifulSoup(driver.page_source,'lxml')
print(soup.prettify())
# close webdriver object
driver.close()
|
flexible
|
{
"blob_id": "03b2b722832eb46f3f81618f70fd0475f1f08c94",
"index": 2997,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndriver.get('https://www.facebook.com')\n<mask token>\nprint(soup.prettify())\ndriver.close()\n",
"step-3": "<mask token>\ndriver = webdriver.Chrome('/Users/UserName/Downloads/chromedriver')\ndriver.get('https://www.facebook.com')\nsoup = BeautifulSoup(driver.page_source, 'lxml')\nprint(soup.prettify())\ndriver.close()\n",
"step-4": "from selenium import webdriver\nfrom time import sleep\nfrom bs4 import BeautifulSoup\n<mask token>\ndriver = webdriver.Chrome('/Users/UserName/Downloads/chromedriver')\ndriver.get('https://www.facebook.com')\nsoup = BeautifulSoup(driver.page_source, 'lxml')\nprint(soup.prettify())\ndriver.close()\n",
"step-5": "\n\n\nfrom selenium import webdriver\nfrom time import sleep\nfrom bs4 import BeautifulSoup\n\n\n\n\n\"\"\"\n\nWith selenium we need web driver for our browser.\nIf you use google chrome, you can download chrome driver from here:\n \nhttp://chromedriver.chromium.org/downloads\n\n\nIn linux (my OS) I extracted downloaded zip file and placed\nexe file in \"/home/UserName/bin\"\n\n\nI did this in order not to write chrome driver path everytime\n\n\n\"\"\"\n\n# IF you did not locate exe file in user/bin or user/local/bin\n# then you have to specify the driver path while creating driver object\n# driver object is browser which you can programatically control\ndriver = webdriver.Chrome('/Users/UserName/Downloads/chromedriver')\n\n\n\n# open some page using get method\ndriver.get('https://www.facebook.com')\n\n\n# driver.page_source\n\n# Opens facebook's source html file\nsoup = BeautifulSoup(driver.page_source,'lxml')\n\nprint(soup.prettify())\n\n\n\n# close webdriver object\ndriver.close()\n\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',
'django_rethinkci'))
class APITests(TestCase):
@classmethod
def setUpClass(cls):
super(APITests, cls).setUpClass()
cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.
RETHINK_DB_PORT)
try:
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
except:
pass
r.db_create(settings.RETHINK_DB_DB).run(cls.conn)
cls.conn.db = settings.RETHINK_DB_DB
management.call_command('syncrethinkdb', verbosity=0)
@classmethod
def tearDownClass(cls):
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
super(APITests, cls).tearDownClass()
def tearDown(self):
for t in ['django_rethink_test', 'history',
'django_rethink_test_reviewed',
'django_rethink_test_history_permissions',
'django_rethink_test_history_has_read_permission']:
r.table(t).delete().run(self.conn)
super(APITests, self).tearDown()
def create_user(self, username='tester', password='tester',
is_superuser=True, groups=[], **kwargs):
user = get_user_model().objects.create(username=username, password=
make_password(password), is_superuser=is_superuser, **kwargs)
for name in groups:
group, created = Group.objects.get_or_create(name=name)
user.groups.add(group)
auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password
)).encode('ascii')).decode('ascii')
return user, auth
def test_history_no_type(self):
super_user, super_auth = self.create_user()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),
HTTP_AUTHORIZATION=super_auth)
self.assertEqual(response.status_code, 404)
def test_history_with_permissions(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=
False, groups=['group1'])
serializer = TestHistoryPermissionsSerializer(None, data={'field1':
'test1', 'user': luser.username, 'permissions': {'write': [
'group1']}}, context={'username': luser.username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryPermissionsSerializer(None, data={'field1':
'test2', 'user': super_user.username, 'permissions': {'write':
[]}}, context={'username': super_user.username})
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.
table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.
table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
def test_history_with_has_read_permission(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=
False, groups=['group1'])
serializer = TestHistoryHasReadPermissionSerializer(None, data={
'field1': 'test1', 'user': luser.username}, context={'username':
luser.username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(test1, data={
'field1': 'test1.1'}, partial=True, context={'username': luser.
username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(None, data={
'field1': 'test2', 'user': super_user.username}, context={
'username': super_user.username})
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 2)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
serializer = TestHistoryHasReadPermissionSerializer(test1, context=
{'username': luser.username})
serializer.delete()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 3)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestHistoryPermissionsSerializer(HistorySerializerMixin):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_permissions'
indices = [('permissions_read', r.row['permissions']['read']), (
'permissions_write', r.row['permissions']['write']), (
'permissions_create', r.row['permissions']['create'])]
class TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
user = serializers.CharField(required=True)
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_has_read_permission'
def has_read_permission(self, user):
return self.instance['user'] == user.username
@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',
'django_rethinkci'))
class APITests(TestCase):
@classmethod
def setUpClass(cls):
super(APITests, cls).setUpClass()
cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.
RETHINK_DB_PORT)
try:
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
except:
pass
r.db_create(settings.RETHINK_DB_DB).run(cls.conn)
cls.conn.db = settings.RETHINK_DB_DB
management.call_command('syncrethinkdb', verbosity=0)
@classmethod
def tearDownClass(cls):
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
super(APITests, cls).tearDownClass()
def tearDown(self):
for t in ['django_rethink_test', 'history',
'django_rethink_test_reviewed',
'django_rethink_test_history_permissions',
'django_rethink_test_history_has_read_permission']:
r.table(t).delete().run(self.conn)
super(APITests, self).tearDown()
def create_user(self, username='tester', password='tester',
is_superuser=True, groups=[], **kwargs):
user = get_user_model().objects.create(username=username, password=
make_password(password), is_superuser=is_superuser, **kwargs)
for name in groups:
group, created = Group.objects.get_or_create(name=name)
user.groups.add(group)
auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password
)).encode('ascii')).decode('ascii')
return user, auth
def test_history_no_type(self):
super_user, super_auth = self.create_user()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),
HTTP_AUTHORIZATION=super_auth)
self.assertEqual(response.status_code, 404)
def test_history_with_permissions(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=
False, groups=['group1'])
serializer = TestHistoryPermissionsSerializer(None, data={'field1':
'test1', 'user': luser.username, 'permissions': {'write': [
'group1']}}, context={'username': luser.username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryPermissionsSerializer(None, data={'field1':
'test2', 'user': super_user.username, 'permissions': {'write':
[]}}, context={'username': super_user.username})
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.
table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.
table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
def test_history_with_has_read_permission(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=
False, groups=['group1'])
serializer = TestHistoryHasReadPermissionSerializer(None, data={
'field1': 'test1', 'user': luser.username}, context={'username':
luser.username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(test1, data={
'field1': 'test1.1'}, partial=True, context={'username': luser.
username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(None, data={
'field1': 'test2', 'user': super_user.username}, context={
'username': super_user.username})
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 2)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
serializer = TestHistoryHasReadPermissionSerializer(test1, context=
{'username': luser.username})
serializer.delete()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 3)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestReviewSerializer(NeedsReviewMixin, HistorySerializerMixin):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_reviewed'
indices = [('permissions_read', r.row['permissions']['read']), (
'permissions_write', r.row['permissions']['write']), (
'permissions_create', r.row['permissions']['create'])]
class TestHistoryPermissionsSerializer(HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_permissions'
indices = [('permissions_read', r.row['permissions']['read']), (
'permissions_write', r.row['permissions']['write']), (
'permissions_create', r.row['permissions']['create'])]
class TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
user = serializers.CharField(required=True)
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_has_read_permission'
def has_read_permission(self, user):
return self.instance['user'] == user.username
@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',
'django_rethinkci'))
class APITests(TestCase):
@classmethod
def setUpClass(cls):
super(APITests, cls).setUpClass()
cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.
RETHINK_DB_PORT)
try:
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
except:
pass
r.db_create(settings.RETHINK_DB_DB).run(cls.conn)
cls.conn.db = settings.RETHINK_DB_DB
management.call_command('syncrethinkdb', verbosity=0)
@classmethod
def tearDownClass(cls):
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
super(APITests, cls).tearDownClass()
def tearDown(self):
for t in ['django_rethink_test', 'history',
'django_rethink_test_reviewed',
'django_rethink_test_history_permissions',
'django_rethink_test_history_has_read_permission']:
r.table(t).delete().run(self.conn)
super(APITests, self).tearDown()
def create_user(self, username='tester', password='tester',
is_superuser=True, groups=[], **kwargs):
user = get_user_model().objects.create(username=username, password=
make_password(password), is_superuser=is_superuser, **kwargs)
for name in groups:
group, created = Group.objects.get_or_create(name=name)
user.groups.add(group)
auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password
)).encode('ascii')).decode('ascii')
return user, auth
def test_history_no_type(self):
super_user, super_auth = self.create_user()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),
HTTP_AUTHORIZATION=super_auth)
self.assertEqual(response.status_code, 404)
def test_history_with_permissions(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=
False, groups=['group1'])
serializer = TestHistoryPermissionsSerializer(None, data={'field1':
'test1', 'user': luser.username, 'permissions': {'write': [
'group1']}}, context={'username': luser.username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryPermissionsSerializer(None, data={'field1':
'test2', 'user': super_user.username, 'permissions': {'write':
[]}}, context={'username': super_user.username})
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.
table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.
table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
def test_history_with_has_read_permission(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=
False, groups=['group1'])
serializer = TestHistoryHasReadPermissionSerializer(None, data={
'field1': 'test1', 'user': luser.username}, context={'username':
luser.username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(test1, data={
'field1': 'test1.1'}, partial=True, context={'username': luser.
username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(None, data={
'field1': 'test2', 'user': super_user.username}, context={
'username': super_user.username})
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 2)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
serializer = TestHistoryHasReadPermissionSerializer(test1, context=
{'username': luser.username})
serializer.delete()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 3)
<|reserved_special_token_1|>
from __future__ import absolute_import
import os
import base64
import json
from django.test import TestCase, override_settings
from django.conf import settings
from django.core import management
from django.urls import reverse
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.contrib.auth.hashers import make_password
from django_rethink.connection import r
from django_rethink.serializers import *
class TestSerializer(RethinkSerializer):
id = serializers.CharField(required=False, read_only=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test'
indices = [('permissions_read', r.row['permissions']['read']), (
'permissions_write', r.row['permissions']['write']), (
'permissions_create', r.row['permissions']['create'])]
class TestReviewSerializer(NeedsReviewMixin, HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_reviewed'
indices = [('permissions_read', r.row['permissions']['read']), (
'permissions_write', r.row['permissions']['write']), (
'permissions_create', r.row['permissions']['create'])]
class TestHistoryPermissionsSerializer(HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_permissions'
indices = [('permissions_read', r.row['permissions']['read']), (
'permissions_write', r.row['permissions']['write']), (
'permissions_create', r.row['permissions']['create'])]
class TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
user = serializers.CharField(required=True)
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_has_read_permission'
def has_read_permission(self, user):
return self.instance['user'] == user.username
@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',
'django_rethinkci'))
class APITests(TestCase):
@classmethod
def setUpClass(cls):
super(APITests, cls).setUpClass()
cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.
RETHINK_DB_PORT)
try:
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
except:
pass
r.db_create(settings.RETHINK_DB_DB).run(cls.conn)
cls.conn.db = settings.RETHINK_DB_DB
management.call_command('syncrethinkdb', verbosity=0)
@classmethod
def tearDownClass(cls):
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
super(APITests, cls).tearDownClass()
def tearDown(self):
for t in ['django_rethink_test', 'history',
'django_rethink_test_reviewed',
'django_rethink_test_history_permissions',
'django_rethink_test_history_has_read_permission']:
r.table(t).delete().run(self.conn)
super(APITests, self).tearDown()
def create_user(self, username='tester', password='tester',
is_superuser=True, groups=[], **kwargs):
user = get_user_model().objects.create(username=username, password=
make_password(password), is_superuser=is_superuser, **kwargs)
for name in groups:
group, created = Group.objects.get_or_create(name=name)
user.groups.add(group)
auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password
)).encode('ascii')).decode('ascii')
return user, auth
def test_history_no_type(self):
super_user, super_auth = self.create_user()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),
HTTP_AUTHORIZATION=super_auth)
self.assertEqual(response.status_code, 404)
def test_history_with_permissions(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=
False, groups=['group1'])
serializer = TestHistoryPermissionsSerializer(None, data={'field1':
'test1', 'user': luser.username, 'permissions': {'write': [
'group1']}}, context={'username': luser.username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryPermissionsSerializer(None, data={'field1':
'test2', 'user': super_user.username, 'permissions': {'write':
[]}}, context={'username': super_user.username})
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.
table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.
table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
def test_history_with_has_read_permission(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=
False, groups=['group1'])
serializer = TestHistoryHasReadPermissionSerializer(None, data={
'field1': 'test1', 'user': luser.username}, context={'username':
luser.username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(test1, data={
'field1': 'test1.1'}, partial=True, context={'username': luser.
username})
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(None, data={
'field1': 'test2', 'user': super_user.username}, context={
'username': super_user.username})
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 2)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
serializer = TestHistoryHasReadPermissionSerializer(test1, context=
{'username': luser.username})
serializer.delete()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': TestHistoryHasReadPermissionSerializer.
Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 3)
<|reserved_special_token_1|>
# Copyright 2017 Klarna AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import base64
import json
from django.test import TestCase, override_settings
from django.conf import settings
from django.core import management
from django.urls import reverse
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.contrib.auth.hashers import make_password
from django_rethink.connection import r
from django_rethink.serializers import *
class TestSerializer(RethinkSerializer):
id = serializers.CharField(required=False, read_only=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test'
indices = [
('permissions_read', r.row['permissions']['read']),
('permissions_write', r.row['permissions']['write']),
('permissions_create', r.row['permissions']['create']),
]
class TestReviewSerializer(NeedsReviewMixin, HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_reviewed'
indices = [
('permissions_read', r.row['permissions']['read']),
('permissions_write', r.row['permissions']['write']),
('permissions_create', r.row['permissions']['create']),
]
class TestHistoryPermissionsSerializer(HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_permissions'
indices = [
('permissions_read', r.row['permissions']['read']),
('permissions_write', r.row['permissions']['write']),
('permissions_create', r.row['permissions']['create']),
]
class TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
user = serializers.CharField(required=True)
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_has_read_permission'
def has_read_permission(self, user):
return self.instance['user'] == user.username
@override_settings(
RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB', 'django_rethinkci'),
)
class APITests(TestCase):
@classmethod
def setUpClass(cls):
super(APITests, cls).setUpClass()
cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.RETHINK_DB_PORT)
try:
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
except:
pass
r.db_create(settings.RETHINK_DB_DB).run(cls.conn)
cls.conn.db = settings.RETHINK_DB_DB
management.call_command('syncrethinkdb', verbosity=0)
@classmethod
def tearDownClass(cls):
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
super(APITests, cls).tearDownClass()
def tearDown(self):
for t in [
"django_rethink_test",
"history",
"django_rethink_test_reviewed",
"django_rethink_test_history_permissions",
"django_rethink_test_history_has_read_permission",
]:
r.table(t).delete().run(self.conn)
super(APITests, self).tearDown()
def create_user(self, username='tester', password='tester', is_superuser=True, groups=[], **kwargs):
user = get_user_model().objects.create(
username=username,
password=make_password(password),
is_superuser=is_superuser,
**kwargs
)
for name in groups:
group, created = Group.objects.get_or_create(name=name)
user.groups.add(group)
auth = "Basic %s" % (base64.b64encode(("%s:%s" % (username, password)).encode("ascii")).decode("ascii"))
return user, auth
def test_history_no_type(self):
super_user, super_auth = self.create_user()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),
HTTP_AUTHORIZATION=super_auth
)
self.assertEqual(response.status_code, 404)
def test_history_with_permissions(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=False, groups=['group1'])
serializer = TestHistoryPermissionsSerializer(None,
data={'field1': 'test1', 'user': luser.username,
'permissions': {'write': ['group1']}},
context={'username': luser.username}
)
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryPermissionsSerializer(None,
data={'field1': 'test2', 'user': super_user.username,
'permissions': {'write': []}},
context={'username': super_user.username}
)
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryPermissionsSerializer.Meta.table_name,
'pk': test1['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryPermissionsSerializer.Meta.table_name,
'pk': test2['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
def test_history_with_has_read_permission(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=False, groups=['group1'])
serializer = TestHistoryHasReadPermissionSerializer(None,
data={'field1': 'test1', 'user': luser.username},
context={'username': luser.username}
)
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(test1,
data={'field1': 'test1.1'}, partial=True,
context={'username': luser.username},
)
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(None,
data={'field1': 'test2', 'user': super_user.username},
context={'username': super_user.username}
)
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test1['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 2)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test2['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test2['id'],
}), HTTP_AUTHORIZATION=super_auth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
serializer = TestHistoryHasReadPermissionSerializer(test1,
context={'username': luser.username},
)
serializer.delete()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test1['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 3)
|
flexible
|
{
"blob_id": "d5d12e2269b343dde78534eddf2cce06759eb264",
"index": 9128,
"step-1": "<mask token>\n\n\n@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',\n 'django_rethinkci'))\nclass APITests(TestCase):\n\n @classmethod\n def setUpClass(cls):\n super(APITests, cls).setUpClass()\n cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.\n RETHINK_DB_PORT)\n try:\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n except:\n pass\n r.db_create(settings.RETHINK_DB_DB).run(cls.conn)\n cls.conn.db = settings.RETHINK_DB_DB\n management.call_command('syncrethinkdb', verbosity=0)\n\n @classmethod\n def tearDownClass(cls):\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n super(APITests, cls).tearDownClass()\n\n def tearDown(self):\n for t in ['django_rethink_test', 'history',\n 'django_rethink_test_reviewed',\n 'django_rethink_test_history_permissions',\n 'django_rethink_test_history_has_read_permission']:\n r.table(t).delete().run(self.conn)\n super(APITests, self).tearDown()\n\n def create_user(self, username='tester', password='tester',\n is_superuser=True, groups=[], **kwargs):\n user = get_user_model().objects.create(username=username, password=\n make_password(password), is_superuser=is_superuser, **kwargs)\n for name in groups:\n group, created = Group.objects.get_or_create(name=name)\n user.groups.add(group)\n auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password\n )).encode('ascii')).decode('ascii')\n return user, auth\n\n def test_history_no_type(self):\n super_user, super_auth = self.create_user()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),\n HTTP_AUTHORIZATION=super_auth)\n self.assertEqual(response.status_code, 404)\n\n def test_history_with_permissions(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test1', 'user': luser.username, 'permissions': {'write': [\n 'group1']}}, context={'username': luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test2', 'user': super_user.username, 'permissions': {'write':\n []}}, context={'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n def test_history_with_has_read_permission(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test1', 'user': luser.username}, context={'username':\n luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(test1, data={\n 'field1': 'test1.1'}, partial=True, context={'username': luser.\n username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test2', 'user': super_user.username}, context={\n 'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 2)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth\n )\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n serializer = TestHistoryHasReadPermissionSerializer(test1, context=\n {'username': luser.username})\n serializer.delete()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 3)\n",
"step-2": "<mask token>\n\n\nclass TestHistoryPermissionsSerializer(HistorySerializerMixin):\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_permissions'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n user = serializers.CharField(required=True)\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_has_read_permission'\n\n def has_read_permission(self, user):\n return self.instance['user'] == user.username\n\n\n@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',\n 'django_rethinkci'))\nclass APITests(TestCase):\n\n @classmethod\n def setUpClass(cls):\n super(APITests, cls).setUpClass()\n cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.\n RETHINK_DB_PORT)\n try:\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n except:\n pass\n r.db_create(settings.RETHINK_DB_DB).run(cls.conn)\n cls.conn.db = settings.RETHINK_DB_DB\n management.call_command('syncrethinkdb', verbosity=0)\n\n @classmethod\n def tearDownClass(cls):\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n super(APITests, cls).tearDownClass()\n\n def tearDown(self):\n for t in ['django_rethink_test', 'history',\n 'django_rethink_test_reviewed',\n 'django_rethink_test_history_permissions',\n 'django_rethink_test_history_has_read_permission']:\n r.table(t).delete().run(self.conn)\n super(APITests, self).tearDown()\n\n def create_user(self, username='tester', password='tester',\n is_superuser=True, groups=[], **kwargs):\n user = get_user_model().objects.create(username=username, password=\n make_password(password), is_superuser=is_superuser, **kwargs)\n for name in groups:\n group, created = Group.objects.get_or_create(name=name)\n user.groups.add(group)\n auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password\n )).encode('ascii')).decode('ascii')\n return user, auth\n\n def test_history_no_type(self):\n super_user, super_auth = self.create_user()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),\n HTTP_AUTHORIZATION=super_auth)\n self.assertEqual(response.status_code, 404)\n\n def test_history_with_permissions(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test1', 'user': luser.username, 'permissions': {'write': [\n 'group1']}}, context={'username': luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test2', 'user': super_user.username, 'permissions': {'write':\n []}}, context={'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n def test_history_with_has_read_permission(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test1', 'user': luser.username}, context={'username':\n luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(test1, data={\n 'field1': 'test1.1'}, partial=True, context={'username': luser.\n username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test2', 'user': super_user.username}, context={\n 'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 2)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth\n )\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n serializer = TestHistoryHasReadPermissionSerializer(test1, context=\n {'username': luser.username})\n serializer.delete()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 3)\n",
"step-3": "<mask token>\n\n\nclass TestReviewSerializer(NeedsReviewMixin, HistorySerializerMixin):\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_reviewed'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestHistoryPermissionsSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n permissions = PermissionsSerializer()\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_permissions'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n user = serializers.CharField(required=True)\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_has_read_permission'\n\n def has_read_permission(self, user):\n return self.instance['user'] == user.username\n\n\n@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',\n 'django_rethinkci'))\nclass APITests(TestCase):\n\n @classmethod\n def setUpClass(cls):\n super(APITests, cls).setUpClass()\n cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.\n RETHINK_DB_PORT)\n try:\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n except:\n pass\n r.db_create(settings.RETHINK_DB_DB).run(cls.conn)\n cls.conn.db = settings.RETHINK_DB_DB\n management.call_command('syncrethinkdb', verbosity=0)\n\n @classmethod\n def tearDownClass(cls):\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n super(APITests, cls).tearDownClass()\n\n def tearDown(self):\n for t in ['django_rethink_test', 'history',\n 'django_rethink_test_reviewed',\n 'django_rethink_test_history_permissions',\n 'django_rethink_test_history_has_read_permission']:\n r.table(t).delete().run(self.conn)\n super(APITests, self).tearDown()\n\n def create_user(self, username='tester', password='tester',\n is_superuser=True, groups=[], **kwargs):\n user = get_user_model().objects.create(username=username, password=\n make_password(password), is_superuser=is_superuser, **kwargs)\n for name in groups:\n group, created = Group.objects.get_or_create(name=name)\n user.groups.add(group)\n auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password\n )).encode('ascii')).decode('ascii')\n return user, auth\n\n def test_history_no_type(self):\n super_user, super_auth = self.create_user()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),\n HTTP_AUTHORIZATION=super_auth)\n self.assertEqual(response.status_code, 404)\n\n def test_history_with_permissions(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test1', 'user': luser.username, 'permissions': {'write': [\n 'group1']}}, context={'username': luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test2', 'user': super_user.username, 'permissions': {'write':\n []}}, context={'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n def test_history_with_has_read_permission(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test1', 'user': luser.username}, context={'username':\n luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(test1, data={\n 'field1': 'test1.1'}, partial=True, context={'username': luser.\n username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test2', 'user': super_user.username}, context={\n 'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 2)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth\n )\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n serializer = TestHistoryHasReadPermissionSerializer(test1, context=\n {'username': luser.username})\n serializer.delete()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 3)\n",
"step-4": "from __future__ import absolute_import\nimport os\nimport base64\nimport json\nfrom django.test import TestCase, override_settings\nfrom django.conf import settings\nfrom django.core import management\nfrom django.urls import reverse\nfrom django.contrib.auth import get_user_model\nfrom django.contrib.auth.models import Group\nfrom django.contrib.auth.hashers import make_password\nfrom django_rethink.connection import r\nfrom django_rethink.serializers import *\n\n\nclass TestSerializer(RethinkSerializer):\n id = serializers.CharField(required=False, read_only=True)\n permissions = PermissionsSerializer()\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestReviewSerializer(NeedsReviewMixin, HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n permissions = PermissionsSerializer()\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_reviewed'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestHistoryPermissionsSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n permissions = PermissionsSerializer()\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_permissions'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n user = serializers.CharField(required=True)\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_has_read_permission'\n\n def has_read_permission(self, user):\n return self.instance['user'] == user.username\n\n\n@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',\n 'django_rethinkci'))\nclass APITests(TestCase):\n\n @classmethod\n def setUpClass(cls):\n super(APITests, cls).setUpClass()\n cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.\n RETHINK_DB_PORT)\n try:\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n except:\n pass\n r.db_create(settings.RETHINK_DB_DB).run(cls.conn)\n cls.conn.db = settings.RETHINK_DB_DB\n management.call_command('syncrethinkdb', verbosity=0)\n\n @classmethod\n def tearDownClass(cls):\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n super(APITests, cls).tearDownClass()\n\n def tearDown(self):\n for t in ['django_rethink_test', 'history',\n 'django_rethink_test_reviewed',\n 'django_rethink_test_history_permissions',\n 'django_rethink_test_history_has_read_permission']:\n r.table(t).delete().run(self.conn)\n super(APITests, self).tearDown()\n\n def create_user(self, username='tester', password='tester',\n is_superuser=True, groups=[], **kwargs):\n user = get_user_model().objects.create(username=username, password=\n make_password(password), is_superuser=is_superuser, **kwargs)\n for name in groups:\n group, created = Group.objects.get_or_create(name=name)\n user.groups.add(group)\n auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password\n )).encode('ascii')).decode('ascii')\n return user, auth\n\n def test_history_no_type(self):\n super_user, super_auth = self.create_user()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),\n HTTP_AUTHORIZATION=super_auth)\n self.assertEqual(response.status_code, 404)\n\n def test_history_with_permissions(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test1', 'user': luser.username, 'permissions': {'write': [\n 'group1']}}, context={'username': luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test2', 'user': super_user.username, 'permissions': {'write':\n []}}, context={'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n def test_history_with_has_read_permission(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test1', 'user': luser.username}, context={'username':\n luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(test1, data={\n 'field1': 'test1.1'}, partial=True, context={'username': luser.\n username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test2', 'user': super_user.username}, context={\n 'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 2)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth\n )\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n serializer = TestHistoryHasReadPermissionSerializer(test1, context=\n {'username': luser.username})\n serializer.delete()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 3)\n",
"step-5": "# Copyright 2017 Klarna AB\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nimport os\nimport base64\nimport json\nfrom django.test import TestCase, override_settings\nfrom django.conf import settings\nfrom django.core import management\nfrom django.urls import reverse\nfrom django.contrib.auth import get_user_model\nfrom django.contrib.auth.models import Group\nfrom django.contrib.auth.hashers import make_password\n\nfrom django_rethink.connection import r\nfrom django_rethink.serializers import *\n\nclass TestSerializer(RethinkSerializer):\n id = serializers.CharField(required=False, read_only=True)\n permissions = PermissionsSerializer()\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test'\n indices = [\n ('permissions_read', r.row['permissions']['read']),\n ('permissions_write', r.row['permissions']['write']),\n ('permissions_create', r.row['permissions']['create']),\n ]\n\nclass TestReviewSerializer(NeedsReviewMixin, HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n permissions = PermissionsSerializer()\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_reviewed'\n indices = [\n ('permissions_read', r.row['permissions']['read']),\n ('permissions_write', r.row['permissions']['write']),\n ('permissions_create', r.row['permissions']['create']),\n ]\n\nclass TestHistoryPermissionsSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n permissions = PermissionsSerializer()\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_permissions'\n indices = [\n ('permissions_read', r.row['permissions']['read']),\n ('permissions_write', r.row['permissions']['write']),\n ('permissions_create', r.row['permissions']['create']),\n ]\n\nclass TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n user = serializers.CharField(required=True)\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_has_read_permission'\n def has_read_permission(self, user):\n return self.instance['user'] == user.username\n\n@override_settings(\n RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB', 'django_rethinkci'),\n)\nclass APITests(TestCase):\n @classmethod\n def setUpClass(cls):\n super(APITests, cls).setUpClass()\n cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.RETHINK_DB_PORT)\n try:\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n except:\n pass\n r.db_create(settings.RETHINK_DB_DB).run(cls.conn)\n cls.conn.db = settings.RETHINK_DB_DB\n management.call_command('syncrethinkdb', verbosity=0)\n\n @classmethod\n def tearDownClass(cls):\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n super(APITests, cls).tearDownClass()\n\n def tearDown(self):\n for t in [\n \"django_rethink_test\",\n \"history\",\n \"django_rethink_test_reviewed\",\n \"django_rethink_test_history_permissions\",\n \"django_rethink_test_history_has_read_permission\",\n ]:\n r.table(t).delete().run(self.conn)\n super(APITests, self).tearDown()\n\n def create_user(self, username='tester', password='tester', is_superuser=True, groups=[], **kwargs):\n user = get_user_model().objects.create(\n username=username,\n password=make_password(password),\n is_superuser=is_superuser,\n **kwargs\n )\n for name in groups:\n group, created = Group.objects.get_or_create(name=name)\n user.groups.add(group)\n auth = \"Basic %s\" % (base64.b64encode((\"%s:%s\" % (username, password)).encode(\"ascii\")).decode(\"ascii\"))\n return user, auth\n\n def test_history_no_type(self):\n super_user, super_auth = self.create_user()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),\n HTTP_AUTHORIZATION=super_auth\n )\n self.assertEqual(response.status_code, 404)\n\n def test_history_with_permissions(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=False, groups=['group1'])\n\n serializer = TestHistoryPermissionsSerializer(None,\n data={'field1': 'test1', 'user': luser.username,\n 'permissions': {'write': ['group1']}},\n context={'username': luser.username}\n )\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n\n serializer = TestHistoryPermissionsSerializer(None,\n data={'field1': 'test2', 'user': super_user.username,\n 'permissions': {'write': []}},\n context={'username': super_user.username}\n )\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryPermissionsSerializer.Meta.table_name,\n 'pk': test1['id'],\n }), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryPermissionsSerializer.Meta.table_name,\n 'pk': test2['id'],\n }), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n def test_history_with_has_read_permission(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=False, groups=['group1'])\n\n serializer = TestHistoryHasReadPermissionSerializer(None,\n data={'field1': 'test1', 'user': luser.username},\n context={'username': luser.username}\n )\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(test1,\n data={'field1': 'test1.1'}, partial=True,\n context={'username': luser.username},\n )\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n\n serializer = TestHistoryHasReadPermissionSerializer(None,\n data={'field1': 'test2', 'user': super_user.username},\n context={'username': super_user.username}\n )\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,\n 'pk': test1['id'],\n }), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 2)\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,\n 'pk': test2['id'],\n }), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,\n 'pk': test2['id'],\n }), HTTP_AUTHORIZATION=super_auth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n\n serializer = TestHistoryHasReadPermissionSerializer(test1,\n context={'username': luser.username},\n )\n serializer.delete()\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,\n 'pk': test1['id'],\n }), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 3)\n",
"step-ids": [
8,
12,
14,
18,
19
]
}
|
[
8,
12,
14,
18,
19
] |
#!/usr/bin/python
import glob
import pandas as pd
import numpy as np
manifest = pd.read_csv('./manifest.csv', sep=',', names=['projectId','records'], skiprows=[0])
mailTypes = pd.read_csv('./mail_types.csv', sep=',', names=['typeId','typeName'], skiprows=[0])
#----- mailTypes['typeId'] = pd.to_numeric(mailTypes['typeId'], errors='coerce')
#mailTypes['typeId'] = mailTypes['typeId'].astype(str).astype(int)
#print mailTypes.dtypes
mailAll = pd.DataFrame(columns=['projectId', 'correspondenceId', 'sentDate', 'fromOrganizationId',
'fromUserId', 'correspondenceTypeId', 'correspondenceTypeName', 'responseRequiredByDate'])
path = './correspondence/' # use your path
allFiles = glob.glob(path + "*.csv")
counter = 0
for file_ in allFiles :
counter+=1
print 'files remaining: ' + str(len(allFiles) - counter)
correspond = pd.read_csv(file_, sep=',', header='infer')
mail = pd.merge(correspond, mailTypes, how='left', left_on=['correspondenceTypeId'], right_on=['typeId'])
mail.drop('typeId', axis=1, inplace=True)
mail.columns = ['projectId', 'correspondenceId', 'sentDate', 'fromOrganizationId', 'fromUserId', 'correspondenceTypeId', 'correspondenceTypeName', 'responseRequiredByDate']
mailAll = mailAll.append(mail)
mailAll_df = pd.DataFrame.from_dict(mailAll)
mailAll_df = mailAll_df[['projectId', 'correspondenceId', 'sentDate', 'fromOrganizationId', 'fromUserId', 'correspondenceTypeId', 'correspondenceTypeName', 'responseRequiredByDate']]
mailAll_df.to_csv('mailAll.csv', sep=',')
|
normal
|
{
"blob_id": "2ea33fd06be888db5cda86b345f535532d2a05b5",
"index": 4268,
"step-1": "#!/usr/bin/python \n\nimport glob\nimport pandas as pd\nimport numpy as np\n\nmanifest = pd.read_csv('./manifest.csv', sep=',', names=['projectId','records'], skiprows=[0])\nmailTypes = pd.read_csv('./mail_types.csv', sep=',', names=['typeId','typeName'], skiprows=[0])\n\n#----- mailTypes['typeId'] = pd.to_numeric(mailTypes['typeId'], errors='coerce')\n#mailTypes['typeId'] = mailTypes['typeId'].astype(str).astype(int)\n#print mailTypes.dtypes\n\nmailAll = pd.DataFrame(columns=['projectId', 'correspondenceId', 'sentDate', 'fromOrganizationId', \n 'fromUserId', 'correspondenceTypeId', 'correspondenceTypeName', 'responseRequiredByDate'])\n\npath = './correspondence/' # use your path\nallFiles = glob.glob(path + \"*.csv\")\n\ncounter = 0\nfor file_ in allFiles :\n counter+=1\n print 'files remaining: ' + str(len(allFiles) - counter)\n\n correspond = pd.read_csv(file_, sep=',', header='infer')\n mail = pd.merge(correspond, mailTypes, how='left', left_on=['correspondenceTypeId'], right_on=['typeId'])\n mail.drop('typeId', axis=1, inplace=True)\n mail.columns = ['projectId', 'correspondenceId', 'sentDate', 'fromOrganizationId', 'fromUserId', 'correspondenceTypeId', 'correspondenceTypeName', 'responseRequiredByDate']\n mailAll = mailAll.append(mail)\n \nmailAll_df = pd.DataFrame.from_dict(mailAll)\nmailAll_df = mailAll_df[['projectId', 'correspondenceId', 'sentDate', 'fromOrganizationId', 'fromUserId', 'correspondenceTypeId', 'correspondenceTypeName', 'responseRequiredByDate']]\nmailAll_df.to_csv('mailAll.csv', sep=',')\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from .base import Base
class Files(Base):
endpoint = "/files"
def upload_file(self, channel_id, files):
return self.client.post(self.endpoint, data={"channel_id": channel_id}, files=files)
def get_file(self, file_id):
return self.client.get(
self.endpoint + "/" + file_id,
)
def get_file_thumbnail(self, file_id):
return self.client.get(
self.endpoint + "/" + file_id + "/thumbnail",
)
def get_file_preview(self, file_id):
return self.client.get(
self.endpoint + "/" + file_id + "/preview",
)
def get_public_file_link(self, file_id):
return self.client.get(
self.endpoint + "/" + file_id + "/link",
)
def get_file_metadata(self, file_id):
return self.client.get(
self.endpoint + "/" + file_id + "/info",
)
|
normal
|
{
"blob_id": "0686dec7f3dc23f01ffff41f611a1bb597bb5352",
"index": 829,
"step-1": "<mask token>\n\n\nclass Files(Base):\n <mask token>\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={'channel_id':\n channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id)\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n <mask token>\n <mask token>\n\n def get_file_metadata(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/info')\n",
"step-2": "<mask token>\n\n\nclass Files(Base):\n <mask token>\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={'channel_id':\n channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id)\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n\n def get_file_preview(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/preview')\n <mask token>\n\n def get_file_metadata(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/info')\n",
"step-3": "<mask token>\n\n\nclass Files(Base):\n endpoint = '/files'\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={'channel_id':\n channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id)\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n\n def get_file_preview(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/preview')\n\n def get_public_file_link(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/link')\n\n def get_file_metadata(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/info')\n",
"step-4": "from .base import Base\n\n\nclass Files(Base):\n endpoint = '/files'\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={'channel_id':\n channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id)\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/thumbnail')\n\n def get_file_preview(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/preview')\n\n def get_public_file_link(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/link')\n\n def get_file_metadata(self, file_id):\n return self.client.get(self.endpoint + '/' + file_id + '/info')\n",
"step-5": "from .base import Base\n\n\nclass Files(Base):\n endpoint = \"/files\"\n\n def upload_file(self, channel_id, files):\n return self.client.post(self.endpoint, data={\"channel_id\": channel_id}, files=files)\n\n def get_file(self, file_id):\n return self.client.get(\n self.endpoint + \"/\" + file_id,\n )\n\n def get_file_thumbnail(self, file_id):\n return self.client.get(\n self.endpoint + \"/\" + file_id + \"/thumbnail\",\n )\n\n def get_file_preview(self, file_id):\n return self.client.get(\n self.endpoint + \"/\" + file_id + \"/preview\",\n )\n\n def get_public_file_link(self, file_id):\n return self.client.get(\n self.endpoint + \"/\" + file_id + \"/link\",\n )\n\n def get_file_metadata(self, file_id):\n return self.client.get(\n self.endpoint + \"/\" + file_id + \"/info\",\n )\n",
"step-ids": [
5,
6,
8,
9,
10
]
}
|
[
5,
6,
8,
9,
10
] |
from numpy import array, sum
def comp_point_ref(self, is_set=False):
"""Compute the point ref of the Surface
Parameters
----------
self : SurfLine
A SurfLine object
is_set: bool
True to update the point_ref property
Returns
-------
point_ref : complex
the reference point of the surface
"""
point_list = list()
for line in self.get_lines():
point_list.append(line.get_middle())
point_ref = sum(array(point_list)) / len(point_list)
if is_set:
self.point_ref = point_ref
return point_ref
|
normal
|
{
"blob_id": "b7721e95cfb509a7c0c6ccdffa3a8ca2c6bd6033",
"index": 6713,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef comp_point_ref(self, is_set=False):\n \"\"\"Compute the point ref of the Surface\n\n Parameters\n ----------\n self : SurfLine\n A SurfLine object\n is_set: bool\n True to update the point_ref property\n\n Returns\n -------\n point_ref : complex\n the reference point of the surface\n \"\"\"\n point_list = list()\n for line in self.get_lines():\n point_list.append(line.get_middle())\n point_ref = sum(array(point_list)) / len(point_list)\n if is_set:\n self.point_ref = point_ref\n return point_ref\n",
"step-3": "from numpy import array, sum\n\n\ndef comp_point_ref(self, is_set=False):\n \"\"\"Compute the point ref of the Surface\n\n Parameters\n ----------\n self : SurfLine\n A SurfLine object\n is_set: bool\n True to update the point_ref property\n\n Returns\n -------\n point_ref : complex\n the reference point of the surface\n \"\"\"\n point_list = list()\n for line in self.get_lines():\n point_list.append(line.get_middle())\n point_ref = sum(array(point_list)) / len(point_list)\n if is_set:\n self.point_ref = point_ref\n return point_ref\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(rak * int(n))
<|reserved_special_token_1|>
rak = 'hello\n'
n = input()
print(rak * int(n))
<|reserved_special_token_1|>
rak="hello\n"
n=input()
print(rak * int(n))
|
flexible
|
{
"blob_id": "b0e4042ac4ed54cafedb9e53244c164527559e39",
"index": 5406,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(rak * int(n))\n",
"step-3": "rak = 'hello\\n'\nn = input()\nprint(rak * int(n))\n",
"step-4": "rak=\"hello\\n\"\nn=input()\nprint(rak * int(n)) \n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Trophy(AnimatedSprite):
def __init__(self, position, image_dict, hold_for_n_frames=3):
super().__init__(position, image_dict, hold_for_n_frames)
self.initial_position = position
self.frames_alive = 0
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Trophy(AnimatedSprite):
def __init__(self, position, image_dict, hold_for_n_frames=3):
super().__init__(position, image_dict, hold_for_n_frames)
self.initial_position = position
self.frames_alive = 0
def update(self):
super().next_frame()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
images_path = sorted(glob.glob('./resources/trophy_sparkle_*.png'))
trophy_im_dict = {'sparkle': [pygame.transform.scale(pygame.image.load(
img_path), (400, 400)) for img_path in images_path]}
class Trophy(AnimatedSprite):
def __init__(self, position, image_dict, hold_for_n_frames=3):
super().__init__(position, image_dict, hold_for_n_frames)
self.initial_position = position
self.frames_alive = 0
def update(self):
super().next_frame()
<|reserved_special_token_1|>
import pygame
import numpy as np
import glob
from entities.base import AnimatedSprite
images_path = sorted(glob.glob('./resources/trophy_sparkle_*.png'))
trophy_im_dict = {'sparkle': [pygame.transform.scale(pygame.image.load(
img_path), (400, 400)) for img_path in images_path]}
class Trophy(AnimatedSprite):
def __init__(self, position, image_dict, hold_for_n_frames=3):
super().__init__(position, image_dict, hold_for_n_frames)
self.initial_position = position
self.frames_alive = 0
def update(self):
super().next_frame()
<|reserved_special_token_1|>
import pygame
import numpy as np
import glob
from entities.base import AnimatedSprite
images_path = sorted(glob.glob('./resources/trophy_sparkle_*.png'))
trophy_im_dict = {'sparkle':[pygame.transform.scale(pygame.image.load(img_path),(400,400)) for img_path in images_path]}
class Trophy(AnimatedSprite):
def __init__(self, position, image_dict, hold_for_n_frames=3,):
super().__init__(position, image_dict, hold_for_n_frames)
self.initial_position = position
self.frames_alive = 0
def update(self):
super().next_frame()
|
flexible
|
{
"blob_id": "883cb1e3ea227bb5ac5aa3b4348336ab1a7fba70",
"index": 3476,
"step-1": "<mask token>\n\n\nclass Trophy(AnimatedSprite):\n\n def __init__(self, position, image_dict, hold_for_n_frames=3):\n super().__init__(position, image_dict, hold_for_n_frames)\n self.initial_position = position\n self.frames_alive = 0\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Trophy(AnimatedSprite):\n\n def __init__(self, position, image_dict, hold_for_n_frames=3):\n super().__init__(position, image_dict, hold_for_n_frames)\n self.initial_position = position\n self.frames_alive = 0\n\n def update(self):\n super().next_frame()\n",
"step-3": "<mask token>\nimages_path = sorted(glob.glob('./resources/trophy_sparkle_*.png'))\ntrophy_im_dict = {'sparkle': [pygame.transform.scale(pygame.image.load(\n img_path), (400, 400)) for img_path in images_path]}\n\n\nclass Trophy(AnimatedSprite):\n\n def __init__(self, position, image_dict, hold_for_n_frames=3):\n super().__init__(position, image_dict, hold_for_n_frames)\n self.initial_position = position\n self.frames_alive = 0\n\n def update(self):\n super().next_frame()\n",
"step-4": "import pygame\nimport numpy as np\nimport glob\nfrom entities.base import AnimatedSprite\nimages_path = sorted(glob.glob('./resources/trophy_sparkle_*.png'))\ntrophy_im_dict = {'sparkle': [pygame.transform.scale(pygame.image.load(\n img_path), (400, 400)) for img_path in images_path]}\n\n\nclass Trophy(AnimatedSprite):\n\n def __init__(self, position, image_dict, hold_for_n_frames=3):\n super().__init__(position, image_dict, hold_for_n_frames)\n self.initial_position = position\n self.frames_alive = 0\n\n def update(self):\n super().next_frame()\n",
"step-5": "import pygame\nimport numpy as np\nimport glob\nfrom entities.base import AnimatedSprite\n\n\nimages_path = sorted(glob.glob('./resources/trophy_sparkle_*.png'))\ntrophy_im_dict = {'sparkle':[pygame.transform.scale(pygame.image.load(img_path),(400,400)) for img_path in images_path]}\n\nclass Trophy(AnimatedSprite):\n def __init__(self, position, image_dict, hold_for_n_frames=3,):\n super().__init__(position, image_dict, hold_for_n_frames)\n self.initial_position = position\n self.frames_alive = 0\n\n def update(self):\n super().next_frame()\n\n\n\n\n\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import re
from prometheus_client.core import GaugeMetricFamily
class ArrayHardwareMetrics:
def __init__(self, fa):
self.fa = fa
self.chassis_health = None
self.controller_health = None
self.component_health = None
self.temperature = None
self.temperature = None
def _array_hardware_status(self):
"""Collect information about all system sensors."""
data = self.fa.get_hardware_status()
self.chassis_health = GaugeMetricFamily(
'purefa_hardware_chassis_health',
'FlashArray hardware chassis health status')
self.controller_health = GaugeMetricFamily(
'purefa_hardware_controller_health',
'FlashArray hardware controller health status',
labels=['controller'])
self.component_health = GaugeMetricFamily(
'purefa_hardware_component_health',
'FlashArray hardware component health status',
labels=['chassis', 'controller', 'component',
'index'])
self.temperature = GaugeMetricFamily(
'purefa_hardware_temperature_celsius',
'FlashArray hardware temperature sensors',
labels=['chassis', 'controller',
'sensor'])
self.power = GaugeMetricFamily(
'purefa_hardware_power_volts',
'FlashArray hardware power supply voltage',
labels=['chassis', 'power_supply'])
re_chassis = re.compile(r"^CH(\d+)$")
re_controller = re.compile(r"^CT(\d+)$")
re_component = re.compile(r"^(CH|CT)(\d+)\.([A-Z]+)([0-9]+)$")
for comp in data:
if (comp['status'] == 'not_installed'):
continue
component_name = comp['name']
component_state = 1 if (comp['status'] == 'ok') else 0
# Chassis
if re.match(r"^CH\d+$", component_name):
detail = re_chassis.match(component_name)
c_index = detail.group(1)
self.chassis_health.add_metric([c_index], component_state)
continue
# Controller
elif re.match(r"^CT\d+$", component_name):
detail = re_controller.match(component_name)
c_index = detail.group(1)
self.controller_health.add_metric([c_index], component_state)
continue
# Components
elif re.match(r"^C(H|T)\d+\.[A-Z]+[0-9]+$", component_name):
detail = re_component.match(component_name)
c_base = detail.group(1)
c_base_index = detail.group(2)
c_type = detail.group(3)
c_index = detail.group(4)
if c_base == 'CH':
# Chassis-based
labelset = [c_base_index, '', c_type, c_index]
else:
# Controller-based
labelset = ['', c_base_index, c_type, c_index]
# Component health status
self.component_health.add_metric(
labels=labelset, value=component_state)
if c_type.lower() == 'tmp':
# Additional metric for temperature
if c_base == 'CH':
self.temperature.add_metric(
[c_base_index, '', c_index], float(comp['temperature']))
else:
self.temperature.add_metric(
['', c_base_index, c_index], float(comp['temperature']))
elif c_type.lower() == 'pwr':
# Additional metric for voltage level
if comp['voltage'] is not None:
self.power.add_metric([c_base_index, c_index],
float(comp['voltage']))
def get_metrics(self):
self._array_hardware_status()
yield self.chassis_health
yield self.controller_health
yield self.component_health
yield self.temperature
yield self.power
|
normal
|
{
"blob_id": "527d514cbad0916fecfe0da68de04d3b130d94c7",
"index": 5156,
"step-1": "<mask token>\n\n\nclass ArrayHardwareMetrics:\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ArrayHardwareMetrics:\n\n def __init__(self, fa):\n self.fa = fa\n self.chassis_health = None\n self.controller_health = None\n self.component_health = None\n self.temperature = None\n self.temperature = None\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass ArrayHardwareMetrics:\n\n def __init__(self, fa):\n self.fa = fa\n self.chassis_health = None\n self.controller_health = None\n self.component_health = None\n self.temperature = None\n self.temperature = None\n <mask token>\n\n def get_metrics(self):\n self._array_hardware_status()\n yield self.chassis_health\n yield self.controller_health\n yield self.component_health\n yield self.temperature\n yield self.power\n",
"step-4": "import re\nfrom prometheus_client.core import GaugeMetricFamily\n\n\nclass ArrayHardwareMetrics:\n\n def __init__(self, fa):\n self.fa = fa\n self.chassis_health = None\n self.controller_health = None\n self.component_health = None\n self.temperature = None\n self.temperature = None\n\n def _array_hardware_status(self):\n \"\"\"Collect information about all system sensors.\"\"\"\n data = self.fa.get_hardware_status()\n self.chassis_health = GaugeMetricFamily(\n 'purefa_hardware_chassis_health',\n 'FlashArray hardware chassis health status')\n self.controller_health = GaugeMetricFamily(\n 'purefa_hardware_controller_health',\n 'FlashArray hardware controller health status', labels=[\n 'controller'])\n self.component_health = GaugeMetricFamily(\n 'purefa_hardware_component_health',\n 'FlashArray hardware component health status', labels=[\n 'chassis', 'controller', 'component', 'index'])\n self.temperature = GaugeMetricFamily(\n 'purefa_hardware_temperature_celsius',\n 'FlashArray hardware temperature sensors', labels=['chassis',\n 'controller', 'sensor'])\n self.power = GaugeMetricFamily('purefa_hardware_power_volts',\n 'FlashArray hardware power supply voltage', labels=['chassis',\n 'power_supply'])\n re_chassis = re.compile('^CH(\\\\d+)$')\n re_controller = re.compile('^CT(\\\\d+)$')\n re_component = re.compile('^(CH|CT)(\\\\d+)\\\\.([A-Z]+)([0-9]+)$')\n for comp in data:\n if comp['status'] == 'not_installed':\n continue\n component_name = comp['name']\n component_state = 1 if comp['status'] == 'ok' else 0\n if re.match('^CH\\\\d+$', component_name):\n detail = re_chassis.match(component_name)\n c_index = detail.group(1)\n self.chassis_health.add_metric([c_index], component_state)\n continue\n elif re.match('^CT\\\\d+$', component_name):\n detail = re_controller.match(component_name)\n c_index = detail.group(1)\n self.controller_health.add_metric([c_index], component_state)\n continue\n elif re.match('^C(H|T)\\\\d+\\\\.[A-Z]+[0-9]+$', component_name):\n detail = re_component.match(component_name)\n c_base = detail.group(1)\n c_base_index = detail.group(2)\n c_type = detail.group(3)\n c_index = detail.group(4)\n if c_base == 'CH':\n labelset = [c_base_index, '', c_type, c_index]\n else:\n labelset = ['', c_base_index, c_type, c_index]\n self.component_health.add_metric(labels=labelset, value=\n component_state)\n if c_type.lower() == 'tmp':\n if c_base == 'CH':\n self.temperature.add_metric([c_base_index, '',\n c_index], float(comp['temperature']))\n else:\n self.temperature.add_metric(['', c_base_index,\n c_index], float(comp['temperature']))\n elif c_type.lower() == 'pwr':\n if comp['voltage'] is not None:\n self.power.add_metric([c_base_index, c_index],\n float(comp['voltage']))\n\n def get_metrics(self):\n self._array_hardware_status()\n yield self.chassis_health\n yield self.controller_health\n yield self.component_health\n yield self.temperature\n yield self.power\n",
"step-5": "import re\nfrom prometheus_client.core import GaugeMetricFamily\n\n\nclass ArrayHardwareMetrics:\n\n def __init__(self, fa):\n self.fa = fa\n self.chassis_health = None\n self.controller_health = None\n self.component_health = None\n self.temperature = None\n self.temperature = None\n\n def _array_hardware_status(self):\n \"\"\"Collect information about all system sensors.\"\"\"\n data = self.fa.get_hardware_status()\n\n self.chassis_health = GaugeMetricFamily(\n 'purefa_hardware_chassis_health',\n 'FlashArray hardware chassis health status')\n self.controller_health = GaugeMetricFamily(\n 'purefa_hardware_controller_health',\n 'FlashArray hardware controller health status',\n labels=['controller'])\n self.component_health = GaugeMetricFamily(\n 'purefa_hardware_component_health',\n 'FlashArray hardware component health status',\n labels=['chassis', 'controller', 'component',\n 'index'])\n self.temperature = GaugeMetricFamily(\n 'purefa_hardware_temperature_celsius',\n 'FlashArray hardware temperature sensors',\n labels=['chassis', 'controller',\n 'sensor'])\n self.power = GaugeMetricFamily(\n 'purefa_hardware_power_volts',\n 'FlashArray hardware power supply voltage',\n labels=['chassis', 'power_supply'])\n\n re_chassis = re.compile(r\"^CH(\\d+)$\")\n re_controller = re.compile(r\"^CT(\\d+)$\")\n re_component = re.compile(r\"^(CH|CT)(\\d+)\\.([A-Z]+)([0-9]+)$\")\n\n for comp in data:\n if (comp['status'] == 'not_installed'):\n continue\n component_name = comp['name']\n component_state = 1 if (comp['status'] == 'ok') else 0\n\n # Chassis\n if re.match(r\"^CH\\d+$\", component_name):\n detail = re_chassis.match(component_name)\n c_index = detail.group(1)\n self.chassis_health.add_metric([c_index], component_state)\n continue\n # Controller\n elif re.match(r\"^CT\\d+$\", component_name):\n detail = re_controller.match(component_name)\n c_index = detail.group(1)\n self.controller_health.add_metric([c_index], component_state)\n continue\n # Components\n elif re.match(r\"^C(H|T)\\d+\\.[A-Z]+[0-9]+$\", component_name):\n detail = re_component.match(component_name)\n c_base = detail.group(1)\n c_base_index = detail.group(2)\n c_type = detail.group(3)\n c_index = detail.group(4)\n\n if c_base == 'CH':\n # Chassis-based\n labelset = [c_base_index, '', c_type, c_index]\n else:\n # Controller-based\n labelset = ['', c_base_index, c_type, c_index]\n\n # Component health status\n self.component_health.add_metric(\n labels=labelset, value=component_state)\n\n if c_type.lower() == 'tmp':\n # Additional metric for temperature\n if c_base == 'CH':\n self.temperature.add_metric(\n [c_base_index, '', c_index], float(comp['temperature']))\n else:\n self.temperature.add_metric(\n ['', c_base_index, c_index], float(comp['temperature']))\n elif c_type.lower() == 'pwr':\n # Additional metric for voltage level\n if comp['voltage'] is not None:\n self.power.add_metric([c_base_index, c_index],\n float(comp['voltage']))\n\n def get_metrics(self):\n self._array_hardware_status()\n yield self.chassis_health\n yield self.controller_health\n yield self.component_health\n yield self.temperature\n yield self.power\n",
"step-ids": [
1,
2,
3,
5,
6
]
}
|
[
1,
2,
3,
5,
6
] |
<|reserved_special_token_0|>
class RemoteSubroutineRunner(ISubroutineRunner):
<|reserved_special_token_0|>
def execute_charge_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_charge_subroutine,
callback)
def execute_go_home_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_go_home_subroutine,
callback)
def execute_read_qr_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_read_qr_subroutine,
callback)
def execute_grab_subroutine(self, target: str, callback: CommandCallback
) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_grab_subroutine,
callback, target=target)
def execute_drop_subroutine(self, target: str, callback: CommandCallback
) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_drop_subroutine,
callback, target=target)
<|reserved_special_token_0|>
def execute_directional_movement(self, direction: str, speed: str,
distance: float, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.
execute_directional_movement, callback, direction=direction,
speed=speed, distance=distance)
def execute_rotational_movement(self, angle: float, callback:
CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.
execute_rotational_movement, callback, angle=angle)
def execute_activate_magnet(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_activate_magnet,
callback)
def execute_deactivate_magnet(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_deactivate_magnet,
callback)
<|reserved_special_token_0|>
def execute_update_directions_subroutine(self, callback: CommandCallback
) ->None:
self._start_command(self._remote_service.execute_update_directions,
callback)
<|reserved_special_token_0|>
def execute_look_down(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_look_down, callback)
<|reserved_special_token_0|>
def _command_done(self, status: CommandStatus) ->None:
with self._busy_lock:
self._busy = False
self._callback(status)
def _start_command(self, function: Callable[[Any], None], callback:
CommandCallback, **kwargs) ->None:
"""
:raises BlockingIOError: command already running
"""
with self._busy_lock:
if self._busy:
raise BlockingIOError()
self._busy = True
self._callback = callback
kwargs['callback'] = self._command_done
function(**kwargs)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RemoteSubroutineRunner(ISubroutineRunner):
<|reserved_special_token_0|>
def execute_charge_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_charge_subroutine,
callback)
def execute_go_home_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_go_home_subroutine,
callback)
def execute_read_qr_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_read_qr_subroutine,
callback)
def execute_grab_subroutine(self, target: str, callback: CommandCallback
) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_grab_subroutine,
callback, target=target)
def execute_drop_subroutine(self, target: str, callback: CommandCallback
) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_drop_subroutine,
callback, target=target)
<|reserved_special_token_0|>
def execute_directional_movement(self, direction: str, speed: str,
distance: float, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.
execute_directional_movement, callback, direction=direction,
speed=speed, distance=distance)
def execute_rotational_movement(self, angle: float, callback:
CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.
execute_rotational_movement, callback, angle=angle)
def execute_activate_magnet(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_activate_magnet,
callback)
def execute_deactivate_magnet(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_deactivate_magnet,
callback)
<|reserved_special_token_0|>
def execute_update_directions_subroutine(self, callback: CommandCallback
) ->None:
self._start_command(self._remote_service.execute_update_directions,
callback)
def execute_championship_subroutine(self, callback: CommandCallback):
self._start_command(self._remote_service.execute_championship, callback
)
def execute_look_down(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_look_down, callback)
<|reserved_special_token_0|>
def _command_done(self, status: CommandStatus) ->None:
with self._busy_lock:
self._busy = False
self._callback(status)
def _start_command(self, function: Callable[[Any], None], callback:
CommandCallback, **kwargs) ->None:
"""
:raises BlockingIOError: command already running
"""
with self._busy_lock:
if self._busy:
raise BlockingIOError()
self._busy = True
self._callback = callback
kwargs['callback'] = self._command_done
function(**kwargs)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RemoteSubroutineRunner(ISubroutineRunner):
<|reserved_special_token_0|>
def execute_charge_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_charge_subroutine,
callback)
def execute_go_home_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_go_home_subroutine,
callback)
def execute_read_qr_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_read_qr_subroutine,
callback)
def execute_grab_subroutine(self, target: str, callback: CommandCallback
) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_grab_subroutine,
callback, target=target)
def execute_drop_subroutine(self, target: str, callback: CommandCallback
) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_drop_subroutine,
callback, target=target)
<|reserved_special_token_0|>
def execute_directional_movement(self, direction: str, speed: str,
distance: float, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.
execute_directional_movement, callback, direction=direction,
speed=speed, distance=distance)
def execute_rotational_movement(self, angle: float, callback:
CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.
execute_rotational_movement, callback, angle=angle)
def execute_activate_magnet(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_activate_magnet,
callback)
def execute_deactivate_magnet(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_deactivate_magnet,
callback)
def execute_discharge_magnet(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_discharge_magnet,
callback)
def execute_update_directions_subroutine(self, callback: CommandCallback
) ->None:
self._start_command(self._remote_service.execute_update_directions,
callback)
def execute_championship_subroutine(self, callback: CommandCallback):
self._start_command(self._remote_service.execute_championship, callback
)
def execute_look_down(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_look_down, callback)
def execute_look_ahead(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_look_ahead, callback)
def _command_done(self, status: CommandStatus) ->None:
with self._busy_lock:
self._busy = False
self._callback(status)
def _start_command(self, function: Callable[[Any], None], callback:
CommandCallback, **kwargs) ->None:
"""
:raises BlockingIOError: command already running
"""
with self._busy_lock:
if self._busy:
raise BlockingIOError()
self._busy = True
self._callback = callback
kwargs['callback'] = self._command_done
function(**kwargs)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RemoteSubroutineRunner(ISubroutineRunner):
def __init__(self, remote_service: RemoteService) ->None:
self._remote_service = remote_service
self._callback: CommandCallback = None
self._busy = False
self._busy_lock = Lock()
def execute_charge_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_charge_subroutine,
callback)
def execute_go_home_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_go_home_subroutine,
callback)
def execute_read_qr_subroutine(self, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_read_qr_subroutine,
callback)
def execute_grab_subroutine(self, target: str, callback: CommandCallback
) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_grab_subroutine,
callback, target=target)
def execute_drop_subroutine(self, target: str, callback: CommandCallback
) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_drop_subroutine,
callback, target=target)
def execute_switch_light_subroutine(self, callback: CommandCallback
) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.
execute_switch_light_subroutine, callback)
def execute_directional_movement(self, direction: str, speed: str,
distance: float, callback: CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.
execute_directional_movement, callback, direction=direction,
speed=speed, distance=distance)
def execute_rotational_movement(self, angle: float, callback:
CommandCallback) ->None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.
execute_rotational_movement, callback, angle=angle)
def execute_activate_magnet(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_activate_magnet,
callback)
def execute_deactivate_magnet(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_deactivate_magnet,
callback)
def execute_discharge_magnet(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_discharge_magnet,
callback)
def execute_update_directions_subroutine(self, callback: CommandCallback
) ->None:
self._start_command(self._remote_service.execute_update_directions,
callback)
def execute_championship_subroutine(self, callback: CommandCallback):
self._start_command(self._remote_service.execute_championship, callback
)
def execute_look_down(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_look_down, callback)
def execute_look_ahead(self, callback: CommandCallback) ->None:
self._start_command(self._remote_service.execute_look_ahead, callback)
def _command_done(self, status: CommandStatus) ->None:
with self._busy_lock:
self._busy = False
self._callback(status)
def _start_command(self, function: Callable[[Any], None], callback:
CommandCallback, **kwargs) ->None:
"""
:raises BlockingIOError: command already running
"""
with self._busy_lock:
if self._busy:
raise BlockingIOError()
self._busy = True
self._callback = callback
kwargs['callback'] = self._command_done
function(**kwargs)
<|reserved_special_token_1|>
from threading import Lock
from typing import Callable, Any
from remote.domain.commandCallback import CommandCallback
from remote.domain.commandStatus import CommandStatus
from remote.service.remoteService import RemoteService
from ui.domain.subroutine.iSubroutineRunner import ISubroutineRunner
class RemoteSubroutineRunner(ISubroutineRunner):
def __init__(self, remote_service: RemoteService) -> None:
self._remote_service = remote_service
self._callback: CommandCallback = None
self._busy = False
self._busy_lock = Lock()
def execute_charge_subroutine(self, callback: CommandCallback) -> None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_charge_subroutine, callback)
def execute_go_home_subroutine(self, callback: CommandCallback) -> None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_go_home_subroutine, callback)
def execute_read_qr_subroutine(self, callback: CommandCallback) -> None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_read_qr_subroutine, callback)
def execute_grab_subroutine(self, target: str, callback: CommandCallback) -> None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_grab_subroutine, callback, target=target)
def execute_drop_subroutine(self, target: str, callback: CommandCallback) -> None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_drop_subroutine, callback, target=target)
def execute_switch_light_subroutine(self, callback: CommandCallback) -> None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_switch_light_subroutine, callback)
def execute_directional_movement(self, direction: str, speed: str, distance: float,
callback: CommandCallback) -> None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_directional_movement, callback,
direction=direction, speed=speed, distance=distance)
def execute_rotational_movement(self, angle: float, callback: CommandCallback) -> None:
"""
:raises BlockingIOError: command already running
"""
self._start_command(self._remote_service.execute_rotational_movement, callback, angle=angle)
def execute_activate_magnet(self, callback: CommandCallback) -> None:
self._start_command(self._remote_service.execute_activate_magnet, callback)
def execute_deactivate_magnet(self, callback: CommandCallback) -> None:
self._start_command(self._remote_service.execute_deactivate_magnet, callback)
def execute_discharge_magnet(self, callback: CommandCallback) -> None:
self._start_command(self._remote_service.execute_discharge_magnet, callback)
def execute_update_directions_subroutine(self, callback: CommandCallback) -> None:
self._start_command(self._remote_service.execute_update_directions, callback)
def execute_championship_subroutine(self, callback: CommandCallback):
self._start_command(self._remote_service.execute_championship, callback)
def execute_look_down(self, callback: CommandCallback) -> None:
self._start_command(self._remote_service.execute_look_down, callback)
def execute_look_ahead(self, callback: CommandCallback) -> None:
self._start_command(self._remote_service.execute_look_ahead, callback)
def _command_done(self, status: CommandStatus) -> None:
with self._busy_lock:
self._busy = False
self._callback(status)
def _start_command(self, function: Callable[[Any], None], callback: CommandCallback, **kwargs) -> None:
"""
:raises BlockingIOError: command already running
"""
with self._busy_lock:
if self._busy:
raise BlockingIOError()
self._busy = True
self._callback = callback
kwargs["callback"] = self._command_done
function(**kwargs)
|
flexible
|
{
"blob_id": "75270fb4ed059f134b47b8937717cb7fe05d9499",
"index": 8833,
"step-1": "<mask token>\n\n\nclass RemoteSubroutineRunner(ISubroutineRunner):\n <mask token>\n\n def execute_charge_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_charge_subroutine,\n callback)\n\n def execute_go_home_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_go_home_subroutine,\n callback)\n\n def execute_read_qr_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_read_qr_subroutine,\n callback)\n\n def execute_grab_subroutine(self, target: str, callback: CommandCallback\n ) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_grab_subroutine,\n callback, target=target)\n\n def execute_drop_subroutine(self, target: str, callback: CommandCallback\n ) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_drop_subroutine,\n callback, target=target)\n <mask token>\n\n def execute_directional_movement(self, direction: str, speed: str,\n distance: float, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.\n execute_directional_movement, callback, direction=direction,\n speed=speed, distance=distance)\n\n def execute_rotational_movement(self, angle: float, callback:\n CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.\n execute_rotational_movement, callback, angle=angle)\n\n def execute_activate_magnet(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_activate_magnet,\n callback)\n\n def execute_deactivate_magnet(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_deactivate_magnet,\n callback)\n <mask token>\n\n def execute_update_directions_subroutine(self, callback: CommandCallback\n ) ->None:\n self._start_command(self._remote_service.execute_update_directions,\n callback)\n <mask token>\n\n def execute_look_down(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_look_down, callback)\n <mask token>\n\n def _command_done(self, status: CommandStatus) ->None:\n with self._busy_lock:\n self._busy = False\n self._callback(status)\n\n def _start_command(self, function: Callable[[Any], None], callback:\n CommandCallback, **kwargs) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n with self._busy_lock:\n if self._busy:\n raise BlockingIOError()\n self._busy = True\n self._callback = callback\n kwargs['callback'] = self._command_done\n function(**kwargs)\n",
"step-2": "<mask token>\n\n\nclass RemoteSubroutineRunner(ISubroutineRunner):\n <mask token>\n\n def execute_charge_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_charge_subroutine,\n callback)\n\n def execute_go_home_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_go_home_subroutine,\n callback)\n\n def execute_read_qr_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_read_qr_subroutine,\n callback)\n\n def execute_grab_subroutine(self, target: str, callback: CommandCallback\n ) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_grab_subroutine,\n callback, target=target)\n\n def execute_drop_subroutine(self, target: str, callback: CommandCallback\n ) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_drop_subroutine,\n callback, target=target)\n <mask token>\n\n def execute_directional_movement(self, direction: str, speed: str,\n distance: float, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.\n execute_directional_movement, callback, direction=direction,\n speed=speed, distance=distance)\n\n def execute_rotational_movement(self, angle: float, callback:\n CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.\n execute_rotational_movement, callback, angle=angle)\n\n def execute_activate_magnet(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_activate_magnet,\n callback)\n\n def execute_deactivate_magnet(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_deactivate_magnet,\n callback)\n <mask token>\n\n def execute_update_directions_subroutine(self, callback: CommandCallback\n ) ->None:\n self._start_command(self._remote_service.execute_update_directions,\n callback)\n\n def execute_championship_subroutine(self, callback: CommandCallback):\n self._start_command(self._remote_service.execute_championship, callback\n )\n\n def execute_look_down(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_look_down, callback)\n <mask token>\n\n def _command_done(self, status: CommandStatus) ->None:\n with self._busy_lock:\n self._busy = False\n self._callback(status)\n\n def _start_command(self, function: Callable[[Any], None], callback:\n CommandCallback, **kwargs) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n with self._busy_lock:\n if self._busy:\n raise BlockingIOError()\n self._busy = True\n self._callback = callback\n kwargs['callback'] = self._command_done\n function(**kwargs)\n",
"step-3": "<mask token>\n\n\nclass RemoteSubroutineRunner(ISubroutineRunner):\n <mask token>\n\n def execute_charge_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_charge_subroutine,\n callback)\n\n def execute_go_home_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_go_home_subroutine,\n callback)\n\n def execute_read_qr_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_read_qr_subroutine,\n callback)\n\n def execute_grab_subroutine(self, target: str, callback: CommandCallback\n ) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_grab_subroutine,\n callback, target=target)\n\n def execute_drop_subroutine(self, target: str, callback: CommandCallback\n ) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_drop_subroutine,\n callback, target=target)\n <mask token>\n\n def execute_directional_movement(self, direction: str, speed: str,\n distance: float, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.\n execute_directional_movement, callback, direction=direction,\n speed=speed, distance=distance)\n\n def execute_rotational_movement(self, angle: float, callback:\n CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.\n execute_rotational_movement, callback, angle=angle)\n\n def execute_activate_magnet(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_activate_magnet,\n callback)\n\n def execute_deactivate_magnet(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_deactivate_magnet,\n callback)\n\n def execute_discharge_magnet(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_discharge_magnet,\n callback)\n\n def execute_update_directions_subroutine(self, callback: CommandCallback\n ) ->None:\n self._start_command(self._remote_service.execute_update_directions,\n callback)\n\n def execute_championship_subroutine(self, callback: CommandCallback):\n self._start_command(self._remote_service.execute_championship, callback\n )\n\n def execute_look_down(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_look_down, callback)\n\n def execute_look_ahead(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_look_ahead, callback)\n\n def _command_done(self, status: CommandStatus) ->None:\n with self._busy_lock:\n self._busy = False\n self._callback(status)\n\n def _start_command(self, function: Callable[[Any], None], callback:\n CommandCallback, **kwargs) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n with self._busy_lock:\n if self._busy:\n raise BlockingIOError()\n self._busy = True\n self._callback = callback\n kwargs['callback'] = self._command_done\n function(**kwargs)\n",
"step-4": "<mask token>\n\n\nclass RemoteSubroutineRunner(ISubroutineRunner):\n\n def __init__(self, remote_service: RemoteService) ->None:\n self._remote_service = remote_service\n self._callback: CommandCallback = None\n self._busy = False\n self._busy_lock = Lock()\n\n def execute_charge_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_charge_subroutine,\n callback)\n\n def execute_go_home_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_go_home_subroutine,\n callback)\n\n def execute_read_qr_subroutine(self, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_read_qr_subroutine,\n callback)\n\n def execute_grab_subroutine(self, target: str, callback: CommandCallback\n ) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_grab_subroutine,\n callback, target=target)\n\n def execute_drop_subroutine(self, target: str, callback: CommandCallback\n ) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_drop_subroutine,\n callback, target=target)\n\n def execute_switch_light_subroutine(self, callback: CommandCallback\n ) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.\n execute_switch_light_subroutine, callback)\n\n def execute_directional_movement(self, direction: str, speed: str,\n distance: float, callback: CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.\n execute_directional_movement, callback, direction=direction,\n speed=speed, distance=distance)\n\n def execute_rotational_movement(self, angle: float, callback:\n CommandCallback) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.\n execute_rotational_movement, callback, angle=angle)\n\n def execute_activate_magnet(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_activate_magnet,\n callback)\n\n def execute_deactivate_magnet(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_deactivate_magnet,\n callback)\n\n def execute_discharge_magnet(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_discharge_magnet,\n callback)\n\n def execute_update_directions_subroutine(self, callback: CommandCallback\n ) ->None:\n self._start_command(self._remote_service.execute_update_directions,\n callback)\n\n def execute_championship_subroutine(self, callback: CommandCallback):\n self._start_command(self._remote_service.execute_championship, callback\n )\n\n def execute_look_down(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_look_down, callback)\n\n def execute_look_ahead(self, callback: CommandCallback) ->None:\n self._start_command(self._remote_service.execute_look_ahead, callback)\n\n def _command_done(self, status: CommandStatus) ->None:\n with self._busy_lock:\n self._busy = False\n self._callback(status)\n\n def _start_command(self, function: Callable[[Any], None], callback:\n CommandCallback, **kwargs) ->None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n with self._busy_lock:\n if self._busy:\n raise BlockingIOError()\n self._busy = True\n self._callback = callback\n kwargs['callback'] = self._command_done\n function(**kwargs)\n",
"step-5": "from threading import Lock\nfrom typing import Callable, Any\n\nfrom remote.domain.commandCallback import CommandCallback\nfrom remote.domain.commandStatus import CommandStatus\nfrom remote.service.remoteService import RemoteService\nfrom ui.domain.subroutine.iSubroutineRunner import ISubroutineRunner\n\n\nclass RemoteSubroutineRunner(ISubroutineRunner):\n def __init__(self, remote_service: RemoteService) -> None:\n self._remote_service = remote_service\n self._callback: CommandCallback = None\n self._busy = False\n self._busy_lock = Lock()\n\n def execute_charge_subroutine(self, callback: CommandCallback) -> None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_charge_subroutine, callback)\n\n def execute_go_home_subroutine(self, callback: CommandCallback) -> None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_go_home_subroutine, callback)\n\n def execute_read_qr_subroutine(self, callback: CommandCallback) -> None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_read_qr_subroutine, callback)\n\n def execute_grab_subroutine(self, target: str, callback: CommandCallback) -> None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_grab_subroutine, callback, target=target)\n\n def execute_drop_subroutine(self, target: str, callback: CommandCallback) -> None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_drop_subroutine, callback, target=target)\n\n def execute_switch_light_subroutine(self, callback: CommandCallback) -> None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_switch_light_subroutine, callback)\n\n def execute_directional_movement(self, direction: str, speed: str, distance: float,\n callback: CommandCallback) -> None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_directional_movement, callback,\n direction=direction, speed=speed, distance=distance)\n\n def execute_rotational_movement(self, angle: float, callback: CommandCallback) -> None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n self._start_command(self._remote_service.execute_rotational_movement, callback, angle=angle)\n\n def execute_activate_magnet(self, callback: CommandCallback) -> None:\n self._start_command(self._remote_service.execute_activate_magnet, callback)\n\n def execute_deactivate_magnet(self, callback: CommandCallback) -> None:\n self._start_command(self._remote_service.execute_deactivate_magnet, callback)\n\n def execute_discharge_magnet(self, callback: CommandCallback) -> None:\n self._start_command(self._remote_service.execute_discharge_magnet, callback)\n\n def execute_update_directions_subroutine(self, callback: CommandCallback) -> None:\n self._start_command(self._remote_service.execute_update_directions, callback)\n\n def execute_championship_subroutine(self, callback: CommandCallback):\n self._start_command(self._remote_service.execute_championship, callback)\n\n def execute_look_down(self, callback: CommandCallback) -> None:\n self._start_command(self._remote_service.execute_look_down, callback)\n\n def execute_look_ahead(self, callback: CommandCallback) -> None:\n self._start_command(self._remote_service.execute_look_ahead, callback)\n\n def _command_done(self, status: CommandStatus) -> None:\n with self._busy_lock:\n self._busy = False\n self._callback(status)\n\n def _start_command(self, function: Callable[[Any], None], callback: CommandCallback, **kwargs) -> None:\n \"\"\"\n\n :raises BlockingIOError: command already running\n \"\"\"\n with self._busy_lock:\n if self._busy:\n raise BlockingIOError()\n self._busy = True\n self._callback = callback\n kwargs[\"callback\"] = self._command_done\n function(**kwargs)\n",
"step-ids": [
14,
15,
17,
19,
21
]
}
|
[
14,
15,
17,
19,
21
] |
import logging
from exceptions.invalid_api_usage import InvalidAPIUsage
from wgadget.endpoints.ep import EP
class EPInfoLight(EP):
NAME = 'info_light'
URL = '/info'
URL_ROUTE_PAR_PAYLOAD = '/'
URL_ROUTE_PAR_URL = '/actuatorId/<actuatorId>'
METHOD = 'GET'
ATTR_ACTUATOR_ID = 'actuatorId'
def __init__(self, web_gadget):
self.web_gadget = web_gadget
def getRequestDescriptionWithPayloadParameters(self):
ret = {}
ret['name'] = EPInfoLight.NAME
ret['url'] = EPInfoLight.URL_ROUTE_PAR_PAYLOAD
ret['method'] = EPInfoLight.METHOD
ret['payload-desc'] = [{},{}]
ret['payload-desc'][0]['attribute'] = EPInfoLight.ATTR_ACTUATOR_ID
ret['payload-desc'][0]['type'] = 'integer'
ret['payload-desc'][0]['value'] = 1
return ret
def executeByParameters(self, actuatorId) -> dict:
payload = {}
payload[EPInfoLight.ATTR_ACTUATOR_ID] = int(actuatorId)
return self.executeByPayload(payload)
def executeByPayload(self, payload) -> dict:
actuatorId = int(payload[EPInfoLight.ATTR_ACTUATOR_ID])
if actuatorId == self.web_gadget.getLightId():
actualValue = self.web_gadget.fetchSavedLightValue()
logging.debug( "WEB request: {0} {1} ('{2}': {3})".format(
EPInfoLight.METHOD, EPInfoLight.URL,
EPInfoLight.ATTR_ACTUATOR_ID, actuatorId)
)
return {"value": actualValue, "thread": self.web_gadget.getThreadControllerStatus()}
# return {"value": actualValue, "thread": {"inProgress": False, "id":1}}
else:
raise InvalidAPIUsage("No such actuator: {0} or value: {1}".format(actuatorId, value), error_code=404)
|
normal
|
{
"blob_id": "e5abab3f718bbbd25dcfc49290383203d53248c3",
"index": 9464,
"step-1": "<mask token>\n\n\nclass EPInfoLight(EP):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass EPInfoLight(EP):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def getRequestDescriptionWithPayloadParameters(self):\n ret = {}\n ret['name'] = EPInfoLight.NAME\n ret['url'] = EPInfoLight.URL_ROUTE_PAR_PAYLOAD\n ret['method'] = EPInfoLight.METHOD\n ret['payload-desc'] = [{}, {}]\n ret['payload-desc'][0]['attribute'] = EPInfoLight.ATTR_ACTUATOR_ID\n ret['payload-desc'][0]['type'] = 'integer'\n ret['payload-desc'][0]['value'] = 1\n return ret\n\n def executeByParameters(self, actuatorId) ->dict:\n payload = {}\n payload[EPInfoLight.ATTR_ACTUATOR_ID] = int(actuatorId)\n return self.executeByPayload(payload)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass EPInfoLight(EP):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def getRequestDescriptionWithPayloadParameters(self):\n ret = {}\n ret['name'] = EPInfoLight.NAME\n ret['url'] = EPInfoLight.URL_ROUTE_PAR_PAYLOAD\n ret['method'] = EPInfoLight.METHOD\n ret['payload-desc'] = [{}, {}]\n ret['payload-desc'][0]['attribute'] = EPInfoLight.ATTR_ACTUATOR_ID\n ret['payload-desc'][0]['type'] = 'integer'\n ret['payload-desc'][0]['value'] = 1\n return ret\n\n def executeByParameters(self, actuatorId) ->dict:\n payload = {}\n payload[EPInfoLight.ATTR_ACTUATOR_ID] = int(actuatorId)\n return self.executeByPayload(payload)\n\n def executeByPayload(self, payload) ->dict:\n actuatorId = int(payload[EPInfoLight.ATTR_ACTUATOR_ID])\n if actuatorId == self.web_gadget.getLightId():\n actualValue = self.web_gadget.fetchSavedLightValue()\n logging.debug(\"WEB request: {0} {1} ('{2}': {3})\".format(\n EPInfoLight.METHOD, EPInfoLight.URL, EPInfoLight.\n ATTR_ACTUATOR_ID, actuatorId))\n return {'value': actualValue, 'thread': self.web_gadget.\n getThreadControllerStatus()}\n else:\n raise InvalidAPIUsage('No such actuator: {0} or value: {1}'.\n format(actuatorId, value), error_code=404)\n",
"step-4": "<mask token>\n\n\nclass EPInfoLight(EP):\n NAME = 'info_light'\n URL = '/info'\n URL_ROUTE_PAR_PAYLOAD = '/'\n URL_ROUTE_PAR_URL = '/actuatorId/<actuatorId>'\n METHOD = 'GET'\n ATTR_ACTUATOR_ID = 'actuatorId'\n\n def __init__(self, web_gadget):\n self.web_gadget = web_gadget\n\n def getRequestDescriptionWithPayloadParameters(self):\n ret = {}\n ret['name'] = EPInfoLight.NAME\n ret['url'] = EPInfoLight.URL_ROUTE_PAR_PAYLOAD\n ret['method'] = EPInfoLight.METHOD\n ret['payload-desc'] = [{}, {}]\n ret['payload-desc'][0]['attribute'] = EPInfoLight.ATTR_ACTUATOR_ID\n ret['payload-desc'][0]['type'] = 'integer'\n ret['payload-desc'][0]['value'] = 1\n return ret\n\n def executeByParameters(self, actuatorId) ->dict:\n payload = {}\n payload[EPInfoLight.ATTR_ACTUATOR_ID] = int(actuatorId)\n return self.executeByPayload(payload)\n\n def executeByPayload(self, payload) ->dict:\n actuatorId = int(payload[EPInfoLight.ATTR_ACTUATOR_ID])\n if actuatorId == self.web_gadget.getLightId():\n actualValue = self.web_gadget.fetchSavedLightValue()\n logging.debug(\"WEB request: {0} {1} ('{2}': {3})\".format(\n EPInfoLight.METHOD, EPInfoLight.URL, EPInfoLight.\n ATTR_ACTUATOR_ID, actuatorId))\n return {'value': actualValue, 'thread': self.web_gadget.\n getThreadControllerStatus()}\n else:\n raise InvalidAPIUsage('No such actuator: {0} or value: {1}'.\n format(actuatorId, value), error_code=404)\n",
"step-5": "\nimport logging\nfrom exceptions.invalid_api_usage import InvalidAPIUsage\nfrom wgadget.endpoints.ep import EP\n\nclass EPInfoLight(EP):\n\n NAME = 'info_light'\n URL = '/info'\n\n URL_ROUTE_PAR_PAYLOAD = '/'\n URL_ROUTE_PAR_URL = '/actuatorId/<actuatorId>'\n\n METHOD = 'GET'\n\n ATTR_ACTUATOR_ID = 'actuatorId'\n\n def __init__(self, web_gadget):\n self.web_gadget = web_gadget\n\n def getRequestDescriptionWithPayloadParameters(self):\n\n ret = {}\n ret['name'] = EPInfoLight.NAME\n ret['url'] = EPInfoLight.URL_ROUTE_PAR_PAYLOAD\n ret['method'] = EPInfoLight.METHOD\n\n ret['payload-desc'] = [{},{}]\n\n ret['payload-desc'][0]['attribute'] = EPInfoLight.ATTR_ACTUATOR_ID\n ret['payload-desc'][0]['type'] = 'integer'\n ret['payload-desc'][0]['value'] = 1\n\n return ret\n\n def executeByParameters(self, actuatorId) -> dict:\n payload = {}\n payload[EPInfoLight.ATTR_ACTUATOR_ID] = int(actuatorId)\n return self.executeByPayload(payload)\n\n def executeByPayload(self, payload) -> dict:\n\n actuatorId = int(payload[EPInfoLight.ATTR_ACTUATOR_ID])\n\n if actuatorId == self.web_gadget.getLightId():\n\n actualValue = self.web_gadget.fetchSavedLightValue()\n\n logging.debug( \"WEB request: {0} {1} ('{2}': {3})\".format(\n EPInfoLight.METHOD, EPInfoLight.URL,\n EPInfoLight.ATTR_ACTUATOR_ID, actuatorId)\n )\n\n return {\"value\": actualValue, \"thread\": self.web_gadget.getThreadControllerStatus()}\n# return {\"value\": actualValue, \"thread\": {\"inProgress\": False, \"id\":1}}\n\n else:\n raise InvalidAPIUsage(\"No such actuator: {0} or value: {1}\".format(actuatorId, value), error_code=404)\n",
"step-ids": [
1,
3,
4,
6,
8
]
}
|
[
1,
3,
4,
6,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(0, len(a)):
print('main')
sorted = True
for j in range(1, len(a) - i):
if a[j] < a[j - 1]:
a[j], a[j - 1] = a[j - 1], a[j]
print('inner')
print(a)
time.sleep(1)
sorted = False
if sorted:
break
print(a)
<|reserved_special_token_1|>
a = [5, 4, 3, 2, 1]
a = [1, 2, 3, 7, 5, 6, 4, 8, 9]
<|reserved_special_token_0|>
sorted = True
for i in range(0, len(a)):
print('main')
sorted = True
for j in range(1, len(a) - i):
if a[j] < a[j - 1]:
a[j], a[j - 1] = a[j - 1], a[j]
print('inner')
print(a)
time.sleep(1)
sorted = False
if sorted:
break
print(a)
<|reserved_special_token_1|>
a = [5, 4, 3, 2, 1]
a = [1, 2, 3, 7, 5, 6, 4, 8, 9]
import time
sorted = True
for i in range(0, len(a)):
print('main')
sorted = True
for j in range(1, len(a) - i):
if a[j] < a[j - 1]:
a[j], a[j - 1] = a[j - 1], a[j]
print('inner')
print(a)
time.sleep(1)
sorted = False
if sorted:
break
print(a)
|
flexible
|
{
"blob_id": "30fbe52a5e3fb184a998fce43d716cffdaf0d2dc",
"index": 1790,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(0, len(a)):\n print('main')\n sorted = True\n for j in range(1, len(a) - i):\n if a[j] < a[j - 1]:\n a[j], a[j - 1] = a[j - 1], a[j]\n print('inner')\n print(a)\n time.sleep(1)\n sorted = False\n if sorted:\n break\nprint(a)\n",
"step-3": "a = [5, 4, 3, 2, 1]\na = [1, 2, 3, 7, 5, 6, 4, 8, 9]\n<mask token>\nsorted = True\nfor i in range(0, len(a)):\n print('main')\n sorted = True\n for j in range(1, len(a) - i):\n if a[j] < a[j - 1]:\n a[j], a[j - 1] = a[j - 1], a[j]\n print('inner')\n print(a)\n time.sleep(1)\n sorted = False\n if sorted:\n break\nprint(a)\n",
"step-4": "a = [5, 4, 3, 2, 1]\na = [1, 2, 3, 7, 5, 6, 4, 8, 9]\nimport time\nsorted = True\nfor i in range(0, len(a)):\n print('main')\n sorted = True\n for j in range(1, len(a) - i):\n if a[j] < a[j - 1]:\n a[j], a[j - 1] = a[j - 1], a[j]\n print('inner')\n print(a)\n time.sleep(1)\n sorted = False\n if sorted:\n break\nprint(a)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.shortcuts import render
from django.http import HttpResponse
def view1(request):
return HttpResponse(" Hey..,This is the first view using HttpResponce!")
def view2(request):
context={"tag_var":"tag_var"}
return render(request,"new.html",context)
# Create your views here.
|
normal
|
{
"blob_id": "c9b62328a463fd38f3dbd1e7b5e1990f7eec1dba",
"index": 9793,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef view2(request):\n context = {'tag_var': 'tag_var'}\n return render(request, 'new.html', context)\n",
"step-3": "<mask token>\n\n\ndef view1(request):\n return HttpResponse(' Hey..,This is the first view using HttpResponce!')\n\n\ndef view2(request):\n context = {'tag_var': 'tag_var'}\n return render(request, 'new.html', context)\n",
"step-4": "from django.shortcuts import render\nfrom django.http import HttpResponse\n\n\ndef view1(request):\n return HttpResponse(' Hey..,This is the first view using HttpResponce!')\n\n\ndef view2(request):\n context = {'tag_var': 'tag_var'}\n return render(request, 'new.html', context)\n",
"step-5": "from django.shortcuts import render\nfrom django.http import HttpResponse\ndef view1(request):\n return HttpResponse(\" Hey..,This is the first view using HttpResponce!\")\ndef view2(request):\n context={\"tag_var\":\"tag_var\"}\n return render(request,\"new.html\",context)\n# Create your views here.\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pytest
import app
import urllib.parse
@pytest.fixture
def client():
app.app.config['TESTING'] = True
with app.app.test_client() as client:
yield client
def test_query_missing_args(client):
response = client.get('/data/query')
assert 'errors' in response.json and '400' in response.status
def test_query_get_json(client):
response = client.get(f'/data/query?sql={urllib.parse.quote("select * from test")}')
assert len(response.json) == 2
def test_query_post_json(client):
response = client.post('/data/query', json={'sql': 'select * from test'})
assert len(response.json) == 2
def test_query_get_csv(client):
response = client.get(f'/data/query?sql={urllib.parse.quote("select * from test")}&format=csv')
text = response.data.decode()
assert len(text) > 0 and 'col0' in text
def test_query_post_csv(client):
response = client.post('/data/query', json={'sql': 'select * from test', 'format': 'csv'})
text = response.data.decode()
assert len(text) > 0 and 'col0' in text
def test_query_bad_sql_insert(client):
response = client.get(f'/data/query?sql={urllib.parse.quote("insert into test (col0) values (1)")}')
assert 'Illegal SQL' in response.json['message'] and 400 == response.status_code
def test_query_bad_sql_delete(client):
response = client.get(f'/data/query?sql={urllib.parse.quote("delete from test where col0 = 1")}')
assert 'Illegal SQL' in response.json['message'] and 400 == response.status_code
def test_query_bad_sql_update(client):
response = client.get(f'/data/query?sql={urllib.parse.quote("update test set col0 = 1")}')
assert 'Illegal SQL' in response.json['message'] and 400 == response.status_code
|
normal
|
{
"blob_id": "a598da0a749fcc5a6719cec31ede0eb13fab228e",
"index": 3171,
"step-1": "<mask token>\n\n\[email protected]\ndef client():\n app.app.config['TESTING'] = True\n with app.app.test_client() as client:\n yield client\n\n\ndef test_query_missing_args(client):\n response = client.get('/data/query')\n assert 'errors' in response.json and '400' in response.status\n\n\ndef test_query_get_json(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('select * from test')}\")\n assert len(response.json) == 2\n\n\ndef test_query_post_json(client):\n response = client.post('/data/query', json={'sql': 'select * from test'})\n assert len(response.json) == 2\n\n\n<mask token>\n\n\ndef test_query_bad_sql_insert(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('insert into test (col0) values (1)')}\"\n )\n assert 'Illegal SQL' in response.json['message'\n ] and 400 == response.status_code\n\n\ndef test_query_bad_sql_delete(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('delete from test where col0 = 1')}\"\n )\n assert 'Illegal SQL' in response.json['message'\n ] and 400 == response.status_code\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]\ndef client():\n app.app.config['TESTING'] = True\n with app.app.test_client() as client:\n yield client\n\n\ndef test_query_missing_args(client):\n response = client.get('/data/query')\n assert 'errors' in response.json and '400' in response.status\n\n\ndef test_query_get_json(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('select * from test')}\")\n assert len(response.json) == 2\n\n\ndef test_query_post_json(client):\n response = client.post('/data/query', json={'sql': 'select * from test'})\n assert len(response.json) == 2\n\n\n<mask token>\n\n\ndef test_query_bad_sql_insert(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('insert into test (col0) values (1)')}\"\n )\n assert 'Illegal SQL' in response.json['message'\n ] and 400 == response.status_code\n\n\ndef test_query_bad_sql_delete(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('delete from test where col0 = 1')}\"\n )\n assert 'Illegal SQL' in response.json['message'\n ] and 400 == response.status_code\n\n\ndef test_query_bad_sql_update(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('update test set col0 = 1')}\")\n assert 'Illegal SQL' in response.json['message'\n ] and 400 == response.status_code\n",
"step-3": "<mask token>\n\n\[email protected]\ndef client():\n app.app.config['TESTING'] = True\n with app.app.test_client() as client:\n yield client\n\n\ndef test_query_missing_args(client):\n response = client.get('/data/query')\n assert 'errors' in response.json and '400' in response.status\n\n\ndef test_query_get_json(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('select * from test')}\")\n assert len(response.json) == 2\n\n\ndef test_query_post_json(client):\n response = client.post('/data/query', json={'sql': 'select * from test'})\n assert len(response.json) == 2\n\n\ndef test_query_get_csv(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('select * from test')}&format=csv\"\n )\n text = response.data.decode()\n assert len(text) > 0 and 'col0' in text\n\n\ndef test_query_post_csv(client):\n response = client.post('/data/query', json={'sql': 'select * from test',\n 'format': 'csv'})\n text = response.data.decode()\n assert len(text) > 0 and 'col0' in text\n\n\ndef test_query_bad_sql_insert(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('insert into test (col0) values (1)')}\"\n )\n assert 'Illegal SQL' in response.json['message'\n ] and 400 == response.status_code\n\n\ndef test_query_bad_sql_delete(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('delete from test where col0 = 1')}\"\n )\n assert 'Illegal SQL' in response.json['message'\n ] and 400 == response.status_code\n\n\ndef test_query_bad_sql_update(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('update test set col0 = 1')}\")\n assert 'Illegal SQL' in response.json['message'\n ] and 400 == response.status_code\n",
"step-4": "import pytest\nimport app\nimport urllib.parse\n\n\[email protected]\ndef client():\n app.app.config['TESTING'] = True\n with app.app.test_client() as client:\n yield client\n\n\ndef test_query_missing_args(client):\n response = client.get('/data/query')\n assert 'errors' in response.json and '400' in response.status\n\n\ndef test_query_get_json(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('select * from test')}\")\n assert len(response.json) == 2\n\n\ndef test_query_post_json(client):\n response = client.post('/data/query', json={'sql': 'select * from test'})\n assert len(response.json) == 2\n\n\ndef test_query_get_csv(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('select * from test')}&format=csv\"\n )\n text = response.data.decode()\n assert len(text) > 0 and 'col0' in text\n\n\ndef test_query_post_csv(client):\n response = client.post('/data/query', json={'sql': 'select * from test',\n 'format': 'csv'})\n text = response.data.decode()\n assert len(text) > 0 and 'col0' in text\n\n\ndef test_query_bad_sql_insert(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('insert into test (col0) values (1)')}\"\n )\n assert 'Illegal SQL' in response.json['message'\n ] and 400 == response.status_code\n\n\ndef test_query_bad_sql_delete(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('delete from test where col0 = 1')}\"\n )\n assert 'Illegal SQL' in response.json['message'\n ] and 400 == response.status_code\n\n\ndef test_query_bad_sql_update(client):\n response = client.get(\n f\"/data/query?sql={urllib.parse.quote('update test set col0 = 1')}\")\n assert 'Illegal SQL' in response.json['message'\n ] and 400 == response.status_code\n",
"step-5": "import pytest\nimport app\nimport urllib.parse\n\n\[email protected]\ndef client():\n app.app.config['TESTING'] = True\n\n with app.app.test_client() as client:\n yield client\n\n\ndef test_query_missing_args(client):\n response = client.get('/data/query')\n assert 'errors' in response.json and '400' in response.status\n\n\ndef test_query_get_json(client):\n response = client.get(f'/data/query?sql={urllib.parse.quote(\"select * from test\")}')\n assert len(response.json) == 2\n\n\ndef test_query_post_json(client):\n response = client.post('/data/query', json={'sql': 'select * from test'})\n assert len(response.json) == 2\n\n\ndef test_query_get_csv(client):\n response = client.get(f'/data/query?sql={urllib.parse.quote(\"select * from test\")}&format=csv')\n text = response.data.decode()\n assert len(text) > 0 and 'col0' in text\n\n\ndef test_query_post_csv(client):\n response = client.post('/data/query', json={'sql': 'select * from test', 'format': 'csv'})\n text = response.data.decode()\n assert len(text) > 0 and 'col0' in text\n\n\ndef test_query_bad_sql_insert(client):\n response = client.get(f'/data/query?sql={urllib.parse.quote(\"insert into test (col0) values (1)\")}')\n assert 'Illegal SQL' in response.json['message'] and 400 == response.status_code\n\n\ndef test_query_bad_sql_delete(client):\n response = client.get(f'/data/query?sql={urllib.parse.quote(\"delete from test where col0 = 1\")}')\n assert 'Illegal SQL' in response.json['message'] and 400 == response.status_code\n\n\ndef test_query_bad_sql_update(client):\n response = client.get(f'/data/query?sql={urllib.parse.quote(\"update test set col0 = 1\")}')\n assert 'Illegal SQL' in response.json['message'] and 400 == response.status_code\n",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while True:
answer = zfile.read(nothing).decode('utf-8')
comments += zfile.getinfo(nothing).comment.decode('utf-8')
print(answer)
findRet = re.findall(target, answer)
if len(findRet) == 0:
break
nothing = findRet[0] + '.txt'
print('answer is ...')
print(comments)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
zfile = zipfile.ZipFile('./channel.zip')
<|reserved_special_token_0|>
comments = ''
nothing = '90052.txt'
target = '[0-9]+'
while True:
answer = zfile.read(nothing).decode('utf-8')
comments += zfile.getinfo(nothing).comment.decode('utf-8')
print(answer)
findRet = re.findall(target, answer)
if len(findRet) == 0:
break
nothing = findRet[0] + '.txt'
print('answer is ...')
print(comments)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import zipfile
import re
zfile = zipfile.ZipFile('./channel.zip')
<|reserved_special_token_0|>
comments = ''
nothing = '90052.txt'
target = '[0-9]+'
while True:
answer = zfile.read(nothing).decode('utf-8')
comments += zfile.getinfo(nothing).comment.decode('utf-8')
print(answer)
findRet = re.findall(target, answer)
if len(findRet) == 0:
break
nothing = findRet[0] + '.txt'
print('answer is ...')
print(comments)
<|reserved_special_token_1|>
'''''''''''''''''''''''''''''
> Filename: lv6.py
> Author: Kadrick, BoGwon Kang
> Created at: 2021/10/11 16:07
> Description: zip
'''''''''''''''''''''''''''''
import zipfile
import re
# open zipfile
zfile = zipfile.ZipFile('./channel.zip')
# check list
'''
print(zfile.namelist())
print(zfile.read("readme.txt"))
print(zfile.read("90052.txt"))
'''
# follow nothing & collect the comments
comments = ""
nothing = "90052.txt"
target = r"[0-9]+"
while True:
answer = zfile.read(nothing).decode('utf-8')
# collect comment
comments += zfile.getinfo(nothing).comment.decode('utf-8')
print(answer)
findRet = re.findall(target, answer)
if len(findRet) == 0:
break
nothing = findRet[0] + ".txt"
print("answer is ...")
print(comments)
|
flexible
|
{
"blob_id": "b1fe7e318c361930c8ad00758bcb86597fd8f3bd",
"index": 2567,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n answer = zfile.read(nothing).decode('utf-8')\n comments += zfile.getinfo(nothing).comment.decode('utf-8')\n print(answer)\n findRet = re.findall(target, answer)\n if len(findRet) == 0:\n break\n nothing = findRet[0] + '.txt'\nprint('answer is ...')\nprint(comments)\n",
"step-3": "<mask token>\nzfile = zipfile.ZipFile('./channel.zip')\n<mask token>\ncomments = ''\nnothing = '90052.txt'\ntarget = '[0-9]+'\nwhile True:\n answer = zfile.read(nothing).decode('utf-8')\n comments += zfile.getinfo(nothing).comment.decode('utf-8')\n print(answer)\n findRet = re.findall(target, answer)\n if len(findRet) == 0:\n break\n nothing = findRet[0] + '.txt'\nprint('answer is ...')\nprint(comments)\n",
"step-4": "<mask token>\nimport zipfile\nimport re\nzfile = zipfile.ZipFile('./channel.zip')\n<mask token>\ncomments = ''\nnothing = '90052.txt'\ntarget = '[0-9]+'\nwhile True:\n answer = zfile.read(nothing).decode('utf-8')\n comments += zfile.getinfo(nothing).comment.decode('utf-8')\n print(answer)\n findRet = re.findall(target, answer)\n if len(findRet) == 0:\n break\n nothing = findRet[0] + '.txt'\nprint('answer is ...')\nprint(comments)\n",
"step-5": "'''''''''''''''''''''''''''''\n> Filename: lv6.py\n> Author: Kadrick, BoGwon Kang\n> Created at: 2021/10/11 16:07\n> Description: zip\n'''''''''''''''''''''''''''''\nimport zipfile\nimport re\n\n# open zipfile\nzfile = zipfile.ZipFile('./channel.zip')\n\n# check list\n'''\nprint(zfile.namelist())\nprint(zfile.read(\"readme.txt\"))\nprint(zfile.read(\"90052.txt\"))\n'''\n\n# follow nothing & collect the comments\ncomments = \"\"\n\nnothing = \"90052.txt\"\ntarget = r\"[0-9]+\"\n\nwhile True:\n answer = zfile.read(nothing).decode('utf-8')\n # collect comment\n comments += zfile.getinfo(nothing).comment.decode('utf-8')\n print(answer)\n\n findRet = re.findall(target, answer)\n\n if len(findRet) == 0:\n break\n \n nothing = findRet[0] + \".txt\"\n\nprint(\"answer is ...\")\nprint(comments)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
"""
Creating flask server that response with a json
"""
from flask import Flask
from flask import jsonify
micro_service = Flask(__name__)
@micro_service.route('/') # http://mysite.com/
def home():
return jsonify({'message': 'Hello, world!'})
if __name__ == '__main__':
micro_service.run()
|
normal
|
{
"blob_id": "4b14dee3625d5d0c703176ed2f0a28b2583fd84d",
"index": 6519,
"step-1": "<mask token>\n\n\n@micro_service.route('/')\ndef home():\n return jsonify({'message': 'Hello, world!'})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@micro_service.route('/')\ndef home():\n return jsonify({'message': 'Hello, world!'})\n\n\nif __name__ == '__main__':\n micro_service.run()\n",
"step-3": "<mask token>\nmicro_service = Flask(__name__)\n\n\n@micro_service.route('/')\ndef home():\n return jsonify({'message': 'Hello, world!'})\n\n\nif __name__ == '__main__':\n micro_service.run()\n",
"step-4": "<mask token>\nfrom flask import Flask\nfrom flask import jsonify\nmicro_service = Flask(__name__)\n\n\n@micro_service.route('/')\ndef home():\n return jsonify({'message': 'Hello, world!'})\n\n\nif __name__ == '__main__':\n micro_service.run()\n",
"step-5": "\"\"\"\nCreating flask server that response with a json\n\"\"\"\n\nfrom flask import Flask\nfrom flask import jsonify\n\nmicro_service = Flask(__name__)\n\n\n@micro_service.route('/') # http://mysite.com/\ndef home():\n return jsonify({'message': 'Hello, world!'})\n\n\nif __name__ == '__main__':\n micro_service.run()\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
"""MicroPython rotary encoder library."""
from machine import Pin
ENC_STATES = (0, -1, 1, 0, 1, 0, 0, -1, -1, 0, 0, 1, 0, 1, -1, 0)
class Encoder(object):
def __init__(self, pin_x='P4', pin_y='P5', pin_mode=Pin.PULL_UP,
scale=1, min=0, max=100, reverse=False):
self.pin_x = (pin_x if isinstance(pin_x, Pin) else
Pin(pin_x, mode=Pin.IN, pull=pin_mode))
self.pin_y = (pin_y if isinstance(pin_y, Pin) else
Pin(pin_y, mode=Pin.IN, pull=pin_mode))
self.pin_mode = pin_mode
self.scale = scale
self.min = min
self.max = max
self.reverse = 1 if reverse else -1
# The following variables are assigned to in the interrupt callback,
# so we have to allocate them here.
self._pos = -1
self._readings = 0
self._state = 0
self.set_callbacks(self._callback)
def _callback(self, line):
self._readings = (self._readings << 2 | self.pin_x.value() << 1 |
self.pin_y.value()) & 0x0f
self._state = ENC_STATES[self._readings] * self.reverse
if self._state:
self._pos = min(max(self.min, self._pos + self._state), self.max)
def set_callbacks(self, callback=None):
self.irq_x = self.pin_x.callback(
trigger=Pin.IRQ_FALLING | Pin.IRQ_RISING, handler=callback)
self.irq_y = self.pin_y.callback(
trigger=Pin.IRQ_FALLING | Pin.IRQ_RISING, handler=callback)
def position(self):
return self._pos * self.scale
def reset(self):
self._pos = 0
def setMax(self, Max):
self.max = Max
def setMin(self, Min):
self.min = Min
def setScale(self, Scale):
self.scale = Scale
|
normal
|
{
"blob_id": "1406b2ab78b52823a8f455c8e2719f6bd84bd168",
"index": 822,
"step-1": "<mask token>\n\n\nclass Encoder(object):\n\n def __init__(self, pin_x='P4', pin_y='P5', pin_mode=Pin.PULL_UP, scale=\n 1, min=0, max=100, reverse=False):\n self.pin_x = pin_x if isinstance(pin_x, Pin) else Pin(pin_x, mode=\n Pin.IN, pull=pin_mode)\n self.pin_y = pin_y if isinstance(pin_y, Pin) else Pin(pin_y, mode=\n Pin.IN, pull=pin_mode)\n self.pin_mode = pin_mode\n self.scale = scale\n self.min = min\n self.max = max\n self.reverse = 1 if reverse else -1\n self._pos = -1\n self._readings = 0\n self._state = 0\n self.set_callbacks(self._callback)\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n <mask token>\n\n def position(self):\n return self._pos * self.scale\n <mask token>\n\n def setMax(self, Max):\n self.max = Max\n\n def setMin(self, Min):\n self.min = Min\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"step-2": "<mask token>\n\n\nclass Encoder(object):\n\n def __init__(self, pin_x='P4', pin_y='P5', pin_mode=Pin.PULL_UP, scale=\n 1, min=0, max=100, reverse=False):\n self.pin_x = pin_x if isinstance(pin_x, Pin) else Pin(pin_x, mode=\n Pin.IN, pull=pin_mode)\n self.pin_y = pin_y if isinstance(pin_y, Pin) else Pin(pin_y, mode=\n Pin.IN, pull=pin_mode)\n self.pin_mode = pin_mode\n self.scale = scale\n self.min = min\n self.max = max\n self.reverse = 1 if reverse else -1\n self._pos = -1\n self._readings = 0\n self._state = 0\n self.set_callbacks(self._callback)\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n\n def set_callbacks(self, callback=None):\n self.irq_x = self.pin_x.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n self.irq_y = self.pin_y.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n\n def setMax(self, Max):\n self.max = Max\n\n def setMin(self, Min):\n self.min = Min\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"step-3": "<mask token>\nENC_STATES = 0, -1, 1, 0, 1, 0, 0, -1, -1, 0, 0, 1, 0, 1, -1, 0\n\n\nclass Encoder(object):\n\n def __init__(self, pin_x='P4', pin_y='P5', pin_mode=Pin.PULL_UP, scale=\n 1, min=0, max=100, reverse=False):\n self.pin_x = pin_x if isinstance(pin_x, Pin) else Pin(pin_x, mode=\n Pin.IN, pull=pin_mode)\n self.pin_y = pin_y if isinstance(pin_y, Pin) else Pin(pin_y, mode=\n Pin.IN, pull=pin_mode)\n self.pin_mode = pin_mode\n self.scale = scale\n self.min = min\n self.max = max\n self.reverse = 1 if reverse else -1\n self._pos = -1\n self._readings = 0\n self._state = 0\n self.set_callbacks(self._callback)\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n\n def set_callbacks(self, callback=None):\n self.irq_x = self.pin_x.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n self.irq_y = self.pin_y.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n\n def setMax(self, Max):\n self.max = Max\n\n def setMin(self, Min):\n self.min = Min\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"step-4": "<mask token>\nfrom machine import Pin\nENC_STATES = 0, -1, 1, 0, 1, 0, 0, -1, -1, 0, 0, 1, 0, 1, -1, 0\n\n\nclass Encoder(object):\n\n def __init__(self, pin_x='P4', pin_y='P5', pin_mode=Pin.PULL_UP, scale=\n 1, min=0, max=100, reverse=False):\n self.pin_x = pin_x if isinstance(pin_x, Pin) else Pin(pin_x, mode=\n Pin.IN, pull=pin_mode)\n self.pin_y = pin_y if isinstance(pin_y, Pin) else Pin(pin_y, mode=\n Pin.IN, pull=pin_mode)\n self.pin_mode = pin_mode\n self.scale = scale\n self.min = min\n self.max = max\n self.reverse = 1 if reverse else -1\n self._pos = -1\n self._readings = 0\n self._state = 0\n self.set_callbacks(self._callback)\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 15\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n\n def set_callbacks(self, callback=None):\n self.irq_x = self.pin_x.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n self.irq_y = self.pin_y.callback(trigger=Pin.IRQ_FALLING | Pin.\n IRQ_RISING, handler=callback)\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n\n def setMax(self, Max):\n self.max = Max\n\n def setMin(self, Min):\n self.min = Min\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"MicroPython rotary encoder library.\"\"\"\n\nfrom machine import Pin\n\n\nENC_STATES = (0, -1, 1, 0, 1, 0, 0, -1, -1, 0, 0, 1, 0, 1, -1, 0)\n\n\nclass Encoder(object):\n def __init__(self, pin_x='P4', pin_y='P5', pin_mode=Pin.PULL_UP,\n scale=1, min=0, max=100, reverse=False):\n self.pin_x = (pin_x if isinstance(pin_x, Pin) else\n Pin(pin_x, mode=Pin.IN, pull=pin_mode))\n self.pin_y = (pin_y if isinstance(pin_y, Pin) else\n Pin(pin_y, mode=Pin.IN, pull=pin_mode))\n\n self.pin_mode = pin_mode\n self.scale = scale\n self.min = min\n self.max = max\n self.reverse = 1 if reverse else -1\n\n # The following variables are assigned to in the interrupt callback,\n # so we have to allocate them here.\n self._pos = -1\n self._readings = 0\n self._state = 0\n\n self.set_callbacks(self._callback)\n\n def _callback(self, line):\n self._readings = (self._readings << 2 | self.pin_x.value() << 1 |\n self.pin_y.value()) & 0x0f\n\n self._state = ENC_STATES[self._readings] * self.reverse\n if self._state:\n self._pos = min(max(self.min, self._pos + self._state), self.max)\n\n def set_callbacks(self, callback=None):\n self.irq_x = self.pin_x.callback(\n trigger=Pin.IRQ_FALLING | Pin.IRQ_RISING, handler=callback)\n self.irq_y = self.pin_y.callback(\n trigger=Pin.IRQ_FALLING | Pin.IRQ_RISING, handler=callback)\n\n def position(self):\n return self._pos * self.scale\n\n def reset(self):\n self._pos = 0\n\n def setMax(self, Max):\n self.max = Max\n\n def setMin(self, Min):\n self.min = Min\n\n def setScale(self, Scale):\n self.scale = Scale\n",
"step-ids": [
7,
9,
10,
11,
12
]
}
|
[
7,
9,
10,
11,
12
] |
# -*- coding=utf-8 -*-
from mako.template import Template
from xblock.fragment import Fragment
from .lookup import TemplateLookup # xblock_ifmo.lookup
from .utils import deep_update
class FragmentMakoChain(Fragment):
"""
Класс, позволяющий последовательно оборачивать экземпляры Fragment друг в
друга.
Для того, чтобы цепочка отработала, шаблон должен наследоваться от шаблона
ifmo_xblock_base и определять блок block_body.
Порядок оборачивания не определён.
"""
base = None
context = {}
_content = None
lookup_dirs = None
def __init__(self, content=None, base=None, lookup_dirs=None):
"""
Класс, позволяющий последовательно оборачивать экземпляры Fragment друг
в друга.
:param content: Содержимое фрагмента
:param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;
должен быть экземпляром FragmentMakoChain или None
:param lookup_dirs: Директории поиска шаблонов
:return:
"""
assert isinstance(base, FragmentMakoChain) or base is None
super(FragmentMakoChain, self).__init__(content=content)
self.base = base
self.lookup_dirs = lookup_dirs
def body_html(self):
template = self.build_chain()
return template.render(**self.context.get('render_context', {}))
def add_context(self, new_context):
deep_update(self.context, new_context)
def build_chain(self):
"""
Строит цепочку шаблонов.
В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,
поэтому порядок оборачивания не определён (точнее, его вычисляет
метод super()). Поскольку при рендере шаблона используется исключительно
lookup от шаблона, над которым он вызван, а не собственный Lookup для
каждого из шаблона в коллекции, необходимо добавить в коллекцию все
пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,
необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.
:param lookup: экземпляр TemplateLookup, в который будут записываться
новые пути и шаблоны, использующиеся как родительские
:return: tuple(template, lookup, base_template_id)
- template -- шаблон, который должен будет стать родителем
- lookup -- изменённый lookup
"""
def _build_chain(self, lookup=None):
old_base_name = "ifmo_xblock_base"
new_base_name = None
if self.base is not None:
import uuid
new_base_name = "{name}_{rnd}".format(name=old_base_name, rnd=str(uuid.uuid4()))
if hasattr(self.base, 'build_chain'):
base_template, base_lookup = _build_chain(self.base, lookup)
lookup.put_template(new_base_name, base_template)
else:
lookup.put_string(new_base_name, self.base.body_html())
lookup.append_dirs(self.base.lookup_dirs)
return Template(
text=self._content.replace(old_base_name, new_base_name) if new_base_name else self._content,
lookup=lookup
), lookup
lookup = TemplateLookup(directories=self.lookup_dirs)
template, _ = _build_chain(self, lookup)
return template
@property
def resources(self):
seen = set()
parent_res = self.base.resources if self.base else []
return [x for x in parent_res + self._resources if x not in seen and not seen.add(x)]
@property
def content(self):
return self.body_html()
@content.setter
def content(self, value):
self._content = value
|
normal
|
{
"blob_id": "9d904225afd4f4d0cf338ae16f031f8ab41639ad",
"index": 234,
"step-1": "<mask token>\n\n\nclass FragmentMakoChain(Fragment):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, content=None, base=None, lookup_dirs=None):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг\n в друга.\n\n :param content: Содержимое фрагмента\n :param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;\n должен быть экземпляром FragmentMakoChain или None\n :param lookup_dirs: Директории поиска шаблонов\n :return:\n \"\"\"\n assert isinstance(base, FragmentMakoChain) or base is None\n super(FragmentMakoChain, self).__init__(content=content)\n self.base = base\n self.lookup_dirs = lookup_dirs\n\n def body_html(self):\n template = self.build_chain()\n return template.render(**self.context.get('render_context', {}))\n\n def add_context(self, new_context):\n deep_update(self.context, new_context)\n\n def build_chain(self):\n \"\"\"\n Строит цепочку шаблонов.\n\n В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,\n поэтому порядок оборачивания не определён (точнее, его вычисляет\n метод super()). Поскольку при рендере шаблона используется исключительно\n lookup от шаблона, над которым он вызван, а не собственный Lookup для\n каждого из шаблона в коллекции, необходимо добавить в коллекцию все\n пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,\n необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.\n\n :param lookup: экземпляр TemplateLookup, в который будут записываться\n новые пути и шаблоны, использующиеся как родительские\n\n :return: tuple(template, lookup, base_template_id)\n - template -- шаблон, который должен будет стать родителем\n - lookup -- изменённый lookup\n \"\"\"\n\n def _build_chain(self, lookup=None):\n old_base_name = 'ifmo_xblock_base'\n new_base_name = None\n if self.base is not None:\n import uuid\n new_base_name = '{name}_{rnd}'.format(name=old_base_name,\n rnd=str(uuid.uuid4()))\n if hasattr(self.base, 'build_chain'):\n base_template, base_lookup = _build_chain(self.base, lookup\n )\n lookup.put_template(new_base_name, base_template)\n else:\n lookup.put_string(new_base_name, self.base.body_html())\n lookup.append_dirs(self.base.lookup_dirs)\n return Template(text=self._content.replace(old_base_name,\n new_base_name) if new_base_name else self._content, lookup=\n lookup), lookup\n lookup = TemplateLookup(directories=self.lookup_dirs)\n template, _ = _build_chain(self, lookup)\n return template\n <mask token>\n\n @property\n def content(self):\n return self.body_html()\n\n @content.setter\n def content(self, value):\n self._content = value\n",
"step-2": "<mask token>\n\n\nclass FragmentMakoChain(Fragment):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, content=None, base=None, lookup_dirs=None):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг\n в друга.\n\n :param content: Содержимое фрагмента\n :param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;\n должен быть экземпляром FragmentMakoChain или None\n :param lookup_dirs: Директории поиска шаблонов\n :return:\n \"\"\"\n assert isinstance(base, FragmentMakoChain) or base is None\n super(FragmentMakoChain, self).__init__(content=content)\n self.base = base\n self.lookup_dirs = lookup_dirs\n\n def body_html(self):\n template = self.build_chain()\n return template.render(**self.context.get('render_context', {}))\n\n def add_context(self, new_context):\n deep_update(self.context, new_context)\n\n def build_chain(self):\n \"\"\"\n Строит цепочку шаблонов.\n\n В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,\n поэтому порядок оборачивания не определён (точнее, его вычисляет\n метод super()). Поскольку при рендере шаблона используется исключительно\n lookup от шаблона, над которым он вызван, а не собственный Lookup для\n каждого из шаблона в коллекции, необходимо добавить в коллекцию все\n пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,\n необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.\n\n :param lookup: экземпляр TemplateLookup, в который будут записываться\n новые пути и шаблоны, использующиеся как родительские\n\n :return: tuple(template, lookup, base_template_id)\n - template -- шаблон, который должен будет стать родителем\n - lookup -- изменённый lookup\n \"\"\"\n\n def _build_chain(self, lookup=None):\n old_base_name = 'ifmo_xblock_base'\n new_base_name = None\n if self.base is not None:\n import uuid\n new_base_name = '{name}_{rnd}'.format(name=old_base_name,\n rnd=str(uuid.uuid4()))\n if hasattr(self.base, 'build_chain'):\n base_template, base_lookup = _build_chain(self.base, lookup\n )\n lookup.put_template(new_base_name, base_template)\n else:\n lookup.put_string(new_base_name, self.base.body_html())\n lookup.append_dirs(self.base.lookup_dirs)\n return Template(text=self._content.replace(old_base_name,\n new_base_name) if new_base_name else self._content, lookup=\n lookup), lookup\n lookup = TemplateLookup(directories=self.lookup_dirs)\n template, _ = _build_chain(self, lookup)\n return template\n\n @property\n def resources(self):\n seen = set()\n parent_res = self.base.resources if self.base else []\n return [x for x in parent_res + self._resources if x not in seen and\n not seen.add(x)]\n\n @property\n def content(self):\n return self.body_html()\n\n @content.setter\n def content(self, value):\n self._content = value\n",
"step-3": "<mask token>\n\n\nclass FragmentMakoChain(Fragment):\n <mask token>\n base = None\n context = {}\n _content = None\n lookup_dirs = None\n\n def __init__(self, content=None, base=None, lookup_dirs=None):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг\n в друга.\n\n :param content: Содержимое фрагмента\n :param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;\n должен быть экземпляром FragmentMakoChain или None\n :param lookup_dirs: Директории поиска шаблонов\n :return:\n \"\"\"\n assert isinstance(base, FragmentMakoChain) or base is None\n super(FragmentMakoChain, self).__init__(content=content)\n self.base = base\n self.lookup_dirs = lookup_dirs\n\n def body_html(self):\n template = self.build_chain()\n return template.render(**self.context.get('render_context', {}))\n\n def add_context(self, new_context):\n deep_update(self.context, new_context)\n\n def build_chain(self):\n \"\"\"\n Строит цепочку шаблонов.\n\n В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,\n поэтому порядок оборачивания не определён (точнее, его вычисляет\n метод super()). Поскольку при рендере шаблона используется исключительно\n lookup от шаблона, над которым он вызван, а не собственный Lookup для\n каждого из шаблона в коллекции, необходимо добавить в коллекцию все\n пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,\n необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.\n\n :param lookup: экземпляр TemplateLookup, в который будут записываться\n новые пути и шаблоны, использующиеся как родительские\n\n :return: tuple(template, lookup, base_template_id)\n - template -- шаблон, который должен будет стать родителем\n - lookup -- изменённый lookup\n \"\"\"\n\n def _build_chain(self, lookup=None):\n old_base_name = 'ifmo_xblock_base'\n new_base_name = None\n if self.base is not None:\n import uuid\n new_base_name = '{name}_{rnd}'.format(name=old_base_name,\n rnd=str(uuid.uuid4()))\n if hasattr(self.base, 'build_chain'):\n base_template, base_lookup = _build_chain(self.base, lookup\n )\n lookup.put_template(new_base_name, base_template)\n else:\n lookup.put_string(new_base_name, self.base.body_html())\n lookup.append_dirs(self.base.lookup_dirs)\n return Template(text=self._content.replace(old_base_name,\n new_base_name) if new_base_name else self._content, lookup=\n lookup), lookup\n lookup = TemplateLookup(directories=self.lookup_dirs)\n template, _ = _build_chain(self, lookup)\n return template\n\n @property\n def resources(self):\n seen = set()\n parent_res = self.base.resources if self.base else []\n return [x for x in parent_res + self._resources if x not in seen and\n not seen.add(x)]\n\n @property\n def content(self):\n return self.body_html()\n\n @content.setter\n def content(self, value):\n self._content = value\n",
"step-4": "from mako.template import Template\nfrom xblock.fragment import Fragment\nfrom .lookup import TemplateLookup\nfrom .utils import deep_update\n\n\nclass FragmentMakoChain(Fragment):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг в\n друга.\n\n Для того, чтобы цепочка отработала, шаблон должен наследоваться от шаблона\n ifmo_xblock_base и определять блок block_body.\n\n Порядок оборачивания не определён.\n \"\"\"\n base = None\n context = {}\n _content = None\n lookup_dirs = None\n\n def __init__(self, content=None, base=None, lookup_dirs=None):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг\n в друга.\n\n :param content: Содержимое фрагмента\n :param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;\n должен быть экземпляром FragmentMakoChain или None\n :param lookup_dirs: Директории поиска шаблонов\n :return:\n \"\"\"\n assert isinstance(base, FragmentMakoChain) or base is None\n super(FragmentMakoChain, self).__init__(content=content)\n self.base = base\n self.lookup_dirs = lookup_dirs\n\n def body_html(self):\n template = self.build_chain()\n return template.render(**self.context.get('render_context', {}))\n\n def add_context(self, new_context):\n deep_update(self.context, new_context)\n\n def build_chain(self):\n \"\"\"\n Строит цепочку шаблонов.\n\n В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,\n поэтому порядок оборачивания не определён (точнее, его вычисляет\n метод super()). Поскольку при рендере шаблона используется исключительно\n lookup от шаблона, над которым он вызван, а не собственный Lookup для\n каждого из шаблона в коллекции, необходимо добавить в коллекцию все\n пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,\n необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.\n\n :param lookup: экземпляр TemplateLookup, в который будут записываться\n новые пути и шаблоны, использующиеся как родительские\n\n :return: tuple(template, lookup, base_template_id)\n - template -- шаблон, который должен будет стать родителем\n - lookup -- изменённый lookup\n \"\"\"\n\n def _build_chain(self, lookup=None):\n old_base_name = 'ifmo_xblock_base'\n new_base_name = None\n if self.base is not None:\n import uuid\n new_base_name = '{name}_{rnd}'.format(name=old_base_name,\n rnd=str(uuid.uuid4()))\n if hasattr(self.base, 'build_chain'):\n base_template, base_lookup = _build_chain(self.base, lookup\n )\n lookup.put_template(new_base_name, base_template)\n else:\n lookup.put_string(new_base_name, self.base.body_html())\n lookup.append_dirs(self.base.lookup_dirs)\n return Template(text=self._content.replace(old_base_name,\n new_base_name) if new_base_name else self._content, lookup=\n lookup), lookup\n lookup = TemplateLookup(directories=self.lookup_dirs)\n template, _ = _build_chain(self, lookup)\n return template\n\n @property\n def resources(self):\n seen = set()\n parent_res = self.base.resources if self.base else []\n return [x for x in parent_res + self._resources if x not in seen and\n not seen.add(x)]\n\n @property\n def content(self):\n return self.body_html()\n\n @content.setter\n def content(self, value):\n self._content = value\n",
"step-5": "# -*- coding=utf-8 -*-\n\nfrom mako.template import Template\nfrom xblock.fragment import Fragment\n\nfrom .lookup import TemplateLookup # xblock_ifmo.lookup\nfrom .utils import deep_update\n\n\nclass FragmentMakoChain(Fragment):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг в\n друга.\n\n Для того, чтобы цепочка отработала, шаблон должен наследоваться от шаблона\n ifmo_xblock_base и определять блок block_body.\n\n Порядок оборачивания не определён.\n \"\"\"\n\n base = None\n context = {}\n _content = None\n lookup_dirs = None\n\n def __init__(self, content=None, base=None, lookup_dirs=None):\n \"\"\"\n Класс, позволяющий последовательно оборачивать экземпляры Fragment друг\n в друга.\n\n :param content: Содержимое фрагмента\n :param base: Базовый фрагмент, тот, в который будет обёрнут этот фрагмент;\n должен быть экземпляром FragmentMakoChain или None\n :param lookup_dirs: Директории поиска шаблонов\n :return:\n \"\"\"\n assert isinstance(base, FragmentMakoChain) or base is None\n super(FragmentMakoChain, self).__init__(content=content)\n self.base = base\n self.lookup_dirs = lookup_dirs\n\n def body_html(self):\n template = self.build_chain()\n return template.render(**self.context.get('render_context', {}))\n\n def add_context(self, new_context):\n deep_update(self.context, new_context)\n\n def build_chain(self):\n \"\"\"\n Строит цепочку шаблонов.\n\n В цепочке каждый шаблон наследуется от одного и того же ifmo_xblock_base,\n поэтому порядок оборачивания не определён (точнее, его вычисляет\n метод super()). Поскольку при рендере шаблона используется исключительно\n lookup от шаблона, над которым он вызван, а не собственный Lookup для\n каждого из шаблона в коллекции, необходимо добавить в коллекцию все\n пути и шаблоны, использующиеся в шаблоне выше по цепочке. Более того,\n необходимо изменить имена шаблонов (ifmo_xblock_base) на уникальные.\n\n :param lookup: экземпляр TemplateLookup, в который будут записываться\n новые пути и шаблоны, использующиеся как родительские\n\n :return: tuple(template, lookup, base_template_id)\n - template -- шаблон, который должен будет стать родителем\n - lookup -- изменённый lookup\n \"\"\"\n\n def _build_chain(self, lookup=None):\n\n old_base_name = \"ifmo_xblock_base\"\n new_base_name = None\n\n if self.base is not None:\n\n import uuid\n new_base_name = \"{name}_{rnd}\".format(name=old_base_name, rnd=str(uuid.uuid4()))\n\n if hasattr(self.base, 'build_chain'):\n base_template, base_lookup = _build_chain(self.base, lookup)\n lookup.put_template(new_base_name, base_template)\n else:\n lookup.put_string(new_base_name, self.base.body_html())\n\n lookup.append_dirs(self.base.lookup_dirs)\n\n return Template(\n text=self._content.replace(old_base_name, new_base_name) if new_base_name else self._content,\n lookup=lookup\n ), lookup\n\n lookup = TemplateLookup(directories=self.lookup_dirs)\n template, _ = _build_chain(self, lookup)\n return template\n\n @property\n def resources(self):\n seen = set()\n parent_res = self.base.resources if self.base else []\n return [x for x in parent_res + self._resources if x not in seen and not seen.add(x)]\n\n @property\n def content(self):\n return self.body_html()\n\n @content.setter\n def content(self, value):\n self._content = value\n",
"step-ids": [
7,
8,
9,
11,
12
]
}
|
[
7,
8,
9,
11,
12
] |
class Circle:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Circle:
def __init__(self, radius, color='white'):
self.radius = radius
self.color = color
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Circle:
def __init__(self, radius, color='white'):
self.radius = radius
self.color = color
<|reserved_special_token_0|>
print('半径:{}, 色: {}'.format(c1.radius, c1.color))
<|reserved_special_token_1|>
class Circle:
def __init__(self, radius, color='white'):
self.radius = radius
self.color = color
c1 = Circle(10, 'black')
print('半径:{}, 色: {}'.format(c1.radius, c1.color))
<|reserved_special_token_1|>
class Circle():
def __init__(self, radius, color="white"):
self.radius = radius
self.color = color
c1 = Circle(10, "black")
print("半径:{}, 色: {}".format(c1.radius, c1.color))
|
flexible
|
{
"blob_id": "6ce50552571594c7be77ac0bf3b5274f2f39e545",
"index": 5086,
"step-1": "class Circle:\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Circle:\n\n def __init__(self, radius, color='white'):\n self.radius = radius\n self.color = color\n\n\n<mask token>\n",
"step-3": "class Circle:\n\n def __init__(self, radius, color='white'):\n self.radius = radius\n self.color = color\n\n\n<mask token>\nprint('半径:{}, 色: {}'.format(c1.radius, c1.color))\n",
"step-4": "class Circle:\n\n def __init__(self, radius, color='white'):\n self.radius = radius\n self.color = color\n\n\nc1 = Circle(10, 'black')\nprint('半径:{}, 色: {}'.format(c1.radius, c1.color))\n",
"step-5": "class Circle():\n def __init__(self, radius, color=\"white\"):\n self.radius = radius\n self.color = color\n \nc1 = Circle(10, \"black\")\nprint(\"半径:{}, 色: {}\".format(c1.radius, c1.color))",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
scheme = 'http'
hostname = 'localhost'
port = 9000
routes = [
'/available/2',
'/available/4'
]
|
normal
|
{
"blob_id": "d1402469232b5e3c3b09339849f6899e009fd74b",
"index": 3323,
"step-1": "<mask token>\n",
"step-2": "scheme = 'http'\nhostname = 'localhost'\nport = 9000\nroutes = ['/available/2', '/available/4']\n",
"step-3": "# -*- coding: utf-8 -*-\n\n\nscheme = 'http'\n\nhostname = 'localhost'\n\nport = 9000\n\nroutes = [\n '/available/2',\n '/available/4'\n]\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import math
import numpy as np
# import tkinter
import tensorflow as tf
from matplotlib import axis
import os
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.cluster import KMeans
from sklearn.metrics import confusion_matrix
class MD(BaseEstimator, TransformerMixin):
def __init__(self, data, input_size, epoch,
batch_size, iteration, alpha=1.0, n_neg_samples=10,
random_seed=2020):
# bind params to class
# network parameters.
self.iteration = iteration
self.epoch = epoch
self.batch_size = batch_size
self.learning_rate = 0.01
self.random_seed = random_seed
self.phase = True
self.first_layer_size = 256
self.second_layer_size = 128
self.third_layer_size = 128
self.input_size = input_size
# data.
self.X_train_ben = data[0]
self.X_train_mal = data[1]
self.X_test_ben = data[2]
self.X_test_mal = data[3]
# evaluation.
self.accuracy_list = [] # accuracy during training
self.fmeasure_list = [] # fmeasure during training
self.clusters_dist = [] # distance between clusters centroid
self.evaluation_metrics_list = {'accuracy': [], 'precision': [], 'recall': [],
'fmeasure': []} # evaluation metrics of test data for all epochs
self.FinalCenters = {'benignCenter': 0, 'malwareCenter': 0}
# init all variables in a tensorflow graph
self._init_graph()
def _init_graph(self):
'''
Init a tensorflow Graph containing: input data, variables, model, loss, optimizer
'''
self.graph = tf.Graph()
with self.graph.as_default(): # , tf.device('/cpu:0'):
# Set graph level random seed.
tf.set_random_seed(self.random_seed)
# Input data.
self.train_data = tf.placeholder(tf.float32,
shape=[None, self.input_size]) # batch_size * input_size
self.train_labels = tf.placeholder(tf.float32, shape=[None, 1]) # batch_size * 1
self.train_labels_center = tf.placeholder(tf.float32, shape=[None,
self.third_layer_size]) # batch_size * third_layer_size
self.train_labels_center_disagree = tf.placeholder(tf.float32, shape=[None,
self.third_layer_size]) # batch_size * third_layer_size
# Variables.
self.weights = self._initialize_weights()
# the embedding layer.
self.embedding_layer = tf.keras.layers.Embedding(256, 32, input_length=324)
self.embedding_result = self.embedding_layer(self.train_data)
self.embedding_result = tf.layers.Flatten()(self.embedding_result)
# the first hidden layer.
self.net1 = tf.matmul(self.embedding_result, self.weights['layer1']) # batch_size * first_layer_size
self.layer1 = tf.layers.batch_normalization(self.net1, training=self.phase)
self.layer1 = tf.nn.tanh(self.layer1)
# the second hidden layer.
self.net2 = tf.matmul(self.layer1, self.weights['layer2'])
self.net2 = tf.layers.batch_normalization(self.net2, training=self.phase)
self.net2 = tf.nn.relu(self.net2)
self.layer2 = tf.layers.dropout(self.net2, rate=0.3, training=self.phase)
# the third hidden layer.
self.net3 = tf.matmul(self.layer2, self.weights['layer3'])
self.layer3 = tf.nn.tanh(self.net3)
# loss function.
self.cross_entropy = tf.reduce_mean(tf.losses.mean_squared_error(self.train_labels_center, self.layer3))
# optimizer.
self.train_step = tf.train.AdamOptimizer(self.learning_rate).minimize(self.cross_entropy)
# init.
self.init = tf.initialize_all_variables()
self.sess = tf.Session()
self.sess.run(self.init)
def _initialize_weights(self):
self.all_weights = dict()
self.all_weights['layer1'] = tf.Variable(
tf.random.normal([10368, self.first_layer_size], mean=0.0, stddev=1)) # input_size * attr_dim
self.all_weights['layer2'] = tf.Variable(
tf.random.normal([self.first_layer_size, self.second_layer_size], mean=0.0,
stddev=1)) # input_size * attr_dim
self.all_weights['layer3'] = tf.Variable(
tf.random.normal([self.second_layer_size, self.third_layer_size], mean=0.0,
stddev=1)) # input_size * attr_dim
self.all_weights['layer1'] = tf.Variable(
tf.random.uniform([10368, self.first_layer_size], minval=-1,
maxval=1)) # input_size * attr_dim
self.all_weights['layer2'] = tf.Variable(
tf.random.uniform([self.first_layer_size, self.second_layer_size], minval=-1,
maxval=1)) # input_size * attr_dim
self.all_weights['layer3'] = tf.Variable(
tf.random.uniform([self.second_layer_size, self.third_layer_size], minval=-1,
maxval=1)) # input_size * attr_dim
# --------------------------------------------------------------------------
self.all_weights['layer1'] = tf.get_variable("w", [32 * self.input_size, self.first_layer_size],
initializer=tf.initializers.random_normal(mean=0, stddev=0.8),
regularizer=tf.keras.regularizers.l2(
0.01)) # input_size * attr_dim
self.all_weights['layer2'] = tf.get_variable("w2", [self.first_layer_size, self.second_layer_size],
initializer=tf.initializers.random_normal(mean=0,
stddev=0.8),
regularizer=tf.keras.regularizers.l2(
0.01)) # input_size * attr_dim
self.all_weights['layer3'] = tf.get_variable("w3", [self.second_layer_size, self.third_layer_size],
initializer=tf.initializers.random_normal(mean=0, stddev=0.8),
regularizer=tf.keras.regularizers.l2(
0.01)) # input_size * attr_dim
return self.all_weights
def kmeans_clustering(self, point, size, true_labels):
self.kmeans = KMeans(n_clusters=2, random_state=10, init='k-means++', n_init=20).fit(point)
self.kmeans_labels = self.kmeans.labels_
# find index of samples that are in the first cluster
self.label_list_0 = np.where(self.kmeans_labels == 0)[0]
# get labels of samples that are in the first cluster
temp = [true_labels[i][0] for i in self.label_list_0]
temp.append(2)
# determine label(cluster center) of benign and malware group based on the majority samples in each cluster
counts = np.bincount(temp)
if counts[0] > counts[1]: # counts[0] : number of benign in the first cluster
benign_center = self.kmeans.cluster_centers_[0]
malware_center = self.kmeans.cluster_centers_[1]
else:
benign_center = self.kmeans.cluster_centers_[1]
malware_center = self.kmeans.cluster_centers_[0]
# set label for each sample
new_labels = np.zeros((size, self.third_layer_size))
for i in range(size):
if true_labels[i][0] == 0.0:
new_labels[i] = benign_center
else:
new_labels[i] = malware_center
self.FinalCenters = {'benignCenter': benign_center, 'malwareCenter': malware_center}
return new_labels
def partial_fit(self, X): # fit a batch
# get network output.
feed_dict = {self.train_data: X['batch_data_train']}
self.points = self.sess.run((self.layer3), feed_dict=feed_dict)
# apply clustering to find expected output.
new_labels = self.kmeans_clustering(self.points, len(X['batch_data_label']), X['batch_data_label'])
self.clusters_dist.append(np.linalg.norm(self.kmeans.cluster_centers_[0] - self.kmeans.cluster_centers_[1]))
feed_dicts = {self.train_data: X['batch_data_train'],
self.train_labels_center: new_labels}
loss, opt = self.sess.run((self.cross_entropy, self.train_step), feed_dict=feed_dicts)
# print(loss)
# print('------------')
metrics = self.evaluate(X['batch_data_label'], self.kmeans_labels, len((X['batch_data_label'])))
self.accuracy_list.append(metrics[0])
self.fmeasure_list.append(metrics[3])
return loss
def evaluate(self, true_labels, kmeans_labels, size):
"""
:param true_labels: label of malware and benign samples as a 2D array(number of samples * 1) of 0 and 1
:param kmeans_labels: contains a list of 0 and 1 that each cell shows the sample cluster number
:param size: number of samples
:return: accuracy, precision, recall, f_measure
"""
# find index of samples that are in the first cluster
self.label_list_0 = np.where(kmeans_labels == 0)[0]
self.label_list_1 = np.where(kmeans_labels == 1)[0]
# get labels of samples that are in the first cluster
temp = [true_labels[i][0] for i in self.label_list_0]
temp1 = [true_labels[i][0] for i in self.label_list_1]
temp1.append(2)
temp.append(2)
# determine label(cluster center) of benign and malware group based on the majority samples in each cluster
counts = np.bincount(temp)
counts2 = np.bincount(temp1)
if counts[0] > counts[1]:
accuracy = (counts[0] + counts2[1]) / size
precision = counts2[1] / (counts2[1] + counts2[0])
recall = counts2[1] / (counts2[1] + counts[1])
f_measure = 2 * ((precision * recall) / (precision + recall))
else:
accuracy = (counts[1] + counts2[0]) / size
precision = counts[1] / (counts[1] + counts[0])
recall = counts[1] / (counts[1] + counts2[1])
f_measure = 2 * ((precision * recall) / (precision + recall))
return accuracy, precision, recall, f_measure
def final_fit(self, X, true_labels):
self.phase = False
# get network output for test data.
feed_dict = {self.train_data: X['data_test']}
self.points = self.sess.run(self.layer3, feed_dict=feed_dict)
# determine label of each test sample based on the euclidean distance
self.predicted_Labels = []
for i in range(len(true_labels)):
if np.linalg.norm(self.FinalCenters['benignCenter'] - self.points[i]) < np.linalg.norm(
self.FinalCenters['malwareCenter'] - self.points[i]):
self.predicted_Labels.append([0])
else:
self.predicted_Labels.append([1])
tn, fp, fn, tp = confusion_matrix(true_labels, self.predicted_Labels).ravel()
accuracy = (tp + tn) / (tp + tn + fn + fp)
precision = tp / (tp + fp)
recall = tp / (tp + fn)
f_measure = 2 * (precision * recall) / (precision + recall)
self.evaluation_metrics_list['accuracy'].append(np.float("{0:.4f}".format(accuracy)))
self.evaluation_metrics_list['precision'].append(np.float("{0:.4f}".format(precision)))
self.evaluation_metrics_list['recall'].append(np.float("{0:.4f}".format(recall)))
self.evaluation_metrics_list['fmeasure'].append(np.float("{0:.4f}".format(f_measure)))
print("accuracy", "precision", "recall", "f_measure", sep="\t\t\t\t\t")
print(accuracy, precision, recall, f_measure, sep="\t\t\t")
return 0
def train(self): # fit a dataset
for iter in range(self.iteration):
self.log("iteration {} ".format(iter))
for epoch in range(self.epoch):
self.accuracy_list = []
self.fmeasure_list = []
self.clusters_dist = []
self.log("epoch %s" % (epoch))
total_batches = int(len(self.X_train_ben['data']) / self.batch_size)
self.log('total_batches in epoch %s : %s ' % (epoch, total_batches))
start_index = 0
end_index = start_index + self.batch_size
self.counter = 0
# Loop over all batches.
for i in range(total_batches + 1):
self.counter += 1
# generate a batch data
batch_xs = {}
batch_xs['batch_data_train'] = np.concatenate(
[self.X_train_ben['data'][start_index:end_index], \
self.X_train_mal['data'][start_index:end_index]])
batch_xs['batch_data_label'] = np.concatenate(
[self.X_train_ben['label'][start_index:end_index], \
self.X_train_mal['label'][start_index:end_index]])
# Fit training using batch data
end_index = end_index + self.batch_size
cost = self.partial_fit(batch_xs)
# test
batch_test = {}
batch_test["data"] = np.concatenate([self.X_test_ben['data'], self.X_test_mal['data']])
batch_test["label"] = np.concatenate([self.X_test_ben['label'], self.X_test_mal['label']])
self.final_fit(batch_test, batch_test["label"])
# init all variables in a tensorflow graph for the next fold
self.sess.run(self.init)
return self.accuracy_list, self.fmeasure_list, self.clusters_dist, self.evaluation_metrics_list
def log(self, message):
print(message)
def write_result_to_file(self, variable, message):
# file = open('result.txt', 'a+')
file = open('results/' + str(self.batch_size) + '/results.txt', 'a+')
file.write(message + "\n")
file.write(str(np.mean(variable['accuracy'])) + '+' + str(np.var(variable['accuracy'])) + '\t' + str(
np.mean(variable['precision'])) + '\t' + str(
np.mean(variable['recall'])) + '\t' + str(
np.mean(variable['fmeasure'])) + '+' + str(np.var(variable['fmeasure'])) + '\n')
|
normal
|
{
"blob_id": "a9947884e805cc8fcb6bff010a5f6e0ff0bb01fe",
"index": 8393,
"step-1": "<mask token>\n\n\nclass MD(BaseEstimator, TransformerMixin):\n <mask token>\n\n def _init_graph(self):\n \"\"\"\n Init a tensorflow Graph containing: input data, variables, model, loss, optimizer\n \"\"\"\n self.graph = tf.Graph()\n with self.graph.as_default():\n tf.set_random_seed(self.random_seed)\n self.train_data = tf.placeholder(tf.float32, shape=[None, self.\n input_size])\n self.train_labels = tf.placeholder(tf.float32, shape=[None, 1])\n self.train_labels_center = tf.placeholder(tf.float32, shape=[\n None, self.third_layer_size])\n self.train_labels_center_disagree = tf.placeholder(tf.float32,\n shape=[None, self.third_layer_size])\n self.weights = self._initialize_weights()\n self.embedding_layer = tf.keras.layers.Embedding(256, 32,\n input_length=324)\n self.embedding_result = self.embedding_layer(self.train_data)\n self.embedding_result = tf.layers.Flatten()(self.embedding_result)\n self.net1 = tf.matmul(self.embedding_result, self.weights['layer1']\n )\n self.layer1 = tf.layers.batch_normalization(self.net1, training\n =self.phase)\n self.layer1 = tf.nn.tanh(self.layer1)\n self.net2 = tf.matmul(self.layer1, self.weights['layer2'])\n self.net2 = tf.layers.batch_normalization(self.net2, training=\n self.phase)\n self.net2 = tf.nn.relu(self.net2)\n self.layer2 = tf.layers.dropout(self.net2, rate=0.3, training=\n self.phase)\n self.net3 = tf.matmul(self.layer2, self.weights['layer3'])\n self.layer3 = tf.nn.tanh(self.net3)\n self.cross_entropy = tf.reduce_mean(tf.losses.\n mean_squared_error(self.train_labels_center, self.layer3))\n self.train_step = tf.train.AdamOptimizer(self.learning_rate\n ).minimize(self.cross_entropy)\n self.init = tf.initialize_all_variables()\n self.sess = tf.Session()\n self.sess.run(self.init)\n <mask token>\n <mask token>\n <mask token>\n\n def evaluate(self, true_labels, kmeans_labels, size):\n \"\"\"\n :param true_labels: label of malware and benign samples as a 2D array(number of samples * 1) of 0 and 1\n :param kmeans_labels: contains a list of 0 and 1 that each cell shows the sample cluster number\n :param size: number of samples\n\n :return: accuracy, precision, recall, f_measure\n\n \"\"\"\n self.label_list_0 = np.where(kmeans_labels == 0)[0]\n self.label_list_1 = np.where(kmeans_labels == 1)[0]\n temp = [true_labels[i][0] for i in self.label_list_0]\n temp1 = [true_labels[i][0] for i in self.label_list_1]\n temp1.append(2)\n temp.append(2)\n counts = np.bincount(temp)\n counts2 = np.bincount(temp1)\n if counts[0] > counts[1]:\n accuracy = (counts[0] + counts2[1]) / size\n precision = counts2[1] / (counts2[1] + counts2[0])\n recall = counts2[1] / (counts2[1] + counts[1])\n f_measure = 2 * (precision * recall / (precision + recall))\n else:\n accuracy = (counts[1] + counts2[0]) / size\n precision = counts[1] / (counts[1] + counts[0])\n recall = counts[1] / (counts[1] + counts2[1])\n f_measure = 2 * (precision * recall / (precision + recall))\n return accuracy, precision, recall, f_measure\n <mask token>\n <mask token>\n\n def log(self, message):\n print(message)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass MD(BaseEstimator, TransformerMixin):\n\n def __init__(self, data, input_size, epoch, batch_size, iteration,\n alpha=1.0, n_neg_samples=10, random_seed=2020):\n self.iteration = iteration\n self.epoch = epoch\n self.batch_size = batch_size\n self.learning_rate = 0.01\n self.random_seed = random_seed\n self.phase = True\n self.first_layer_size = 256\n self.second_layer_size = 128\n self.third_layer_size = 128\n self.input_size = input_size\n self.X_train_ben = data[0]\n self.X_train_mal = data[1]\n self.X_test_ben = data[2]\n self.X_test_mal = data[3]\n self.accuracy_list = []\n self.fmeasure_list = []\n self.clusters_dist = []\n self.evaluation_metrics_list = {'accuracy': [], 'precision': [],\n 'recall': [], 'fmeasure': []}\n self.FinalCenters = {'benignCenter': 0, 'malwareCenter': 0}\n self._init_graph()\n\n def _init_graph(self):\n \"\"\"\n Init a tensorflow Graph containing: input data, variables, model, loss, optimizer\n \"\"\"\n self.graph = tf.Graph()\n with self.graph.as_default():\n tf.set_random_seed(self.random_seed)\n self.train_data = tf.placeholder(tf.float32, shape=[None, self.\n input_size])\n self.train_labels = tf.placeholder(tf.float32, shape=[None, 1])\n self.train_labels_center = tf.placeholder(tf.float32, shape=[\n None, self.third_layer_size])\n self.train_labels_center_disagree = tf.placeholder(tf.float32,\n shape=[None, self.third_layer_size])\n self.weights = self._initialize_weights()\n self.embedding_layer = tf.keras.layers.Embedding(256, 32,\n input_length=324)\n self.embedding_result = self.embedding_layer(self.train_data)\n self.embedding_result = tf.layers.Flatten()(self.embedding_result)\n self.net1 = tf.matmul(self.embedding_result, self.weights['layer1']\n )\n self.layer1 = tf.layers.batch_normalization(self.net1, training\n =self.phase)\n self.layer1 = tf.nn.tanh(self.layer1)\n self.net2 = tf.matmul(self.layer1, self.weights['layer2'])\n self.net2 = tf.layers.batch_normalization(self.net2, training=\n self.phase)\n self.net2 = tf.nn.relu(self.net2)\n self.layer2 = tf.layers.dropout(self.net2, rate=0.3, training=\n self.phase)\n self.net3 = tf.matmul(self.layer2, self.weights['layer3'])\n self.layer3 = tf.nn.tanh(self.net3)\n self.cross_entropy = tf.reduce_mean(tf.losses.\n mean_squared_error(self.train_labels_center, self.layer3))\n self.train_step = tf.train.AdamOptimizer(self.learning_rate\n ).minimize(self.cross_entropy)\n self.init = tf.initialize_all_variables()\n self.sess = tf.Session()\n self.sess.run(self.init)\n\n def _initialize_weights(self):\n self.all_weights = dict()\n self.all_weights['layer1'] = tf.Variable(tf.random.normal([10368,\n self.first_layer_size], mean=0.0, stddev=1))\n self.all_weights['layer2'] = tf.Variable(tf.random.normal([self.\n first_layer_size, self.second_layer_size], mean=0.0, stddev=1))\n self.all_weights['layer3'] = tf.Variable(tf.random.normal([self.\n second_layer_size, self.third_layer_size], mean=0.0, stddev=1))\n self.all_weights['layer1'] = tf.Variable(tf.random.uniform([10368,\n self.first_layer_size], minval=-1, maxval=1))\n self.all_weights['layer2'] = tf.Variable(tf.random.uniform([self.\n first_layer_size, self.second_layer_size], minval=-1, maxval=1))\n self.all_weights['layer3'] = tf.Variable(tf.random.uniform([self.\n second_layer_size, self.third_layer_size], minval=-1, maxval=1))\n self.all_weights['layer1'] = tf.get_variable('w', [32 * self.\n input_size, self.first_layer_size], initializer=tf.initializers\n .random_normal(mean=0, stddev=0.8), regularizer=tf.keras.\n regularizers.l2(0.01))\n self.all_weights['layer2'] = tf.get_variable('w2', [self.\n first_layer_size, self.second_layer_size], initializer=tf.\n initializers.random_normal(mean=0, stddev=0.8), regularizer=tf.\n keras.regularizers.l2(0.01))\n self.all_weights['layer3'] = tf.get_variable('w3', [self.\n second_layer_size, self.third_layer_size], initializer=tf.\n initializers.random_normal(mean=0, stddev=0.8), regularizer=tf.\n keras.regularizers.l2(0.01))\n return self.all_weights\n <mask token>\n\n def partial_fit(self, X):\n feed_dict = {self.train_data: X['batch_data_train']}\n self.points = self.sess.run(self.layer3, feed_dict=feed_dict)\n new_labels = self.kmeans_clustering(self.points, len(X[\n 'batch_data_label']), X['batch_data_label'])\n self.clusters_dist.append(np.linalg.norm(self.kmeans.\n cluster_centers_[0] - self.kmeans.cluster_centers_[1]))\n feed_dicts = {self.train_data: X['batch_data_train'], self.\n train_labels_center: new_labels}\n loss, opt = self.sess.run((self.cross_entropy, self.train_step),\n feed_dict=feed_dicts)\n metrics = self.evaluate(X['batch_data_label'], self.kmeans_labels,\n len(X['batch_data_label']))\n self.accuracy_list.append(metrics[0])\n self.fmeasure_list.append(metrics[3])\n return loss\n\n def evaluate(self, true_labels, kmeans_labels, size):\n \"\"\"\n :param true_labels: label of malware and benign samples as a 2D array(number of samples * 1) of 0 and 1\n :param kmeans_labels: contains a list of 0 and 1 that each cell shows the sample cluster number\n :param size: number of samples\n\n :return: accuracy, precision, recall, f_measure\n\n \"\"\"\n self.label_list_0 = np.where(kmeans_labels == 0)[0]\n self.label_list_1 = np.where(kmeans_labels == 1)[0]\n temp = [true_labels[i][0] for i in self.label_list_0]\n temp1 = [true_labels[i][0] for i in self.label_list_1]\n temp1.append(2)\n temp.append(2)\n counts = np.bincount(temp)\n counts2 = np.bincount(temp1)\n if counts[0] > counts[1]:\n accuracy = (counts[0] + counts2[1]) / size\n precision = counts2[1] / (counts2[1] + counts2[0])\n recall = counts2[1] / (counts2[1] + counts[1])\n f_measure = 2 * (precision * recall / (precision + recall))\n else:\n accuracy = (counts[1] + counts2[0]) / size\n precision = counts[1] / (counts[1] + counts[0])\n recall = counts[1] / (counts[1] + counts2[1])\n f_measure = 2 * (precision * recall / (precision + recall))\n return accuracy, precision, recall, f_measure\n\n def final_fit(self, X, true_labels):\n self.phase = False\n feed_dict = {self.train_data: X['data_test']}\n self.points = self.sess.run(self.layer3, feed_dict=feed_dict)\n self.predicted_Labels = []\n for i in range(len(true_labels)):\n if np.linalg.norm(self.FinalCenters['benignCenter'] - self.\n points[i]) < np.linalg.norm(self.FinalCenters[\n 'malwareCenter'] - self.points[i]):\n self.predicted_Labels.append([0])\n else:\n self.predicted_Labels.append([1])\n tn, fp, fn, tp = confusion_matrix(true_labels, self.predicted_Labels\n ).ravel()\n accuracy = (tp + tn) / (tp + tn + fn + fp)\n precision = tp / (tp + fp)\n recall = tp / (tp + fn)\n f_measure = 2 * (precision * recall) / (precision + recall)\n self.evaluation_metrics_list['accuracy'].append(np.float('{0:.4f}'.\n format(accuracy)))\n self.evaluation_metrics_list['precision'].append(np.float('{0:.4f}'\n .format(precision)))\n self.evaluation_metrics_list['recall'].append(np.float('{0:.4f}'.\n format(recall)))\n self.evaluation_metrics_list['fmeasure'].append(np.float('{0:.4f}'.\n format(f_measure)))\n print('accuracy', 'precision', 'recall', 'f_measure', sep='\\t\\t\\t\\t\\t')\n print(accuracy, precision, recall, f_measure, sep='\\t\\t\\t')\n return 0\n <mask token>\n\n def log(self, message):\n print(message)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass MD(BaseEstimator, TransformerMixin):\n\n def __init__(self, data, input_size, epoch, batch_size, iteration,\n alpha=1.0, n_neg_samples=10, random_seed=2020):\n self.iteration = iteration\n self.epoch = epoch\n self.batch_size = batch_size\n self.learning_rate = 0.01\n self.random_seed = random_seed\n self.phase = True\n self.first_layer_size = 256\n self.second_layer_size = 128\n self.third_layer_size = 128\n self.input_size = input_size\n self.X_train_ben = data[0]\n self.X_train_mal = data[1]\n self.X_test_ben = data[2]\n self.X_test_mal = data[3]\n self.accuracy_list = []\n self.fmeasure_list = []\n self.clusters_dist = []\n self.evaluation_metrics_list = {'accuracy': [], 'precision': [],\n 'recall': [], 'fmeasure': []}\n self.FinalCenters = {'benignCenter': 0, 'malwareCenter': 0}\n self._init_graph()\n\n def _init_graph(self):\n \"\"\"\n Init a tensorflow Graph containing: input data, variables, model, loss, optimizer\n \"\"\"\n self.graph = tf.Graph()\n with self.graph.as_default():\n tf.set_random_seed(self.random_seed)\n self.train_data = tf.placeholder(tf.float32, shape=[None, self.\n input_size])\n self.train_labels = tf.placeholder(tf.float32, shape=[None, 1])\n self.train_labels_center = tf.placeholder(tf.float32, shape=[\n None, self.third_layer_size])\n self.train_labels_center_disagree = tf.placeholder(tf.float32,\n shape=[None, self.third_layer_size])\n self.weights = self._initialize_weights()\n self.embedding_layer = tf.keras.layers.Embedding(256, 32,\n input_length=324)\n self.embedding_result = self.embedding_layer(self.train_data)\n self.embedding_result = tf.layers.Flatten()(self.embedding_result)\n self.net1 = tf.matmul(self.embedding_result, self.weights['layer1']\n )\n self.layer1 = tf.layers.batch_normalization(self.net1, training\n =self.phase)\n self.layer1 = tf.nn.tanh(self.layer1)\n self.net2 = tf.matmul(self.layer1, self.weights['layer2'])\n self.net2 = tf.layers.batch_normalization(self.net2, training=\n self.phase)\n self.net2 = tf.nn.relu(self.net2)\n self.layer2 = tf.layers.dropout(self.net2, rate=0.3, training=\n self.phase)\n self.net3 = tf.matmul(self.layer2, self.weights['layer3'])\n self.layer3 = tf.nn.tanh(self.net3)\n self.cross_entropy = tf.reduce_mean(tf.losses.\n mean_squared_error(self.train_labels_center, self.layer3))\n self.train_step = tf.train.AdamOptimizer(self.learning_rate\n ).minimize(self.cross_entropy)\n self.init = tf.initialize_all_variables()\n self.sess = tf.Session()\n self.sess.run(self.init)\n\n def _initialize_weights(self):\n self.all_weights = dict()\n self.all_weights['layer1'] = tf.Variable(tf.random.normal([10368,\n self.first_layer_size], mean=0.0, stddev=1))\n self.all_weights['layer2'] = tf.Variable(tf.random.normal([self.\n first_layer_size, self.second_layer_size], mean=0.0, stddev=1))\n self.all_weights['layer3'] = tf.Variable(tf.random.normal([self.\n second_layer_size, self.third_layer_size], mean=0.0, stddev=1))\n self.all_weights['layer1'] = tf.Variable(tf.random.uniform([10368,\n self.first_layer_size], minval=-1, maxval=1))\n self.all_weights['layer2'] = tf.Variable(tf.random.uniform([self.\n first_layer_size, self.second_layer_size], minval=-1, maxval=1))\n self.all_weights['layer3'] = tf.Variable(tf.random.uniform([self.\n second_layer_size, self.third_layer_size], minval=-1, maxval=1))\n self.all_weights['layer1'] = tf.get_variable('w', [32 * self.\n input_size, self.first_layer_size], initializer=tf.initializers\n .random_normal(mean=0, stddev=0.8), regularizer=tf.keras.\n regularizers.l2(0.01))\n self.all_weights['layer2'] = tf.get_variable('w2', [self.\n first_layer_size, self.second_layer_size], initializer=tf.\n initializers.random_normal(mean=0, stddev=0.8), regularizer=tf.\n keras.regularizers.l2(0.01))\n self.all_weights['layer3'] = tf.get_variable('w3', [self.\n second_layer_size, self.third_layer_size], initializer=tf.\n initializers.random_normal(mean=0, stddev=0.8), regularizer=tf.\n keras.regularizers.l2(0.01))\n return self.all_weights\n <mask token>\n\n def partial_fit(self, X):\n feed_dict = {self.train_data: X['batch_data_train']}\n self.points = self.sess.run(self.layer3, feed_dict=feed_dict)\n new_labels = self.kmeans_clustering(self.points, len(X[\n 'batch_data_label']), X['batch_data_label'])\n self.clusters_dist.append(np.linalg.norm(self.kmeans.\n cluster_centers_[0] - self.kmeans.cluster_centers_[1]))\n feed_dicts = {self.train_data: X['batch_data_train'], self.\n train_labels_center: new_labels}\n loss, opt = self.sess.run((self.cross_entropy, self.train_step),\n feed_dict=feed_dicts)\n metrics = self.evaluate(X['batch_data_label'], self.kmeans_labels,\n len(X['batch_data_label']))\n self.accuracy_list.append(metrics[0])\n self.fmeasure_list.append(metrics[3])\n return loss\n\n def evaluate(self, true_labels, kmeans_labels, size):\n \"\"\"\n :param true_labels: label of malware and benign samples as a 2D array(number of samples * 1) of 0 and 1\n :param kmeans_labels: contains a list of 0 and 1 that each cell shows the sample cluster number\n :param size: number of samples\n\n :return: accuracy, precision, recall, f_measure\n\n \"\"\"\n self.label_list_0 = np.where(kmeans_labels == 0)[0]\n self.label_list_1 = np.where(kmeans_labels == 1)[0]\n temp = [true_labels[i][0] for i in self.label_list_0]\n temp1 = [true_labels[i][0] for i in self.label_list_1]\n temp1.append(2)\n temp.append(2)\n counts = np.bincount(temp)\n counts2 = np.bincount(temp1)\n if counts[0] > counts[1]:\n accuracy = (counts[0] + counts2[1]) / size\n precision = counts2[1] / (counts2[1] + counts2[0])\n recall = counts2[1] / (counts2[1] + counts[1])\n f_measure = 2 * (precision * recall / (precision + recall))\n else:\n accuracy = (counts[1] + counts2[0]) / size\n precision = counts[1] / (counts[1] + counts[0])\n recall = counts[1] / (counts[1] + counts2[1])\n f_measure = 2 * (precision * recall / (precision + recall))\n return accuracy, precision, recall, f_measure\n\n def final_fit(self, X, true_labels):\n self.phase = False\n feed_dict = {self.train_data: X['data_test']}\n self.points = self.sess.run(self.layer3, feed_dict=feed_dict)\n self.predicted_Labels = []\n for i in range(len(true_labels)):\n if np.linalg.norm(self.FinalCenters['benignCenter'] - self.\n points[i]) < np.linalg.norm(self.FinalCenters[\n 'malwareCenter'] - self.points[i]):\n self.predicted_Labels.append([0])\n else:\n self.predicted_Labels.append([1])\n tn, fp, fn, tp = confusion_matrix(true_labels, self.predicted_Labels\n ).ravel()\n accuracy = (tp + tn) / (tp + tn + fn + fp)\n precision = tp / (tp + fp)\n recall = tp / (tp + fn)\n f_measure = 2 * (precision * recall) / (precision + recall)\n self.evaluation_metrics_list['accuracy'].append(np.float('{0:.4f}'.\n format(accuracy)))\n self.evaluation_metrics_list['precision'].append(np.float('{0:.4f}'\n .format(precision)))\n self.evaluation_metrics_list['recall'].append(np.float('{0:.4f}'.\n format(recall)))\n self.evaluation_metrics_list['fmeasure'].append(np.float('{0:.4f}'.\n format(f_measure)))\n print('accuracy', 'precision', 'recall', 'f_measure', sep='\\t\\t\\t\\t\\t')\n print(accuracy, precision, recall, f_measure, sep='\\t\\t\\t')\n return 0\n\n def train(self):\n for iter in range(self.iteration):\n self.log('iteration {} '.format(iter))\n for epoch in range(self.epoch):\n self.accuracy_list = []\n self.fmeasure_list = []\n self.clusters_dist = []\n self.log('epoch %s' % epoch)\n total_batches = int(len(self.X_train_ben['data']) / self.\n batch_size)\n self.log('total_batches in epoch %s : %s ' % (epoch,\n total_batches))\n start_index = 0\n end_index = start_index + self.batch_size\n self.counter = 0\n for i in range(total_batches + 1):\n self.counter += 1\n batch_xs = {}\n batch_xs['batch_data_train'] = np.concatenate([self.\n X_train_ben['data'][start_index:end_index], self.\n X_train_mal['data'][start_index:end_index]])\n batch_xs['batch_data_label'] = np.concatenate([self.\n X_train_ben['label'][start_index:end_index], self.\n X_train_mal['label'][start_index:end_index]])\n end_index = end_index + self.batch_size\n cost = self.partial_fit(batch_xs)\n batch_test = {}\n batch_test['data'] = np.concatenate([self.X_test_ben['data'],\n self.X_test_mal['data']])\n batch_test['label'] = np.concatenate([self.X_test_ben['label'],\n self.X_test_mal['label']])\n self.final_fit(batch_test, batch_test['label'])\n self.sess.run(self.init)\n return (self.accuracy_list, self.fmeasure_list, self.clusters_dist,\n self.evaluation_metrics_list)\n\n def log(self, message):\n print(message)\n <mask token>\n",
"step-4": "<mask token>\n\n\nclass MD(BaseEstimator, TransformerMixin):\n\n def __init__(self, data, input_size, epoch, batch_size, iteration,\n alpha=1.0, n_neg_samples=10, random_seed=2020):\n self.iteration = iteration\n self.epoch = epoch\n self.batch_size = batch_size\n self.learning_rate = 0.01\n self.random_seed = random_seed\n self.phase = True\n self.first_layer_size = 256\n self.second_layer_size = 128\n self.third_layer_size = 128\n self.input_size = input_size\n self.X_train_ben = data[0]\n self.X_train_mal = data[1]\n self.X_test_ben = data[2]\n self.X_test_mal = data[3]\n self.accuracy_list = []\n self.fmeasure_list = []\n self.clusters_dist = []\n self.evaluation_metrics_list = {'accuracy': [], 'precision': [],\n 'recall': [], 'fmeasure': []}\n self.FinalCenters = {'benignCenter': 0, 'malwareCenter': 0}\n self._init_graph()\n\n def _init_graph(self):\n \"\"\"\n Init a tensorflow Graph containing: input data, variables, model, loss, optimizer\n \"\"\"\n self.graph = tf.Graph()\n with self.graph.as_default():\n tf.set_random_seed(self.random_seed)\n self.train_data = tf.placeholder(tf.float32, shape=[None, self.\n input_size])\n self.train_labels = tf.placeholder(tf.float32, shape=[None, 1])\n self.train_labels_center = tf.placeholder(tf.float32, shape=[\n None, self.third_layer_size])\n self.train_labels_center_disagree = tf.placeholder(tf.float32,\n shape=[None, self.third_layer_size])\n self.weights = self._initialize_weights()\n self.embedding_layer = tf.keras.layers.Embedding(256, 32,\n input_length=324)\n self.embedding_result = self.embedding_layer(self.train_data)\n self.embedding_result = tf.layers.Flatten()(self.embedding_result)\n self.net1 = tf.matmul(self.embedding_result, self.weights['layer1']\n )\n self.layer1 = tf.layers.batch_normalization(self.net1, training\n =self.phase)\n self.layer1 = tf.nn.tanh(self.layer1)\n self.net2 = tf.matmul(self.layer1, self.weights['layer2'])\n self.net2 = tf.layers.batch_normalization(self.net2, training=\n self.phase)\n self.net2 = tf.nn.relu(self.net2)\n self.layer2 = tf.layers.dropout(self.net2, rate=0.3, training=\n self.phase)\n self.net3 = tf.matmul(self.layer2, self.weights['layer3'])\n self.layer3 = tf.nn.tanh(self.net3)\n self.cross_entropy = tf.reduce_mean(tf.losses.\n mean_squared_error(self.train_labels_center, self.layer3))\n self.train_step = tf.train.AdamOptimizer(self.learning_rate\n ).minimize(self.cross_entropy)\n self.init = tf.initialize_all_variables()\n self.sess = tf.Session()\n self.sess.run(self.init)\n\n def _initialize_weights(self):\n self.all_weights = dict()\n self.all_weights['layer1'] = tf.Variable(tf.random.normal([10368,\n self.first_layer_size], mean=0.0, stddev=1))\n self.all_weights['layer2'] = tf.Variable(tf.random.normal([self.\n first_layer_size, self.second_layer_size], mean=0.0, stddev=1))\n self.all_weights['layer3'] = tf.Variable(tf.random.normal([self.\n second_layer_size, self.third_layer_size], mean=0.0, stddev=1))\n self.all_weights['layer1'] = tf.Variable(tf.random.uniform([10368,\n self.first_layer_size], minval=-1, maxval=1))\n self.all_weights['layer2'] = tf.Variable(tf.random.uniform([self.\n first_layer_size, self.second_layer_size], minval=-1, maxval=1))\n self.all_weights['layer3'] = tf.Variable(tf.random.uniform([self.\n second_layer_size, self.third_layer_size], minval=-1, maxval=1))\n self.all_weights['layer1'] = tf.get_variable('w', [32 * self.\n input_size, self.first_layer_size], initializer=tf.initializers\n .random_normal(mean=0, stddev=0.8), regularizer=tf.keras.\n regularizers.l2(0.01))\n self.all_weights['layer2'] = tf.get_variable('w2', [self.\n first_layer_size, self.second_layer_size], initializer=tf.\n initializers.random_normal(mean=0, stddev=0.8), regularizer=tf.\n keras.regularizers.l2(0.01))\n self.all_weights['layer3'] = tf.get_variable('w3', [self.\n second_layer_size, self.third_layer_size], initializer=tf.\n initializers.random_normal(mean=0, stddev=0.8), regularizer=tf.\n keras.regularizers.l2(0.01))\n return self.all_weights\n\n def kmeans_clustering(self, point, size, true_labels):\n self.kmeans = KMeans(n_clusters=2, random_state=10, init=\n 'k-means++', n_init=20).fit(point)\n self.kmeans_labels = self.kmeans.labels_\n self.label_list_0 = np.where(self.kmeans_labels == 0)[0]\n temp = [true_labels[i][0] for i in self.label_list_0]\n temp.append(2)\n counts = np.bincount(temp)\n if counts[0] > counts[1]:\n benign_center = self.kmeans.cluster_centers_[0]\n malware_center = self.kmeans.cluster_centers_[1]\n else:\n benign_center = self.kmeans.cluster_centers_[1]\n malware_center = self.kmeans.cluster_centers_[0]\n new_labels = np.zeros((size, self.third_layer_size))\n for i in range(size):\n if true_labels[i][0] == 0.0:\n new_labels[i] = benign_center\n else:\n new_labels[i] = malware_center\n self.FinalCenters = {'benignCenter': benign_center, 'malwareCenter':\n malware_center}\n return new_labels\n\n def partial_fit(self, X):\n feed_dict = {self.train_data: X['batch_data_train']}\n self.points = self.sess.run(self.layer3, feed_dict=feed_dict)\n new_labels = self.kmeans_clustering(self.points, len(X[\n 'batch_data_label']), X['batch_data_label'])\n self.clusters_dist.append(np.linalg.norm(self.kmeans.\n cluster_centers_[0] - self.kmeans.cluster_centers_[1]))\n feed_dicts = {self.train_data: X['batch_data_train'], self.\n train_labels_center: new_labels}\n loss, opt = self.sess.run((self.cross_entropy, self.train_step),\n feed_dict=feed_dicts)\n metrics = self.evaluate(X['batch_data_label'], self.kmeans_labels,\n len(X['batch_data_label']))\n self.accuracy_list.append(metrics[0])\n self.fmeasure_list.append(metrics[3])\n return loss\n\n def evaluate(self, true_labels, kmeans_labels, size):\n \"\"\"\n :param true_labels: label of malware and benign samples as a 2D array(number of samples * 1) of 0 and 1\n :param kmeans_labels: contains a list of 0 and 1 that each cell shows the sample cluster number\n :param size: number of samples\n\n :return: accuracy, precision, recall, f_measure\n\n \"\"\"\n self.label_list_0 = np.where(kmeans_labels == 0)[0]\n self.label_list_1 = np.where(kmeans_labels == 1)[0]\n temp = [true_labels[i][0] for i in self.label_list_0]\n temp1 = [true_labels[i][0] for i in self.label_list_1]\n temp1.append(2)\n temp.append(2)\n counts = np.bincount(temp)\n counts2 = np.bincount(temp1)\n if counts[0] > counts[1]:\n accuracy = (counts[0] + counts2[1]) / size\n precision = counts2[1] / (counts2[1] + counts2[0])\n recall = counts2[1] / (counts2[1] + counts[1])\n f_measure = 2 * (precision * recall / (precision + recall))\n else:\n accuracy = (counts[1] + counts2[0]) / size\n precision = counts[1] / (counts[1] + counts[0])\n recall = counts[1] / (counts[1] + counts2[1])\n f_measure = 2 * (precision * recall / (precision + recall))\n return accuracy, precision, recall, f_measure\n\n def final_fit(self, X, true_labels):\n self.phase = False\n feed_dict = {self.train_data: X['data_test']}\n self.points = self.sess.run(self.layer3, feed_dict=feed_dict)\n self.predicted_Labels = []\n for i in range(len(true_labels)):\n if np.linalg.norm(self.FinalCenters['benignCenter'] - self.\n points[i]) < np.linalg.norm(self.FinalCenters[\n 'malwareCenter'] - self.points[i]):\n self.predicted_Labels.append([0])\n else:\n self.predicted_Labels.append([1])\n tn, fp, fn, tp = confusion_matrix(true_labels, self.predicted_Labels\n ).ravel()\n accuracy = (tp + tn) / (tp + tn + fn + fp)\n precision = tp / (tp + fp)\n recall = tp / (tp + fn)\n f_measure = 2 * (precision * recall) / (precision + recall)\n self.evaluation_metrics_list['accuracy'].append(np.float('{0:.4f}'.\n format(accuracy)))\n self.evaluation_metrics_list['precision'].append(np.float('{0:.4f}'\n .format(precision)))\n self.evaluation_metrics_list['recall'].append(np.float('{0:.4f}'.\n format(recall)))\n self.evaluation_metrics_list['fmeasure'].append(np.float('{0:.4f}'.\n format(f_measure)))\n print('accuracy', 'precision', 'recall', 'f_measure', sep='\\t\\t\\t\\t\\t')\n print(accuracy, precision, recall, f_measure, sep='\\t\\t\\t')\n return 0\n\n def train(self):\n for iter in range(self.iteration):\n self.log('iteration {} '.format(iter))\n for epoch in range(self.epoch):\n self.accuracy_list = []\n self.fmeasure_list = []\n self.clusters_dist = []\n self.log('epoch %s' % epoch)\n total_batches = int(len(self.X_train_ben['data']) / self.\n batch_size)\n self.log('total_batches in epoch %s : %s ' % (epoch,\n total_batches))\n start_index = 0\n end_index = start_index + self.batch_size\n self.counter = 0\n for i in range(total_batches + 1):\n self.counter += 1\n batch_xs = {}\n batch_xs['batch_data_train'] = np.concatenate([self.\n X_train_ben['data'][start_index:end_index], self.\n X_train_mal['data'][start_index:end_index]])\n batch_xs['batch_data_label'] = np.concatenate([self.\n X_train_ben['label'][start_index:end_index], self.\n X_train_mal['label'][start_index:end_index]])\n end_index = end_index + self.batch_size\n cost = self.partial_fit(batch_xs)\n batch_test = {}\n batch_test['data'] = np.concatenate([self.X_test_ben['data'],\n self.X_test_mal['data']])\n batch_test['label'] = np.concatenate([self.X_test_ben['label'],\n self.X_test_mal['label']])\n self.final_fit(batch_test, batch_test['label'])\n self.sess.run(self.init)\n return (self.accuracy_list, self.fmeasure_list, self.clusters_dist,\n self.evaluation_metrics_list)\n\n def log(self, message):\n print(message)\n\n def write_result_to_file(self, variable, message):\n file = open('results/' + str(self.batch_size) + '/results.txt', 'a+')\n file.write(message + '\\n')\n file.write(str(np.mean(variable['accuracy'])) + '+' + str(np.var(\n variable['accuracy'])) + '\\t' + str(np.mean(variable[\n 'precision'])) + '\\t' + str(np.mean(variable['recall'])) + '\\t' +\n str(np.mean(variable['fmeasure'])) + '+' + str(np.var(variable[\n 'fmeasure'])) + '\\n')\n",
"step-5": "import math\nimport numpy as np\n# import tkinter\nimport tensorflow as tf\nfrom matplotlib import axis\nimport os\nfrom sklearn.base import BaseEstimator, TransformerMixin\nfrom sklearn.cluster import KMeans\nfrom sklearn.metrics import confusion_matrix\n\n\nclass MD(BaseEstimator, TransformerMixin):\n def __init__(self, data, input_size, epoch,\n batch_size, iteration, alpha=1.0, n_neg_samples=10,\n random_seed=2020):\n # bind params to class\n\n # network parameters.\n self.iteration = iteration\n self.epoch = epoch\n self.batch_size = batch_size\n self.learning_rate = 0.01\n self.random_seed = random_seed\n self.phase = True\n self.first_layer_size = 256\n self.second_layer_size = 128\n self.third_layer_size = 128\n self.input_size = input_size\n\n # data.\n self.X_train_ben = data[0]\n self.X_train_mal = data[1]\n self.X_test_ben = data[2]\n self.X_test_mal = data[3]\n\n # evaluation.\n self.accuracy_list = [] # accuracy during training\n self.fmeasure_list = [] # fmeasure during training\n self.clusters_dist = [] # distance between clusters centroid\n self.evaluation_metrics_list = {'accuracy': [], 'precision': [], 'recall': [],\n 'fmeasure': []} # evaluation metrics of test data for all epochs\n\n self.FinalCenters = {'benignCenter': 0, 'malwareCenter': 0}\n\n # init all variables in a tensorflow graph\n self._init_graph()\n\n def _init_graph(self):\n '''\n Init a tensorflow Graph containing: input data, variables, model, loss, optimizer\n '''\n self.graph = tf.Graph()\n with self.graph.as_default(): # , tf.device('/cpu:0'):\n\n # Set graph level random seed.\n tf.set_random_seed(self.random_seed)\n\n # Input data.\n\n self.train_data = tf.placeholder(tf.float32,\n shape=[None, self.input_size]) # batch_size * input_size\n self.train_labels = tf.placeholder(tf.float32, shape=[None, 1]) # batch_size * 1\n self.train_labels_center = tf.placeholder(tf.float32, shape=[None,\n self.third_layer_size]) # batch_size * third_layer_size\n self.train_labels_center_disagree = tf.placeholder(tf.float32, shape=[None,\n self.third_layer_size]) # batch_size * third_layer_size\n\n # Variables.\n self.weights = self._initialize_weights()\n\n # the embedding layer.\n self.embedding_layer = tf.keras.layers.Embedding(256, 32, input_length=324)\n self.embedding_result = self.embedding_layer(self.train_data)\n self.embedding_result = tf.layers.Flatten()(self.embedding_result)\n\n # the first hidden layer.\n self.net1 = tf.matmul(self.embedding_result, self.weights['layer1']) # batch_size * first_layer_size\n self.layer1 = tf.layers.batch_normalization(self.net1, training=self.phase)\n self.layer1 = tf.nn.tanh(self.layer1)\n\n # the second hidden layer.\n self.net2 = tf.matmul(self.layer1, self.weights['layer2'])\n self.net2 = tf.layers.batch_normalization(self.net2, training=self.phase)\n self.net2 = tf.nn.relu(self.net2)\n self.layer2 = tf.layers.dropout(self.net2, rate=0.3, training=self.phase)\n\n # the third hidden layer.\n self.net3 = tf.matmul(self.layer2, self.weights['layer3'])\n self.layer3 = tf.nn.tanh(self.net3)\n\n # loss function.\n self.cross_entropy = tf.reduce_mean(tf.losses.mean_squared_error(self.train_labels_center, self.layer3))\n\n # optimizer.\n self.train_step = tf.train.AdamOptimizer(self.learning_rate).minimize(self.cross_entropy)\n\n # init.\n self.init = tf.initialize_all_variables()\n self.sess = tf.Session()\n self.sess.run(self.init)\n\n def _initialize_weights(self):\n\n self.all_weights = dict()\n\n self.all_weights['layer1'] = tf.Variable(\n tf.random.normal([10368, self.first_layer_size], mean=0.0, stddev=1)) # input_size * attr_dim\n self.all_weights['layer2'] = tf.Variable(\n tf.random.normal([self.first_layer_size, self.second_layer_size], mean=0.0,\n stddev=1)) # input_size * attr_dim\n\n self.all_weights['layer3'] = tf.Variable(\n tf.random.normal([self.second_layer_size, self.third_layer_size], mean=0.0,\n stddev=1)) # input_size * attr_dim\n\n self.all_weights['layer1'] = tf.Variable(\n tf.random.uniform([10368, self.first_layer_size], minval=-1,\n maxval=1)) # input_size * attr_dim\n self.all_weights['layer2'] = tf.Variable(\n tf.random.uniform([self.first_layer_size, self.second_layer_size], minval=-1,\n maxval=1)) # input_size * attr_dim\n\n self.all_weights['layer3'] = tf.Variable(\n tf.random.uniform([self.second_layer_size, self.third_layer_size], minval=-1,\n maxval=1)) # input_size * attr_dim\n # --------------------------------------------------------------------------\n self.all_weights['layer1'] = tf.get_variable(\"w\", [32 * self.input_size, self.first_layer_size],\n initializer=tf.initializers.random_normal(mean=0, stddev=0.8),\n regularizer=tf.keras.regularizers.l2(\n 0.01)) # input_size * attr_dim\n self.all_weights['layer2'] = tf.get_variable(\"w2\", [self.first_layer_size, self.second_layer_size],\n initializer=tf.initializers.random_normal(mean=0,\n stddev=0.8),\n regularizer=tf.keras.regularizers.l2(\n 0.01)) # input_size * attr_dim\n\n self.all_weights['layer3'] = tf.get_variable(\"w3\", [self.second_layer_size, self.third_layer_size],\n initializer=tf.initializers.random_normal(mean=0, stddev=0.8),\n regularizer=tf.keras.regularizers.l2(\n 0.01)) # input_size * attr_dim\n\n return self.all_weights\n\n def kmeans_clustering(self, point, size, true_labels):\n self.kmeans = KMeans(n_clusters=2, random_state=10, init='k-means++', n_init=20).fit(point)\n\n self.kmeans_labels = self.kmeans.labels_\n\n # find index of samples that are in the first cluster\n self.label_list_0 = np.where(self.kmeans_labels == 0)[0]\n\n # get labels of samples that are in the first cluster\n temp = [true_labels[i][0] for i in self.label_list_0]\n temp.append(2)\n\n # determine label(cluster center) of benign and malware group based on the majority samples in each cluster\n counts = np.bincount(temp)\n\n if counts[0] > counts[1]: # counts[0] : number of benign in the first cluster\n benign_center = self.kmeans.cluster_centers_[0]\n malware_center = self.kmeans.cluster_centers_[1]\n else:\n benign_center = self.kmeans.cluster_centers_[1]\n malware_center = self.kmeans.cluster_centers_[0]\n\n # set label for each sample\n new_labels = np.zeros((size, self.third_layer_size))\n\n for i in range(size):\n if true_labels[i][0] == 0.0:\n new_labels[i] = benign_center\n else:\n new_labels[i] = malware_center\n\n self.FinalCenters = {'benignCenter': benign_center, 'malwareCenter': malware_center}\n\n return new_labels\n\n def partial_fit(self, X): # fit a batch\n\n # get network output.\n feed_dict = {self.train_data: X['batch_data_train']}\n self.points = self.sess.run((self.layer3), feed_dict=feed_dict)\n\n # apply clustering to find expected output.\n new_labels = self.kmeans_clustering(self.points, len(X['batch_data_label']), X['batch_data_label'])\n self.clusters_dist.append(np.linalg.norm(self.kmeans.cluster_centers_[0] - self.kmeans.cluster_centers_[1]))\n\n feed_dicts = {self.train_data: X['batch_data_train'],\n self.train_labels_center: new_labels}\n loss, opt = self.sess.run((self.cross_entropy, self.train_step), feed_dict=feed_dicts)\n\n # print(loss)\n # print('------------')\n\n metrics = self.evaluate(X['batch_data_label'], self.kmeans_labels, len((X['batch_data_label'])))\n self.accuracy_list.append(metrics[0])\n self.fmeasure_list.append(metrics[3])\n\n return loss\n\n def evaluate(self, true_labels, kmeans_labels, size):\n \"\"\"\n :param true_labels: label of malware and benign samples as a 2D array(number of samples * 1) of 0 and 1\n :param kmeans_labels: contains a list of 0 and 1 that each cell shows the sample cluster number\n :param size: number of samples\n\n :return: accuracy, precision, recall, f_measure\n\n \"\"\"\n\n # find index of samples that are in the first cluster\n self.label_list_0 = np.where(kmeans_labels == 0)[0]\n self.label_list_1 = np.where(kmeans_labels == 1)[0]\n\n # get labels of samples that are in the first cluster\n temp = [true_labels[i][0] for i in self.label_list_0]\n temp1 = [true_labels[i][0] for i in self.label_list_1]\n temp1.append(2)\n temp.append(2)\n\n # determine label(cluster center) of benign and malware group based on the majority samples in each cluster\n counts = np.bincount(temp)\n counts2 = np.bincount(temp1)\n\n if counts[0] > counts[1]:\n accuracy = (counts[0] + counts2[1]) / size\n precision = counts2[1] / (counts2[1] + counts2[0])\n recall = counts2[1] / (counts2[1] + counts[1])\n f_measure = 2 * ((precision * recall) / (precision + recall))\n else:\n accuracy = (counts[1] + counts2[0]) / size\n precision = counts[1] / (counts[1] + counts[0])\n recall = counts[1] / (counts[1] + counts2[1])\n f_measure = 2 * ((precision * recall) / (precision + recall))\n\n return accuracy, precision, recall, f_measure\n\n def final_fit(self, X, true_labels):\n\n self.phase = False\n\n # get network output for test data.\n feed_dict = {self.train_data: X['data_test']}\n self.points = self.sess.run(self.layer3, feed_dict=feed_dict)\n\n # determine label of each test sample based on the euclidean distance\n self.predicted_Labels = []\n for i in range(len(true_labels)):\n if np.linalg.norm(self.FinalCenters['benignCenter'] - self.points[i]) < np.linalg.norm(\n self.FinalCenters['malwareCenter'] - self.points[i]):\n self.predicted_Labels.append([0])\n else:\n self.predicted_Labels.append([1])\n\n tn, fp, fn, tp = confusion_matrix(true_labels, self.predicted_Labels).ravel()\n\n accuracy = (tp + tn) / (tp + tn + fn + fp)\n precision = tp / (tp + fp)\n recall = tp / (tp + fn)\n f_measure = 2 * (precision * recall) / (precision + recall)\n\n self.evaluation_metrics_list['accuracy'].append(np.float(\"{0:.4f}\".format(accuracy)))\n self.evaluation_metrics_list['precision'].append(np.float(\"{0:.4f}\".format(precision)))\n self.evaluation_metrics_list['recall'].append(np.float(\"{0:.4f}\".format(recall)))\n self.evaluation_metrics_list['fmeasure'].append(np.float(\"{0:.4f}\".format(f_measure)))\n\n print(\"accuracy\", \"precision\", \"recall\", \"f_measure\", sep=\"\\t\\t\\t\\t\\t\")\n print(accuracy, precision, recall, f_measure, sep=\"\\t\\t\\t\")\n\n return 0\n\n def train(self): # fit a dataset\n\n for iter in range(self.iteration):\n self.log(\"iteration {} \".format(iter))\n\n for epoch in range(self.epoch):\n\n self.accuracy_list = []\n self.fmeasure_list = []\n self.clusters_dist = []\n\n self.log(\"epoch %s\" % (epoch))\n\n total_batches = int(len(self.X_train_ben['data']) / self.batch_size)\n self.log('total_batches in epoch %s : %s ' % (epoch, total_batches))\n\n start_index = 0\n end_index = start_index + self.batch_size\n self.counter = 0\n\n # Loop over all batches.\n for i in range(total_batches + 1):\n self.counter += 1\n\n # generate a batch data\n batch_xs = {}\n\n batch_xs['batch_data_train'] = np.concatenate(\n [self.X_train_ben['data'][start_index:end_index], \\\n self.X_train_mal['data'][start_index:end_index]])\n\n batch_xs['batch_data_label'] = np.concatenate(\n [self.X_train_ben['label'][start_index:end_index], \\\n self.X_train_mal['label'][start_index:end_index]])\n\n # Fit training using batch data\n end_index = end_index + self.batch_size\n cost = self.partial_fit(batch_xs)\n\n\n # test\n batch_test = {}\n batch_test[\"data\"] = np.concatenate([self.X_test_ben['data'], self.X_test_mal['data']])\n batch_test[\"label\"] = np.concatenate([self.X_test_ben['label'], self.X_test_mal['label']])\n\n self.final_fit(batch_test, batch_test[\"label\"])\n\n # init all variables in a tensorflow graph for the next fold\n self.sess.run(self.init)\n\n return self.accuracy_list, self.fmeasure_list, self.clusters_dist, self.evaluation_metrics_list\n\n def log(self, message):\n print(message)\n\n def write_result_to_file(self, variable, message):\n # file = open('result.txt', 'a+')\n file = open('results/' + str(self.batch_size) + '/results.txt', 'a+')\n file.write(message + \"\\n\")\n file.write(str(np.mean(variable['accuracy'])) + '+' + str(np.var(variable['accuracy'])) + '\\t' + str(\n np.mean(variable['precision'])) + '\\t' + str(\n np.mean(variable['recall'])) + '\\t' + str(\n np.mean(variable['fmeasure'])) + '+' + str(np.var(variable['fmeasure'])) + '\\n')\n\n",
"step-ids": [
4,
8,
9,
11,
13
]
}
|
[
4,
8,
9,
11,
13
] |
import sys
sys.path.append('.')
import torch
from torch.nn import functional as F
import os
import yaml
from src.new_grad_cam import gc
def test(conf):
device = conf['device']
dataset = conf['test_dataset']
classes = conf['data']['classes']
weights_path = conf['weights_path']
results_dir = conf['results_dir']
model = conf['model']
model.load_state_dict(torch.load(weights_path))
model = model.to(device)
model.eval()
gc(model=model, dataset=dataset, results_dir=results_dir, classes=
classes, device=device)
if __name__ == '__main__':
from config import get_config
conf = get_config('./conf/testing.yaml')
test(conf)
|
normal
|
{
"blob_id": "b57b6df1b7e551f64033a0c47e5a22eab9fd5fd4",
"index": 7616,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test(conf):\n device = conf['device']\n dataset = conf['test_dataset']\n classes = conf['data']['classes']\n weights_path = conf['weights_path']\n results_dir = conf['results_dir']\n model = conf['model']\n model.load_state_dict(torch.load(weights_path))\n model = model.to(device)\n model.eval()\n gc(model=model, dataset=dataset, results_dir=results_dir, classes=\n classes, device=device)\n\n\n<mask token>\n",
"step-3": "<mask token>\nsys.path.append('.')\n<mask token>\n\n\ndef test(conf):\n device = conf['device']\n dataset = conf['test_dataset']\n classes = conf['data']['classes']\n weights_path = conf['weights_path']\n results_dir = conf['results_dir']\n model = conf['model']\n model.load_state_dict(torch.load(weights_path))\n model = model.to(device)\n model.eval()\n gc(model=model, dataset=dataset, results_dir=results_dir, classes=\n classes, device=device)\n\n\nif __name__ == '__main__':\n from config import get_config\n conf = get_config('./conf/testing.yaml')\n test(conf)\n",
"step-4": "import sys\nsys.path.append('.')\nimport torch\nfrom torch.nn import functional as F\nimport os\nimport yaml\nfrom src.new_grad_cam import gc\n\n\ndef test(conf):\n device = conf['device']\n dataset = conf['test_dataset']\n classes = conf['data']['classes']\n weights_path = conf['weights_path']\n results_dir = conf['results_dir']\n model = conf['model']\n model.load_state_dict(torch.load(weights_path))\n model = model.to(device)\n model.eval()\n gc(model=model, dataset=dataset, results_dir=results_dir, classes=\n classes, device=device)\n\n\nif __name__ == '__main__':\n from config import get_config\n conf = get_config('./conf/testing.yaml')\n test(conf)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Generated by Django 3.0.4 on 2021-03-27 19:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('electra', '0009_remove_response_img'),
]
operations = [
migrations.AddField(
model_name='response',
name='date_time',
field=models.DateTimeField(auto_now_add=True, null=True),
),
]
|
normal
|
{
"blob_id": "049d83bc1a31ef170654fda47d1f58e024befb44",
"index": 8220,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('electra', '0009_remove_response_img')]\n operations = [migrations.AddField(model_name='response', name=\n 'date_time', field=models.DateTimeField(auto_now_add=True, null=True))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('electra', '0009_remove_response_img')]\n operations = [migrations.AddField(model_name='response', name=\n 'date_time', field=models.DateTimeField(auto_now_add=True, null=True))]\n",
"step-5": "# Generated by Django 3.0.4 on 2021-03-27 19:18\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('electra', '0009_remove_response_img'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='response',\n name='date_time',\n field=models.DateTimeField(auto_now_add=True, null=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from ScientificColorschemez import Colorschemez
import matplotlib.pyplot as plt
cs = Colorschemez.latest()
for name, hexcode in zip(cs.colornames, cs.colors):
print('%s: %s' % (hexcode, name))
fig, ax = plt.subplots()
cs.example_plot(ax)
fig.savefig('latest.png', dpi=200, bbox_inches='tight')
|
normal
|
{
"blob_id": "7106a8ddbec60ce4b7d9e8e5ce8d7df02e5f7222",
"index": 6854,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor name, hexcode in zip(cs.colornames, cs.colors):\n print('%s: %s' % (hexcode, name))\n<mask token>\ncs.example_plot(ax)\nfig.savefig('latest.png', dpi=200, bbox_inches='tight')\n",
"step-3": "<mask token>\ncs = Colorschemez.latest()\nfor name, hexcode in zip(cs.colornames, cs.colors):\n print('%s: %s' % (hexcode, name))\nfig, ax = plt.subplots()\ncs.example_plot(ax)\nfig.savefig('latest.png', dpi=200, bbox_inches='tight')\n",
"step-4": "from ScientificColorschemez import Colorschemez\nimport matplotlib.pyplot as plt\ncs = Colorschemez.latest()\nfor name, hexcode in zip(cs.colornames, cs.colors):\n print('%s: %s' % (hexcode, name))\nfig, ax = plt.subplots()\ncs.example_plot(ax)\nfig.savefig('latest.png', dpi=200, bbox_inches='tight')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserInfo(APIView):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserInfo(APIView):
@swagger_auto_schema(operation_description=
'Get information about the logged in user', responses={(200):
serializers.UserInfoSerializer})
def get(self, request):
"""
Get information about the logged in user
:param request:
:return: HttpResponse response containing **user_info**
"""
if not request.user.is_authenticated:
return Response({'title': 'None', 'organization': 'None',
'state': 'None', 'country': 'None', 'user_type': 'None'})
user_info = {'username': request.user.username}
if request.user.email:
user_info['email'] = request.user.email
if request.user.first_name:
user_info['first_name'] = request.user.first_name
if request.user.id:
user_info['id'] = request.user.id
if request.user.last_name:
user_info['last_name'] = request.user.last_name
user_profile = UserProfile.objects.filter(user=request.user).first()
if user_profile.title:
user_info['title'] = user_profile.title
if user_profile.organization:
user_info['organization'] = user_profile.organization
if user_profile.state and user_profile.state.strip(
) and user_profile.state != 'Unspecified':
user_info['state'] = user_profile.state.strip()
if user_profile.country and user_profile.country != 'Unspecified':
user_info['country'] = user_profile.country
if user_profile.user_type and user_profile.user_type.strip(
) and user_profile.user_type != 'Unspecified':
user_info['user_type'] = user_profile.user_type.strip()
return Response(user_info)
<|reserved_special_token_1|>
from rest_framework.views import APIView
from rest_framework.response import Response
from drf_yasg.utils import swagger_auto_schema
from theme.models import UserProfile
from hs_core.views import serializers
class UserInfo(APIView):
@swagger_auto_schema(operation_description=
'Get information about the logged in user', responses={(200):
serializers.UserInfoSerializer})
def get(self, request):
"""
Get information about the logged in user
:param request:
:return: HttpResponse response containing **user_info**
"""
if not request.user.is_authenticated:
return Response({'title': 'None', 'organization': 'None',
'state': 'None', 'country': 'None', 'user_type': 'None'})
user_info = {'username': request.user.username}
if request.user.email:
user_info['email'] = request.user.email
if request.user.first_name:
user_info['first_name'] = request.user.first_name
if request.user.id:
user_info['id'] = request.user.id
if request.user.last_name:
user_info['last_name'] = request.user.last_name
user_profile = UserProfile.objects.filter(user=request.user).first()
if user_profile.title:
user_info['title'] = user_profile.title
if user_profile.organization:
user_info['organization'] = user_profile.organization
if user_profile.state and user_profile.state.strip(
) and user_profile.state != 'Unspecified':
user_info['state'] = user_profile.state.strip()
if user_profile.country and user_profile.country != 'Unspecified':
user_info['country'] = user_profile.country
if user_profile.user_type and user_profile.user_type.strip(
) and user_profile.user_type != 'Unspecified':
user_info['user_type'] = user_profile.user_type.strip()
return Response(user_info)
<|reserved_special_token_1|>
from rest_framework.views import APIView
from rest_framework.response import Response
from drf_yasg.utils import swagger_auto_schema
from theme.models import UserProfile
from hs_core.views import serializers
class UserInfo(APIView):
@swagger_auto_schema(operation_description="Get information about the logged in user",
responses={200: serializers.UserInfoSerializer})
def get(self, request):
'''
Get information about the logged in user
:param request:
:return: HttpResponse response containing **user_info**
'''
if not request.user.is_authenticated:
return Response({"title": "None", "organization": "None", "state": "None", "country": "None",
"user_type": "None"})
user_info = {"username": request.user.username}
if request.user.email:
user_info['email'] = request.user.email
if request.user.first_name:
user_info['first_name'] = request.user.first_name
if request.user.id:
user_info['id'] = request.user.id
if request.user.last_name:
user_info['last_name'] = request.user.last_name
user_profile = UserProfile.objects.filter(user=request.user).first()
if user_profile.title:
user_info['title'] = user_profile.title
if user_profile.organization:
user_info['organization'] = user_profile.organization
if user_profile.state and user_profile.state.strip() and user_profile.state != 'Unspecified':
user_info['state'] = user_profile.state.strip()
if user_profile.country and user_profile.country != 'Unspecified':
user_info['country'] = user_profile.country
if user_profile.user_type and user_profile.user_type.strip() and user_profile.user_type != 'Unspecified':
user_info['user_type'] = user_profile.user_type.strip()
return Response(user_info)
|
flexible
|
{
"blob_id": "c45ffe8cba8d152e346182252dbc43e22eaf83e2",
"index": 3498,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass UserInfo(APIView):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass UserInfo(APIView):\n\n @swagger_auto_schema(operation_description=\n 'Get information about the logged in user', responses={(200):\n serializers.UserInfoSerializer})\n def get(self, request):\n \"\"\"\n Get information about the logged in user\n\n :param request:\n :return: HttpResponse response containing **user_info**\n \"\"\"\n if not request.user.is_authenticated:\n return Response({'title': 'None', 'organization': 'None',\n 'state': 'None', 'country': 'None', 'user_type': 'None'})\n user_info = {'username': request.user.username}\n if request.user.email:\n user_info['email'] = request.user.email\n if request.user.first_name:\n user_info['first_name'] = request.user.first_name\n if request.user.id:\n user_info['id'] = request.user.id\n if request.user.last_name:\n user_info['last_name'] = request.user.last_name\n user_profile = UserProfile.objects.filter(user=request.user).first()\n if user_profile.title:\n user_info['title'] = user_profile.title\n if user_profile.organization:\n user_info['organization'] = user_profile.organization\n if user_profile.state and user_profile.state.strip(\n ) and user_profile.state != 'Unspecified':\n user_info['state'] = user_profile.state.strip()\n if user_profile.country and user_profile.country != 'Unspecified':\n user_info['country'] = user_profile.country\n if user_profile.user_type and user_profile.user_type.strip(\n ) and user_profile.user_type != 'Unspecified':\n user_info['user_type'] = user_profile.user_type.strip()\n return Response(user_info)\n",
"step-4": "from rest_framework.views import APIView\nfrom rest_framework.response import Response\nfrom drf_yasg.utils import swagger_auto_schema\nfrom theme.models import UserProfile\nfrom hs_core.views import serializers\n\n\nclass UserInfo(APIView):\n\n @swagger_auto_schema(operation_description=\n 'Get information about the logged in user', responses={(200):\n serializers.UserInfoSerializer})\n def get(self, request):\n \"\"\"\n Get information about the logged in user\n\n :param request:\n :return: HttpResponse response containing **user_info**\n \"\"\"\n if not request.user.is_authenticated:\n return Response({'title': 'None', 'organization': 'None',\n 'state': 'None', 'country': 'None', 'user_type': 'None'})\n user_info = {'username': request.user.username}\n if request.user.email:\n user_info['email'] = request.user.email\n if request.user.first_name:\n user_info['first_name'] = request.user.first_name\n if request.user.id:\n user_info['id'] = request.user.id\n if request.user.last_name:\n user_info['last_name'] = request.user.last_name\n user_profile = UserProfile.objects.filter(user=request.user).first()\n if user_profile.title:\n user_info['title'] = user_profile.title\n if user_profile.organization:\n user_info['organization'] = user_profile.organization\n if user_profile.state and user_profile.state.strip(\n ) and user_profile.state != 'Unspecified':\n user_info['state'] = user_profile.state.strip()\n if user_profile.country and user_profile.country != 'Unspecified':\n user_info['country'] = user_profile.country\n if user_profile.user_type and user_profile.user_type.strip(\n ) and user_profile.user_type != 'Unspecified':\n user_info['user_type'] = user_profile.user_type.strip()\n return Response(user_info)\n",
"step-5": "from rest_framework.views import APIView\nfrom rest_framework.response import Response\nfrom drf_yasg.utils import swagger_auto_schema\n\nfrom theme.models import UserProfile\nfrom hs_core.views import serializers\n\n\nclass UserInfo(APIView):\n @swagger_auto_schema(operation_description=\"Get information about the logged in user\",\n responses={200: serializers.UserInfoSerializer})\n def get(self, request):\n '''\n Get information about the logged in user\n\n :param request:\n :return: HttpResponse response containing **user_info**\n '''\n if not request.user.is_authenticated:\n return Response({\"title\": \"None\", \"organization\": \"None\", \"state\": \"None\", \"country\": \"None\",\n \"user_type\": \"None\"})\n\n user_info = {\"username\": request.user.username}\n\n if request.user.email:\n user_info['email'] = request.user.email\n if request.user.first_name:\n user_info['first_name'] = request.user.first_name\n if request.user.id:\n user_info['id'] = request.user.id\n if request.user.last_name:\n user_info['last_name'] = request.user.last_name\n\n user_profile = UserProfile.objects.filter(user=request.user).first()\n if user_profile.title:\n user_info['title'] = user_profile.title\n if user_profile.organization:\n user_info['organization'] = user_profile.organization\n if user_profile.state and user_profile.state.strip() and user_profile.state != 'Unspecified':\n user_info['state'] = user_profile.state.strip()\n if user_profile.country and user_profile.country != 'Unspecified':\n user_info['country'] = user_profile.country\n if user_profile.user_type and user_profile.user_type.strip() and user_profile.user_type != 'Unspecified':\n user_info['user_type'] = user_profile.user_type.strip()\n return Response(user_info)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PersianConfig(AppConfig):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PersianConfig(AppConfig):
name = 'persian'
<|reserved_special_token_1|>
from django.apps import AppConfig
class PersianConfig(AppConfig):
name = 'persian'
|
flexible
|
{
"blob_id": "6b0d1de4c77841f20670331db3332cf87be7ad84",
"index": 3931,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass PersianConfig(AppConfig):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass PersianConfig(AppConfig):\n name = 'persian'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass PersianConfig(AppConfig):\n name = 'persian'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from redstork import PageObject
class AnnotController:
def get_annotations(self, project, page_index):
page = project.doc[page_index]
yield from page.flat_iter()
|
normal
|
{
"blob_id": "6ca2a9040897e49c6407b9b0760240fec93b4df0",
"index": 3067,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass AnnotController:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass AnnotController:\n\n def get_annotations(self, project, page_index):\n page = project.doc[page_index]\n yield from page.flat_iter()\n",
"step-4": "from redstork import PageObject\n\n\nclass AnnotController:\n\n def get_annotations(self, project, page_index):\n page = project.doc[page_index]\n yield from page.flat_iter()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def func():
global data
print('%s is acquire lock..\n' % threading.current_thread().getName())
if lock.acquire():
print('%s get lock ' % threading.current_thread().getName())
data += 1
time.sleep(2)
print('%s release lock ' % threading.current_thread().getName())
print(data)
lock.release()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def func():
global data
print('%s is acquire lock..\n' % threading.current_thread().getName())
if lock.acquire():
print('%s get lock ' % threading.current_thread().getName())
data += 1
time.sleep(2)
print('%s release lock ' % threading.current_thread().getName())
print(data)
lock.release()
<|reserved_special_token_0|>
t1.start()
t2.start()
t3.start()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
data = 0
lock = threading.Lock()
def func():
global data
print('%s is acquire lock..\n' % threading.current_thread().getName())
if lock.acquire():
print('%s get lock ' % threading.current_thread().getName())
data += 1
time.sleep(2)
print('%s release lock ' % threading.current_thread().getName())
print(data)
lock.release()
t1 = threading.Thread(target=func)
t2 = threading.Thread(target=func)
t3 = threading.Thread(target=func)
t1.start()
t2.start()
t3.start()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import threading
import time
data = 0
lock = threading.Lock()
def func():
global data
print('%s is acquire lock..\n' % threading.current_thread().getName())
if lock.acquire():
print('%s get lock ' % threading.current_thread().getName())
data += 1
time.sleep(2)
print('%s release lock ' % threading.current_thread().getName())
print(data)
lock.release()
t1 = threading.Thread(target=func)
t2 = threading.Thread(target=func)
t3 = threading.Thread(target=func)
t1.start()
t2.start()
t3.start()
<|reserved_special_token_1|>
'''
这部分理解参考:
https://www.bilibili.com/video/BV1QA411H7tK?from=search&seid=17305042509580602672
图文代码地址: https://blog.csdn.net/qq_30758629/article/details/112527763
'''
import threading
import time
data=0
lock=threading.Lock() #创建一个锁对象
def func():
global data
print("%s is acquire lock..\n" %threading.current_thread().getName())
if lock.acquire():
print("%s get lock "%threading.current_thread().getName())
data+=1
time.sleep(2)
print("%s release lock "%threading.current_thread().getName())
print(data)
lock.release()
t1=threading.Thread(target=func)
t2=threading.Thread(target=func)
t3=threading.Thread(target=func)
t1.start()
t2.start()
t3.start()
|
flexible
|
{
"blob_id": "7aa426723f5311b5abec4a7ace9d3ec1e5e31d9a",
"index": 5966,
"step-1": "<mask token>\n\n\ndef func():\n global data\n print('%s is acquire lock..\\n' % threading.current_thread().getName())\n if lock.acquire():\n print('%s get lock ' % threading.current_thread().getName())\n data += 1\n time.sleep(2)\n print('%s release lock ' % threading.current_thread().getName())\n print(data)\n lock.release()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef func():\n global data\n print('%s is acquire lock..\\n' % threading.current_thread().getName())\n if lock.acquire():\n print('%s get lock ' % threading.current_thread().getName())\n data += 1\n time.sleep(2)\n print('%s release lock ' % threading.current_thread().getName())\n print(data)\n lock.release()\n\n\n<mask token>\nt1.start()\nt2.start()\nt3.start()\n",
"step-3": "<mask token>\ndata = 0\nlock = threading.Lock()\n\n\ndef func():\n global data\n print('%s is acquire lock..\\n' % threading.current_thread().getName())\n if lock.acquire():\n print('%s get lock ' % threading.current_thread().getName())\n data += 1\n time.sleep(2)\n print('%s release lock ' % threading.current_thread().getName())\n print(data)\n lock.release()\n\n\nt1 = threading.Thread(target=func)\nt2 = threading.Thread(target=func)\nt3 = threading.Thread(target=func)\nt1.start()\nt2.start()\nt3.start()\n",
"step-4": "<mask token>\nimport threading\nimport time\ndata = 0\nlock = threading.Lock()\n\n\ndef func():\n global data\n print('%s is acquire lock..\\n' % threading.current_thread().getName())\n if lock.acquire():\n print('%s get lock ' % threading.current_thread().getName())\n data += 1\n time.sleep(2)\n print('%s release lock ' % threading.current_thread().getName())\n print(data)\n lock.release()\n\n\nt1 = threading.Thread(target=func)\nt2 = threading.Thread(target=func)\nt3 = threading.Thread(target=func)\nt1.start()\nt2.start()\nt3.start()\n",
"step-5": "'''\n\n这部分理解参考:\n\nhttps://www.bilibili.com/video/BV1QA411H7tK?from=search&seid=17305042509580602672\n\n图文代码地址: https://blog.csdn.net/qq_30758629/article/details/112527763\n\n'''\n\nimport threading\nimport time\n\ndata=0\nlock=threading.Lock() #创建一个锁对象\n\ndef func():\n global data\n print(\"%s is acquire lock..\\n\" %threading.current_thread().getName())\n\n if lock.acquire():\n print(\"%s get lock \"%threading.current_thread().getName())\n data+=1\n time.sleep(2)\n print(\"%s release lock \"%threading.current_thread().getName())\n print(data)\n lock.release()\n\nt1=threading.Thread(target=func)\nt2=threading.Thread(target=func)\nt3=threading.Thread(target=func)\nt1.start()\nt2.start()\nt3.start()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def areAlmostEqual(self, s1, s2):
if not len(s1) == len(s2):
return False
differences = []
for i in range(len(s1)):
if not s1[i] == s2[i]:
differences.append(i)
if len(differences) == 0:
return True
elif len(differences) == 2 and s1[differences[0]] == s2[differences[1]
]:
return True
return False
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def areAlmostEqual(self, s1, s2):
if not len(s1) == len(s2):
return False
differences = []
for i in range(len(s1)):
if not s1[i] == s2[i]:
differences.append(i)
if len(differences) == 0:
return True
elif len(differences) == 2 and s1[differences[0]] == s2[differences[1]
]:
return True
return False
if __name__ == '__main__':
s = Solution()
assert s.areAlmostEqual('kelb', 'kelb')
assert s.areAlmostEqual('', '')
assert not s.areAlmostEqual('abcd', 'dcba')
assert s.areAlmostEqual('abc', 'cba')
assert s.areAlmostEqual('abcdefghijklmnopqrstuvwxyz',
'zbcdefghijklmnopqrstuvwxya')
assert not s.areAlmostEqual('abcdefghijklmnopqrstuvwxyz',
'abcdefghijklmnopqrstuvwxya')
<|reserved_special_token_1|>
"""
You are given two strings s1 and s2 of equal length. A string swap is an operation where you choose two indices in a string (not necessarily different) and swap the characters at these indices.
Return true if it is possible to make both strings equal by performing at most one string swap on exactly one of the strings. Otherwise, return false.
"""
class Solution:
def areAlmostEqual(self, s1, s2):
if not(len(s1) == len(s2)):
return False
differences = []
for i in range(len(s1)):
if not(s1[i] == s2[i]):
differences.append(i)
if len(differences) == 0:
return True
elif len(differences) == 2 and s1[differences[0]] == s2[differences[1]]:
return True
return False
if __name__ == "__main__":
s = Solution()
assert(s.areAlmostEqual("kelb", "kelb"))
assert(s.areAlmostEqual("", ""))
assert(not s.areAlmostEqual("abcd", "dcba"))
assert(s.areAlmostEqual("abc", "cba"))
assert(s.areAlmostEqual("abcdefghijklmnopqrstuvwxyz", "zbcdefghijklmnopqrstuvwxya"))
assert(not s.areAlmostEqual("abcdefghijklmnopqrstuvwxyz", "abcdefghijklmnopqrstuvwxya"))
|
flexible
|
{
"blob_id": "5efb8151375d705f3591921654f847e45b6927c9",
"index": 3614,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def areAlmostEqual(self, s1, s2):\n if not len(s1) == len(s2):\n return False\n differences = []\n for i in range(len(s1)):\n if not s1[i] == s2[i]:\n differences.append(i)\n if len(differences) == 0:\n return True\n elif len(differences) == 2 and s1[differences[0]] == s2[differences[1]\n ]:\n return True\n return False\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def areAlmostEqual(self, s1, s2):\n if not len(s1) == len(s2):\n return False\n differences = []\n for i in range(len(s1)):\n if not s1[i] == s2[i]:\n differences.append(i)\n if len(differences) == 0:\n return True\n elif len(differences) == 2 and s1[differences[0]] == s2[differences[1]\n ]:\n return True\n return False\n\n\nif __name__ == '__main__':\n s = Solution()\n assert s.areAlmostEqual('kelb', 'kelb')\n assert s.areAlmostEqual('', '')\n assert not s.areAlmostEqual('abcd', 'dcba')\n assert s.areAlmostEqual('abc', 'cba')\n assert s.areAlmostEqual('abcdefghijklmnopqrstuvwxyz',\n 'zbcdefghijklmnopqrstuvwxya')\n assert not s.areAlmostEqual('abcdefghijklmnopqrstuvwxyz',\n 'abcdefghijklmnopqrstuvwxya')\n",
"step-5": "\"\"\"\nYou are given two strings s1 and s2 of equal length. A string swap is an operation where you choose two indices in a string (not necessarily different) and swap the characters at these indices.\n\nReturn true if it is possible to make both strings equal by performing at most one string swap on exactly one of the strings. Otherwise, return false.\n\"\"\"\n\nclass Solution:\n def areAlmostEqual(self, s1, s2):\n if not(len(s1) == len(s2)):\n return False\n\n differences = []\n for i in range(len(s1)):\n if not(s1[i] == s2[i]):\n differences.append(i)\n\n if len(differences) == 0:\n return True\n elif len(differences) == 2 and s1[differences[0]] == s2[differences[1]]:\n return True\n\n return False\n\n\nif __name__ == \"__main__\":\n s = Solution()\n\n assert(s.areAlmostEqual(\"kelb\", \"kelb\"))\n assert(s.areAlmostEqual(\"\", \"\"))\n assert(not s.areAlmostEqual(\"abcd\", \"dcba\"))\n assert(s.areAlmostEqual(\"abc\", \"cba\"))\n assert(s.areAlmostEqual(\"abcdefghijklmnopqrstuvwxyz\", \"zbcdefghijklmnopqrstuvwxya\"))\n assert(not s.areAlmostEqual(\"abcdefghijklmnopqrstuvwxyz\", \"abcdefghijklmnopqrstuvwxya\"))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Music(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@staticmethod
def get_global_volume():
return Music.our_music_volume
class Sound(object):
our_sound_volume = 0.8
def __init__(self, filename=None):
self.sound = None
self.channel = None
if filename is not None:
self.load(filename)
def load(self, filename):
self.sound = mixer.Sound(filename)
def play(self, loop=0):
"""for infiniteloop, set loop to -1"""
self.sound.set_volume(Sound.our_sound_volume)
self.channel = self.sound.play(loop)
def stop(self):
self.sound.stop()
def fadeout(self, millisec):
self.sound.fadeout(millisec)
def is_playing(self):
return self.channel is not None and self.channel.get_sound(
) is self.sound
@staticmethod
def set_global_volume(volume):
assert volume >= 0.0
assert volume <= 1.0
Sound.our_sound_volume = volume
@staticmethod
def get_global_volume():
return Sound.our_sound_volume
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Music(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, filename=None):
self.sound = None
self.channel = None
if filename is not None:
self.load(filename)
def load(self, filename):
self.sound = mixer.Sound(filename)
def play(self, loop=-1):
self.sound.set_volume(Music.our_music_volume)
self.channel = self.sound.play(loop)
Music.our_current_music = self.sound
def stop(self):
self.sound.stop()
def fadeout(self, millisec):
self.sound.fadeout(millisec)
<|reserved_special_token_0|>
@staticmethod
def set_global_volume(volume):
assert volume >= 0.0
assert volume <= 1.0
Music.our_music_volume = volume
if Music.our_current_music is not None:
Music.our_current_music.set_volume(volume)
@staticmethod
def get_global_volume():
return Music.our_music_volume
class Sound(object):
our_sound_volume = 0.8
def __init__(self, filename=None):
self.sound = None
self.channel = None
if filename is not None:
self.load(filename)
def load(self, filename):
self.sound = mixer.Sound(filename)
def play(self, loop=0):
"""for infiniteloop, set loop to -1"""
self.sound.set_volume(Sound.our_sound_volume)
self.channel = self.sound.play(loop)
def stop(self):
self.sound.stop()
def fadeout(self, millisec):
self.sound.fadeout(millisec)
def is_playing(self):
return self.channel is not None and self.channel.get_sound(
) is self.sound
@staticmethod
def set_global_volume(volume):
assert volume >= 0.0
assert volume <= 1.0
Sound.our_sound_volume = volume
@staticmethod
def get_global_volume():
return Sound.our_sound_volume
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def deinit():
mixer.quit()
class Music(object):
our_music_volume = 0.8
our_current_music = None
def __init__(self, filename=None):
self.sound = None
self.channel = None
if filename is not None:
self.load(filename)
def load(self, filename):
self.sound = mixer.Sound(filename)
def play(self, loop=-1):
self.sound.set_volume(Music.our_music_volume)
self.channel = self.sound.play(loop)
Music.our_current_music = self.sound
def stop(self):
self.sound.stop()
def fadeout(self, millisec):
self.sound.fadeout(millisec)
def is_playing(self):
return self.channel is not None and self.channel.get_sound(
) is self.sound
@staticmethod
def set_global_volume(volume):
assert volume >= 0.0
assert volume <= 1.0
Music.our_music_volume = volume
if Music.our_current_music is not None:
Music.our_current_music.set_volume(volume)
@staticmethod
def get_global_volume():
return Music.our_music_volume
class Sound(object):
our_sound_volume = 0.8
def __init__(self, filename=None):
self.sound = None
self.channel = None
if filename is not None:
self.load(filename)
def load(self, filename):
self.sound = mixer.Sound(filename)
def play(self, loop=0):
"""for infiniteloop, set loop to -1"""
self.sound.set_volume(Sound.our_sound_volume)
self.channel = self.sound.play(loop)
def stop(self):
self.sound.stop()
def fadeout(self, millisec):
self.sound.fadeout(millisec)
def is_playing(self):
return self.channel is not None and self.channel.get_sound(
) is self.sound
@staticmethod
def set_global_volume(volume):
assert volume >= 0.0
assert volume <= 1.0
Sound.our_sound_volume = volume
@staticmethod
def get_global_volume():
return Sound.our_sound_volume
<|reserved_special_token_1|>
import pygame
import pygame.mixer as mixer
def pre_init():
mixer.pre_init(22050, -16, 2, 2048)
def init():
mixer.init()
pygame.mixer.set_num_channels(16)
def deinit():
mixer.quit()
class Music(object):
our_music_volume = 0.8
our_current_music = None
def __init__(self, filename=None):
self.sound = None
self.channel = None
if filename is not None:
self.load(filename)
def load(self, filename):
self.sound = mixer.Sound(filename)
def play(self, loop=-1):
self.sound.set_volume(Music.our_music_volume)
self.channel = self.sound.play(loop)
Music.our_current_music = self.sound
def stop(self):
self.sound.stop()
def fadeout(self, millisec):
self.sound.fadeout(millisec)
def is_playing(self):
return self.channel is not None and self.channel.get_sound(
) is self.sound
@staticmethod
def set_global_volume(volume):
assert volume >= 0.0
assert volume <= 1.0
Music.our_music_volume = volume
if Music.our_current_music is not None:
Music.our_current_music.set_volume(volume)
@staticmethod
def get_global_volume():
return Music.our_music_volume
class Sound(object):
our_sound_volume = 0.8
def __init__(self, filename=None):
self.sound = None
self.channel = None
if filename is not None:
self.load(filename)
def load(self, filename):
self.sound = mixer.Sound(filename)
def play(self, loop=0):
"""for infiniteloop, set loop to -1"""
self.sound.set_volume(Sound.our_sound_volume)
self.channel = self.sound.play(loop)
def stop(self):
self.sound.stop()
def fadeout(self, millisec):
self.sound.fadeout(millisec)
def is_playing(self):
return self.channel is not None and self.channel.get_sound(
) is self.sound
@staticmethod
def set_global_volume(volume):
assert volume >= 0.0
assert volume <= 1.0
Sound.our_sound_volume = volume
@staticmethod
def get_global_volume():
return Sound.our_sound_volume
<|reserved_special_token_1|>
#!/usr/bin/env python
import pygame
import pygame.mixer as mixer
def pre_init():
mixer.pre_init(22050, -16, 2, 2048)
def init():
mixer.init()
pygame.mixer.set_num_channels(16)
def deinit():
mixer.quit()
class Music (object):
our_music_volume = 0.8
our_current_music = None
def __init__( self, filename = None ):
self.sound = None
self.channel = None
if filename is not None:
self.load( filename )
def load( self, filename ):
self.sound = mixer.Sound( filename )
def play( self, loop = -1 ):
self.sound.set_volume( Music.our_music_volume )
self.channel = self.sound.play( loop )
Music.our_current_music = self.sound
def stop( self ):
self.sound.stop()
def fadeout( self, millisec ):
self.sound.fadeout( millisec )
def is_playing( self ):
return self.channel is not None and self.channel.get_sound() is self.sound
@staticmethod
def set_global_volume( volume ):
assert volume >= 0.0
assert volume <= 1.0
Music.our_music_volume = volume
if Music.our_current_music is not None:
Music.our_current_music.set_volume( volume )
@staticmethod
def get_global_volume():
return Music.our_music_volume
class Sound (object):
our_sound_volume = 0.8
def __init__( self, filename = None ):
self.sound = None
self.channel = None
if filename is not None:
self.load( filename )
def load( self, filename ):
self.sound = mixer.Sound( filename )
def play( self, loop = 0 ):
"""for infiniteloop, set loop to -1"""
self.sound.set_volume( Sound.our_sound_volume )
self.channel = self.sound.play( loop )
def stop( self ):
self.sound.stop()
def fadeout( self, millisec ):
self.sound.fadeout( millisec )
def is_playing( self ):
return self.channel is not None and self.channel.get_sound() is self.sound
@staticmethod
def set_global_volume( volume ):
assert volume >= 0.0
assert volume <= 1.0
Sound.our_sound_volume = volume
@staticmethod
def get_global_volume():
return Sound.our_sound_volume
|
flexible
|
{
"blob_id": "2caea9e7bbef99b19ba917995513413385c7abdf",
"index": 9808,
"step-1": "<mask token>\n\n\nclass Music(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @staticmethod\n def get_global_volume():\n return Music.our_music_volume\n\n\nclass Sound(object):\n our_sound_volume = 0.8\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=0):\n \"\"\"for infiniteloop, set loop to -1\"\"\"\n self.sound.set_volume(Sound.our_sound_volume)\n self.channel = self.sound.play(loop)\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Sound.our_sound_volume = volume\n\n @staticmethod\n def get_global_volume():\n return Sound.our_sound_volume\n",
"step-2": "<mask token>\n\n\nclass Music(object):\n <mask token>\n <mask token>\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=-1):\n self.sound.set_volume(Music.our_music_volume)\n self.channel = self.sound.play(loop)\n Music.our_current_music = self.sound\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n <mask token>\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Music.our_music_volume = volume\n if Music.our_current_music is not None:\n Music.our_current_music.set_volume(volume)\n\n @staticmethod\n def get_global_volume():\n return Music.our_music_volume\n\n\nclass Sound(object):\n our_sound_volume = 0.8\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=0):\n \"\"\"for infiniteloop, set loop to -1\"\"\"\n self.sound.set_volume(Sound.our_sound_volume)\n self.channel = self.sound.play(loop)\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Sound.our_sound_volume = volume\n\n @staticmethod\n def get_global_volume():\n return Sound.our_sound_volume\n",
"step-3": "<mask token>\n\n\ndef deinit():\n mixer.quit()\n\n\nclass Music(object):\n our_music_volume = 0.8\n our_current_music = None\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=-1):\n self.sound.set_volume(Music.our_music_volume)\n self.channel = self.sound.play(loop)\n Music.our_current_music = self.sound\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Music.our_music_volume = volume\n if Music.our_current_music is not None:\n Music.our_current_music.set_volume(volume)\n\n @staticmethod\n def get_global_volume():\n return Music.our_music_volume\n\n\nclass Sound(object):\n our_sound_volume = 0.8\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=0):\n \"\"\"for infiniteloop, set loop to -1\"\"\"\n self.sound.set_volume(Sound.our_sound_volume)\n self.channel = self.sound.play(loop)\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Sound.our_sound_volume = volume\n\n @staticmethod\n def get_global_volume():\n return Sound.our_sound_volume\n",
"step-4": "import pygame\nimport pygame.mixer as mixer\n\n\ndef pre_init():\n mixer.pre_init(22050, -16, 2, 2048)\n\n\ndef init():\n mixer.init()\n pygame.mixer.set_num_channels(16)\n\n\ndef deinit():\n mixer.quit()\n\n\nclass Music(object):\n our_music_volume = 0.8\n our_current_music = None\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=-1):\n self.sound.set_volume(Music.our_music_volume)\n self.channel = self.sound.play(loop)\n Music.our_current_music = self.sound\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Music.our_music_volume = volume\n if Music.our_current_music is not None:\n Music.our_current_music.set_volume(volume)\n\n @staticmethod\n def get_global_volume():\n return Music.our_music_volume\n\n\nclass Sound(object):\n our_sound_volume = 0.8\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=0):\n \"\"\"for infiniteloop, set loop to -1\"\"\"\n self.sound.set_volume(Sound.our_sound_volume)\n self.channel = self.sound.play(loop)\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Sound.our_sound_volume = volume\n\n @staticmethod\n def get_global_volume():\n return Sound.our_sound_volume\n",
"step-5": "#!/usr/bin/env python\n\nimport pygame\nimport pygame.mixer as mixer\n\ndef pre_init():\n mixer.pre_init(22050, -16, 2, 2048)\n\ndef init():\n mixer.init()\n pygame.mixer.set_num_channels(16)\n\ndef deinit():\n mixer.quit()\n\n\nclass Music (object):\n our_music_volume = 0.8\n our_current_music = None\n \n def __init__( self, filename = None ):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load( filename )\n\n def load( self, filename ):\n self.sound = mixer.Sound( filename )\n\n def play( self, loop = -1 ):\n self.sound.set_volume( Music.our_music_volume )\n self.channel = self.sound.play( loop )\n Music.our_current_music = self.sound\n \n def stop( self ):\n self.sound.stop()\n\n def fadeout( self, millisec ):\n self.sound.fadeout( millisec )\n\n def is_playing( self ):\n return self.channel is not None and self.channel.get_sound() is self.sound\n\n @staticmethod\n def set_global_volume( volume ):\n assert volume >= 0.0\n assert volume <= 1.0\n\n Music.our_music_volume = volume\n\n if Music.our_current_music is not None:\n Music.our_current_music.set_volume( volume )\n\n @staticmethod\n def get_global_volume():\n return Music.our_music_volume\n \n\nclass Sound (object):\n our_sound_volume = 0.8\n \n def __init__( self, filename = None ):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load( filename )\n\n def load( self, filename ):\n self.sound = mixer.Sound( filename )\n\n def play( self, loop = 0 ):\n \"\"\"for infiniteloop, set loop to -1\"\"\"\n self.sound.set_volume( Sound.our_sound_volume )\n self.channel = self.sound.play( loop )\n \n def stop( self ):\n self.sound.stop()\n\n def fadeout( self, millisec ):\n self.sound.fadeout( millisec )\n\n def is_playing( self ):\n return self.channel is not None and self.channel.get_sound() is self.sound\n\n @staticmethod\n def set_global_volume( volume ):\n assert volume >= 0.0\n assert volume <= 1.0\n\n Sound.our_sound_volume = volume\n\n @staticmethod\n def get_global_volume():\n return Sound.our_sound_volume\n \n",
"step-ids": [
12,
18,
21,
24,
25
]
}
|
[
12,
18,
21,
24,
25
] |
ba0563.pngMap = [
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000011111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000001100110111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000001111111111111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000111111111111111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000111111111111111111111110000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000111111111111111111110000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000011111111111111111110000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000111111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000011111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111100000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111100000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000111111111111100000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111110000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111110000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111100000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111100000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111100000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000001111111111100000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000111101011100000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
]
|
normal
|
{
"blob_id": "dab1adcd185092fc425b5d87150f27e7b67bff6c",
"index": 151,
"step-1": "<mask token>\n",
"step-2": "ba0563.pngMap = [\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000011111111000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000111111111111000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000001100110111111111111000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000001111111111111111111111000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000111111111111111111111111000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000111111111111111111111110000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000111111111111111111110000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000011111111111111111110000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000111111111111111000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000011111111111111000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111100000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111100000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000111111111111100000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111110000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111110000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111100000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111100000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111100000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000001111111111100000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000111101011100000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ]\n",
"step-3": "ba0563.pngMap = [\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000011111111000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000111111111111000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000001100110111111111111000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000001111111111111111111111000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000111111111111111111111111000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000111111111111111111111110000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000111111111111111111110000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000011111111111111111110000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000111111111111111000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000011111111111111000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111100000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111100000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000111111111111100000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111110000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111110000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111100000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111100000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111100000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000001111111111100000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000111101011100000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n]\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
class Node:
"""
This class represent a node (vertex).
"""
def __init__(self, k: int = None, loc: tuple = None, **kwargs):
"""
Each node contain dew fields:
key: node_id.
location: node's position represent as 3DPoint.
ni_out: a dictionary that holds all the "edges" that connected from this node,
each edge is represented using a pair (key, edge weight).
ni_in: a dictionary that holds all the "edges" that connected to this node,
each edge is represented using a pair (key, edge weight)
"""
self.__key = k
self.__location = loc
self.__ni_out = {}
self.__ni_in = {}
def add_neighbor_out(self, neighbor_id: int, weight: float) -> None:
"""
Add "edge" that connected from this node (node_id ---> neighbor_id).
:param neighbor_id: dest node key
:param weight: edge's weight
"""
self.__ni_out[neighbor_id] = weight
def add_neighbor_in(self, neighbor_id: int, weight: float) -> None:
"""
Add "edge" that connected to this node (neighbor_id ---> node_id).
:param neighbor_id: dest node key
:param weight: edge's weight
"""
self.__ni_in[neighbor_id] = weight
def get_connections_out(self) -> dict:
"""
Return a dictionary that holds all the "edges" that connected from this node,
each edge is represented using a pair (key, edge weight).
:return: dictionary (key, edge weight).
"""
return self.__ni_out
def get_connections_in(self) -> dict:
"""
Return a dictionary that holds all the "edges" that connected to this node,
each edge is represented using a pair (key, edge weight).
:return: dictionary (key, edge weight).
"""
return self.__ni_in
def get_key(self) -> int:
"""
Return this node key.
:return: key
"""
return self.__key
def get_location(self) -> tuple:
"""
Return this node location as a 3DPoint (x, y, z).
:return: this node location
"""
return self.__location
def set_location(self, location: tuple) -> None:
"""
Allows to add location to this node.
This method used for load and plot graphs that their nodes have no position.
:param location: the new position of this node
"""
self.__location = location
def as_dict_node(self):
"""
Return the node as dictionary {"pos": "x", "y", "z", "id": key}
:return: the node as dictionary
"""
loc_as_str = str(self.get_location())
m_dict = {"pos": loc_as_str[1:-1], "id": self.get_key()}
return m_dict
def as_dict_edge(self):
"""
Return the edge as dictionary {"src": src node_id, "w": edge weight, "dest": dest node_id}
:return: the edge as dictionary
"""
l_list = []
for k, v in self.get_connections_out().items():
m_dict = {"src": int(self.get_key()), "w": float(v), "dest": int(k)}
l_list.append(m_dict)
return l_list
def __repr__(self):
return str([self.get_key()])
def __str__(self) -> str:
return "Node: id: " + str(self.__key) + ' neighbors: ' + str(self.__ni_out)
def __eq__(self, o: object) -> bool:
if self is o:
return True
if o is None or self.__class__ is not o.__class__:
return False
other = o
return self.__key == other.__key and self.__location.__eq__(other.__location) and self.__ni_in.__eq__(
other.__ni_in) and self.__ni_out.__eq__(other.__ni_out)
|
normal
|
{
"blob_id": "9c3f6c368c764918da5cce44da574b7c041fa414",
"index": 1364,
"step-1": "class Node:\n <mask token>\n\n def __init__(self, k: int=None, loc: tuple=None, **kwargs):\n \"\"\"\n Each node contain dew fields:\n key: node_id.\n location: node's position represent as 3DPoint.\n ni_out: a dictionary that holds all the \"edges\" that connected from this node,\n each edge is represented using a pair (key, edge weight).\n ni_in: a dictionary that holds all the \"edges\" that connected to this node,\n each edge is represented using a pair (key, edge weight)\n \"\"\"\n self.__key = k\n self.__location = loc\n self.__ni_out = {}\n self.__ni_in = {}\n\n def add_neighbor_out(self, neighbor_id: int, weight: float) ->None:\n \"\"\"\n Add \"edge\" that connected from this node (node_id ---> neighbor_id).\n :param neighbor_id: dest node key\n :param weight: edge's weight\n \"\"\"\n self.__ni_out[neighbor_id] = weight\n\n def add_neighbor_in(self, neighbor_id: int, weight: float) ->None:\n \"\"\"\n Add \"edge\" that connected to this node (neighbor_id ---> node_id).\n :param neighbor_id: dest node key\n :param weight: edge's weight\n \"\"\"\n self.__ni_in[neighbor_id] = weight\n\n def get_connections_out(self) ->dict:\n \"\"\"\n Return a dictionary that holds all the \"edges\" that connected from this node,\n each edge is represented using a pair (key, edge weight).\n :return: dictionary (key, edge weight).\n \"\"\"\n return self.__ni_out\n\n def get_connections_in(self) ->dict:\n \"\"\"\n Return a dictionary that holds all the \"edges\" that connected to this node,\n each edge is represented using a pair (key, edge weight).\n :return: dictionary (key, edge weight).\n \"\"\"\n return self.__ni_in\n\n def get_key(self) ->int:\n \"\"\"\n Return this node key.\n :return: key\n \"\"\"\n return self.__key\n <mask token>\n\n def set_location(self, location: tuple) ->None:\n \"\"\"\n Allows to add location to this node.\n This method used for load and plot graphs that their nodes have no position.\n :param location: the new position of this node\n \"\"\"\n self.__location = location\n\n def as_dict_node(self):\n \"\"\"\n Return the node as dictionary {\"pos\": \"x\", \"y\", \"z\", \"id\": key}\n :return: the node as dictionary\n \"\"\"\n loc_as_str = str(self.get_location())\n m_dict = {'pos': loc_as_str[1:-1], 'id': self.get_key()}\n return m_dict\n\n def as_dict_edge(self):\n \"\"\"\n Return the edge as dictionary {\"src\": src node_id, \"w\": edge weight, \"dest\": dest node_id}\n :return: the edge as dictionary\n \"\"\"\n l_list = []\n for k, v in self.get_connections_out().items():\n m_dict = {'src': int(self.get_key()), 'w': float(v), 'dest': int(k)\n }\n l_list.append(m_dict)\n return l_list\n <mask token>\n\n def __str__(self) ->str:\n return 'Node: id: ' + str(self.__key) + ' neighbors: ' + str(self.\n __ni_out)\n\n def __eq__(self, o: object) ->bool:\n if self is o:\n return True\n if o is None or self.__class__ is not o.__class__:\n return False\n other = o\n return self.__key == other.__key and self.__location.__eq__(other.\n __location) and self.__ni_in.__eq__(other.__ni_in\n ) and self.__ni_out.__eq__(other.__ni_out)\n",
"step-2": "class Node:\n <mask token>\n\n def __init__(self, k: int=None, loc: tuple=None, **kwargs):\n \"\"\"\n Each node contain dew fields:\n key: node_id.\n location: node's position represent as 3DPoint.\n ni_out: a dictionary that holds all the \"edges\" that connected from this node,\n each edge is represented using a pair (key, edge weight).\n ni_in: a dictionary that holds all the \"edges\" that connected to this node,\n each edge is represented using a pair (key, edge weight)\n \"\"\"\n self.__key = k\n self.__location = loc\n self.__ni_out = {}\n self.__ni_in = {}\n\n def add_neighbor_out(self, neighbor_id: int, weight: float) ->None:\n \"\"\"\n Add \"edge\" that connected from this node (node_id ---> neighbor_id).\n :param neighbor_id: dest node key\n :param weight: edge's weight\n \"\"\"\n self.__ni_out[neighbor_id] = weight\n\n def add_neighbor_in(self, neighbor_id: int, weight: float) ->None:\n \"\"\"\n Add \"edge\" that connected to this node (neighbor_id ---> node_id).\n :param neighbor_id: dest node key\n :param weight: edge's weight\n \"\"\"\n self.__ni_in[neighbor_id] = weight\n\n def get_connections_out(self) ->dict:\n \"\"\"\n Return a dictionary that holds all the \"edges\" that connected from this node,\n each edge is represented using a pair (key, edge weight).\n :return: dictionary (key, edge weight).\n \"\"\"\n return self.__ni_out\n\n def get_connections_in(self) ->dict:\n \"\"\"\n Return a dictionary that holds all the \"edges\" that connected to this node,\n each edge is represented using a pair (key, edge weight).\n :return: dictionary (key, edge weight).\n \"\"\"\n return self.__ni_in\n\n def get_key(self) ->int:\n \"\"\"\n Return this node key.\n :return: key\n \"\"\"\n return self.__key\n <mask token>\n\n def set_location(self, location: tuple) ->None:\n \"\"\"\n Allows to add location to this node.\n This method used for load and plot graphs that their nodes have no position.\n :param location: the new position of this node\n \"\"\"\n self.__location = location\n\n def as_dict_node(self):\n \"\"\"\n Return the node as dictionary {\"pos\": \"x\", \"y\", \"z\", \"id\": key}\n :return: the node as dictionary\n \"\"\"\n loc_as_str = str(self.get_location())\n m_dict = {'pos': loc_as_str[1:-1], 'id': self.get_key()}\n return m_dict\n\n def as_dict_edge(self):\n \"\"\"\n Return the edge as dictionary {\"src\": src node_id, \"w\": edge weight, \"dest\": dest node_id}\n :return: the edge as dictionary\n \"\"\"\n l_list = []\n for k, v in self.get_connections_out().items():\n m_dict = {'src': int(self.get_key()), 'w': float(v), 'dest': int(k)\n }\n l_list.append(m_dict)\n return l_list\n\n def __repr__(self):\n return str([self.get_key()])\n\n def __str__(self) ->str:\n return 'Node: id: ' + str(self.__key) + ' neighbors: ' + str(self.\n __ni_out)\n\n def __eq__(self, o: object) ->bool:\n if self is o:\n return True\n if o is None or self.__class__ is not o.__class__:\n return False\n other = o\n return self.__key == other.__key and self.__location.__eq__(other.\n __location) and self.__ni_in.__eq__(other.__ni_in\n ) and self.__ni_out.__eq__(other.__ni_out)\n",
"step-3": "class Node:\n <mask token>\n\n def __init__(self, k: int=None, loc: tuple=None, **kwargs):\n \"\"\"\n Each node contain dew fields:\n key: node_id.\n location: node's position represent as 3DPoint.\n ni_out: a dictionary that holds all the \"edges\" that connected from this node,\n each edge is represented using a pair (key, edge weight).\n ni_in: a dictionary that holds all the \"edges\" that connected to this node,\n each edge is represented using a pair (key, edge weight)\n \"\"\"\n self.__key = k\n self.__location = loc\n self.__ni_out = {}\n self.__ni_in = {}\n\n def add_neighbor_out(self, neighbor_id: int, weight: float) ->None:\n \"\"\"\n Add \"edge\" that connected from this node (node_id ---> neighbor_id).\n :param neighbor_id: dest node key\n :param weight: edge's weight\n \"\"\"\n self.__ni_out[neighbor_id] = weight\n\n def add_neighbor_in(self, neighbor_id: int, weight: float) ->None:\n \"\"\"\n Add \"edge\" that connected to this node (neighbor_id ---> node_id).\n :param neighbor_id: dest node key\n :param weight: edge's weight\n \"\"\"\n self.__ni_in[neighbor_id] = weight\n\n def get_connections_out(self) ->dict:\n \"\"\"\n Return a dictionary that holds all the \"edges\" that connected from this node,\n each edge is represented using a pair (key, edge weight).\n :return: dictionary (key, edge weight).\n \"\"\"\n return self.__ni_out\n\n def get_connections_in(self) ->dict:\n \"\"\"\n Return a dictionary that holds all the \"edges\" that connected to this node,\n each edge is represented using a pair (key, edge weight).\n :return: dictionary (key, edge weight).\n \"\"\"\n return self.__ni_in\n\n def get_key(self) ->int:\n \"\"\"\n Return this node key.\n :return: key\n \"\"\"\n return self.__key\n\n def get_location(self) ->tuple:\n \"\"\"\n Return this node location as a 3DPoint (x, y, z).\n :return: this node location\n \"\"\"\n return self.__location\n\n def set_location(self, location: tuple) ->None:\n \"\"\"\n Allows to add location to this node.\n This method used for load and plot graphs that their nodes have no position.\n :param location: the new position of this node\n \"\"\"\n self.__location = location\n\n def as_dict_node(self):\n \"\"\"\n Return the node as dictionary {\"pos\": \"x\", \"y\", \"z\", \"id\": key}\n :return: the node as dictionary\n \"\"\"\n loc_as_str = str(self.get_location())\n m_dict = {'pos': loc_as_str[1:-1], 'id': self.get_key()}\n return m_dict\n\n def as_dict_edge(self):\n \"\"\"\n Return the edge as dictionary {\"src\": src node_id, \"w\": edge weight, \"dest\": dest node_id}\n :return: the edge as dictionary\n \"\"\"\n l_list = []\n for k, v in self.get_connections_out().items():\n m_dict = {'src': int(self.get_key()), 'w': float(v), 'dest': int(k)\n }\n l_list.append(m_dict)\n return l_list\n\n def __repr__(self):\n return str([self.get_key()])\n\n def __str__(self) ->str:\n return 'Node: id: ' + str(self.__key) + ' neighbors: ' + str(self.\n __ni_out)\n\n def __eq__(self, o: object) ->bool:\n if self is o:\n return True\n if o is None or self.__class__ is not o.__class__:\n return False\n other = o\n return self.__key == other.__key and self.__location.__eq__(other.\n __location) and self.__ni_in.__eq__(other.__ni_in\n ) and self.__ni_out.__eq__(other.__ni_out)\n",
"step-4": "class Node:\n \"\"\"\n This class represent a node (vertex).\n \"\"\"\n\n def __init__(self, k: int=None, loc: tuple=None, **kwargs):\n \"\"\"\n Each node contain dew fields:\n key: node_id.\n location: node's position represent as 3DPoint.\n ni_out: a dictionary that holds all the \"edges\" that connected from this node,\n each edge is represented using a pair (key, edge weight).\n ni_in: a dictionary that holds all the \"edges\" that connected to this node,\n each edge is represented using a pair (key, edge weight)\n \"\"\"\n self.__key = k\n self.__location = loc\n self.__ni_out = {}\n self.__ni_in = {}\n\n def add_neighbor_out(self, neighbor_id: int, weight: float) ->None:\n \"\"\"\n Add \"edge\" that connected from this node (node_id ---> neighbor_id).\n :param neighbor_id: dest node key\n :param weight: edge's weight\n \"\"\"\n self.__ni_out[neighbor_id] = weight\n\n def add_neighbor_in(self, neighbor_id: int, weight: float) ->None:\n \"\"\"\n Add \"edge\" that connected to this node (neighbor_id ---> node_id).\n :param neighbor_id: dest node key\n :param weight: edge's weight\n \"\"\"\n self.__ni_in[neighbor_id] = weight\n\n def get_connections_out(self) ->dict:\n \"\"\"\n Return a dictionary that holds all the \"edges\" that connected from this node,\n each edge is represented using a pair (key, edge weight).\n :return: dictionary (key, edge weight).\n \"\"\"\n return self.__ni_out\n\n def get_connections_in(self) ->dict:\n \"\"\"\n Return a dictionary that holds all the \"edges\" that connected to this node,\n each edge is represented using a pair (key, edge weight).\n :return: dictionary (key, edge weight).\n \"\"\"\n return self.__ni_in\n\n def get_key(self) ->int:\n \"\"\"\n Return this node key.\n :return: key\n \"\"\"\n return self.__key\n\n def get_location(self) ->tuple:\n \"\"\"\n Return this node location as a 3DPoint (x, y, z).\n :return: this node location\n \"\"\"\n return self.__location\n\n def set_location(self, location: tuple) ->None:\n \"\"\"\n Allows to add location to this node.\n This method used for load and plot graphs that their nodes have no position.\n :param location: the new position of this node\n \"\"\"\n self.__location = location\n\n def as_dict_node(self):\n \"\"\"\n Return the node as dictionary {\"pos\": \"x\", \"y\", \"z\", \"id\": key}\n :return: the node as dictionary\n \"\"\"\n loc_as_str = str(self.get_location())\n m_dict = {'pos': loc_as_str[1:-1], 'id': self.get_key()}\n return m_dict\n\n def as_dict_edge(self):\n \"\"\"\n Return the edge as dictionary {\"src\": src node_id, \"w\": edge weight, \"dest\": dest node_id}\n :return: the edge as dictionary\n \"\"\"\n l_list = []\n for k, v in self.get_connections_out().items():\n m_dict = {'src': int(self.get_key()), 'w': float(v), 'dest': int(k)\n }\n l_list.append(m_dict)\n return l_list\n\n def __repr__(self):\n return str([self.get_key()])\n\n def __str__(self) ->str:\n return 'Node: id: ' + str(self.__key) + ' neighbors: ' + str(self.\n __ni_out)\n\n def __eq__(self, o: object) ->bool:\n if self is o:\n return True\n if o is None or self.__class__ is not o.__class__:\n return False\n other = o\n return self.__key == other.__key and self.__location.__eq__(other.\n __location) and self.__ni_in.__eq__(other.__ni_in\n ) and self.__ni_out.__eq__(other.__ni_out)\n",
"step-5": "class Node:\n \"\"\"\n This class represent a node (vertex).\n \"\"\"\n\n def __init__(self, k: int = None, loc: tuple = None, **kwargs):\n \"\"\"\n Each node contain dew fields:\n key: node_id.\n location: node's position represent as 3DPoint.\n ni_out: a dictionary that holds all the \"edges\" that connected from this node,\n each edge is represented using a pair (key, edge weight).\n ni_in: a dictionary that holds all the \"edges\" that connected to this node,\n each edge is represented using a pair (key, edge weight)\n \"\"\"\n self.__key = k\n self.__location = loc\n self.__ni_out = {}\n self.__ni_in = {}\n\n def add_neighbor_out(self, neighbor_id: int, weight: float) -> None:\n \"\"\"\n Add \"edge\" that connected from this node (node_id ---> neighbor_id).\n :param neighbor_id: dest node key\n :param weight: edge's weight\n \"\"\"\n self.__ni_out[neighbor_id] = weight\n\n def add_neighbor_in(self, neighbor_id: int, weight: float) -> None:\n \"\"\"\n Add \"edge\" that connected to this node (neighbor_id ---> node_id).\n :param neighbor_id: dest node key\n :param weight: edge's weight\n \"\"\"\n self.__ni_in[neighbor_id] = weight\n\n def get_connections_out(self) -> dict:\n \"\"\"\n Return a dictionary that holds all the \"edges\" that connected from this node,\n each edge is represented using a pair (key, edge weight).\n :return: dictionary (key, edge weight).\n \"\"\"\n return self.__ni_out\n\n def get_connections_in(self) -> dict:\n \"\"\"\n Return a dictionary that holds all the \"edges\" that connected to this node,\n each edge is represented using a pair (key, edge weight).\n :return: dictionary (key, edge weight).\n \"\"\"\n return self.__ni_in\n\n def get_key(self) -> int:\n \"\"\"\n Return this node key.\n :return: key\n \"\"\"\n return self.__key\n\n def get_location(self) -> tuple:\n \"\"\"\n Return this node location as a 3DPoint (x, y, z).\n :return: this node location\n \"\"\"\n return self.__location\n\n def set_location(self, location: tuple) -> None:\n \"\"\"\n Allows to add location to this node.\n This method used for load and plot graphs that their nodes have no position.\n :param location: the new position of this node\n \"\"\"\n self.__location = location\n\n def as_dict_node(self):\n \"\"\"\n Return the node as dictionary {\"pos\": \"x\", \"y\", \"z\", \"id\": key}\n :return: the node as dictionary\n \"\"\"\n loc_as_str = str(self.get_location())\n m_dict = {\"pos\": loc_as_str[1:-1], \"id\": self.get_key()}\n return m_dict\n\n def as_dict_edge(self):\n \"\"\"\n Return the edge as dictionary {\"src\": src node_id, \"w\": edge weight, \"dest\": dest node_id}\n :return: the edge as dictionary\n \"\"\"\n l_list = []\n for k, v in self.get_connections_out().items():\n m_dict = {\"src\": int(self.get_key()), \"w\": float(v), \"dest\": int(k)}\n l_list.append(m_dict)\n return l_list\n\n def __repr__(self):\n return str([self.get_key()])\n\n def __str__(self) -> str:\n return \"Node: id: \" + str(self.__key) + ' neighbors: ' + str(self.__ni_out)\n\n def __eq__(self, o: object) -> bool:\n if self is o:\n return True\n if o is None or self.__class__ is not o.__class__:\n return False\n other = o\n return self.__key == other.__key and self.__location.__eq__(other.__location) and self.__ni_in.__eq__(\n other.__ni_in) and self.__ni_out.__eq__(other.__ni_out)",
"step-ids": [
12,
13,
14,
15,
16
]
}
|
[
12,
13,
14,
15,
16
] |
# Generated by Django 2.2.16 on 2020-10-27 14:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('trades', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='orderinfo',
name='nonce_str',
field=models.CharField(blank=True, max_length=50, null=True, unique=True, verbose_name='随机加密串'),
),
]
|
normal
|
{
"blob_id": "4e04e748a97c59a26a394b049c15d96476b98517",
"index": 9382,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('trades', '0001_initial')]\n operations = [migrations.AddField(model_name='orderinfo', name=\n 'nonce_str', field=models.CharField(blank=True, max_length=50, null\n =True, unique=True, verbose_name='随机加密串'))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('trades', '0001_initial')]\n operations = [migrations.AddField(model_name='orderinfo', name=\n 'nonce_str', field=models.CharField(blank=True, max_length=50, null\n =True, unique=True, verbose_name='随机加密串'))]\n",
"step-5": "# Generated by Django 2.2.16 on 2020-10-27 14:55\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('trades', '0001_initial'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='orderinfo',\n name='nonce_str',\n field=models.CharField(blank=True, max_length=50, null=True, unique=True, verbose_name='随机加密串'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
from distutils.core import Extension
REPROJECT_ROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
libraries = []
sources = []
sources.append(os.path.join(REPROJECT_ROOT, "_overlap.c"))
sources.append(os.path.join(REPROJECT_ROOT, "overlapArea.c"))
sources.append(os.path.join(REPROJECT_ROOT, "reproject_slice_c.c"))
include_dirs = ['numpy']
include_dirs.append(REPROJECT_ROOT)
extension = Extension(
name="reproject.spherical_intersect._overlap",
sources=sources,
include_dirs=include_dirs,
libraries=libraries,
language="c",
extra_compile_args=['-O2'])
return [extension]
def get_package_data():
header_files = ['overlapArea.h', 'reproject_slice_c.h', 'mNaN.h']
return {'reproject.spherical_intersect': header_files}
|
normal
|
{
"blob_id": "ad079876476f6f291ad52aece8d0d5afdd5a8bcf",
"index": 9892,
"step-1": "<mask token>\n\n\ndef get_extensions():\n libraries = []\n sources = []\n sources.append(os.path.join(REPROJECT_ROOT, '_overlap.c'))\n sources.append(os.path.join(REPROJECT_ROOT, 'overlapArea.c'))\n sources.append(os.path.join(REPROJECT_ROOT, 'reproject_slice_c.c'))\n include_dirs = ['numpy']\n include_dirs.append(REPROJECT_ROOT)\n extension = Extension(name='reproject.spherical_intersect._overlap',\n sources=sources, include_dirs=include_dirs, libraries=libraries,\n language='c', extra_compile_args=['-O2'])\n return [extension]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_extensions():\n libraries = []\n sources = []\n sources.append(os.path.join(REPROJECT_ROOT, '_overlap.c'))\n sources.append(os.path.join(REPROJECT_ROOT, 'overlapArea.c'))\n sources.append(os.path.join(REPROJECT_ROOT, 'reproject_slice_c.c'))\n include_dirs = ['numpy']\n include_dirs.append(REPROJECT_ROOT)\n extension = Extension(name='reproject.spherical_intersect._overlap',\n sources=sources, include_dirs=include_dirs, libraries=libraries,\n language='c', extra_compile_args=['-O2'])\n return [extension]\n\n\ndef get_package_data():\n header_files = ['overlapArea.h', 'reproject_slice_c.h', 'mNaN.h']\n return {'reproject.spherical_intersect': header_files}\n",
"step-3": "<mask token>\nREPROJECT_ROOT = os.path.relpath(os.path.dirname(__file__))\n\n\ndef get_extensions():\n libraries = []\n sources = []\n sources.append(os.path.join(REPROJECT_ROOT, '_overlap.c'))\n sources.append(os.path.join(REPROJECT_ROOT, 'overlapArea.c'))\n sources.append(os.path.join(REPROJECT_ROOT, 'reproject_slice_c.c'))\n include_dirs = ['numpy']\n include_dirs.append(REPROJECT_ROOT)\n extension = Extension(name='reproject.spherical_intersect._overlap',\n sources=sources, include_dirs=include_dirs, libraries=libraries,\n language='c', extra_compile_args=['-O2'])\n return [extension]\n\n\ndef get_package_data():\n header_files = ['overlapArea.h', 'reproject_slice_c.h', 'mNaN.h']\n return {'reproject.spherical_intersect': header_files}\n",
"step-4": "import os\nfrom distutils.core import Extension\nREPROJECT_ROOT = os.path.relpath(os.path.dirname(__file__))\n\n\ndef get_extensions():\n libraries = []\n sources = []\n sources.append(os.path.join(REPROJECT_ROOT, '_overlap.c'))\n sources.append(os.path.join(REPROJECT_ROOT, 'overlapArea.c'))\n sources.append(os.path.join(REPROJECT_ROOT, 'reproject_slice_c.c'))\n include_dirs = ['numpy']\n include_dirs.append(REPROJECT_ROOT)\n extension = Extension(name='reproject.spherical_intersect._overlap',\n sources=sources, include_dirs=include_dirs, libraries=libraries,\n language='c', extra_compile_args=['-O2'])\n return [extension]\n\n\ndef get_package_data():\n header_files = ['overlapArea.h', 'reproject_slice_c.h', 'mNaN.h']\n return {'reproject.spherical_intersect': header_files}\n",
"step-5": "import os\nfrom distutils.core import Extension\n\nREPROJECT_ROOT = os.path.relpath(os.path.dirname(__file__))\n\n\ndef get_extensions():\n\n libraries = []\n\n sources = []\n sources.append(os.path.join(REPROJECT_ROOT, \"_overlap.c\"))\n sources.append(os.path.join(REPROJECT_ROOT, \"overlapArea.c\"))\n sources.append(os.path.join(REPROJECT_ROOT, \"reproject_slice_c.c\"))\n\n include_dirs = ['numpy']\n include_dirs.append(REPROJECT_ROOT)\n\n extension = Extension(\n name=\"reproject.spherical_intersect._overlap\",\n sources=sources,\n include_dirs=include_dirs,\n libraries=libraries,\n language=\"c\",\n extra_compile_args=['-O2'])\n\n return [extension]\n\n\ndef get_package_data():\n\n header_files = ['overlapArea.h', 'reproject_slice_c.h', 'mNaN.h']\n\n return {'reproject.spherical_intersect': header_files}\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# Generated by Django 2.1.7 on 2019-03-14 07:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('training_area', '0002_event'),
]
operations = [
migrations.AddField(
model_name='event',
name='athlete',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='athlete_calendar', to='training_area.Athlete'),
),
migrations.AddField(
model_name='event',
name='coach',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='coach_calendar', to='training_area.Coach'),
),
]
|
normal
|
{
"blob_id": "9555ed63b3906ec23c31839691a089aad9d96c63",
"index": 9917,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('training_area', '0002_event')]\n operations = [migrations.AddField(model_name='event', name='athlete',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.\n models.deletion.CASCADE, related_name='athlete_calendar', to=\n 'training_area.Athlete')), migrations.AddField(model_name='event',\n name='coach', field=models.ForeignKey(blank=True, null=True,\n on_delete=django.db.models.deletion.CASCADE, related_name=\n 'coach_calendar', to='training_area.Coach'))]\n",
"step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('training_area', '0002_event')]\n operations = [migrations.AddField(model_name='event', name='athlete',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.\n models.deletion.CASCADE, related_name='athlete_calendar', to=\n 'training_area.Athlete')), migrations.AddField(model_name='event',\n name='coach', field=models.ForeignKey(blank=True, null=True,\n on_delete=django.db.models.deletion.CASCADE, related_name=\n 'coach_calendar', to='training_area.Coach'))]\n",
"step-5": "# Generated by Django 2.1.7 on 2019-03-14 07:27\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('training_area', '0002_event'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='event',\n name='athlete',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='athlete_calendar', to='training_area.Athlete'),\n ),\n migrations.AddField(\n model_name='event',\n name='coach',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='coach_calendar', to='training_area.Coach'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from ddt import ddt, data, unpack
import sys
sys.path.append("..")
from pages.homepage import HomePage
from base.basetestcase import BaseTestCase
from helpers.filedatahelper import get_data
@ddt
class QuickSearchTest(BaseTestCase):
testingdata = get_data('testdata/QuickSearchTestData.xlsx')
@data(*testingdata)
@unpack
def test_QuickSearch(self, search_value, expected_result, notes):
homepage = HomePage(self.driver)
search_results = homepage.search.searchFor(search_value)
self.assertTrue(expected_result in search_results.get_results())
if __name__ == '__main__':
unittest.main(verbosity=2)
|
normal
|
{
"blob_id": "4ba0f7e947830018695c8c9e68a96426f49b4b5b",
"index": 3326,
"step-1": "<mask token>\n\n\n@ddt\nclass QuickSearchTest(BaseTestCase):\n <mask token>\n\n @data(*testingdata)\n @unpack\n def test_QuickSearch(self, search_value, expected_result, notes):\n homepage = HomePage(self.driver)\n search_results = homepage.search.searchFor(search_value)\n self.assertTrue(expected_result in search_results.get_results())\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@ddt\nclass QuickSearchTest(BaseTestCase):\n testingdata = get_data('testdata/QuickSearchTestData.xlsx')\n\n @data(*testingdata)\n @unpack\n def test_QuickSearch(self, search_value, expected_result, notes):\n homepage = HomePage(self.driver)\n search_results = homepage.search.searchFor(search_value)\n self.assertTrue(expected_result in search_results.get_results())\n\n\n<mask token>\n",
"step-3": "<mask token>\nsys.path.append('..')\n<mask token>\n\n\n@ddt\nclass QuickSearchTest(BaseTestCase):\n testingdata = get_data('testdata/QuickSearchTestData.xlsx')\n\n @data(*testingdata)\n @unpack\n def test_QuickSearch(self, search_value, expected_result, notes):\n homepage = HomePage(self.driver)\n search_results = homepage.search.searchFor(search_value)\n self.assertTrue(expected_result in search_results.get_results())\n\n\nif __name__ == '__main__':\n unittest.main(verbosity=2)\n",
"step-4": "from ddt import ddt, data, unpack\nimport sys\nsys.path.append('..')\nfrom pages.homepage import HomePage\nfrom base.basetestcase import BaseTestCase\nfrom helpers.filedatahelper import get_data\n\n\n@ddt\nclass QuickSearchTest(BaseTestCase):\n testingdata = get_data('testdata/QuickSearchTestData.xlsx')\n\n @data(*testingdata)\n @unpack\n def test_QuickSearch(self, search_value, expected_result, notes):\n homepage = HomePage(self.driver)\n search_results = homepage.search.searchFor(search_value)\n self.assertTrue(expected_result in search_results.get_results())\n\n\nif __name__ == '__main__':\n unittest.main(verbosity=2)\n",
"step-5": "from ddt import ddt, data, unpack\nimport sys\nsys.path.append(\"..\")\nfrom pages.homepage import HomePage\nfrom base.basetestcase import BaseTestCase\nfrom helpers.filedatahelper import get_data\n\n\n@ddt\nclass QuickSearchTest(BaseTestCase):\n testingdata = get_data('testdata/QuickSearchTestData.xlsx')\n @data(*testingdata)\n @unpack\n def test_QuickSearch(self, search_value, expected_result, notes):\n homepage = HomePage(self.driver)\n search_results = homepage.search.searchFor(search_value)\n self.assertTrue(expected_result in search_results.get_results())\n\nif __name__ == '__main__':\n unittest.main(verbosity=2)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from django.core.exceptions import ValidationError
from django.utils import timezone
def year_validator(value):
if value < 1 or value > timezone.now().year:
raise ValidationError(
('%s is not a correct year!' % value)
)
def raiting_validator(value):
if value < 1 or value > 10:
raise ValidationError(
('%s is not a caorrect raiting!' % value)
)
|
normal
|
{
"blob_id": "7a6d5309580b673413f57047e631a08e61e837cf",
"index": 4447,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef raiting_validator(value):\n if value < 1 or value > 10:\n raise ValidationError('%s is not a caorrect raiting!' % value)\n",
"step-3": "<mask token>\n\n\ndef year_validator(value):\n if value < 1 or value > timezone.now().year:\n raise ValidationError('%s is not a correct year!' % value)\n\n\ndef raiting_validator(value):\n if value < 1 or value > 10:\n raise ValidationError('%s is not a caorrect raiting!' % value)\n",
"step-4": "from django.core.exceptions import ValidationError\nfrom django.utils import timezone\n\n\ndef year_validator(value):\n if value < 1 or value > timezone.now().year:\n raise ValidationError('%s is not a correct year!' % value)\n\n\ndef raiting_validator(value):\n if value < 1 or value > 10:\n raise ValidationError('%s is not a caorrect raiting!' % value)\n",
"step-5": "from django.core.exceptions import ValidationError\nfrom django.utils import timezone\n\n\ndef year_validator(value):\n if value < 1 or value > timezone.now().year:\n raise ValidationError(\n ('%s is not a correct year!' % value)\n )\n\n\ndef raiting_validator(value):\n if value < 1 or value > 10:\n raise ValidationError(\n ('%s is not a caorrect raiting!' % value)\n )\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(weekdays[a])
<|reserved_special_token_1|>
weekdays = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',
'Saturday', 'Sunday']
i = input('Enter a day of the week and number of days: ').split()
e = int(i[-1])
starting_point = weekdays.index(i[0])
a = e + starting_point - len(weekdays)
print(weekdays[a])
|
flexible
|
{
"blob_id": "5f7d05c642339ce0ab02a65ca41f9ee89c2faf57",
"index": 4240,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(weekdays[a])\n",
"step-3": "weekdays = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',\n 'Saturday', 'Sunday']\ni = input('Enter a day of the week and number of days: ').split()\ne = int(i[-1])\nstarting_point = weekdays.index(i[0])\na = e + starting_point - len(weekdays)\nprint(weekdays[a])\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def Create(obj, targetColor, control, attr, offset):
shape = obj.getShape()
name = obj.name()
if type(shape) == pm.Mesh:
outVerts = []
verts = shape.vtx[:]
for i, vert in enumerate(verts):
if vert.getColor() == targetColor:
outVerts.append(vert)
uvShellsList = shape.getUvShellsIds()[0]
uvList = []
outUvShellList = []
for vert in outVerts:
uvs = vert.getUVIndices()
for uv in uvs:
uvList.append(uv)
outUvShellList.append(uvShellsList[uv])
outUvList = []
mostCommonShell = Counter(outUvShellList).most_common(1)[0][0]
for i, uvshell in enumerate(outUvShellList):
if uvshell == mostCommonShell:
outUvList.append(shape.map[uvList[i]])
if len(outVerts) > 0:
moveUV = pm.polyMoveUV(outUvList)[0]
moveUV.rename('%s_%s_moveUV' % (name, attr))
crv = pm.AnimCurveTU(name='%s_%s_animCurveTU' % (name, attr))
pm.setKeyframe(crv, t=0.0, v=0.0, itt='linear', ott='linear')
pm.setKeyframe(crv, t=20.0, v=-offset * 20, itt='linear', ott=
'linear')
control.attr(attr) >> crv.input
crv.output >> moveUV.translateV
return moveUV
else:
pm.warning('No verts found with color %s' % targetColor)
else:
pm.warning('The target must be a mesh')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def Create(obj, targetColor, control, attr, offset):
shape = obj.getShape()
name = obj.name()
if type(shape) == pm.Mesh:
outVerts = []
verts = shape.vtx[:]
for i, vert in enumerate(verts):
if vert.getColor() == targetColor:
outVerts.append(vert)
uvShellsList = shape.getUvShellsIds()[0]
uvList = []
outUvShellList = []
for vert in outVerts:
uvs = vert.getUVIndices()
for uv in uvs:
uvList.append(uv)
outUvShellList.append(uvShellsList[uv])
outUvList = []
mostCommonShell = Counter(outUvShellList).most_common(1)[0][0]
for i, uvshell in enumerate(outUvShellList):
if uvshell == mostCommonShell:
outUvList.append(shape.map[uvList[i]])
if len(outVerts) > 0:
moveUV = pm.polyMoveUV(outUvList)[0]
moveUV.rename('%s_%s_moveUV' % (name, attr))
crv = pm.AnimCurveTU(name='%s_%s_animCurveTU' % (name, attr))
pm.setKeyframe(crv, t=0.0, v=0.0, itt='linear', ott='linear')
pm.setKeyframe(crv, t=20.0, v=-offset * 20, itt='linear', ott=
'linear')
control.attr(attr) >> crv.input
crv.output >> moveUV.translateV
return moveUV
else:
pm.warning('No verts found with color %s' % targetColor)
else:
pm.warning('The target must be a mesh')
def ConnectToAttr(src, trgt, attr):
moveUVs = src.getShape().history(type='polyMoveUV')
try:
attr = pm.PyNode(trgt).attr(attr).getChildren()
except:
attr = [pm.PyNode(trgt).attr(attr)]
if len(moveUVs) > len(attr):
pm.warning(
'There are more polyMoveUV nodes that attrs to connect to %s:%s' %
(len(moveUVs), len(attr)))
else:
for i, moveUV in enumerate(moveUVs):
moveUV.translateV >> attr[i]
<|reserved_special_token_1|>
import pymel.all as pm
from collections import Counter
def Create(obj, targetColor, control, attr, offset):
shape = obj.getShape()
name = obj.name()
if type(shape) == pm.Mesh:
outVerts = []
verts = shape.vtx[:]
for i, vert in enumerate(verts):
if vert.getColor() == targetColor:
outVerts.append(vert)
uvShellsList = shape.getUvShellsIds()[0]
uvList = []
outUvShellList = []
for vert in outVerts:
uvs = vert.getUVIndices()
for uv in uvs:
uvList.append(uv)
outUvShellList.append(uvShellsList[uv])
outUvList = []
mostCommonShell = Counter(outUvShellList).most_common(1)[0][0]
for i, uvshell in enumerate(outUvShellList):
if uvshell == mostCommonShell:
outUvList.append(shape.map[uvList[i]])
if len(outVerts) > 0:
moveUV = pm.polyMoveUV(outUvList)[0]
moveUV.rename('%s_%s_moveUV' % (name, attr))
crv = pm.AnimCurveTU(name='%s_%s_animCurveTU' % (name, attr))
pm.setKeyframe(crv, t=0.0, v=0.0, itt='linear', ott='linear')
pm.setKeyframe(crv, t=20.0, v=-offset * 20, itt='linear', ott=
'linear')
control.attr(attr) >> crv.input
crv.output >> moveUV.translateV
return moveUV
else:
pm.warning('No verts found with color %s' % targetColor)
else:
pm.warning('The target must be a mesh')
def ConnectToAttr(src, trgt, attr):
moveUVs = src.getShape().history(type='polyMoveUV')
try:
attr = pm.PyNode(trgt).attr(attr).getChildren()
except:
attr = [pm.PyNode(trgt).attr(attr)]
if len(moveUVs) > len(attr):
pm.warning(
'There are more polyMoveUV nodes that attrs to connect to %s:%s' %
(len(moveUVs), len(attr)))
else:
for i, moveUV in enumerate(moveUVs):
moveUV.translateV >> attr[i]
<|reserved_special_token_1|>
import pymel.all as pm
from collections import Counter
# example
# v.Create( sel[0], pm.datatypes.Color.red, sel[1], 'leftEye', 0.2 )
# select mesh 1st then the control
def Create( obj, targetColor, control, attr, offset ) :
shape = obj.getShape()
name = obj.name()
if( type(shape) == pm.Mesh ) :
outVerts = []
verts = shape.vtx[:]
for i, vert in enumerate(verts) :
if( vert.getColor() == targetColor ) :
outVerts.append(vert)
# this needs rewriting
# what shells does this vert eblong to?
# out of teh verts we have, which shell contains the most?
uvShellsList = shape.getUvShellsIds()[0]
uvList = []
outUvShellList = []
for vert in outVerts :
uvs = vert.getUVIndices()
for uv in uvs :
uvList.append(uv)
outUvShellList.append(uvShellsList[uv])
outUvList = []
mostCommonShell = Counter(outUvShellList).most_common(1)[0][0]
for i, uvshell in enumerate(outUvShellList) :
if( uvshell == mostCommonShell ) :
outUvList.append(shape.map[uvList[i]])
# print outUvList
# return
if( len(outVerts) > 0 ) :
moveUV = pm.polyMoveUV( outUvList )[0]
moveUV.rename('%s_%s_moveUV' % ( name, attr ))
crv = pm.AnimCurveTU(name='%s_%s_animCurveTU' % ( name, attr ) )
pm.setKeyframe(crv, t=0.0, v=0.0, itt='linear', ott='linear')
pm.setKeyframe(crv, t=20.0, v=-offset * 20, itt='linear', ott='linear')
control.attr(attr) >> crv.input
crv.output >> moveUV.translateV
return moveUV
else :
pm.warning( 'No verts found with color %s' % ( targetColor ) )
else :
pm.warning('The target must be a mesh')
# use this to connect the PolyMoveUV to the joint attribute you want FF (shader) to read
# example : ConnectToAttr( sel[0], sel[1], 'translateX' ) - select mesh 1st then joint
def ConnectToAttr( src, trgt, attr ) :
moveUVs = src.getShape().history(type='polyMoveUV')
try :
attr = pm.PyNode(trgt).attr(attr).getChildren()
except :
attr = [ pm.PyNode(trgt).attr(attr) ]
if( len(moveUVs) > len(attr) ) :
pm.warning( 'There are more polyMoveUV nodes that attrs to connect to %s:%s' % ( len(moveUVs), len(attr) ) )
else :
for i, moveUV in enumerate(moveUVs) :
moveUV.translateV >> attr[i]
|
flexible
|
{
"blob_id": "9061db3bb3aa3178262af58e56126302b9effdff",
"index": 6509,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef Create(obj, targetColor, control, attr, offset):\n shape = obj.getShape()\n name = obj.name()\n if type(shape) == pm.Mesh:\n outVerts = []\n verts = shape.vtx[:]\n for i, vert in enumerate(verts):\n if vert.getColor() == targetColor:\n outVerts.append(vert)\n uvShellsList = shape.getUvShellsIds()[0]\n uvList = []\n outUvShellList = []\n for vert in outVerts:\n uvs = vert.getUVIndices()\n for uv in uvs:\n uvList.append(uv)\n outUvShellList.append(uvShellsList[uv])\n outUvList = []\n mostCommonShell = Counter(outUvShellList).most_common(1)[0][0]\n for i, uvshell in enumerate(outUvShellList):\n if uvshell == mostCommonShell:\n outUvList.append(shape.map[uvList[i]])\n if len(outVerts) > 0:\n moveUV = pm.polyMoveUV(outUvList)[0]\n moveUV.rename('%s_%s_moveUV' % (name, attr))\n crv = pm.AnimCurveTU(name='%s_%s_animCurveTU' % (name, attr))\n pm.setKeyframe(crv, t=0.0, v=0.0, itt='linear', ott='linear')\n pm.setKeyframe(crv, t=20.0, v=-offset * 20, itt='linear', ott=\n 'linear')\n control.attr(attr) >> crv.input\n crv.output >> moveUV.translateV\n return moveUV\n else:\n pm.warning('No verts found with color %s' % targetColor)\n else:\n pm.warning('The target must be a mesh')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef Create(obj, targetColor, control, attr, offset):\n shape = obj.getShape()\n name = obj.name()\n if type(shape) == pm.Mesh:\n outVerts = []\n verts = shape.vtx[:]\n for i, vert in enumerate(verts):\n if vert.getColor() == targetColor:\n outVerts.append(vert)\n uvShellsList = shape.getUvShellsIds()[0]\n uvList = []\n outUvShellList = []\n for vert in outVerts:\n uvs = vert.getUVIndices()\n for uv in uvs:\n uvList.append(uv)\n outUvShellList.append(uvShellsList[uv])\n outUvList = []\n mostCommonShell = Counter(outUvShellList).most_common(1)[0][0]\n for i, uvshell in enumerate(outUvShellList):\n if uvshell == mostCommonShell:\n outUvList.append(shape.map[uvList[i]])\n if len(outVerts) > 0:\n moveUV = pm.polyMoveUV(outUvList)[0]\n moveUV.rename('%s_%s_moveUV' % (name, attr))\n crv = pm.AnimCurveTU(name='%s_%s_animCurveTU' % (name, attr))\n pm.setKeyframe(crv, t=0.0, v=0.0, itt='linear', ott='linear')\n pm.setKeyframe(crv, t=20.0, v=-offset * 20, itt='linear', ott=\n 'linear')\n control.attr(attr) >> crv.input\n crv.output >> moveUV.translateV\n return moveUV\n else:\n pm.warning('No verts found with color %s' % targetColor)\n else:\n pm.warning('The target must be a mesh')\n\n\ndef ConnectToAttr(src, trgt, attr):\n moveUVs = src.getShape().history(type='polyMoveUV')\n try:\n attr = pm.PyNode(trgt).attr(attr).getChildren()\n except:\n attr = [pm.PyNode(trgt).attr(attr)]\n if len(moveUVs) > len(attr):\n pm.warning(\n 'There are more polyMoveUV nodes that attrs to connect to %s:%s' %\n (len(moveUVs), len(attr)))\n else:\n for i, moveUV in enumerate(moveUVs):\n moveUV.translateV >> attr[i]\n",
"step-4": "import pymel.all as pm\nfrom collections import Counter\n\n\ndef Create(obj, targetColor, control, attr, offset):\n shape = obj.getShape()\n name = obj.name()\n if type(shape) == pm.Mesh:\n outVerts = []\n verts = shape.vtx[:]\n for i, vert in enumerate(verts):\n if vert.getColor() == targetColor:\n outVerts.append(vert)\n uvShellsList = shape.getUvShellsIds()[0]\n uvList = []\n outUvShellList = []\n for vert in outVerts:\n uvs = vert.getUVIndices()\n for uv in uvs:\n uvList.append(uv)\n outUvShellList.append(uvShellsList[uv])\n outUvList = []\n mostCommonShell = Counter(outUvShellList).most_common(1)[0][0]\n for i, uvshell in enumerate(outUvShellList):\n if uvshell == mostCommonShell:\n outUvList.append(shape.map[uvList[i]])\n if len(outVerts) > 0:\n moveUV = pm.polyMoveUV(outUvList)[0]\n moveUV.rename('%s_%s_moveUV' % (name, attr))\n crv = pm.AnimCurveTU(name='%s_%s_animCurveTU' % (name, attr))\n pm.setKeyframe(crv, t=0.0, v=0.0, itt='linear', ott='linear')\n pm.setKeyframe(crv, t=20.0, v=-offset * 20, itt='linear', ott=\n 'linear')\n control.attr(attr) >> crv.input\n crv.output >> moveUV.translateV\n return moveUV\n else:\n pm.warning('No verts found with color %s' % targetColor)\n else:\n pm.warning('The target must be a mesh')\n\n\ndef ConnectToAttr(src, trgt, attr):\n moveUVs = src.getShape().history(type='polyMoveUV')\n try:\n attr = pm.PyNode(trgt).attr(attr).getChildren()\n except:\n attr = [pm.PyNode(trgt).attr(attr)]\n if len(moveUVs) > len(attr):\n pm.warning(\n 'There are more polyMoveUV nodes that attrs to connect to %s:%s' %\n (len(moveUVs), len(attr)))\n else:\n for i, moveUV in enumerate(moveUVs):\n moveUV.translateV >> attr[i]\n",
"step-5": "import pymel.all as pm\nfrom collections import Counter\n\n# example\n# v.Create( sel[0], pm.datatypes.Color.red, sel[1], 'leftEye', 0.2 )\n# select mesh 1st then the control\n\ndef Create( obj, targetColor, control, attr, offset ) :\n\tshape = obj.getShape()\n\tname = obj.name()\n\tif( type(shape) == pm.Mesh ) :\n\t\toutVerts = []\n\t\tverts = shape.vtx[:]\t\t\t\t\n\t\tfor i, vert in enumerate(verts) :\t\t\t\n\t\t\tif( vert.getColor() == targetColor ) :\n\t\t\t\toutVerts.append(vert)\n\n\n\t\t# this needs rewriting\n\t\t# what shells does this vert eblong to?\n\t\t# out of teh verts we have, which shell contains the most?\n\t\tuvShellsList = shape.getUvShellsIds()[0]\n\t\tuvList = []\n\t\toutUvShellList = []\n\t\tfor vert in outVerts :\n\t\t\tuvs = vert.getUVIndices()\n\t\t\tfor uv in uvs :\n\t\t\t\tuvList.append(uv)\n\t\t\t\toutUvShellList.append(uvShellsList[uv])\n\t\t\n\t\toutUvList = []\n\t\tmostCommonShell = Counter(outUvShellList).most_common(1)[0][0]\t\t\n\t\tfor i, uvshell in enumerate(outUvShellList) :\n\t\t\tif( uvshell == mostCommonShell ) :\n\t\t\t\toutUvList.append(shape.map[uvList[i]])\n\n\t\t# print outUvList\n\n\t\t# return\n\n\t\tif( len(outVerts) > 0 ) :\n\t\t\tmoveUV = pm.polyMoveUV( outUvList )[0]\n\t\t\tmoveUV.rename('%s_%s_moveUV' % ( name, attr ))\n\n\t\t\tcrv = pm.AnimCurveTU(name='%s_%s_animCurveTU' % ( name, attr ) )\n\t\t\tpm.setKeyframe(crv, t=0.0, v=0.0, itt='linear', ott='linear')\n\t\t\tpm.setKeyframe(crv, t=20.0, v=-offset * 20, itt='linear', ott='linear')\n\n\t\t\tcontrol.attr(attr) >> crv.input\n\t\t\tcrv.output >> moveUV.translateV\n\n\t\t\treturn moveUV\n\n\t\telse :\n\t\t\tpm.warning( 'No verts found with color %s' % ( targetColor ) )\n\n\telse :\n\t\tpm.warning('The target must be a mesh')\n\n\n\n# use this to connect the PolyMoveUV to the joint attribute you want FF (shader) to read\n# example : ConnectToAttr( sel[0], sel[1], 'translateX' ) - select mesh 1st then joint\n\ndef ConnectToAttr( src, trgt, attr ) :\n\tmoveUVs = src.getShape().history(type='polyMoveUV')\n\ttry :\n\t\tattr = pm.PyNode(trgt).attr(attr).getChildren()\n\texcept :\n\t\tattr = [ pm.PyNode(trgt).attr(attr) ]\n\n\tif( len(moveUVs) > len(attr) ) :\n\t\tpm.warning( 'There are more polyMoveUV nodes that attrs to connect to %s:%s' % ( len(moveUVs), len(attr) ) )\n\telse :\n\t\tfor i, moveUV in enumerate(moveUVs)\t:\n\t\t\tmoveUV.translateV >> attr[i]\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from find import Solution
array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
solution = Solution.Find(6, array)
|
normal
|
{
"blob_id": "d4361b169bf75d3af82eca3d26609961ccc2f27e",
"index": 2405,
"step-1": "<mask token>\n",
"step-2": "<mask token>\narray = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]\nsolution = Solution.Find(6, array)\n",
"step-3": "from find import Solution\narray = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]\nsolution = Solution.Find(6, array)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from .submit import *
from .fck import *
|
normal
|
{
"blob_id": "9a5ba88a61f5c27c0bc7b980fa9d865b52cbbb20",
"index": 7266,
"step-1": "<mask token>\n",
"step-2": "from .submit import *\nfrom .fck import *\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
class VehiclePhoto:
<|reserved_special_token_0|>
def __repr__(self):
return f'{self.VehiclePhotoID} {self.VIN} {self.UrlVdp}'
<|reserved_special_token_1|>
class Image:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class VehiclePhoto:
def __init__(self, **kwargs):
self.ClientID = kwargs['ClientID']
self.DealerID = kwargs['DealerID']
self.Domain = kwargs['Domain']
self.VehiclePhotoID = kwargs['VehiclePhotoID']
self.VIN = kwargs['VIN']
self.UrlVdp = kwargs['UrlVdp']
self.UrlImage = kwargs['UrlImage']
def __repr__(self):
return f'{self.VehiclePhotoID} {self.VIN} {self.UrlVdp}'
<|reserved_special_token_1|>
class Image:
def __init__(self, **kwargs):
self.ClientID = kwargs['ClientID']
self.DealerID = kwargs['DealerID']
self.VIN = kwargs['VIN']
self.UrlVdp = None
self.PhotoURL = kwargs['PhotoURL']
self.VdpActive = None
<|reserved_special_token_0|>
class VehiclePhoto:
def __init__(self, **kwargs):
self.ClientID = kwargs['ClientID']
self.DealerID = kwargs['DealerID']
self.Domain = kwargs['Domain']
self.VehiclePhotoID = kwargs['VehiclePhotoID']
self.VIN = kwargs['VIN']
self.UrlVdp = kwargs['UrlVdp']
self.UrlImage = kwargs['UrlImage']
def __repr__(self):
return f'{self.VehiclePhotoID} {self.VIN} {self.UrlVdp}'
<|reserved_special_token_1|>
class Image:
def __init__(self, **kwargs):
self.ClientID = kwargs['ClientID']
self.DealerID = kwargs['DealerID']
self.VIN = kwargs['VIN']
self.UrlVdp = None
self.PhotoURL = kwargs['PhotoURL']
self.VdpActive = None
def __repr__(self):
return f'{self.DealerID} {self.VIN} {self.UrlVdp}'
class VehiclePhoto:
def __init__(self, **kwargs):
self.ClientID = kwargs['ClientID']
self.DealerID = kwargs['DealerID']
self.Domain = kwargs['Domain']
self.VehiclePhotoID = kwargs['VehiclePhotoID']
self.VIN = kwargs['VIN']
self.UrlVdp = kwargs['UrlVdp']
self.UrlImage = kwargs['UrlImage']
def __repr__(self):
return f'{self.VehiclePhotoID} {self.VIN} {self.UrlVdp}'
<|reserved_special_token_1|>
class Image:
def __init__(self, **kwargs):
self.ClientID = kwargs['ClientID']
self.DealerID = kwargs['DealerID']
self.VIN = kwargs['VIN']
self.UrlVdp = None
self.PhotoURL = kwargs['PhotoURL']
self.VdpActive = None
def __repr__(self):
return f"{self.DealerID} {self.VIN} {self.UrlVdp}"
class VehiclePhoto:
def __init__(self, **kwargs):
self.ClientID = kwargs['ClientID']
self.DealerID = kwargs['DealerID']
self.Domain = kwargs['Domain']
self.VehiclePhotoID = kwargs['VehiclePhotoID']
self.VIN = kwargs['VIN']
self.UrlVdp = kwargs['UrlVdp']
self.UrlImage = kwargs['UrlImage']
def __repr__(self):
return f"{self.VehiclePhotoID} {self.VIN} {self.UrlVdp}"
|
flexible
|
{
"blob_id": "3dc4e10145ad42c0168fec3462da0f87c1e661a5",
"index": 8701,
"step-1": "<mask token>\n\n\nclass VehiclePhoto:\n <mask token>\n\n def __repr__(self):\n return f'{self.VehiclePhotoID} {self.VIN} {self.UrlVdp}'\n",
"step-2": "class Image:\n <mask token>\n <mask token>\n\n\nclass VehiclePhoto:\n\n def __init__(self, **kwargs):\n self.ClientID = kwargs['ClientID']\n self.DealerID = kwargs['DealerID']\n self.Domain = kwargs['Domain']\n self.VehiclePhotoID = kwargs['VehiclePhotoID']\n self.VIN = kwargs['VIN']\n self.UrlVdp = kwargs['UrlVdp']\n self.UrlImage = kwargs['UrlImage']\n\n def __repr__(self):\n return f'{self.VehiclePhotoID} {self.VIN} {self.UrlVdp}'\n",
"step-3": "class Image:\n\n def __init__(self, **kwargs):\n self.ClientID = kwargs['ClientID']\n self.DealerID = kwargs['DealerID']\n self.VIN = kwargs['VIN']\n self.UrlVdp = None\n self.PhotoURL = kwargs['PhotoURL']\n self.VdpActive = None\n <mask token>\n\n\nclass VehiclePhoto:\n\n def __init__(self, **kwargs):\n self.ClientID = kwargs['ClientID']\n self.DealerID = kwargs['DealerID']\n self.Domain = kwargs['Domain']\n self.VehiclePhotoID = kwargs['VehiclePhotoID']\n self.VIN = kwargs['VIN']\n self.UrlVdp = kwargs['UrlVdp']\n self.UrlImage = kwargs['UrlImage']\n\n def __repr__(self):\n return f'{self.VehiclePhotoID} {self.VIN} {self.UrlVdp}'\n",
"step-4": "class Image:\n\n def __init__(self, **kwargs):\n self.ClientID = kwargs['ClientID']\n self.DealerID = kwargs['DealerID']\n self.VIN = kwargs['VIN']\n self.UrlVdp = None\n self.PhotoURL = kwargs['PhotoURL']\n self.VdpActive = None\n\n def __repr__(self):\n return f'{self.DealerID} {self.VIN} {self.UrlVdp}'\n\n\nclass VehiclePhoto:\n\n def __init__(self, **kwargs):\n self.ClientID = kwargs['ClientID']\n self.DealerID = kwargs['DealerID']\n self.Domain = kwargs['Domain']\n self.VehiclePhotoID = kwargs['VehiclePhotoID']\n self.VIN = kwargs['VIN']\n self.UrlVdp = kwargs['UrlVdp']\n self.UrlImage = kwargs['UrlImage']\n\n def __repr__(self):\n return f'{self.VehiclePhotoID} {self.VIN} {self.UrlVdp}'\n",
"step-5": "class Image:\n\n def __init__(self, **kwargs):\n self.ClientID = kwargs['ClientID']\n self.DealerID = kwargs['DealerID']\n self.VIN = kwargs['VIN']\n self.UrlVdp = None\n self.PhotoURL = kwargs['PhotoURL']\n self.VdpActive = None\n \n def __repr__(self):\n return f\"{self.DealerID} {self.VIN} {self.UrlVdp}\"\n\nclass VehiclePhoto:\n def __init__(self, **kwargs):\n self.ClientID = kwargs['ClientID']\n self.DealerID = kwargs['DealerID']\n self.Domain = kwargs['Domain']\n self.VehiclePhotoID = kwargs['VehiclePhotoID']\n self.VIN = kwargs['VIN']\n self.UrlVdp = kwargs['UrlVdp']\n self.UrlImage = kwargs['UrlImage']\n \n def __repr__(self):\n return f\"{self.VehiclePhotoID} {self.VIN} {self.UrlVdp}\"",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
__author__ = 'Joe'
import sys
sys.path.insert(0,'../src/')
import grocery_functions
import unittest
class TestGroceryFuncs(unittest.TestCase):
def test_getRecipeNames(self):
recipe_names = grocery_functions.get_recipe_names("test-recipes")
self.assertTrue(recipe_names[0] == "Cajun Chicken & Rice")
self.assertTrue(recipe_names[1] == "Chicken Curry in a Hurry")
self.assertTrue(recipe_names[2] == 'Chicken_Zucchini_and_Prosciutto')
self.assertTrue(recipe_names[3] == 'Healthy Roasted Chicken and Veggies (one pan)')
self.assertTrue(recipe_names[4] == 'Kielbasa, Pepper, Onion and Potato Hash')
def test_getIngredientsFromFile(self):
list=grocery_functions.get_ingredients_from_recipe_file("test-recipes\Kielbasa, Pepper, Onion and Potato Hash.txt")
self.assertTrue(list[0].name == 'turkey kielbasa')
self.assertTrue(list[0].unit == 'ounce')
self.assertTrue(list[0].number == '14')
self.assertTrue(list[2].name == 'non-green bell pepper')
self.assertTrue(list[2].unit == '')
self.assertTrue(list[2].number == '1')
self.assertTrue(list[6].name == 'salt')
self.assertTrue(list[6].unit == '')
self.assertTrue(list[6].number == '1')
def test_getTagsFromFile(self):
list=grocery_functions.get_tags_from_recipe_file("test-recipes\Chicken Curry in a Hurry.txt")
self.assertTrue(list[0] == 'chicken')
self.assertTrue(list[1] == 'easy')
self.assertTrue(list[2] == 'stove')
def test_getRecipeFromFile(self):
list=grocery_functions.get_recipe_from_recipe_file("test-recipes\Healthy Roasted Chicken and Veggies (one pan).txt")
self.assertTrue(list[2]=="1 cup bell pepper, chopped (any colors you like)")
self.assertTrue(list[10]=="1 teaspoon italian seasoning")
self.assertTrue(list[15]=="Place the chicken and veggies in a medium roasting dish or sheet pan. Add the olive oil, ")
def test_condenseList(self):
recipe_names = grocery_functions.get_recipe_names("test-recipes")
grocery_list=[]
for recipe in recipe_names:
grocery_list += grocery_functions.get_ingredients_from_recipe_file("test-recipes\\"+recipe+".txt")
grocery_list=grocery_functions.condense_grocery_list(grocery_list)
# grocery_functions.print_grocery_list(grocery_list)
# grocery_functions.sort_and_print_grocery_List(grocery_list, "Smiths-Eu-JT-ItemDepartments.txt")
def test_makeAllIngredientsFile(self):
grocery_functions.make_all_ingredients_file()
def test_getItemDeptDicts(self):
grocery_functions.get_item_dept_dicts("Smiths-Eu-JT-ItemDepartments.txt")
def test_checkRecipeFormat(self):
errors=grocery_functions.check_recipe_format("test-recipes", False)
self.assertTrue(errors == [])
errors=grocery_functions.check_recipe_format("broken-test-recipes", False)
self.assertTrue('invalid format, "1 lb, chicken breasts" in: broken-test-recipes//broken_recipe.txt' in errors)
self.assertTrue('invalid heading, "wrong_header" in file: broken-test-recipes//broken_recipe.txt' in errors)
self.assertTrue('Blank recipe in: broken-test-recipes//broken_recipe.txt' in errors)
def test_update_default_ing_dept_file(self):
grocery_functions.update_default_ing_dept_file(grocery_functions.get_all_ingredients("test-recipes"))
def suite(self):
return unittest.TestLoader().loadTestsFromTestCase(TestGroceryFuncs)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestGroceryFuncs)
unittest.TextTestRunner(verbosity=2).run(suite)
|
normal
|
{
"blob_id": "c4fbf206482a04f3e2d2aa98a0dbf525a176c4e7",
"index": 1087,
"step-1": "<mask token>\n\n\nclass TestGroceryFuncs(unittest.TestCase):\n\n def test_getRecipeNames(self):\n recipe_names = grocery_functions.get_recipe_names('test-recipes')\n self.assertTrue(recipe_names[0] == 'Cajun Chicken & Rice')\n self.assertTrue(recipe_names[1] == 'Chicken Curry in a Hurry')\n self.assertTrue(recipe_names[2] == 'Chicken_Zucchini_and_Prosciutto')\n self.assertTrue(recipe_names[3] ==\n 'Healthy Roasted Chicken and Veggies (one pan)')\n self.assertTrue(recipe_names[4] ==\n 'Kielbasa, Pepper, Onion and Potato Hash')\n <mask token>\n <mask token>\n <mask token>\n\n def test_condenseList(self):\n recipe_names = grocery_functions.get_recipe_names('test-recipes')\n grocery_list = []\n for recipe in recipe_names:\n grocery_list += grocery_functions.get_ingredients_from_recipe_file(\n 'test-recipes\\\\' + recipe + '.txt')\n grocery_list = grocery_functions.condense_grocery_list(grocery_list)\n <mask token>\n <mask token>\n <mask token>\n\n def test_update_default_ing_dept_file(self):\n grocery_functions.update_default_ing_dept_file(grocery_functions.\n get_all_ingredients('test-recipes'))\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestGroceryFuncs(unittest.TestCase):\n\n def test_getRecipeNames(self):\n recipe_names = grocery_functions.get_recipe_names('test-recipes')\n self.assertTrue(recipe_names[0] == 'Cajun Chicken & Rice')\n self.assertTrue(recipe_names[1] == 'Chicken Curry in a Hurry')\n self.assertTrue(recipe_names[2] == 'Chicken_Zucchini_and_Prosciutto')\n self.assertTrue(recipe_names[3] ==\n 'Healthy Roasted Chicken and Veggies (one pan)')\n self.assertTrue(recipe_names[4] ==\n 'Kielbasa, Pepper, Onion and Potato Hash')\n\n def test_getIngredientsFromFile(self):\n list = grocery_functions.get_ingredients_from_recipe_file(\n 'test-recipes\\\\Kielbasa, Pepper, Onion and Potato Hash.txt')\n self.assertTrue(list[0].name == 'turkey kielbasa')\n self.assertTrue(list[0].unit == 'ounce')\n self.assertTrue(list[0].number == '14')\n self.assertTrue(list[2].name == 'non-green bell pepper')\n self.assertTrue(list[2].unit == '')\n self.assertTrue(list[2].number == '1')\n self.assertTrue(list[6].name == 'salt')\n self.assertTrue(list[6].unit == '')\n self.assertTrue(list[6].number == '1')\n\n def test_getTagsFromFile(self):\n list = grocery_functions.get_tags_from_recipe_file(\n 'test-recipes\\\\Chicken Curry in a Hurry.txt')\n self.assertTrue(list[0] == 'chicken')\n self.assertTrue(list[1] == 'easy')\n self.assertTrue(list[2] == 'stove')\n\n def test_getRecipeFromFile(self):\n list = grocery_functions.get_recipe_from_recipe_file(\n 'test-recipes\\\\Healthy Roasted Chicken and Veggies (one pan).txt')\n self.assertTrue(list[2] ==\n '1 cup bell pepper, chopped (any colors you like)')\n self.assertTrue(list[10] == '1 teaspoon italian seasoning')\n self.assertTrue(list[15] ==\n 'Place the chicken and veggies in a medium roasting dish or sheet pan. Add the olive oil, '\n )\n\n def test_condenseList(self):\n recipe_names = grocery_functions.get_recipe_names('test-recipes')\n grocery_list = []\n for recipe in recipe_names:\n grocery_list += grocery_functions.get_ingredients_from_recipe_file(\n 'test-recipes\\\\' + recipe + '.txt')\n grocery_list = grocery_functions.condense_grocery_list(grocery_list)\n <mask token>\n <mask token>\n <mask token>\n\n def test_update_default_ing_dept_file(self):\n grocery_functions.update_default_ing_dept_file(grocery_functions.\n get_all_ingredients('test-recipes'))\n\n def suite(self):\n return unittest.TestLoader().loadTestsFromTestCase(TestGroceryFuncs)\n\n\n<mask token>\n",
"step-3": "<mask token>\nsys.path.insert(0, '../src/')\n<mask token>\n\n\nclass TestGroceryFuncs(unittest.TestCase):\n\n def test_getRecipeNames(self):\n recipe_names = grocery_functions.get_recipe_names('test-recipes')\n self.assertTrue(recipe_names[0] == 'Cajun Chicken & Rice')\n self.assertTrue(recipe_names[1] == 'Chicken Curry in a Hurry')\n self.assertTrue(recipe_names[2] == 'Chicken_Zucchini_and_Prosciutto')\n self.assertTrue(recipe_names[3] ==\n 'Healthy Roasted Chicken and Veggies (one pan)')\n self.assertTrue(recipe_names[4] ==\n 'Kielbasa, Pepper, Onion and Potato Hash')\n\n def test_getIngredientsFromFile(self):\n list = grocery_functions.get_ingredients_from_recipe_file(\n 'test-recipes\\\\Kielbasa, Pepper, Onion and Potato Hash.txt')\n self.assertTrue(list[0].name == 'turkey kielbasa')\n self.assertTrue(list[0].unit == 'ounce')\n self.assertTrue(list[0].number == '14')\n self.assertTrue(list[2].name == 'non-green bell pepper')\n self.assertTrue(list[2].unit == '')\n self.assertTrue(list[2].number == '1')\n self.assertTrue(list[6].name == 'salt')\n self.assertTrue(list[6].unit == '')\n self.assertTrue(list[6].number == '1')\n\n def test_getTagsFromFile(self):\n list = grocery_functions.get_tags_from_recipe_file(\n 'test-recipes\\\\Chicken Curry in a Hurry.txt')\n self.assertTrue(list[0] == 'chicken')\n self.assertTrue(list[1] == 'easy')\n self.assertTrue(list[2] == 'stove')\n\n def test_getRecipeFromFile(self):\n list = grocery_functions.get_recipe_from_recipe_file(\n 'test-recipes\\\\Healthy Roasted Chicken and Veggies (one pan).txt')\n self.assertTrue(list[2] ==\n '1 cup bell pepper, chopped (any colors you like)')\n self.assertTrue(list[10] == '1 teaspoon italian seasoning')\n self.assertTrue(list[15] ==\n 'Place the chicken and veggies in a medium roasting dish or sheet pan. Add the olive oil, '\n )\n\n def test_condenseList(self):\n recipe_names = grocery_functions.get_recipe_names('test-recipes')\n grocery_list = []\n for recipe in recipe_names:\n grocery_list += grocery_functions.get_ingredients_from_recipe_file(\n 'test-recipes\\\\' + recipe + '.txt')\n grocery_list = grocery_functions.condense_grocery_list(grocery_list)\n\n def test_makeAllIngredientsFile(self):\n grocery_functions.make_all_ingredients_file()\n\n def test_getItemDeptDicts(self):\n grocery_functions.get_item_dept_dicts(\n 'Smiths-Eu-JT-ItemDepartments.txt')\n\n def test_checkRecipeFormat(self):\n errors = grocery_functions.check_recipe_format('test-recipes', False)\n self.assertTrue(errors == [])\n errors = grocery_functions.check_recipe_format('broken-test-recipes',\n False)\n self.assertTrue(\n 'invalid format, \"1 lb, chicken breasts\" in: broken-test-recipes//broken_recipe.txt'\n in errors)\n self.assertTrue(\n 'invalid heading, \"wrong_header\" in file: broken-test-recipes//broken_recipe.txt'\n in errors)\n self.assertTrue(\n 'Blank recipe in: broken-test-recipes//broken_recipe.txt' in errors\n )\n\n def test_update_default_ing_dept_file(self):\n grocery_functions.update_default_ing_dept_file(grocery_functions.\n get_all_ingredients('test-recipes'))\n\n def suite(self):\n return unittest.TestLoader().loadTestsFromTestCase(TestGroceryFuncs)\n\n\nif __name__ == '__main__':\n suite = unittest.TestLoader().loadTestsFromTestCase(TestGroceryFuncs)\n unittest.TextTestRunner(verbosity=2).run(suite)\n",
"step-4": "__author__ = 'Joe'\nimport sys\nsys.path.insert(0, '../src/')\nimport grocery_functions\nimport unittest\n\n\nclass TestGroceryFuncs(unittest.TestCase):\n\n def test_getRecipeNames(self):\n recipe_names = grocery_functions.get_recipe_names('test-recipes')\n self.assertTrue(recipe_names[0] == 'Cajun Chicken & Rice')\n self.assertTrue(recipe_names[1] == 'Chicken Curry in a Hurry')\n self.assertTrue(recipe_names[2] == 'Chicken_Zucchini_and_Prosciutto')\n self.assertTrue(recipe_names[3] ==\n 'Healthy Roasted Chicken and Veggies (one pan)')\n self.assertTrue(recipe_names[4] ==\n 'Kielbasa, Pepper, Onion and Potato Hash')\n\n def test_getIngredientsFromFile(self):\n list = grocery_functions.get_ingredients_from_recipe_file(\n 'test-recipes\\\\Kielbasa, Pepper, Onion and Potato Hash.txt')\n self.assertTrue(list[0].name == 'turkey kielbasa')\n self.assertTrue(list[0].unit == 'ounce')\n self.assertTrue(list[0].number == '14')\n self.assertTrue(list[2].name == 'non-green bell pepper')\n self.assertTrue(list[2].unit == '')\n self.assertTrue(list[2].number == '1')\n self.assertTrue(list[6].name == 'salt')\n self.assertTrue(list[6].unit == '')\n self.assertTrue(list[6].number == '1')\n\n def test_getTagsFromFile(self):\n list = grocery_functions.get_tags_from_recipe_file(\n 'test-recipes\\\\Chicken Curry in a Hurry.txt')\n self.assertTrue(list[0] == 'chicken')\n self.assertTrue(list[1] == 'easy')\n self.assertTrue(list[2] == 'stove')\n\n def test_getRecipeFromFile(self):\n list = grocery_functions.get_recipe_from_recipe_file(\n 'test-recipes\\\\Healthy Roasted Chicken and Veggies (one pan).txt')\n self.assertTrue(list[2] ==\n '1 cup bell pepper, chopped (any colors you like)')\n self.assertTrue(list[10] == '1 teaspoon italian seasoning')\n self.assertTrue(list[15] ==\n 'Place the chicken and veggies in a medium roasting dish or sheet pan. Add the olive oil, '\n )\n\n def test_condenseList(self):\n recipe_names = grocery_functions.get_recipe_names('test-recipes')\n grocery_list = []\n for recipe in recipe_names:\n grocery_list += grocery_functions.get_ingredients_from_recipe_file(\n 'test-recipes\\\\' + recipe + '.txt')\n grocery_list = grocery_functions.condense_grocery_list(grocery_list)\n\n def test_makeAllIngredientsFile(self):\n grocery_functions.make_all_ingredients_file()\n\n def test_getItemDeptDicts(self):\n grocery_functions.get_item_dept_dicts(\n 'Smiths-Eu-JT-ItemDepartments.txt')\n\n def test_checkRecipeFormat(self):\n errors = grocery_functions.check_recipe_format('test-recipes', False)\n self.assertTrue(errors == [])\n errors = grocery_functions.check_recipe_format('broken-test-recipes',\n False)\n self.assertTrue(\n 'invalid format, \"1 lb, chicken breasts\" in: broken-test-recipes//broken_recipe.txt'\n in errors)\n self.assertTrue(\n 'invalid heading, \"wrong_header\" in file: broken-test-recipes//broken_recipe.txt'\n in errors)\n self.assertTrue(\n 'Blank recipe in: broken-test-recipes//broken_recipe.txt' in errors\n )\n\n def test_update_default_ing_dept_file(self):\n grocery_functions.update_default_ing_dept_file(grocery_functions.\n get_all_ingredients('test-recipes'))\n\n def suite(self):\n return unittest.TestLoader().loadTestsFromTestCase(TestGroceryFuncs)\n\n\nif __name__ == '__main__':\n suite = unittest.TestLoader().loadTestsFromTestCase(TestGroceryFuncs)\n unittest.TextTestRunner(verbosity=2).run(suite)\n",
"step-5": "__author__ = 'Joe'\nimport sys\nsys.path.insert(0,'../src/')\n\nimport grocery_functions\nimport unittest\n\nclass TestGroceryFuncs(unittest.TestCase):\n\n def test_getRecipeNames(self):\n recipe_names = grocery_functions.get_recipe_names(\"test-recipes\")\n self.assertTrue(recipe_names[0] == \"Cajun Chicken & Rice\")\n self.assertTrue(recipe_names[1] == \"Chicken Curry in a Hurry\")\n self.assertTrue(recipe_names[2] == 'Chicken_Zucchini_and_Prosciutto')\n self.assertTrue(recipe_names[3] == 'Healthy Roasted Chicken and Veggies (one pan)')\n self.assertTrue(recipe_names[4] == 'Kielbasa, Pepper, Onion and Potato Hash')\n\n def test_getIngredientsFromFile(self):\n list=grocery_functions.get_ingredients_from_recipe_file(\"test-recipes\\Kielbasa, Pepper, Onion and Potato Hash.txt\")\n self.assertTrue(list[0].name == 'turkey kielbasa')\n self.assertTrue(list[0].unit == 'ounce')\n self.assertTrue(list[0].number == '14')\n self.assertTrue(list[2].name == 'non-green bell pepper')\n self.assertTrue(list[2].unit == '')\n self.assertTrue(list[2].number == '1')\n self.assertTrue(list[6].name == 'salt')\n self.assertTrue(list[6].unit == '')\n self.assertTrue(list[6].number == '1')\n\n def test_getTagsFromFile(self):\n list=grocery_functions.get_tags_from_recipe_file(\"test-recipes\\Chicken Curry in a Hurry.txt\")\n self.assertTrue(list[0] == 'chicken')\n self.assertTrue(list[1] == 'easy')\n self.assertTrue(list[2] == 'stove')\n\n def test_getRecipeFromFile(self):\n list=grocery_functions.get_recipe_from_recipe_file(\"test-recipes\\Healthy Roasted Chicken and Veggies (one pan).txt\")\n self.assertTrue(list[2]==\"1 cup bell pepper, chopped (any colors you like)\")\n self.assertTrue(list[10]==\"1 teaspoon italian seasoning\")\n self.assertTrue(list[15]==\"Place the chicken and veggies in a medium roasting dish or sheet pan. Add the olive oil, \")\n\n def test_condenseList(self):\n recipe_names = grocery_functions.get_recipe_names(\"test-recipes\")\n grocery_list=[]\n for recipe in recipe_names:\n grocery_list += grocery_functions.get_ingredients_from_recipe_file(\"test-recipes\\\\\"+recipe+\".txt\")\n grocery_list=grocery_functions.condense_grocery_list(grocery_list)\n # grocery_functions.print_grocery_list(grocery_list)\n # grocery_functions.sort_and_print_grocery_List(grocery_list, \"Smiths-Eu-JT-ItemDepartments.txt\")\n\n def test_makeAllIngredientsFile(self):\n grocery_functions.make_all_ingredients_file()\n\n def test_getItemDeptDicts(self):\n grocery_functions.get_item_dept_dicts(\"Smiths-Eu-JT-ItemDepartments.txt\")\n\n def test_checkRecipeFormat(self):\n errors=grocery_functions.check_recipe_format(\"test-recipes\", False)\n self.assertTrue(errors == [])\n errors=grocery_functions.check_recipe_format(\"broken-test-recipes\", False)\n self.assertTrue('invalid format, \"1 lb, chicken breasts\" in: broken-test-recipes//broken_recipe.txt' in errors)\n self.assertTrue('invalid heading, \"wrong_header\" in file: broken-test-recipes//broken_recipe.txt' in errors)\n self.assertTrue('Blank recipe in: broken-test-recipes//broken_recipe.txt' in errors)\n\n def test_update_default_ing_dept_file(self):\n grocery_functions.update_default_ing_dept_file(grocery_functions.get_all_ingredients(\"test-recipes\"))\n\n def suite(self):\n return unittest.TestLoader().loadTestsFromTestCase(TestGroceryFuncs)\n\nif __name__ == '__main__':\n suite = unittest.TestLoader().loadTestsFromTestCase(TestGroceryFuncs)\n unittest.TextTestRunner(verbosity=2).run(suite)",
"step-ids": [
4,
8,
12,
14,
15
]
}
|
[
4,
8,
12,
14,
15
] |
<|reserved_special_token_0|>
def obtain_confidence(sim=False):
if sim:
noise = np.random.normal(0, 0.6, size=1)[0]
return noise
filename = 'Confidence.txt'
lines = open(filename).read().splitlines()
try:
confidence = float(lines[-1])
except ValueError:
print('Failed to convert confidence value to float')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
confidence = float(lines[-1])
except IndexError:
print('The confidence file is empty')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
confidence = float(lines[-1])
return confidence
<|reserved_special_token_0|>
def plot_gp(optimizer, logpath, i, utility_function, bounds, x, y=None):
fig = plt.figure(figsize=(16, 10))
steps = len(optimizer.res)
fig.suptitle('Gaussian Process and Utility Function After {} Steps'.
format(steps), fontdict={'size': 30})
gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
axis = plt.subplot(gs[0])
acq = plt.subplot(gs[1])
x_obs = np.array([[res['params']['x']] for res in optimizer.res])
y_obs = np.array([res['target'] for res in optimizer.res])
y_obs, norm = normalize(y_obs.reshape(1, -1), return_norm=True)
y_obs = y_obs.flatten()
mu, sigma = posterior(optimizer, x_obs, y_obs, x)
utility = utility_function.utility(x, optimizer._gp, y_obs.max())
mu = mu * norm
sigma = sigma * norm
y_obs = y_obs * norm
if y is not None:
axis.plot(x, y, linewidth=3, label='Target')
axis.plot(x_obs.flatten(), y_obs, 'D', markersize=8, label=
u'Observations', color='r')
axis.plot(x, mu, '--', color='k', label='Prediction')
axis.fill(np.concatenate([x, x[::-1]]), np.concatenate([mu - 1.96 *
sigma, (mu + 1.96 * sigma)[::-1]]), alpha=0.6, fc='c', ec='None',
label='95% confidence interval')
axis.set_ylabel('f(x)', fontdict={'size': 20})
axis.set_xlabel('x', fontdict={'size': 20})
acq.plot(x, utility, label='Utility Function', color='purple')
acq.plot(x[np.argmax(utility)], np.max(utility), '*', markersize=15,
label=u'Next Best Guess', markerfacecolor='gold', markeredgecolor=
'k', markeredgewidth=1)
acq.set_ylabel('Utility', fontdict={'size': 20})
acq.set_xlabel('x', fontdict={'size': 20})
axis.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)
acq.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)
fig.savefig(logpath + '/fig_{}'.format(i))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def blackbox_function(x, y=None, sim=False):
if sim:
if y is None:
return -x ** 2 + 6
else:
return -(x + y) ** 2 + 6
filename = 'Output.txt'
lines = open(filename).read().splitlines()
try:
latency = float(lines[-1])
except ValueError:
print('Failed to convert value to float')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
latency = float(lines[-1])
except IndexError:
print('The latent file is empty')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
latency = float(lines[-1])
return latency
def obtain_confidence(sim=False):
if sim:
noise = np.random.normal(0, 0.6, size=1)[0]
return noise
filename = 'Confidence.txt'
lines = open(filename).read().splitlines()
try:
confidence = float(lines[-1])
except ValueError:
print('Failed to convert confidence value to float')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
confidence = float(lines[-1])
except IndexError:
print('The confidence file is empty')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
confidence = float(lines[-1])
return confidence
<|reserved_special_token_0|>
def plot_gp(optimizer, logpath, i, utility_function, bounds, x, y=None):
fig = plt.figure(figsize=(16, 10))
steps = len(optimizer.res)
fig.suptitle('Gaussian Process and Utility Function After {} Steps'.
format(steps), fontdict={'size': 30})
gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
axis = plt.subplot(gs[0])
acq = plt.subplot(gs[1])
x_obs = np.array([[res['params']['x']] for res in optimizer.res])
y_obs = np.array([res['target'] for res in optimizer.res])
y_obs, norm = normalize(y_obs.reshape(1, -1), return_norm=True)
y_obs = y_obs.flatten()
mu, sigma = posterior(optimizer, x_obs, y_obs, x)
utility = utility_function.utility(x, optimizer._gp, y_obs.max())
mu = mu * norm
sigma = sigma * norm
y_obs = y_obs * norm
if y is not None:
axis.plot(x, y, linewidth=3, label='Target')
axis.plot(x_obs.flatten(), y_obs, 'D', markersize=8, label=
u'Observations', color='r')
axis.plot(x, mu, '--', color='k', label='Prediction')
axis.fill(np.concatenate([x, x[::-1]]), np.concatenate([mu - 1.96 *
sigma, (mu + 1.96 * sigma)[::-1]]), alpha=0.6, fc='c', ec='None',
label='95% confidence interval')
axis.set_ylabel('f(x)', fontdict={'size': 20})
axis.set_xlabel('x', fontdict={'size': 20})
acq.plot(x, utility, label='Utility Function', color='purple')
acq.plot(x[np.argmax(utility)], np.max(utility), '*', markersize=15,
label=u'Next Best Guess', markerfacecolor='gold', markeredgecolor=
'k', markeredgewidth=1)
acq.set_ylabel('Utility', fontdict={'size': 20})
acq.set_xlabel('x', fontdict={'size': 20})
axis.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)
acq.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)
fig.savefig(logpath + '/fig_{}'.format(i))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def blackbox_function(x, y=None, sim=False):
if sim:
if y is None:
return -x ** 2 + 6
else:
return -(x + y) ** 2 + 6
filename = 'Output.txt'
lines = open(filename).read().splitlines()
try:
latency = float(lines[-1])
except ValueError:
print('Failed to convert value to float')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
latency = float(lines[-1])
except IndexError:
print('The latent file is empty')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
latency = float(lines[-1])
return latency
def obtain_confidence(sim=False):
if sim:
noise = np.random.normal(0, 0.6, size=1)[0]
return noise
filename = 'Confidence.txt'
lines = open(filename).read().splitlines()
try:
confidence = float(lines[-1])
except ValueError:
print('Failed to convert confidence value to float')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
confidence = float(lines[-1])
except IndexError:
print('The confidence file is empty')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
confidence = float(lines[-1])
return confidence
def posterior(optimizer, x_obs, y_obs, grid):
optimizer._gp.fit(x_obs, y_obs)
mu, sigma = optimizer._gp.predict(grid, return_std=True)
return mu, sigma
def plot_gp(optimizer, logpath, i, utility_function, bounds, x, y=None):
fig = plt.figure(figsize=(16, 10))
steps = len(optimizer.res)
fig.suptitle('Gaussian Process and Utility Function After {} Steps'.
format(steps), fontdict={'size': 30})
gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
axis = plt.subplot(gs[0])
acq = plt.subplot(gs[1])
x_obs = np.array([[res['params']['x']] for res in optimizer.res])
y_obs = np.array([res['target'] for res in optimizer.res])
y_obs, norm = normalize(y_obs.reshape(1, -1), return_norm=True)
y_obs = y_obs.flatten()
mu, sigma = posterior(optimizer, x_obs, y_obs, x)
utility = utility_function.utility(x, optimizer._gp, y_obs.max())
mu = mu * norm
sigma = sigma * norm
y_obs = y_obs * norm
if y is not None:
axis.plot(x, y, linewidth=3, label='Target')
axis.plot(x_obs.flatten(), y_obs, 'D', markersize=8, label=
u'Observations', color='r')
axis.plot(x, mu, '--', color='k', label='Prediction')
axis.fill(np.concatenate([x, x[::-1]]), np.concatenate([mu - 1.96 *
sigma, (mu + 1.96 * sigma)[::-1]]), alpha=0.6, fc='c', ec='None',
label='95% confidence interval')
axis.set_ylabel('f(x)', fontdict={'size': 20})
axis.set_xlabel('x', fontdict={'size': 20})
acq.plot(x, utility, label='Utility Function', color='purple')
acq.plot(x[np.argmax(utility)], np.max(utility), '*', markersize=15,
label=u'Next Best Guess', markerfacecolor='gold', markeredgecolor=
'k', markeredgewidth=1)
acq.set_ylabel('Utility', fontdict={'size': 20})
acq.set_xlabel('x', fontdict={'size': 20})
axis.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)
acq.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)
fig.savefig(logpath + '/fig_{}'.format(i))
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import gridspec
from sklearn.preprocessing import normalize
def blackbox_function(x, y=None, sim=False):
if sim:
if y is None:
return -x ** 2 + 6
else:
return -(x + y) ** 2 + 6
filename = 'Output.txt'
lines = open(filename).read().splitlines()
try:
latency = float(lines[-1])
except ValueError:
print('Failed to convert value to float')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
latency = float(lines[-1])
except IndexError:
print('The latent file is empty')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
latency = float(lines[-1])
return latency
def obtain_confidence(sim=False):
if sim:
noise = np.random.normal(0, 0.6, size=1)[0]
return noise
filename = 'Confidence.txt'
lines = open(filename).read().splitlines()
try:
confidence = float(lines[-1])
except ValueError:
print('Failed to convert confidence value to float')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
confidence = float(lines[-1])
except IndexError:
print('The confidence file is empty')
wait = input('PRESS ENTER TO CONTINUE.')
lines = open(filename).read().splitlines()
confidence = float(lines[-1])
return confidence
def posterior(optimizer, x_obs, y_obs, grid):
optimizer._gp.fit(x_obs, y_obs)
mu, sigma = optimizer._gp.predict(grid, return_std=True)
return mu, sigma
def plot_gp(optimizer, logpath, i, utility_function, bounds, x, y=None):
fig = plt.figure(figsize=(16, 10))
steps = len(optimizer.res)
fig.suptitle('Gaussian Process and Utility Function After {} Steps'.
format(steps), fontdict={'size': 30})
gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
axis = plt.subplot(gs[0])
acq = plt.subplot(gs[1])
x_obs = np.array([[res['params']['x']] for res in optimizer.res])
y_obs = np.array([res['target'] for res in optimizer.res])
y_obs, norm = normalize(y_obs.reshape(1, -1), return_norm=True)
y_obs = y_obs.flatten()
mu, sigma = posterior(optimizer, x_obs, y_obs, x)
utility = utility_function.utility(x, optimizer._gp, y_obs.max())
mu = mu * norm
sigma = sigma * norm
y_obs = y_obs * norm
if y is not None:
axis.plot(x, y, linewidth=3, label='Target')
axis.plot(x_obs.flatten(), y_obs, 'D', markersize=8, label=
u'Observations', color='r')
axis.plot(x, mu, '--', color='k', label='Prediction')
axis.fill(np.concatenate([x, x[::-1]]), np.concatenate([mu - 1.96 *
sigma, (mu + 1.96 * sigma)[::-1]]), alpha=0.6, fc='c', ec='None',
label='95% confidence interval')
axis.set_ylabel('f(x)', fontdict={'size': 20})
axis.set_xlabel('x', fontdict={'size': 20})
acq.plot(x, utility, label='Utility Function', color='purple')
acq.plot(x[np.argmax(utility)], np.max(utility), '*', markersize=15,
label=u'Next Best Guess', markerfacecolor='gold', markeredgecolor=
'k', markeredgewidth=1)
acq.set_ylabel('Utility', fontdict={'size': 20})
acq.set_xlabel('x', fontdict={'size': 20})
axis.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)
acq.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)
fig.savefig(logpath + '/fig_{}'.format(i))
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import gridspec
from sklearn.preprocessing import normalize
def blackbox_function(x, y=None, sim=False):
if sim:
if y is None:
return -x ** 2 + 6
else:
return -(x+y) ** 2 + 6
# Reading the magnitude of the N170 data
filename = 'Output.txt'
lines = open(filename).read().splitlines()
try:
latency = float(lines[-1])
except ValueError:
print('Failed to convert value to float')
wait = input("PRESS ENTER TO CONTINUE.")
lines = open(filename).read().splitlines()
latency = float(lines[-1])
except IndexError:
print('The latent file is empty')
wait = input("PRESS ENTER TO CONTINUE.")
lines = open(filename).read().splitlines()
latency = float(lines[-1])
return latency
def obtain_confidence(sim=False):
if sim:
noise = np.random.normal(0, 0.60, size=1)[0]
return noise
# Reading the Confidence levels of the target value
filename = 'Confidence.txt'
lines = open(filename).read().splitlines()
try:
confidence = float(lines[-1])
except ValueError:
print('Failed to convert confidence value to float')
wait = input("PRESS ENTER TO CONTINUE.")
lines = open(filename).read().splitlines()
confidence = float(lines[-1])
except IndexError:
print('The confidence file is empty')
wait = input("PRESS ENTER TO CONTINUE.")
lines = open(filename).read().splitlines()
confidence = float(lines[-1])
return confidence
def posterior(optimizer, x_obs, y_obs, grid):
optimizer._gp.fit(x_obs, y_obs)
mu, sigma = optimizer._gp.predict(grid, return_std=True)
return mu, sigma
def plot_gp(optimizer, logpath, i, utility_function, bounds, x, y=None):
fig = plt.figure(figsize=(16, 10))
steps = len(optimizer.res)
fig.suptitle(
'Gaussian Process and Utility Function After {} Steps'.format(steps),
fontdict={'size': 30}
)
gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
axis = plt.subplot(gs[0])
acq = plt.subplot(gs[1])
# x_obs = np.array([[res["params"]["x"]] for res in optimizer.res])
# y_obs = np.array([res["target"] for res in optimizer.res])
x_obs = np.array([[res["params"]["x"]] for res in optimizer.res])
y_obs = np.array([res["target"] for res in optimizer.res])
y_obs, norm = normalize(y_obs.reshape(1, -1), return_norm=True)
y_obs = y_obs.flatten()
mu, sigma = posterior(optimizer, x_obs, y_obs, x)
utility = utility_function.utility(x, optimizer._gp, y_obs.max())
# Unnormalize data
mu = mu*norm
sigma = sigma*norm
y_obs = y_obs*norm
if y is not None:
axis.plot(x, y, linewidth=3, label='Target')
axis.plot(x_obs.flatten(), y_obs, 'D', markersize=8, label=u'Observations', color='r')
axis.plot(x, mu, '--', color='k', label='Prediction')
axis.fill(np.concatenate([x, x[::-1]]),
np.concatenate([mu - 1.9600 * sigma, (mu + 1.9600 * sigma)[::-1]]),
alpha=.6, fc='c', ec='None', label='95% confidence interval')
# if(bounds == "large"):
# axis.set_xlim((-1, 1))
# else:
# axis.set_xlim((0, 1))
# axis.set_ylim((None, None))
axis.set_ylabel('f(x)', fontdict={'size': 20})
axis.set_xlabel('x', fontdict={'size': 20})
# utility = utility_function.utility(x, optimizer._gp, 0)
acq.plot(x, utility, label='Utility Function', color='purple')
acq.plot(x[np.argmax(utility)], np.max(utility), '*', markersize=15,
label=u'Next Best Guess', markerfacecolor='gold', markeredgecolor='k', markeredgewidth=1)
# if (bounds == "large"):
# acq.set_xlim((-1, 1))
# else:
# acq.set_xlim((0, 1))
# acq.set_ylim((0, np.max(utility) + 0.5))
acq.set_ylabel('Utility', fontdict={'size': 20})
acq.set_xlabel('x', fontdict={'size': 20})
axis.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.)
acq.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.)
fig.savefig(logpath+'/fig_{}'.format(i))
|
flexible
|
{
"blob_id": "6defbe25fc17e53df2fc4d32886bba1cb141bdfd",
"index": 7018,
"step-1": "<mask token>\n\n\ndef obtain_confidence(sim=False):\n if sim:\n noise = np.random.normal(0, 0.6, size=1)[0]\n return noise\n filename = 'Confidence.txt'\n lines = open(filename).read().splitlines()\n try:\n confidence = float(lines[-1])\n except ValueError:\n print('Failed to convert confidence value to float')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n confidence = float(lines[-1])\n except IndexError:\n print('The confidence file is empty')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n confidence = float(lines[-1])\n return confidence\n\n\n<mask token>\n\n\ndef plot_gp(optimizer, logpath, i, utility_function, bounds, x, y=None):\n fig = plt.figure(figsize=(16, 10))\n steps = len(optimizer.res)\n fig.suptitle('Gaussian Process and Utility Function After {} Steps'.\n format(steps), fontdict={'size': 30})\n gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])\n axis = plt.subplot(gs[0])\n acq = plt.subplot(gs[1])\n x_obs = np.array([[res['params']['x']] for res in optimizer.res])\n y_obs = np.array([res['target'] for res in optimizer.res])\n y_obs, norm = normalize(y_obs.reshape(1, -1), return_norm=True)\n y_obs = y_obs.flatten()\n mu, sigma = posterior(optimizer, x_obs, y_obs, x)\n utility = utility_function.utility(x, optimizer._gp, y_obs.max())\n mu = mu * norm\n sigma = sigma * norm\n y_obs = y_obs * norm\n if y is not None:\n axis.plot(x, y, linewidth=3, label='Target')\n axis.plot(x_obs.flatten(), y_obs, 'D', markersize=8, label=\n u'Observations', color='r')\n axis.plot(x, mu, '--', color='k', label='Prediction')\n axis.fill(np.concatenate([x, x[::-1]]), np.concatenate([mu - 1.96 *\n sigma, (mu + 1.96 * sigma)[::-1]]), alpha=0.6, fc='c', ec='None',\n label='95% confidence interval')\n axis.set_ylabel('f(x)', fontdict={'size': 20})\n axis.set_xlabel('x', fontdict={'size': 20})\n acq.plot(x, utility, label='Utility Function', color='purple')\n acq.plot(x[np.argmax(utility)], np.max(utility), '*', markersize=15,\n label=u'Next Best Guess', markerfacecolor='gold', markeredgecolor=\n 'k', markeredgewidth=1)\n acq.set_ylabel('Utility', fontdict={'size': 20})\n acq.set_xlabel('x', fontdict={'size': 20})\n axis.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)\n acq.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)\n fig.savefig(logpath + '/fig_{}'.format(i))\n",
"step-2": "<mask token>\n\n\ndef blackbox_function(x, y=None, sim=False):\n if sim:\n if y is None:\n return -x ** 2 + 6\n else:\n return -(x + y) ** 2 + 6\n filename = 'Output.txt'\n lines = open(filename).read().splitlines()\n try:\n latency = float(lines[-1])\n except ValueError:\n print('Failed to convert value to float')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n latency = float(lines[-1])\n except IndexError:\n print('The latent file is empty')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n latency = float(lines[-1])\n return latency\n\n\ndef obtain_confidence(sim=False):\n if sim:\n noise = np.random.normal(0, 0.6, size=1)[0]\n return noise\n filename = 'Confidence.txt'\n lines = open(filename).read().splitlines()\n try:\n confidence = float(lines[-1])\n except ValueError:\n print('Failed to convert confidence value to float')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n confidence = float(lines[-1])\n except IndexError:\n print('The confidence file is empty')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n confidence = float(lines[-1])\n return confidence\n\n\n<mask token>\n\n\ndef plot_gp(optimizer, logpath, i, utility_function, bounds, x, y=None):\n fig = plt.figure(figsize=(16, 10))\n steps = len(optimizer.res)\n fig.suptitle('Gaussian Process and Utility Function After {} Steps'.\n format(steps), fontdict={'size': 30})\n gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])\n axis = plt.subplot(gs[0])\n acq = plt.subplot(gs[1])\n x_obs = np.array([[res['params']['x']] for res in optimizer.res])\n y_obs = np.array([res['target'] for res in optimizer.res])\n y_obs, norm = normalize(y_obs.reshape(1, -1), return_norm=True)\n y_obs = y_obs.flatten()\n mu, sigma = posterior(optimizer, x_obs, y_obs, x)\n utility = utility_function.utility(x, optimizer._gp, y_obs.max())\n mu = mu * norm\n sigma = sigma * norm\n y_obs = y_obs * norm\n if y is not None:\n axis.plot(x, y, linewidth=3, label='Target')\n axis.plot(x_obs.flatten(), y_obs, 'D', markersize=8, label=\n u'Observations', color='r')\n axis.plot(x, mu, '--', color='k', label='Prediction')\n axis.fill(np.concatenate([x, x[::-1]]), np.concatenate([mu - 1.96 *\n sigma, (mu + 1.96 * sigma)[::-1]]), alpha=0.6, fc='c', ec='None',\n label='95% confidence interval')\n axis.set_ylabel('f(x)', fontdict={'size': 20})\n axis.set_xlabel('x', fontdict={'size': 20})\n acq.plot(x, utility, label='Utility Function', color='purple')\n acq.plot(x[np.argmax(utility)], np.max(utility), '*', markersize=15,\n label=u'Next Best Guess', markerfacecolor='gold', markeredgecolor=\n 'k', markeredgewidth=1)\n acq.set_ylabel('Utility', fontdict={'size': 20})\n acq.set_xlabel('x', fontdict={'size': 20})\n axis.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)\n acq.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)\n fig.savefig(logpath + '/fig_{}'.format(i))\n",
"step-3": "<mask token>\n\n\ndef blackbox_function(x, y=None, sim=False):\n if sim:\n if y is None:\n return -x ** 2 + 6\n else:\n return -(x + y) ** 2 + 6\n filename = 'Output.txt'\n lines = open(filename).read().splitlines()\n try:\n latency = float(lines[-1])\n except ValueError:\n print('Failed to convert value to float')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n latency = float(lines[-1])\n except IndexError:\n print('The latent file is empty')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n latency = float(lines[-1])\n return latency\n\n\ndef obtain_confidence(sim=False):\n if sim:\n noise = np.random.normal(0, 0.6, size=1)[0]\n return noise\n filename = 'Confidence.txt'\n lines = open(filename).read().splitlines()\n try:\n confidence = float(lines[-1])\n except ValueError:\n print('Failed to convert confidence value to float')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n confidence = float(lines[-1])\n except IndexError:\n print('The confidence file is empty')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n confidence = float(lines[-1])\n return confidence\n\n\ndef posterior(optimizer, x_obs, y_obs, grid):\n optimizer._gp.fit(x_obs, y_obs)\n mu, sigma = optimizer._gp.predict(grid, return_std=True)\n return mu, sigma\n\n\ndef plot_gp(optimizer, logpath, i, utility_function, bounds, x, y=None):\n fig = plt.figure(figsize=(16, 10))\n steps = len(optimizer.res)\n fig.suptitle('Gaussian Process and Utility Function After {} Steps'.\n format(steps), fontdict={'size': 30})\n gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])\n axis = plt.subplot(gs[0])\n acq = plt.subplot(gs[1])\n x_obs = np.array([[res['params']['x']] for res in optimizer.res])\n y_obs = np.array([res['target'] for res in optimizer.res])\n y_obs, norm = normalize(y_obs.reshape(1, -1), return_norm=True)\n y_obs = y_obs.flatten()\n mu, sigma = posterior(optimizer, x_obs, y_obs, x)\n utility = utility_function.utility(x, optimizer._gp, y_obs.max())\n mu = mu * norm\n sigma = sigma * norm\n y_obs = y_obs * norm\n if y is not None:\n axis.plot(x, y, linewidth=3, label='Target')\n axis.plot(x_obs.flatten(), y_obs, 'D', markersize=8, label=\n u'Observations', color='r')\n axis.plot(x, mu, '--', color='k', label='Prediction')\n axis.fill(np.concatenate([x, x[::-1]]), np.concatenate([mu - 1.96 *\n sigma, (mu + 1.96 * sigma)[::-1]]), alpha=0.6, fc='c', ec='None',\n label='95% confidence interval')\n axis.set_ylabel('f(x)', fontdict={'size': 20})\n axis.set_xlabel('x', fontdict={'size': 20})\n acq.plot(x, utility, label='Utility Function', color='purple')\n acq.plot(x[np.argmax(utility)], np.max(utility), '*', markersize=15,\n label=u'Next Best Guess', markerfacecolor='gold', markeredgecolor=\n 'k', markeredgewidth=1)\n acq.set_ylabel('Utility', fontdict={'size': 20})\n acq.set_xlabel('x', fontdict={'size': 20})\n axis.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)\n acq.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)\n fig.savefig(logpath + '/fig_{}'.format(i))\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom sklearn.preprocessing import normalize\n\n\ndef blackbox_function(x, y=None, sim=False):\n if sim:\n if y is None:\n return -x ** 2 + 6\n else:\n return -(x + y) ** 2 + 6\n filename = 'Output.txt'\n lines = open(filename).read().splitlines()\n try:\n latency = float(lines[-1])\n except ValueError:\n print('Failed to convert value to float')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n latency = float(lines[-1])\n except IndexError:\n print('The latent file is empty')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n latency = float(lines[-1])\n return latency\n\n\ndef obtain_confidence(sim=False):\n if sim:\n noise = np.random.normal(0, 0.6, size=1)[0]\n return noise\n filename = 'Confidence.txt'\n lines = open(filename).read().splitlines()\n try:\n confidence = float(lines[-1])\n except ValueError:\n print('Failed to convert confidence value to float')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n confidence = float(lines[-1])\n except IndexError:\n print('The confidence file is empty')\n wait = input('PRESS ENTER TO CONTINUE.')\n lines = open(filename).read().splitlines()\n confidence = float(lines[-1])\n return confidence\n\n\ndef posterior(optimizer, x_obs, y_obs, grid):\n optimizer._gp.fit(x_obs, y_obs)\n mu, sigma = optimizer._gp.predict(grid, return_std=True)\n return mu, sigma\n\n\ndef plot_gp(optimizer, logpath, i, utility_function, bounds, x, y=None):\n fig = plt.figure(figsize=(16, 10))\n steps = len(optimizer.res)\n fig.suptitle('Gaussian Process and Utility Function After {} Steps'.\n format(steps), fontdict={'size': 30})\n gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])\n axis = plt.subplot(gs[0])\n acq = plt.subplot(gs[1])\n x_obs = np.array([[res['params']['x']] for res in optimizer.res])\n y_obs = np.array([res['target'] for res in optimizer.res])\n y_obs, norm = normalize(y_obs.reshape(1, -1), return_norm=True)\n y_obs = y_obs.flatten()\n mu, sigma = posterior(optimizer, x_obs, y_obs, x)\n utility = utility_function.utility(x, optimizer._gp, y_obs.max())\n mu = mu * norm\n sigma = sigma * norm\n y_obs = y_obs * norm\n if y is not None:\n axis.plot(x, y, linewidth=3, label='Target')\n axis.plot(x_obs.flatten(), y_obs, 'D', markersize=8, label=\n u'Observations', color='r')\n axis.plot(x, mu, '--', color='k', label='Prediction')\n axis.fill(np.concatenate([x, x[::-1]]), np.concatenate([mu - 1.96 *\n sigma, (mu + 1.96 * sigma)[::-1]]), alpha=0.6, fc='c', ec='None',\n label='95% confidence interval')\n axis.set_ylabel('f(x)', fontdict={'size': 20})\n axis.set_xlabel('x', fontdict={'size': 20})\n acq.plot(x, utility, label='Utility Function', color='purple')\n acq.plot(x[np.argmax(utility)], np.max(utility), '*', markersize=15,\n label=u'Next Best Guess', markerfacecolor='gold', markeredgecolor=\n 'k', markeredgewidth=1)\n acq.set_ylabel('Utility', fontdict={'size': 20})\n acq.set_xlabel('x', fontdict={'size': 20})\n axis.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)\n acq.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.0)\n fig.savefig(logpath + '/fig_{}'.format(i))\n",
"step-5": "import numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom sklearn.preprocessing import normalize\n\ndef blackbox_function(x, y=None, sim=False):\n if sim:\n if y is None:\n return -x ** 2 + 6\n else:\n return -(x+y) ** 2 + 6\n\n # Reading the magnitude of the N170 data\n filename = 'Output.txt'\n lines = open(filename).read().splitlines()\n\n try:\n latency = float(lines[-1])\n except ValueError:\n print('Failed to convert value to float')\n wait = input(\"PRESS ENTER TO CONTINUE.\")\n lines = open(filename).read().splitlines()\n latency = float(lines[-1])\n except IndexError:\n print('The latent file is empty')\n wait = input(\"PRESS ENTER TO CONTINUE.\")\n lines = open(filename).read().splitlines()\n latency = float(lines[-1])\n return latency\n\n\ndef obtain_confidence(sim=False):\n if sim:\n noise = np.random.normal(0, 0.60, size=1)[0]\n return noise\n\n # Reading the Confidence levels of the target value\n filename = 'Confidence.txt'\n lines = open(filename).read().splitlines()\n\n try:\n confidence = float(lines[-1])\n except ValueError:\n print('Failed to convert confidence value to float')\n wait = input(\"PRESS ENTER TO CONTINUE.\")\n lines = open(filename).read().splitlines()\n confidence = float(lines[-1])\n except IndexError:\n print('The confidence file is empty')\n wait = input(\"PRESS ENTER TO CONTINUE.\")\n lines = open(filename).read().splitlines()\n confidence = float(lines[-1])\n return confidence\n\n\n\ndef posterior(optimizer, x_obs, y_obs, grid):\n\n optimizer._gp.fit(x_obs, y_obs)\n\n mu, sigma = optimizer._gp.predict(grid, return_std=True)\n return mu, sigma\n\n\ndef plot_gp(optimizer, logpath, i, utility_function, bounds, x, y=None):\n\n fig = plt.figure(figsize=(16, 10))\n steps = len(optimizer.res)\n fig.suptitle(\n 'Gaussian Process and Utility Function After {} Steps'.format(steps),\n fontdict={'size': 30}\n )\n\n gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])\n axis = plt.subplot(gs[0])\n acq = plt.subplot(gs[1])\n\n # x_obs = np.array([[res[\"params\"][\"x\"]] for res in optimizer.res])\n # y_obs = np.array([res[\"target\"] for res in optimizer.res])\n\n x_obs = np.array([[res[\"params\"][\"x\"]] for res in optimizer.res])\n y_obs = np.array([res[\"target\"] for res in optimizer.res])\n\n y_obs, norm = normalize(y_obs.reshape(1, -1), return_norm=True)\n y_obs = y_obs.flatten()\n\n mu, sigma = posterior(optimizer, x_obs, y_obs, x)\n utility = utility_function.utility(x, optimizer._gp, y_obs.max())\n\n # Unnormalize data\n mu = mu*norm\n sigma = sigma*norm\n y_obs = y_obs*norm\n\n if y is not None:\n axis.plot(x, y, linewidth=3, label='Target')\n axis.plot(x_obs.flatten(), y_obs, 'D', markersize=8, label=u'Observations', color='r')\n axis.plot(x, mu, '--', color='k', label='Prediction')\n\n axis.fill(np.concatenate([x, x[::-1]]),\n np.concatenate([mu - 1.9600 * sigma, (mu + 1.9600 * sigma)[::-1]]),\n alpha=.6, fc='c', ec='None', label='95% confidence interval')\n # if(bounds == \"large\"):\n # axis.set_xlim((-1, 1))\n # else:\n # axis.set_xlim((0, 1))\n # axis.set_ylim((None, None))\n axis.set_ylabel('f(x)', fontdict={'size': 20})\n axis.set_xlabel('x', fontdict={'size': 20})\n\n # utility = utility_function.utility(x, optimizer._gp, 0)\n acq.plot(x, utility, label='Utility Function', color='purple')\n acq.plot(x[np.argmax(utility)], np.max(utility), '*', markersize=15,\n label=u'Next Best Guess', markerfacecolor='gold', markeredgecolor='k', markeredgewidth=1)\n\n # if (bounds == \"large\"):\n # acq.set_xlim((-1, 1))\n # else:\n # acq.set_xlim((0, 1))\n # acq.set_ylim((0, np.max(utility) + 0.5))\n acq.set_ylabel('Utility', fontdict={'size': 20})\n acq.set_xlabel('x', fontdict={'size': 20})\n\n axis.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.)\n acq.legend(loc=2, bbox_to_anchor=(1.01, 1), borderaxespad=0.)\n\n fig.savefig(logpath+'/fig_{}'.format(i))\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.