index
int64
0
100k
blob_id
stringlengths
40
40
code
stringlengths
7
7.27M
steps
listlengths
1
1.25k
error
bool
2 classes
99,400
58a90ea44db4e27ebfde1cfefffcff2147eba0b0
import pandas as pd import tensorflow as tf import numpy as np
[ "import pandas as pd\nimport tensorflow as tf\nimport numpy as np\n", "<import token>\n" ]
false
99,401
6ca70a7d59066a017bd328e5c641b27d9d816e4d
# -*- coding: utf-8 -*- # Generated by Django 1.11.5 on 2017-09-28 19:51 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('courses', '0003_tabla_tablita'), ] operations = [ migrations.RenameField( model_name='tablita', old_name='ciudad', new_name='pais', ), ]
[ "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.5 on 2017-09-28 19:51\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('courses', '0003_tabla_tablita'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='tablita',\n old_name='ciudad',\n new_name='pais',\n ),\n ]\n", "from __future__ import unicode_literals\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('courses', '0003_tabla_tablita')]\n operations = [migrations.RenameField(model_name='tablita', old_name=\n 'ciudad', new_name='pais')]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('courses', '0003_tabla_tablita')]\n operations = [migrations.RenameField(model_name='tablita', old_name=\n 'ciudad', new_name='pais')]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
99,402
cae1f513da3b53107459c0ec43a8e4fa1258eb26
#zadanie 15 liczba = 10 print("Wartość liczby to {}, a {} to jej druga potęga".format(liczba, liczba**2))
[ "#zadanie 15\nliczba = 10\nprint(\"Wartość liczby to {}, a {} to jej druga potęga\".format(liczba, liczba**2))", "liczba = 10\nprint('Wartość liczby to {}, a {} to jej druga potęga'.format(liczba, \n liczba ** 2))\n", "<assignment token>\nprint('Wartość liczby to {}, a {} to jej druga potęga'.format(liczba, \n liczba ** 2))\n", "<assignment token>\n<code token>\n" ]
false
99,403
46f241700f11cba1ecd591bb633c729857ac770a
from Classes.Lexema import Lexema def default_compare(stored_value: object, compared_value: object) -> bool: """ Простейшее сравнение. :param stored_value: Хранимое значение в хеш-таблице. :param compared_value: Значение, с которым сравнивается stored_value. :return: True, если stored_value и compared_value совпадают. Иначе - False. """ if stored_value is None: return False return stored_value == compared_value def str_object_lexema_compare(stored_value: Lexema, compared_value: str) -> bool: """ Сравнение строковой лексемы и объекта-лексемы. :param stored_value: Лексема-объект. :param compared_value: Строковая лексема. :return: True, если лексема-объект содержит строковую лексему в char. """ if stored_value is None: return False return stored_value.char == compared_value
[ "from Classes.Lexema import Lexema\n\n\ndef default_compare(stored_value: object, compared_value: object) -> bool:\n \"\"\"\n Простейшее сравнение.\n :param stored_value: Хранимое значение в хеш-таблице.\n :param compared_value: Значение, с которым сравнивается stored_value.\n :return: True, если stored_value и compared_value совпадают. Иначе - False.\n \"\"\"\n if stored_value is None:\n return False\n return stored_value == compared_value\n\n\ndef str_object_lexema_compare(stored_value: Lexema, compared_value: str) -> bool:\n \"\"\"\n Сравнение строковой лексемы и объекта-лексемы.\n :param stored_value: Лексема-объект.\n :param compared_value: Строковая лексема.\n :return: True, если лексема-объект содержит строковую лексему в char.\n \"\"\"\n if stored_value is None:\n return False\n return stored_value.char == compared_value\n", "from Classes.Lexema import Lexema\n\n\ndef default_compare(stored_value: object, compared_value: object) ->bool:\n \"\"\"\n Простейшее сравнение.\n :param stored_value: Хранимое значение в хеш-таблице.\n :param compared_value: Значение, с которым сравнивается stored_value.\n :return: True, если stored_value и compared_value совпадают. Иначе - False.\n \"\"\"\n if stored_value is None:\n return False\n return stored_value == compared_value\n\n\ndef str_object_lexema_compare(stored_value: Lexema, compared_value: str\n ) ->bool:\n \"\"\"\n Сравнение строковой лексемы и объекта-лексемы.\n :param stored_value: Лексема-объект.\n :param compared_value: Строковая лексема.\n :return: True, если лексема-объект содержит строковую лексему в char.\n \"\"\"\n if stored_value is None:\n return False\n return stored_value.char == compared_value\n", "<import token>\n\n\ndef default_compare(stored_value: object, compared_value: object) ->bool:\n \"\"\"\n Простейшее сравнение.\n :param stored_value: Хранимое значение в хеш-таблице.\n :param compared_value: Значение, с которым сравнивается stored_value.\n :return: True, если stored_value и compared_value совпадают. Иначе - False.\n \"\"\"\n if stored_value is None:\n return False\n return stored_value == compared_value\n\n\ndef str_object_lexema_compare(stored_value: Lexema, compared_value: str\n ) ->bool:\n \"\"\"\n Сравнение строковой лексемы и объекта-лексемы.\n :param stored_value: Лексема-объект.\n :param compared_value: Строковая лексема.\n :return: True, если лексема-объект содержит строковую лексему в char.\n \"\"\"\n if stored_value is None:\n return False\n return stored_value.char == compared_value\n", "<import token>\n<function token>\n\n\ndef str_object_lexema_compare(stored_value: Lexema, compared_value: str\n ) ->bool:\n \"\"\"\n Сравнение строковой лексемы и объекта-лексемы.\n :param stored_value: Лексема-объект.\n :param compared_value: Строковая лексема.\n :return: True, если лексема-объект содержит строковую лексему в char.\n \"\"\"\n if stored_value is None:\n return False\n return stored_value.char == compared_value\n", "<import token>\n<function token>\n<function token>\n" ]
false
99,404
b3fc5ba1f91da7b58892311c39bb2be79fd15bf1
from code_challenge_6.challenge_6 import my_filter from code_challenge_6.challenge_6 import number_is_less_than_5 from code_challenge_6.challenge_6 import number_is_even from code_challenge_6.challenge_6 import number_is_odd from code_challenge_6.challenge_6 import a_dodgy_function from code_challenge_6.challenge_6 import an_even_more_dodgy_function def test_filter_less_than_5(): result = my_filter([3, 4, 5, 6, 7, 8], number_is_less_than_5) assert result == [3, 4] def test_filter_number_is_odd(): result = my_filter([3, 4, 5, 6, 7, 8], number_is_odd) assert result == [3, 5, 7] def test_filter_number_is_even(): result = my_filter([3, 4, 5, 6, 7, 8], number_is_even) assert result == [4, 6, 8] def test_filter_with_an_invalid_fuction(): result = my_filter([3, 4, 5, 6, 7, 8], a_dodgy_function) assert result == [3, 4, 5, 6, 7, 8] def test_filter_with_another_invalid_fuction(): result = my_filter([3, 4, 5, 6, 7, 8], an_even_more_dodgy_function) assert result == [3, 4, 5, 6, 7, 8]
[ "from code_challenge_6.challenge_6 import my_filter\nfrom code_challenge_6.challenge_6 import number_is_less_than_5\nfrom code_challenge_6.challenge_6 import number_is_even\nfrom code_challenge_6.challenge_6 import number_is_odd\nfrom code_challenge_6.challenge_6 import a_dodgy_function\nfrom code_challenge_6.challenge_6 import an_even_more_dodgy_function\n\n\ndef test_filter_less_than_5():\n result = my_filter([3, 4, 5, 6, 7, 8], number_is_less_than_5)\n assert result == [3, 4]\n\n\ndef test_filter_number_is_odd():\n result = my_filter([3, 4, 5, 6, 7, 8], number_is_odd)\n assert result == [3, 5, 7]\n\n\ndef test_filter_number_is_even():\n result = my_filter([3, 4, 5, 6, 7, 8], number_is_even)\n assert result == [4, 6, 8]\n\n\ndef test_filter_with_an_invalid_fuction():\n result = my_filter([3, 4, 5, 6, 7, 8], a_dodgy_function)\n assert result == [3, 4, 5, 6, 7, 8]\n\n\ndef test_filter_with_another_invalid_fuction():\n result = my_filter([3, 4, 5, 6, 7, 8], an_even_more_dodgy_function)\n assert result == [3, 4, 5, 6, 7, 8]\n", "<import token>\n\n\ndef test_filter_less_than_5():\n result = my_filter([3, 4, 5, 6, 7, 8], number_is_less_than_5)\n assert result == [3, 4]\n\n\ndef test_filter_number_is_odd():\n result = my_filter([3, 4, 5, 6, 7, 8], number_is_odd)\n assert result == [3, 5, 7]\n\n\ndef test_filter_number_is_even():\n result = my_filter([3, 4, 5, 6, 7, 8], number_is_even)\n assert result == [4, 6, 8]\n\n\ndef test_filter_with_an_invalid_fuction():\n result = my_filter([3, 4, 5, 6, 7, 8], a_dodgy_function)\n assert result == [3, 4, 5, 6, 7, 8]\n\n\ndef test_filter_with_another_invalid_fuction():\n result = my_filter([3, 4, 5, 6, 7, 8], an_even_more_dodgy_function)\n assert result == [3, 4, 5, 6, 7, 8]\n", "<import token>\n\n\ndef test_filter_less_than_5():\n result = my_filter([3, 4, 5, 6, 7, 8], number_is_less_than_5)\n assert result == [3, 4]\n\n\ndef test_filter_number_is_odd():\n result = my_filter([3, 4, 5, 6, 7, 8], number_is_odd)\n assert result == [3, 5, 7]\n\n\n<function token>\n\n\ndef test_filter_with_an_invalid_fuction():\n result = my_filter([3, 4, 5, 6, 7, 8], a_dodgy_function)\n assert result == [3, 4, 5, 6, 7, 8]\n\n\ndef test_filter_with_another_invalid_fuction():\n result = my_filter([3, 4, 5, 6, 7, 8], an_even_more_dodgy_function)\n assert result == [3, 4, 5, 6, 7, 8]\n", "<import token>\n\n\ndef test_filter_less_than_5():\n result = my_filter([3, 4, 5, 6, 7, 8], number_is_less_than_5)\n assert result == [3, 4]\n\n\ndef test_filter_number_is_odd():\n result = my_filter([3, 4, 5, 6, 7, 8], number_is_odd)\n assert result == [3, 5, 7]\n\n\n<function token>\n\n\ndef test_filter_with_an_invalid_fuction():\n result = my_filter([3, 4, 5, 6, 7, 8], a_dodgy_function)\n assert result == [3, 4, 5, 6, 7, 8]\n\n\n<function token>\n", "<import token>\n\n\ndef test_filter_less_than_5():\n result = my_filter([3, 4, 5, 6, 7, 8], number_is_less_than_5)\n assert result == [3, 4]\n\n\n<function token>\n<function token>\n\n\ndef test_filter_with_an_invalid_fuction():\n result = my_filter([3, 4, 5, 6, 7, 8], a_dodgy_function)\n assert result == [3, 4, 5, 6, 7, 8]\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n\n\ndef test_filter_with_an_invalid_fuction():\n result = my_filter([3, 4, 5, 6, 7, 8], a_dodgy_function)\n assert result == [3, 4, 5, 6, 7, 8]\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
99,405
1ca948fa1e2877b4732705ee6170d7f0b330a21c
from fractions import Fraction def get_combos(elems, k): '''Given a list of elements (elems) and a specified combo size k (non-neg int), Return a set of tuples representing every unique k-length-combo of elements''' #Base cases if k == 1: return set((e,) for e in elems) #Recursive case n = len(elems) output = set() for i in range(n-k+1): for subcombo in get_combos(elems[i+1:],k-1): output.add( tuple( sorted( (elems[i],)+subcombo ) ) ) return output #t = lambda a, b, skills: Fraction(skills[a],skills[a]+skills[b]) def t(a,b,skills,mem): if (a,b) in mem: return mem[(a,b)], mem output = Fraction(skills[a],skills[a]+skills[b]) mem[(a,b)] = output return output, mem def tn(L,n,skills,mem): a, rest = L[0], L[1:] if n == 2: # Base case return t(a, rest[0], skills, mem) groupings = get_combos(rest, int((n/2)-1)) output = 0 for A_group in groupings: a_win, mem = tn([a]+list(A_group), n/2, skills, mem) B_group = [i for i in rest if i not in A_group] for b in B_group: B_losers = [j for j in B_group if j != b] b_win, mem = tn([b]+list(B_losers), n/2, skills, mem) ab, mem = t(a,b, skills, mem) output += ab * a_win * b_win return output/len(groupings), mem def solution(skills): #print(f'skills input: {skills}') if 'Andy' not in skills: return '01' rest = [key for key in skills if key != 'Andy'] out = str(tn(['Andy']+rest, len(skills), skills, {})[0]).split('/') return out[0]+out[1] print('expect 2940764800, result:', solution({'Andy': 7, 'Novak': 5, 'Roger': 3, 'Rafael': 2}))
[ "from fractions import Fraction\r\n\r\ndef get_combos(elems, k):\r\n '''Given a list of elements (elems) and a specified combo size k (non-neg int),\r\n Return a set of tuples representing every unique k-length-combo of elements'''\r\n #Base cases\r\n if k == 1:\r\n return set((e,) for e in elems)\r\n #Recursive case\r\n n = len(elems)\r\n output = set()\r\n for i in range(n-k+1):\r\n for subcombo in get_combos(elems[i+1:],k-1):\r\n output.add( tuple( sorted( (elems[i],)+subcombo ) ) )\r\n return output\r\n\r\n#t = lambda a, b, skills: Fraction(skills[a],skills[a]+skills[b])\r\ndef t(a,b,skills,mem):\r\n if (a,b) in mem:\r\n return mem[(a,b)], mem\r\n output = Fraction(skills[a],skills[a]+skills[b])\r\n mem[(a,b)] = output\r\n return output, mem\r\n\r\ndef tn(L,n,skills,mem):\r\n a, rest = L[0], L[1:]\r\n if n == 2: # Base case\r\n return t(a, rest[0], skills, mem)\r\n groupings = get_combos(rest, int((n/2)-1))\r\n output = 0\r\n for A_group in groupings:\r\n a_win, mem = tn([a]+list(A_group), n/2, skills, mem)\r\n B_group = [i for i in rest if i not in A_group]\r\n for b in B_group:\r\n B_losers = [j for j in B_group if j != b]\r\n b_win, mem = tn([b]+list(B_losers), n/2, skills, mem)\r\n ab, mem = t(a,b, skills, mem)\r\n output += ab * a_win * b_win\r\n return output/len(groupings), mem\r\n\r\ndef solution(skills):\r\n #print(f'skills input: {skills}')\r\n if 'Andy' not in skills:\r\n return '01'\r\n rest = [key for key in skills if key != 'Andy']\r\n out = str(tn(['Andy']+rest, len(skills), skills, {})[0]).split('/')\r\n return out[0]+out[1]\r\n\r\nprint('expect 2940764800, result:', solution({'Andy': 7, 'Novak': 5, 'Roger': 3, 'Rafael': 2}))", "from fractions import Fraction\n\n\ndef get_combos(elems, k):\n \"\"\"Given a list of elements (elems) and a specified combo size k (non-neg int),\n Return a set of tuples representing every unique k-length-combo of elements\"\"\"\n if k == 1:\n return set((e,) for e in elems)\n n = len(elems)\n output = set()\n for i in range(n - k + 1):\n for subcombo in get_combos(elems[i + 1:], k - 1):\n output.add(tuple(sorted((elems[i],) + subcombo)))\n return output\n\n\ndef t(a, b, skills, mem):\n if (a, b) in mem:\n return mem[a, b], mem\n output = Fraction(skills[a], skills[a] + skills[b])\n mem[a, b] = output\n return output, mem\n\n\ndef tn(L, n, skills, mem):\n a, rest = L[0], L[1:]\n if n == 2:\n return t(a, rest[0], skills, mem)\n groupings = get_combos(rest, int(n / 2 - 1))\n output = 0\n for A_group in groupings:\n a_win, mem = tn([a] + list(A_group), n / 2, skills, mem)\n B_group = [i for i in rest if i not in A_group]\n for b in B_group:\n B_losers = [j for j in B_group if j != b]\n b_win, mem = tn([b] + list(B_losers), n / 2, skills, mem)\n ab, mem = t(a, b, skills, mem)\n output += ab * a_win * b_win\n return output / len(groupings), mem\n\n\ndef solution(skills):\n if 'Andy' not in skills:\n return '01'\n rest = [key for key in skills if key != 'Andy']\n out = str(tn(['Andy'] + rest, len(skills), skills, {})[0]).split('/')\n return out[0] + out[1]\n\n\nprint('expect 2940764800, result:', solution({'Andy': 7, 'Novak': 5,\n 'Roger': 3, 'Rafael': 2}))\n", "<import token>\n\n\ndef get_combos(elems, k):\n \"\"\"Given a list of elements (elems) and a specified combo size k (non-neg int),\n Return a set of tuples representing every unique k-length-combo of elements\"\"\"\n if k == 1:\n return set((e,) for e in elems)\n n = len(elems)\n output = set()\n for i in range(n - k + 1):\n for subcombo in get_combos(elems[i + 1:], k - 1):\n output.add(tuple(sorted((elems[i],) + subcombo)))\n return output\n\n\ndef t(a, b, skills, mem):\n if (a, b) in mem:\n return mem[a, b], mem\n output = Fraction(skills[a], skills[a] + skills[b])\n mem[a, b] = output\n return output, mem\n\n\ndef tn(L, n, skills, mem):\n a, rest = L[0], L[1:]\n if n == 2:\n return t(a, rest[0], skills, mem)\n groupings = get_combos(rest, int(n / 2 - 1))\n output = 0\n for A_group in groupings:\n a_win, mem = tn([a] + list(A_group), n / 2, skills, mem)\n B_group = [i for i in rest if i not in A_group]\n for b in B_group:\n B_losers = [j for j in B_group if j != b]\n b_win, mem = tn([b] + list(B_losers), n / 2, skills, mem)\n ab, mem = t(a, b, skills, mem)\n output += ab * a_win * b_win\n return output / len(groupings), mem\n\n\ndef solution(skills):\n if 'Andy' not in skills:\n return '01'\n rest = [key for key in skills if key != 'Andy']\n out = str(tn(['Andy'] + rest, len(skills), skills, {})[0]).split('/')\n return out[0] + out[1]\n\n\nprint('expect 2940764800, result:', solution({'Andy': 7, 'Novak': 5,\n 'Roger': 3, 'Rafael': 2}))\n", "<import token>\n\n\ndef get_combos(elems, k):\n \"\"\"Given a list of elements (elems) and a specified combo size k (non-neg int),\n Return a set of tuples representing every unique k-length-combo of elements\"\"\"\n if k == 1:\n return set((e,) for e in elems)\n n = len(elems)\n output = set()\n for i in range(n - k + 1):\n for subcombo in get_combos(elems[i + 1:], k - 1):\n output.add(tuple(sorted((elems[i],) + subcombo)))\n return output\n\n\ndef t(a, b, skills, mem):\n if (a, b) in mem:\n return mem[a, b], mem\n output = Fraction(skills[a], skills[a] + skills[b])\n mem[a, b] = output\n return output, mem\n\n\ndef tn(L, n, skills, mem):\n a, rest = L[0], L[1:]\n if n == 2:\n return t(a, rest[0], skills, mem)\n groupings = get_combos(rest, int(n / 2 - 1))\n output = 0\n for A_group in groupings:\n a_win, mem = tn([a] + list(A_group), n / 2, skills, mem)\n B_group = [i for i in rest if i not in A_group]\n for b in B_group:\n B_losers = [j for j in B_group if j != b]\n b_win, mem = tn([b] + list(B_losers), n / 2, skills, mem)\n ab, mem = t(a, b, skills, mem)\n output += ab * a_win * b_win\n return output / len(groupings), mem\n\n\ndef solution(skills):\n if 'Andy' not in skills:\n return '01'\n rest = [key for key in skills if key != 'Andy']\n out = str(tn(['Andy'] + rest, len(skills), skills, {})[0]).split('/')\n return out[0] + out[1]\n\n\n<code token>\n", "<import token>\n<function token>\n\n\ndef t(a, b, skills, mem):\n if (a, b) in mem:\n return mem[a, b], mem\n output = Fraction(skills[a], skills[a] + skills[b])\n mem[a, b] = output\n return output, mem\n\n\ndef tn(L, n, skills, mem):\n a, rest = L[0], L[1:]\n if n == 2:\n return t(a, rest[0], skills, mem)\n groupings = get_combos(rest, int(n / 2 - 1))\n output = 0\n for A_group in groupings:\n a_win, mem = tn([a] + list(A_group), n / 2, skills, mem)\n B_group = [i for i in rest if i not in A_group]\n for b in B_group:\n B_losers = [j for j in B_group if j != b]\n b_win, mem = tn([b] + list(B_losers), n / 2, skills, mem)\n ab, mem = t(a, b, skills, mem)\n output += ab * a_win * b_win\n return output / len(groupings), mem\n\n\ndef solution(skills):\n if 'Andy' not in skills:\n return '01'\n rest = [key for key in skills if key != 'Andy']\n out = str(tn(['Andy'] + rest, len(skills), skills, {})[0]).split('/')\n return out[0] + out[1]\n\n\n<code token>\n", "<import token>\n<function token>\n\n\ndef t(a, b, skills, mem):\n if (a, b) in mem:\n return mem[a, b], mem\n output = Fraction(skills[a], skills[a] + skills[b])\n mem[a, b] = output\n return output, mem\n\n\n<function token>\n\n\ndef solution(skills):\n if 'Andy' not in skills:\n return '01'\n rest = [key for key in skills if key != 'Andy']\n out = str(tn(['Andy'] + rest, len(skills), skills, {})[0]).split('/')\n return out[0] + out[1]\n\n\n<code token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n\n\ndef solution(skills):\n if 'Andy' not in skills:\n return '01'\n rest = [key for key in skills if key != 'Andy']\n out = str(tn(['Andy'] + rest, len(skills), skills, {})[0]).split('/')\n return out[0] + out[1]\n\n\n<code token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n" ]
false
99,406
09bc5daabee306c485952a93446db3f72ed8e2a3
#coding:gb2312 import pygame from pygame.sprite import Sprite class Star(Sprite): def __init__(self,al_setting,screen): """ÉèÖÃÐÇÐÇͼ°¸""" super(Star,self).__init__() self.screen = screen self.al_setting = al_setting self.image = pygame.image.load("xx.bmp") self.rect = self.image.get_rect() self.rect.x = self.rect.width self.rect.y = self.rect.height #self.x = float(self.rect.x) def blitme(): self.screen.blit(self.image,self.rect)
[ "#coding:gb2312\r\nimport pygame\r\nfrom pygame.sprite import Sprite\r\nclass Star(Sprite):\r\n\tdef __init__(self,al_setting,screen):\r\n\t\t\"\"\"ÉèÖÃÐÇÐÇͼ°¸\"\"\"\r\n\t\tsuper(Star,self).__init__()\r\n\t\tself.screen = screen\r\n\t\tself.al_setting = al_setting\r\n\t\tself.image = pygame.image.load(\"xx.bmp\")\r\n\t\tself.rect = self.image.get_rect()\r\n\t\tself.rect.x = self.rect.width\r\n\t\tself.rect.y = self.rect.height\r\n\t\t#self.x = float(self.rect.x)\r\n\tdef blitme():\r\n\t\tself.screen.blit(self.image,self.rect)\r\n", "import pygame\nfrom pygame.sprite import Sprite\n\n\nclass Star(Sprite):\n\n def __init__(self, al_setting, screen):\n \"\"\"ÉèÖÃÐÇÐÇͼ°¸\"\"\"\n super(Star, self).__init__()\n self.screen = screen\n self.al_setting = al_setting\n self.image = pygame.image.load('xx.bmp')\n self.rect = self.image.get_rect()\n self.rect.x = self.rect.width\n self.rect.y = self.rect.height\n\n def blitme():\n self.screen.blit(self.image, self.rect)\n", "<import token>\n\n\nclass Star(Sprite):\n\n def __init__(self, al_setting, screen):\n \"\"\"ÉèÖÃÐÇÐÇͼ°¸\"\"\"\n super(Star, self).__init__()\n self.screen = screen\n self.al_setting = al_setting\n self.image = pygame.image.load('xx.bmp')\n self.rect = self.image.get_rect()\n self.rect.x = self.rect.width\n self.rect.y = self.rect.height\n\n def blitme():\n self.screen.blit(self.image, self.rect)\n", "<import token>\n\n\nclass Star(Sprite):\n <function token>\n\n def blitme():\n self.screen.blit(self.image, self.rect)\n", "<import token>\n\n\nclass Star(Sprite):\n <function token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
99,407
c9381e455b5c2a1b93ae689d2166d09da0328e3d
""" Dronesmith API Python Bindings Author: Geoff Gardner <[email protected]> Copyright 2016 Dronesmith Technologies """ __title__ = 'dronesmith' __version__ = '1.0.01' #__build__ = 0x000000 __author__ = 'Geoff Gardner' #__license__ = '' __copyright__ = 'Copyright 2016 Dronesmith Technologies' import requests import json import math import time class APIError(Exception): def __init__(self, sstr): self.val = sstr def __str__(self): print ':', self.val class Dronesmith(object): """docstring for Dronesmith.""" def __init__(self, email, key): super(Dronesmith, self).__init__() self._link = Link(email, key) if self.__auth() is False: raise APIError("Error authenticating") def drone(self, name=""): return DroneNode(self._link, name) def drones(self): pass def changeName(self, name): pass # self._link.Request() def deleteDrone(self, name): pass def __auth(self): code, data = self._link.Request() return bool(code == 204) class Link(object): """docstring for Link.""" def __init__(self, email, key): super(Link, self).__init__() self._userEmail = email self._userKey = key self._API = 'api.dronesmith.io/api/' def __createHeader(self): return { 'user-email': self._userEmail, 'user-key': self._userKey, 'Content-Type': 'application/json' } def __createUrl(self, sub=""): return 'http://' + self._API + sub def Request(self, method="GET", path="", body=None): url = self.__createUrl(path) print url if method == 'GET': response = requests.get(url, headers=self.__createHeader()) try: jsonText = json.loads(response.text) except: return response.status_code, None else: return response.status_code, jsonText elif method == 'POST': compiled = {} if body != None: try: compiled = json.dumps(body) except: compiled = {} response = requests.post(url, data=compiled, headers=self.__createHeader()) print response.text try: jsonText = json.loads(response.text) except: return response.status_code, None else: return response.status_code, jsonText else: return None class DroneNode(object): """docstring for DroneNode.""" def __init__(self, link, name=""): super(DroneNode, self).__init__() self._link = link self._name = name # self.missions = {} code, obj = self._link.Request('GET', self.__getDroneUrl()) if code == 200: self.__updateMeta(obj) else: code, obj = self._link.Request('POST', self.__getDroneUrl()) if code == 200: self.__updateMeta(obj) else: raise APIError("Could not create drone: " + str(code)) # # Drone Object # def __updateMeta(self, droneObj): self.name = droneObj["name"] self.created = droneObj["created"] self.online = droneObj["online"] self.type = droneObj["type"] self.hardwareId = droneObj["firmwareId"] # # Telemetry calls # def position(self): obj = self.__telem('position') if obj != None: self._position = Position(obj) return self._position def attitude(self): obj = self.__telem('attitude') if obj != None: self._attitude = Attitude(obj) return self._attitude def takeoff(self, altitude=10): code, obj = self._link.Request('POST', self.__getDroneUrl('takeoff'), { "altitude": altitude }) if code == 200 and obj["Command"] == 22 \ and obj["StatusCode"] == 0: return True else: return False def goto(self, latitude, longitude, altitude=None): obj = {} obj["lat"] = latitude obj["lon"] = longitude if altitude != None: obj["altitude"] = altitude code, obj = self._link.Request('POST', self.__getDroneUrl('goto'), obj) if code == 200 and obj["Command"] == 192 \ and obj["Status"] == "Command accepted.": return True else: return False def land(self): code, obj = self._link.Request('POST', self.__getDroneUrl('land'), {}) if code == 200 and obj["Command"] == 21 \ and obj["StatusCode"] == 0: return True else: return False def running(self): code, obj = self._link.Request('GET', self.__getDroneUrl('status')) if code != 200: return False else: if obj != None and \ "Online" in obj: return bool(obj["Online"]) == True else: return False def info(self): code, obj = self._link.Request('GET', self.__getDroneUrl()) if code == 200: self.__updateMeta(obj) return self def run(self): code, obj = self._link.Request('POST', self.__getDroneUrl('start')) if code == 200: attempts = 60 while not self.Running(): attempts -= 1 if attempts <= 0: return False time.sleep(1) return True else: return False def pause(self): code, obj = self._link.Request('POST', self.__getDroneUrl('stop')) if code == 200: return True else: return False def abort(self): code, obj = self._link.Request('POST', self.__getDroneUrl('mode'), { 'mode': 'RTL' }) def __telem(self, name): code, obj = self._link.Request('GET', self.__getDroneUrl(name)) if code == 200: return obj else: return None def __getDroneUrl(self, endpoint=""): return 'drone/' + self._name + '/' + endpoint class Position(object): """docstring for Position.""" def __init__(self, obj): super(Position, self).__init__() self.x = obj['X'] self.y = obj['Y'] self.z = obj['Z'] self.latitude = obj['Latitude'] self.longitude = obj['Longitude'] self.altitude = obj['Altitude'] class Attitude(object): """docstring for Attitude.""" def __init__(self, obj): super(Attitude, self).__init__() self.roll = obj['Roll'] self.pitch = obj['Pitch'] self.yaw = obj['Yaw']
[ "\"\"\"\n Dronesmith API\n Python Bindings\n\n Author: Geoff Gardner <[email protected]>\n Copyright 2016 Dronesmith Technologies\n\"\"\"\n\n__title__ = 'dronesmith'\n__version__ = '1.0.01'\n#__build__ = 0x000000\n__author__ = 'Geoff Gardner'\n#__license__ = ''\n__copyright__ = 'Copyright 2016 Dronesmith Technologies'\n\nimport requests\nimport json\nimport math\nimport time\n\nclass APIError(Exception):\n\tdef __init__(self, sstr):\n\t\tself.val = sstr\n\n\tdef __str__(self):\n\t\tprint ':', self.val\n\nclass Dronesmith(object):\n \"\"\"docstring for Dronesmith.\"\"\"\n def __init__(self, email, key):\n super(Dronesmith, self).__init__()\n self._link = Link(email, key)\n if self.__auth() is False:\n raise APIError(\"Error authenticating\")\n\n def drone(self, name=\"\"):\n return DroneNode(self._link, name)\n\n def drones(self):\n pass\n\n def changeName(self, name):\n pass\n # self._link.Request()\n\n def deleteDrone(self, name):\n pass\n\n def __auth(self):\n code, data = self._link.Request()\n return bool(code == 204)\n\nclass Link(object):\n \"\"\"docstring for Link.\"\"\"\n def __init__(self, email, key):\n super(Link, self).__init__()\n self._userEmail = email\n self._userKey = key\n self._API = 'api.dronesmith.io/api/'\n\n def __createHeader(self):\n return {\n 'user-email': self._userEmail,\n 'user-key': self._userKey,\n 'Content-Type': 'application/json'\n }\n\n def __createUrl(self, sub=\"\"):\n return 'http://' + self._API + sub\n\n def Request(self, method=\"GET\", path=\"\", body=None):\n url = self.__createUrl(path)\n print url\n if method == 'GET':\n response = requests.get(url, headers=self.__createHeader())\n try:\n jsonText = json.loads(response.text)\n except:\n return response.status_code, None\n else:\n return response.status_code, jsonText\n elif method == 'POST':\n compiled = {}\n\n if body != None:\n try:\n compiled = json.dumps(body)\n except:\n compiled = {}\n\n response = requests.post(url, data=compiled, headers=self.__createHeader())\n print response.text\n try:\n jsonText = json.loads(response.text)\n except:\n return response.status_code, None\n else:\n return response.status_code, jsonText\n else:\n return None\n\n\nclass DroneNode(object):\n \"\"\"docstring for DroneNode.\"\"\"\n def __init__(self, link, name=\"\"):\n super(DroneNode, self).__init__()\n self._link = link\n self._name = name\n # self.missions = {}\n\n code, obj = self._link.Request('GET', self.__getDroneUrl())\n if code == 200:\n self.__updateMeta(obj)\n else:\n code, obj = self._link.Request('POST', self.__getDroneUrl())\n if code == 200:\n self.__updateMeta(obj)\n else:\n raise APIError(\"Could not create drone: \" + str(code))\n\n #\n # Drone Object\n #\n def __updateMeta(self, droneObj):\n self.name = droneObj[\"name\"]\n self.created = droneObj[\"created\"]\n self.online = droneObj[\"online\"]\n self.type = droneObj[\"type\"]\n self.hardwareId = droneObj[\"firmwareId\"]\n\n #\n # Telemetry calls\n #\n def position(self):\n obj = self.__telem('position')\n if obj != None:\n self._position = Position(obj)\n return self._position\n\n def attitude(self):\n obj = self.__telem('attitude')\n if obj != None:\n self._attitude = Attitude(obj)\n return self._attitude\n\n def takeoff(self, altitude=10):\n code, obj = self._link.Request('POST', self.__getDroneUrl('takeoff'), {\n \"altitude\": altitude\n })\n\n if code == 200 and obj[\"Command\"] == 22 \\\n and obj[\"StatusCode\"] == 0:\n return True\n else:\n return False\n\n def goto(self, latitude, longitude, altitude=None):\n obj = {}\n obj[\"lat\"] = latitude\n obj[\"lon\"] = longitude\n if altitude != None:\n obj[\"altitude\"] = altitude\n\n code, obj = self._link.Request('POST', self.__getDroneUrl('goto'), obj)\n if code == 200 and obj[\"Command\"] == 192 \\\n and obj[\"Status\"] == \"Command accepted.\":\n return True\n else:\n return False\n\n def land(self):\n code, obj = self._link.Request('POST', self.__getDroneUrl('land'), {})\n if code == 200 and obj[\"Command\"] == 21 \\\n and obj[\"StatusCode\"] == 0:\n return True\n else:\n return False\n\n def running(self):\n code, obj = self._link.Request('GET', self.__getDroneUrl('status'))\n if code != 200:\n return False\n else:\n if obj != None and \\\n \"Online\" in obj:\n return bool(obj[\"Online\"]) == True\n else:\n return False\n\n def info(self):\n code, obj = self._link.Request('GET', self.__getDroneUrl())\n if code == 200:\n self.__updateMeta(obj)\n return self\n\n def run(self):\n code, obj = self._link.Request('POST', self.__getDroneUrl('start'))\n if code == 200:\n attempts = 60\n while not self.Running():\n attempts -= 1\n if attempts <= 0:\n return False\n time.sleep(1)\n return True\n else:\n return False\n\n def pause(self):\n code, obj = self._link.Request('POST', self.__getDroneUrl('stop'))\n if code == 200:\n return True\n else:\n return False\n\n def abort(self):\n code, obj = self._link.Request('POST', self.__getDroneUrl('mode'), {\n 'mode': 'RTL'\n })\n\n def __telem(self, name):\n code, obj = self._link.Request('GET', self.__getDroneUrl(name))\n\n if code == 200:\n return obj\n else:\n return None\n\n def __getDroneUrl(self, endpoint=\"\"):\n return 'drone/' + self._name + '/' + endpoint\n\n\nclass Position(object):\n \"\"\"docstring for Position.\"\"\"\n def __init__(self, obj):\n super(Position, self).__init__()\n self.x = obj['X']\n self.y = obj['Y']\n self.z = obj['Z']\n self.latitude = obj['Latitude']\n self.longitude = obj['Longitude']\n self.altitude = obj['Altitude']\n\nclass Attitude(object):\n \"\"\"docstring for Attitude.\"\"\"\n def __init__(self, obj):\n super(Attitude, self).__init__()\n self.roll = obj['Roll']\n self.pitch = obj['Pitch']\n self.yaw = obj['Yaw']\n" ]
true
99,408
70db5d4b2844a9c22ed161b722d01a61ec0e5b9d
import cv2 as cv from aug.algorithms.algorithm import Algorithm class ORB(Algorithm): def __init__(self): super().__init__(cv.ORB_create(), cv.DescriptorMatcher_create( cv.DescriptorMatcher_BRUTEFORCE_HAMMING), 30) def __str__(self): return f"Detector: ORB\nMatcher: Bruteforce Hamming"
[ "import cv2 as cv\nfrom aug.algorithms.algorithm import Algorithm\n\n\nclass ORB(Algorithm):\n\n def __init__(self):\n super().__init__(cv.ORB_create(), cv.DescriptorMatcher_create(\n cv.DescriptorMatcher_BRUTEFORCE_HAMMING), 30)\n\n def __str__(self):\n return f\"Detector: ORB\\nMatcher: Bruteforce Hamming\"\n", "import cv2 as cv\nfrom aug.algorithms.algorithm import Algorithm\n\n\nclass ORB(Algorithm):\n\n def __init__(self):\n super().__init__(cv.ORB_create(), cv.DescriptorMatcher_create(cv.\n DescriptorMatcher_BRUTEFORCE_HAMMING), 30)\n\n def __str__(self):\n return f'Detector: ORB\\nMatcher: Bruteforce Hamming'\n", "<import token>\n\n\nclass ORB(Algorithm):\n\n def __init__(self):\n super().__init__(cv.ORB_create(), cv.DescriptorMatcher_create(cv.\n DescriptorMatcher_BRUTEFORCE_HAMMING), 30)\n\n def __str__(self):\n return f'Detector: ORB\\nMatcher: Bruteforce Hamming'\n", "<import token>\n\n\nclass ORB(Algorithm):\n\n def __init__(self):\n super().__init__(cv.ORB_create(), cv.DescriptorMatcher_create(cv.\n DescriptorMatcher_BRUTEFORCE_HAMMING), 30)\n <function token>\n", "<import token>\n\n\nclass ORB(Algorithm):\n <function token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
99,409
0a08b1dd3fb380ad61b9614630038787e79628bc
# -*- coding: utf-8 -*- """ Created on Sat Aug 05 23:55:12 2018 @author: Kazushige Okayasu, Hirokatsu Kataoka """ import sys import numpy as np import torch import torch.nn as nn # Training def train(args, model, device, train_loader, optimizer, epoch, iteration): model.train() criterion = nn.CrossEntropyLoss(size_average=True) # previous PyTorch ver. #criterion = nn.CrossEntropyLoss(reduction='sum') for i_batch, sample_batched in enumerate(train_loader): data, target = sample_batched["image"].to(device), sample_batched["label"].to(device) optimizer.zero_grad() output = model(data) pred = output.max(1, keepdim=True)[1] correct = pred.eq(target.view_as(pred)).sum().item() loss = criterion(output, target) loss.backward() optimizer.step() if i_batch % args.log_interval == 0: sys.stdout.write("\repoch:{0:>3} iteration:{1:>6} train_loss: {2:.6f} train_accracy: {3:5.2f}%".format( epoch, iteration, loss.item(), 100.*correct/float(len(sample_batched["label"])))) sys.stdout.flush() iteration += 1 # Validation def val(args, model, device, test_loader, iteration): model.eval() criterion = nn.CrossEntropyLoss(size_average=False) # previous PyTorch ver. #criterion = nn.CrossEntropyLoss(reduction='sum') test_loss = 0 correct = 0 with torch.no_grad(): for i_batch, sample_batched in enumerate(test_loader): data, target = sample_batched["image"].to(device), sample_batched["label"].to(device) output = model(data) test_loss += criterion(output, target).item() pred = output.max(1, keepdim=True)[1] correct += pred.eq(target.view_as(pred)).sum().item() test_loss /= float(len(test_loader.dataset)) correct /= float(len(test_loader.dataset)) print("\nValidation: Accuracy: {0:.2f}% test_loss: {1:.6f}".format(100. * correct, test_loss)) return test_loss, 100. * correct
[ "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sat Aug 05 23:55:12 2018\n@author: Kazushige Okayasu, Hirokatsu Kataoka\n\"\"\"\n\nimport sys\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\n\n# Training\ndef train(args, model, device, train_loader, optimizer, epoch, iteration):\n\tmodel.train()\n\tcriterion = nn.CrossEntropyLoss(size_average=True) # previous PyTorch ver.\n\t#criterion = nn.CrossEntropyLoss(reduction='sum')\n\tfor i_batch, sample_batched in enumerate(train_loader):\n\t\tdata, target = sample_batched[\"image\"].to(device), sample_batched[\"label\"].to(device)\n\t\toptimizer.zero_grad()\n\t\toutput = model(data)\n\t\tpred = output.max(1, keepdim=True)[1]\n\t\tcorrect = pred.eq(target.view_as(pred)).sum().item()\n\t\tloss = criterion(output, target)\n\t\tloss.backward()\n\t\toptimizer.step()\n\t\tif i_batch % args.log_interval == 0:\n\t\t\tsys.stdout.write(\"\\repoch:{0:>3} iteration:{1:>6} train_loss: {2:.6f} train_accracy: {3:5.2f}%\".format(\n\t\t\t\t\t\t\tepoch, iteration, loss.item(), 100.*correct/float(len(sample_batched[\"label\"]))))\n\t\t\tsys.stdout.flush()\n\t\titeration += 1\n\n# Validation\ndef val(args, model, device, test_loader, iteration):\n\tmodel.eval()\n\tcriterion = nn.CrossEntropyLoss(size_average=False) # previous PyTorch ver.\n\t#criterion = nn.CrossEntropyLoss(reduction='sum')\n\ttest_loss = 0\n\tcorrect = 0\n\twith torch.no_grad():\n\t\tfor i_batch, sample_batched in enumerate(test_loader):\n\t\t\tdata, target = sample_batched[\"image\"].to(device), sample_batched[\"label\"].to(device)\n\t\t\toutput = model(data)\n\t\t\ttest_loss += criterion(output, target).item()\n\t\t\tpred = output.max(1, keepdim=True)[1]\n\t\t\tcorrect += pred.eq(target.view_as(pred)).sum().item()\n\ttest_loss /= float(len(test_loader.dataset))\n\tcorrect /= float(len(test_loader.dataset))\n\tprint(\"\\nValidation: Accuracy: {0:.2f}% test_loss: {1:.6f}\".format(100. * correct, test_loss))\n\treturn test_loss, 100. * correct\n", "<docstring token>\nimport sys\nimport numpy as np\nimport torch\nimport torch.nn as nn\n\n\ndef train(args, model, device, train_loader, optimizer, epoch, iteration):\n model.train()\n criterion = nn.CrossEntropyLoss(size_average=True)\n for i_batch, sample_batched in enumerate(train_loader):\n data, target = sample_batched['image'].to(device), sample_batched[\n 'label'].to(device)\n optimizer.zero_grad()\n output = model(data)\n pred = output.max(1, keepdim=True)[1]\n correct = pred.eq(target.view_as(pred)).sum().item()\n loss = criterion(output, target)\n loss.backward()\n optimizer.step()\n if i_batch % args.log_interval == 0:\n sys.stdout.write(\n '\\repoch:{0:>3} iteration:{1:>6} train_loss: {2:.6f} train_accracy: {3:5.2f}%'\n .format(epoch, iteration, loss.item(), 100.0 * correct /\n float(len(sample_batched['label']))))\n sys.stdout.flush()\n iteration += 1\n\n\ndef val(args, model, device, test_loader, iteration):\n model.eval()\n criterion = nn.CrossEntropyLoss(size_average=False)\n test_loss = 0\n correct = 0\n with torch.no_grad():\n for i_batch, sample_batched in enumerate(test_loader):\n data, target = sample_batched['image'].to(device), sample_batched[\n 'label'].to(device)\n output = model(data)\n test_loss += criterion(output, target).item()\n pred = output.max(1, keepdim=True)[1]\n correct += pred.eq(target.view_as(pred)).sum().item()\n test_loss /= float(len(test_loader.dataset))\n correct /= float(len(test_loader.dataset))\n print('\\nValidation: Accuracy: {0:.2f}% test_loss: {1:.6f}'.format(\n 100.0 * correct, test_loss))\n return test_loss, 100.0 * correct\n", "<docstring token>\n<import token>\n\n\ndef train(args, model, device, train_loader, optimizer, epoch, iteration):\n model.train()\n criterion = nn.CrossEntropyLoss(size_average=True)\n for i_batch, sample_batched in enumerate(train_loader):\n data, target = sample_batched['image'].to(device), sample_batched[\n 'label'].to(device)\n optimizer.zero_grad()\n output = model(data)\n pred = output.max(1, keepdim=True)[1]\n correct = pred.eq(target.view_as(pred)).sum().item()\n loss = criterion(output, target)\n loss.backward()\n optimizer.step()\n if i_batch % args.log_interval == 0:\n sys.stdout.write(\n '\\repoch:{0:>3} iteration:{1:>6} train_loss: {2:.6f} train_accracy: {3:5.2f}%'\n .format(epoch, iteration, loss.item(), 100.0 * correct /\n float(len(sample_batched['label']))))\n sys.stdout.flush()\n iteration += 1\n\n\ndef val(args, model, device, test_loader, iteration):\n model.eval()\n criterion = nn.CrossEntropyLoss(size_average=False)\n test_loss = 0\n correct = 0\n with torch.no_grad():\n for i_batch, sample_batched in enumerate(test_loader):\n data, target = sample_batched['image'].to(device), sample_batched[\n 'label'].to(device)\n output = model(data)\n test_loss += criterion(output, target).item()\n pred = output.max(1, keepdim=True)[1]\n correct += pred.eq(target.view_as(pred)).sum().item()\n test_loss /= float(len(test_loader.dataset))\n correct /= float(len(test_loader.dataset))\n print('\\nValidation: Accuracy: {0:.2f}% test_loss: {1:.6f}'.format(\n 100.0 * correct, test_loss))\n return test_loss, 100.0 * correct\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef val(args, model, device, test_loader, iteration):\n model.eval()\n criterion = nn.CrossEntropyLoss(size_average=False)\n test_loss = 0\n correct = 0\n with torch.no_grad():\n for i_batch, sample_batched in enumerate(test_loader):\n data, target = sample_batched['image'].to(device), sample_batched[\n 'label'].to(device)\n output = model(data)\n test_loss += criterion(output, target).item()\n pred = output.max(1, keepdim=True)[1]\n correct += pred.eq(target.view_as(pred)).sum().item()\n test_loss /= float(len(test_loader.dataset))\n correct /= float(len(test_loader.dataset))\n print('\\nValidation: Accuracy: {0:.2f}% test_loss: {1:.6f}'.format(\n 100.0 * correct, test_loss))\n return test_loss, 100.0 * correct\n", "<docstring token>\n<import token>\n<function token>\n<function token>\n" ]
false
99,410
41de670ee0d6b761824c658aea3d176f19bfcd66
#!/usr/bin/env python import sys import re # Calcula a media de tempo entre todas as simulacoes para entre: # real, sys, user soma_real = 0.0 soma_user = 0.0 soma_sys = 0.0 # Soma os tempos de execucao para cada execucao with open(sys.argv[1]) as f: for line in f: if 'real' in line: actual = line.split('m')[1] soma_real = soma_real + float(actual.split('s')[0]) if 'user' in line: actual = line.split('m')[1] soma_user = soma_user + float(actual.split('s')[0]) if 'sys' in line: actual = line.split('m')[1] soma_sys = soma_sys + float(actual.split('s')[0]) # Calcula e imprime a media de tempos print('real '+ "{0:.3f}".format(soma_real/int(sys.argv[2]))+'s') print('user '+ "{0:.3f}".format(soma_user/int(sys.argv[2]))+'s') print('sys '+ "{0:.3f}".format(soma_sys/int(sys.argv[2]))+'s')
[ "#!/usr/bin/env python\n\nimport sys\nimport re\n\n# Calcula a media de tempo entre todas as simulacoes para entre:\n# real, sys, user\n\nsoma_real = 0.0\nsoma_user = 0.0\nsoma_sys = 0.0\n\n# Soma os tempos de execucao para cada execucao\nwith open(sys.argv[1]) as f:\n for line in f:\n if 'real' in line:\n actual = line.split('m')[1]\n soma_real = soma_real + float(actual.split('s')[0])\n if 'user' in line:\n actual = line.split('m')[1]\n soma_user = soma_user + float(actual.split('s')[0])\n if 'sys' in line:\n actual = line.split('m')[1]\n soma_sys = soma_sys + float(actual.split('s')[0])\n\n# Calcula e imprime a media de tempos\nprint('real\t'+ \"{0:.3f}\".format(soma_real/int(sys.argv[2]))+'s')\nprint('user\t'+ \"{0:.3f}\".format(soma_user/int(sys.argv[2]))+'s')\nprint('sys\t\t'+ \"{0:.3f}\".format(soma_sys/int(sys.argv[2]))+'s')\n\n", "import sys\nimport re\nsoma_real = 0.0\nsoma_user = 0.0\nsoma_sys = 0.0\nwith open(sys.argv[1]) as f:\n for line in f:\n if 'real' in line:\n actual = line.split('m')[1]\n soma_real = soma_real + float(actual.split('s')[0])\n if 'user' in line:\n actual = line.split('m')[1]\n soma_user = soma_user + float(actual.split('s')[0])\n if 'sys' in line:\n actual = line.split('m')[1]\n soma_sys = soma_sys + float(actual.split('s')[0])\nprint('real\\t' + '{0:.3f}'.format(soma_real / int(sys.argv[2])) + 's')\nprint('user\\t' + '{0:.3f}'.format(soma_user / int(sys.argv[2])) + 's')\nprint('sys\\t\\t' + '{0:.3f}'.format(soma_sys / int(sys.argv[2])) + 's')\n", "<import token>\nsoma_real = 0.0\nsoma_user = 0.0\nsoma_sys = 0.0\nwith open(sys.argv[1]) as f:\n for line in f:\n if 'real' in line:\n actual = line.split('m')[1]\n soma_real = soma_real + float(actual.split('s')[0])\n if 'user' in line:\n actual = line.split('m')[1]\n soma_user = soma_user + float(actual.split('s')[0])\n if 'sys' in line:\n actual = line.split('m')[1]\n soma_sys = soma_sys + float(actual.split('s')[0])\nprint('real\\t' + '{0:.3f}'.format(soma_real / int(sys.argv[2])) + 's')\nprint('user\\t' + '{0:.3f}'.format(soma_user / int(sys.argv[2])) + 's')\nprint('sys\\t\\t' + '{0:.3f}'.format(soma_sys / int(sys.argv[2])) + 's')\n", "<import token>\n<assignment token>\nwith open(sys.argv[1]) as f:\n for line in f:\n if 'real' in line:\n actual = line.split('m')[1]\n soma_real = soma_real + float(actual.split('s')[0])\n if 'user' in line:\n actual = line.split('m')[1]\n soma_user = soma_user + float(actual.split('s')[0])\n if 'sys' in line:\n actual = line.split('m')[1]\n soma_sys = soma_sys + float(actual.split('s')[0])\nprint('real\\t' + '{0:.3f}'.format(soma_real / int(sys.argv[2])) + 's')\nprint('user\\t' + '{0:.3f}'.format(soma_user / int(sys.argv[2])) + 's')\nprint('sys\\t\\t' + '{0:.3f}'.format(soma_sys / int(sys.argv[2])) + 's')\n", "<import token>\n<assignment token>\n<code token>\n" ]
false
99,411
9c93bd6dff47fbf21933b4e3648752f20e36258a
from django.conf.urls import url from .views import * app_name = 'dbms' urlpatterns = [ url(r'^user-register$', user_register, name='user-register'), url(r'^user-login$', user_login, name='user-login'), url(r'^admin-register$', admin_register, name='admin-register'), url(r'^admin-login$', admin_login, name='admin-login'), url(r'^admin/websites', AdminWebsiteView.as_view(), name='admin-website'), url(r'^keywords', KeywordsView.as_view(), name='keywords'), url(r'^log', get_log, name='get-log'), url(r'^get-result', get_result, name='get-result'), url(r'^hello', hello, name='hello') ]
[ "from django.conf.urls import url\nfrom .views import *\n\napp_name = 'dbms'\n\nurlpatterns = [\n url(r'^user-register$', user_register, name='user-register'),\n url(r'^user-login$', user_login, name='user-login'),\n url(r'^admin-register$', admin_register, name='admin-register'),\n url(r'^admin-login$', admin_login, name='admin-login'),\n url(r'^admin/websites', AdminWebsiteView.as_view(), name='admin-website'),\n url(r'^keywords', KeywordsView.as_view(), name='keywords'),\n url(r'^log', get_log, name='get-log'),\n url(r'^get-result', get_result, name='get-result'),\n url(r'^hello', hello, name='hello')\n]\n", "from django.conf.urls import url\nfrom .views import *\napp_name = 'dbms'\nurlpatterns = [url('^user-register$', user_register, name='user-register'),\n url('^user-login$', user_login, name='user-login'), url(\n '^admin-register$', admin_register, name='admin-register'), url(\n '^admin-login$', admin_login, name='admin-login'), url(\n '^admin/websites', AdminWebsiteView.as_view(), name='admin-website'),\n url('^keywords', KeywordsView.as_view(), name='keywords'), url('^log',\n get_log, name='get-log'), url('^get-result', get_result, name=\n 'get-result'), url('^hello', hello, name='hello')]\n", "<import token>\napp_name = 'dbms'\nurlpatterns = [url('^user-register$', user_register, name='user-register'),\n url('^user-login$', user_login, name='user-login'), url(\n '^admin-register$', admin_register, name='admin-register'), url(\n '^admin-login$', admin_login, name='admin-login'), url(\n '^admin/websites', AdminWebsiteView.as_view(), name='admin-website'),\n url('^keywords', KeywordsView.as_view(), name='keywords'), url('^log',\n get_log, name='get-log'), url('^get-result', get_result, name=\n 'get-result'), url('^hello', hello, name='hello')]\n", "<import token>\n<assignment token>\n" ]
false
99,412
4f97db2c81729b3d4dbc610a6c9b30f003b98999
# -*- coding: utf-8 -*- """ Created on Fri Jul 23 21:53:37 2021 @author: Cnoized """ #This is for problem 62 from time import time from itertools import permutations import numpy as np P_List = [] start1 = time() def CubicPermutations(Times): Number = 0 N = 2 # Cubes = GenerateCubes(5) while Number < Times: N+=1 Number = 0 Cube = sorted([int(a) for a in str(N**3)]) P_List.append([Cube]) Check = P_List.count([Cube]) # if Check > 1: # print(Check) if Check > Number: Number = Check for Base in range(N): Cube2 = sorted([int(a) for a in str(Base**3)]) if Cube2 == Cube: print(Base,'is the smallest base for which its cube has 5 permutations which are also cubes.',Base**3,Cube2) # print(P_List) # print(Cubes) # input() # for a in P_List: # if a in Cubes and a not in P_Int: # Number += 1 # print(N,Number) # P_Int.append(a) # print(P_List) # print(N) return N def GenerateCubes(Number): Alpha=1 Cubes = [] Value = (10**len(str(Number))) while Alpha < Value: Cubes.append([a for a in str(Alpha**3)]) Alpha+=1 # print(len(Cubes)) # print(len(max(Cubes))) return Cubes LookingFor = 5 Answer = CubicPermutations(LookingFor) print(Answer, 'at', LookingFor) end1 = time() print(end1-start1)
[ "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Jul 23 21:53:37 2021\r\n\r\n@author: Cnoized\r\n\"\"\"\r\n#This is for problem 62\r\nfrom time import time\r\nfrom itertools import permutations\r\nimport numpy as np\r\nP_List = []\r\n\r\nstart1 = time()\r\n\r\n\r\n\r\ndef CubicPermutations(Times):\r\n Number = 0\r\n N = 2\r\n # Cubes = GenerateCubes(5)\r\n \r\n while Number < Times:\r\n N+=1\r\n\r\n Number = 0\r\n\r\n Cube = sorted([int(a) for a in str(N**3)])\r\n P_List.append([Cube])\r\n Check = P_List.count([Cube])\r\n # if Check > 1:\r\n # print(Check)\r\n if Check > Number:\r\n Number = Check\r\n for Base in range(N):\r\n Cube2 = sorted([int(a) for a in str(Base**3)])\r\n if Cube2 == Cube:\r\n print(Base,'is the smallest base for which its cube has 5 permutations which are also cubes.',Base**3,Cube2)\r\n # print(P_List)\r\n # print(Cubes)\r\n # input()\r\n \r\n\r\n # for a in P_List:\r\n\r\n # if a in Cubes and a not in P_Int:\r\n # Number += 1\r\n # print(N,Number)\r\n # P_Int.append(a)\r\n # print(P_List)\r\n # print(N)\r\n return N\r\n\r\n\r\ndef GenerateCubes(Number):\r\n Alpha=1\r\n Cubes = []\r\n Value = (10**len(str(Number)))\r\n while Alpha < Value:\r\n Cubes.append([a for a in str(Alpha**3)])\r\n Alpha+=1\r\n # print(len(Cubes))\r\n # print(len(max(Cubes)))\r\n return Cubes\r\n\r\nLookingFor = 5\r\nAnswer = CubicPermutations(LookingFor)\r\nprint(Answer, 'at', LookingFor)\r\n\r\nend1 = time()\r\n\r\nprint(end1-start1)", "<docstring token>\nfrom time import time\nfrom itertools import permutations\nimport numpy as np\nP_List = []\nstart1 = time()\n\n\ndef CubicPermutations(Times):\n Number = 0\n N = 2\n while Number < Times:\n N += 1\n Number = 0\n Cube = sorted([int(a) for a in str(N ** 3)])\n P_List.append([Cube])\n Check = P_List.count([Cube])\n if Check > Number:\n Number = Check\n for Base in range(N):\n Cube2 = sorted([int(a) for a in str(Base ** 3)])\n if Cube2 == Cube:\n print(Base,\n 'is the smallest base for which its cube has 5 permutations which are also cubes.'\n , Base ** 3, Cube2)\n return N\n\n\ndef GenerateCubes(Number):\n Alpha = 1\n Cubes = []\n Value = 10 ** len(str(Number))\n while Alpha < Value:\n Cubes.append([a for a in str(Alpha ** 3)])\n Alpha += 1\n return Cubes\n\n\nLookingFor = 5\nAnswer = CubicPermutations(LookingFor)\nprint(Answer, 'at', LookingFor)\nend1 = time()\nprint(end1 - start1)\n", "<docstring token>\n<import token>\nP_List = []\nstart1 = time()\n\n\ndef CubicPermutations(Times):\n Number = 0\n N = 2\n while Number < Times:\n N += 1\n Number = 0\n Cube = sorted([int(a) for a in str(N ** 3)])\n P_List.append([Cube])\n Check = P_List.count([Cube])\n if Check > Number:\n Number = Check\n for Base in range(N):\n Cube2 = sorted([int(a) for a in str(Base ** 3)])\n if Cube2 == Cube:\n print(Base,\n 'is the smallest base for which its cube has 5 permutations which are also cubes.'\n , Base ** 3, Cube2)\n return N\n\n\ndef GenerateCubes(Number):\n Alpha = 1\n Cubes = []\n Value = 10 ** len(str(Number))\n while Alpha < Value:\n Cubes.append([a for a in str(Alpha ** 3)])\n Alpha += 1\n return Cubes\n\n\nLookingFor = 5\nAnswer = CubicPermutations(LookingFor)\nprint(Answer, 'at', LookingFor)\nend1 = time()\nprint(end1 - start1)\n", "<docstring token>\n<import token>\n<assignment token>\n\n\ndef CubicPermutations(Times):\n Number = 0\n N = 2\n while Number < Times:\n N += 1\n Number = 0\n Cube = sorted([int(a) for a in str(N ** 3)])\n P_List.append([Cube])\n Check = P_List.count([Cube])\n if Check > Number:\n Number = Check\n for Base in range(N):\n Cube2 = sorted([int(a) for a in str(Base ** 3)])\n if Cube2 == Cube:\n print(Base,\n 'is the smallest base for which its cube has 5 permutations which are also cubes.'\n , Base ** 3, Cube2)\n return N\n\n\ndef GenerateCubes(Number):\n Alpha = 1\n Cubes = []\n Value = 10 ** len(str(Number))\n while Alpha < Value:\n Cubes.append([a for a in str(Alpha ** 3)])\n Alpha += 1\n return Cubes\n\n\n<assignment token>\nprint(Answer, 'at', LookingFor)\n<assignment token>\nprint(end1 - start1)\n", "<docstring token>\n<import token>\n<assignment token>\n\n\ndef CubicPermutations(Times):\n Number = 0\n N = 2\n while Number < Times:\n N += 1\n Number = 0\n Cube = sorted([int(a) for a in str(N ** 3)])\n P_List.append([Cube])\n Check = P_List.count([Cube])\n if Check > Number:\n Number = Check\n for Base in range(N):\n Cube2 = sorted([int(a) for a in str(Base ** 3)])\n if Cube2 == Cube:\n print(Base,\n 'is the smallest base for which its cube has 5 permutations which are also cubes.'\n , Base ** 3, Cube2)\n return N\n\n\ndef GenerateCubes(Number):\n Alpha = 1\n Cubes = []\n Value = 10 ** len(str(Number))\n while Alpha < Value:\n Cubes.append([a for a in str(Alpha ** 3)])\n Alpha += 1\n return Cubes\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<docstring token>\n<import token>\n<assignment token>\n<function token>\n\n\ndef GenerateCubes(Number):\n Alpha = 1\n Cubes = []\n Value = 10 ** len(str(Number))\n while Alpha < Value:\n Cubes.append([a for a in str(Alpha ** 3)])\n Alpha += 1\n return Cubes\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<docstring token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
99,413
9b6e4e00ed7af78f8eab4f1bbad44644f1654cdd
import pymorphy2 from django import template register = template.Library() @register.filter def word_form(word, number): morph = pymorphy2.MorphAnalyzer() default_word = morph.parse(word)[0] changed_word = default_word.make_agree_with_number(number).word return changed_word
[ "import pymorphy2\nfrom django import template\n\nregister = template.Library()\n\n\[email protected]\ndef word_form(word, number):\n morph = pymorphy2.MorphAnalyzer()\n default_word = morph.parse(word)[0]\n changed_word = default_word.make_agree_with_number(number).word\n return changed_word\n", "import pymorphy2\nfrom django import template\nregister = template.Library()\n\n\[email protected]\ndef word_form(word, number):\n morph = pymorphy2.MorphAnalyzer()\n default_word = morph.parse(word)[0]\n changed_word = default_word.make_agree_with_number(number).word\n return changed_word\n", "<import token>\nregister = template.Library()\n\n\[email protected]\ndef word_form(word, number):\n morph = pymorphy2.MorphAnalyzer()\n default_word = morph.parse(word)[0]\n changed_word = default_word.make_agree_with_number(number).word\n return changed_word\n", "<import token>\n<assignment token>\n\n\[email protected]\ndef word_form(word, number):\n morph = pymorphy2.MorphAnalyzer()\n default_word = morph.parse(word)[0]\n changed_word = default_word.make_agree_with_number(number).word\n return changed_word\n", "<import token>\n<assignment token>\n<function token>\n" ]
false
99,414
82d1f3125d271fdae99a93726c7cf886599f25a1
# ---------------------------------------------------------------------------------------- # Authors: Jonathan Jolivette | Matt Freeland | Enrique Morales # Template Name: TA | Teacher's Assistant # File: TA | APP.PY (MAIN FILE) # App Version: 1.0 # ---------------------------------------------------------------------------------------- # from flask import render_template from flask import Flask, g, request from flask import render_template, flash, redirect, url_for from flask_login import LoginManager, login_user, logout_user, login_required, current_user from flask_bcrypt import check_password_hash from flask_bootstrap import Bootstrap from flask_fontawesome import FontAwesome from config import Config import secrets import os from PIL import Image import moment import models from models import User from models import Event import forms # set debug and port defaults DEBUG = True PORT = 8000 # can bootstrap wrap the app and in turn cover the entire app where all # templates are under the influence of bootstrap with the need for # any cdn or linking to downloaded files/folders???? # Bootstrap(app) app = Flask(__name__) app.config.from_object(Config) # initializing the login manager module login_manager = LoginManager() login_manager.init_app(app) login_manager.login_view = 'login' @login_manager.user_loader def load_user(userid): try: return models.User.get(models.User.id == userid) except models.DoesNotExist: return None # Connect to database before request @app.before_request def before_request(): """Connect to database before each request """ g.db = models.DATABASE g.db.connect() g.user = current_user @app.after_request def after_request(response): """Close the database connection after each request.""" g.db.close() return response # ============ REGISTRATION PAGE ROUTE ============ @app.route('/register', methods=('GET', 'POST')) def register(): form = forms.RegisterForm() if form.validate_on_submit(): if "generalassemb.ly" in form.email.data: flash("Registered as an instructor", 'success') models.User.create_user( username=form.username.data, email=form.email.data, role="Instructor", password=form.password.data, course=form.course.data ) else: flash("Registered as a student", 'success') models.User.create_user( username=form.username.data, email=form.email.data, role="Student", password=form.password.data, course=form.course.data ) return redirect(url_for('index')) return render_template('register.html', form=form) # ============ LOGIN PAGE ROUTE ============ @app.route('/login', methods=('GET', 'POST')) def login(): form = forms.LoginForm() if form.validate_on_submit(): try: user = models.User.get(models.User.email == form.email.data) except models.DoesNotExist: flash("your email or password doesn't match", "error") else: if check_password_hash(user.password, form.password.data): login_user(user) flash("You've been logged in", "success") return redirect(url_for('dashboard')) else: flash("your email or password doesn't match", "error") return render_template('login.html', form=form) # ============ LOGOUT PAGE ROUTE ============ @app.route('/logout') @login_required def logout(): logout_user() flash("You've been logged out", "success") return redirect(url_for('index')) # ============ EVENT PAGE ROUTE ============ @app.route('/event/', methods=('GET', 'POST')) @app.route('/event', methods=('GET', 'POST')) @login_required def event(): events = Event.select().order_by(Event.date, Event.time) return render_template('event.html', events=events) # ============ EVENT CRUD ROUTES ============ # CREATE @app.route('/event/create', methods=('GET', 'POST')) @login_required def create_event(): form = forms.CreateEventForm() if g.user.role != "Instructor": flash("You must be an instructor to create events") return redirect(url_for('index')) if form.validate_on_submit(): locator = Event.select().where( (Event.instructor == current_user.id) & (Event.date == form.date.data) & (Event.time == form.time.data)) if locator.count() == 0: flash("Created New Event","success") models.Event.create_event( instructor=g.user.id, date=form.date.data, time=form.time.data, ) return redirect(url_for("dashboard")) else: flash("Event already exists","error") return redirect(url_for("dashboard")) return render_template('create_event.html', form=form) # DELETE @app.route('/event/delete/<id>', methods=['DELETE', 'GET']) @login_required def event_delete(id): found_event = models.Event.get(models.Event.id == id) if g.user.id == found_event.instructor_id: if found_event.student != None: unlock_student = User.update(event_assigned = False).where(User.id == found_event.student) unlock_student.execute() event_to_delete = Event.delete().where(Event.id == found_event.id) event_to_delete.execute() flash("Deleted event successfully","error") else: flash("You don't have permission to delete this event.","error") return redirect(url_for('dashboard')) # UPDATE @app.route('/event/update/<id>', methods=('POST', 'GET')) def event_update(id): form = forms.EditEventForm() found_event = Event.get(Event.id == id) if g.user.id == found_event.instructor_id: if form.validate_on_submit(): if found_event.date != form.date.data and found_event.time != form.time.data: locator = Event.select().where( (Event.instructor == current_user.id) & (Event.date == form.date.data) & (Event.time == form.time.data)) if locator.count() == 0: update = Event.update(date=form.date.data, time=form.time.data).where(Event.id == id) update.execute() flash("Updated Event Successfully","success") return redirect(url_for('event')) else: flash("Could not update, duplicate event exists","error") return redirect(url_for('event')) else: flash("You do not have permission to edit this event", "error") return redirect(url_for('dashboard')) return render_template('edit_event.html', form=form, found_event=found_event) # ADD STUDENT TO EVENT @app.route('/event/add_student/<id>', methods=('POST', 'GET')) def add_student_to_event(id): found_event = Event.get(Event.id == id) if found_event.student == None: if current_user.event_assigned == False: add_student = Event.update(student=current_user.id).where(Event.id == id) add_student.execute() lock_events = User.update(event_assigned=True).where(User.id == current_user.id) lock_events.execute() flash("Checked in for event", "success") return redirect(url_for('dashboard')) else: flash("You can only be assigned one event at a time") return redirect(url_for('dashboard')) else: flash("Even already has a student assigned", "error") return redirect(url_for('dashboard')) # REMOVE STUDENT FROM EVENT @app.route('/event/remove_student/<id>', methods=('POST', 'GET')) def remove_student_from_event(id): found_event = Event.get(Event.id == id) if found_event.student == current_user: remove_student = Event.update(student_id=None).where(Event.id == id) remove_student.execute() unlock_events = User.update(event_assigned=False).where(User.id == current_user.id) unlock_events.execute() flash("Unscheduled successfully", "success") else: flash("Cannot unschedule other user events", "error") return redirect(url_for('dashboard')) # ============ HOME PAGE ROUTE ============ @app.route('/') def index(): return render_template('hero.html') # ============ STUDENT DASHBOARD ROUTE ============ @app.route('/student') def student_dash(): return render_template('student-dashboard.html') # ============ TEACHER DASHBOARD ROUTE ============ @app.route('/teacher') def teacher_dash(): return render_template('teacher-dashboard.html') # ============ Account update ROUTES ============ def save_picture(form_picture): random_hex = secrets.token_hex(8) # function returns filename without ext and ext itself,,,, underscores are a python way to throw away variables or "ignore" _, f_ext = os.path.splitext(form_picture.filename) # ignore photo name and concat hex with extension picture_fn = random_hex + f_ext # full path where image will be saved. full path of project directory picture_path = os.path.join(app.root_path, 'static/profile_pics', picture_fn) # sets image resize with pillow output_size = (500, 500) # open image we passed into the function i = Image.open(form_picture) i.thumbnail(output_size) #saves at picture_path on file system i.save(picture_path) # return value to user return picture_fn @app.route("/account", methods=['GET','POST']) @login_required def account(): form = forms.UpdateAccountForm() if form.validate_on_submit(): if form.picture.data: # allows us to set users current image to profile picture picture_file = save_picture(form.picture.data) update_image = User.update(image_file=picture_file).where(User.id == current_user.id) update_image.execute() # current_user.username = form.username.data # current_user.email = form.email.data # g.db.commit() flash('Your account has been updated!', 'success') return redirect(url_for('account')) elif request.method == 'GET': form.username.data = current_user.username form.email.data = current_user.email image_location = User.get(User.id == current_user.id) decoded_location = image_location.image_file.decode() image_file = url_for('static', filename='profile_pics/' + decoded_location) return render_template('account.html', title='Account', image_file=image_file, form=form) @app.route("/dashboard", methods=['GET','POST']) @login_required def dashboard(): events = Event.select().order_by(Event.date, Event.time) form = forms.UpdateAccountForm() if form.validate_on_submit(): if form.picture.data: picture_file = save_picture(form.picture.data) update_image = User.update(image_file=picture_file).where(User.id == current_user.id) update_image.execute() # update_profile = User.update(username=form.username.data) # update_profile.execute() flash('Your account has been updated!', 'success') return redirect(url_for('dashboard')) elif request.method == 'GET': form.username.data = current_user.username image_location = User.get(User.id == current_user.id) if image_location.image_file != "default.png": decoded_location = image_location.image_file.decode() image_file = url_for('static', filename='profile_pics/' + decoded_location) else: image_file = url_for('static', filename='profile_pics/default.png') return render_template('dashboard.html', events=events, title='Account', image_file=image_file, form=form) if __name__ == '__main__': models.initialize() try: models.User.create_user( username='jimbo', email="[email protected]", password='password', course="General", role="Instructor" ) models.User.create_user( username='joe student', email="[email protected]", password='password', course="General", role="Student" ) models.User.create_user( username='walrus', email="[email protected]", password='password', course="General", role="Instructor" ) models.User.create_user( username='rando calrissian', email="[email protected]", password='password', course="General", role="Student" ) except ValueError: pass app.run(debug=DEBUG, port=PORT)
[ "# ----------------------------------------------------------------------------------------\n# Authors: Jonathan Jolivette | Matt Freeland | Enrique Morales\n# Template Name: TA | Teacher's Assistant\n# File: TA | APP.PY (MAIN FILE)\n# App Version: 1.0\n# ----------------------------------------------------------------------------------------\n\n# from flask import render_template\nfrom flask import Flask, g, request\nfrom flask import render_template, flash, redirect, url_for\nfrom flask_login import LoginManager, login_user, logout_user, login_required, current_user\nfrom flask_bcrypt import check_password_hash\nfrom flask_bootstrap import Bootstrap\nfrom flask_fontawesome import FontAwesome\nfrom config import Config\nimport secrets\nimport os\nfrom PIL import Image\n\nimport moment\n\nimport models\nfrom models import User\nfrom models import Event\nimport forms\n\n# set debug and port defaults\nDEBUG = True\nPORT = 8000\n\n# can bootstrap wrap the app and in turn cover the entire app where all\n# templates are under the influence of bootstrap with the need for\n# any cdn or linking to downloaded files/folders????\n# Bootstrap(app)\n\napp = Flask(__name__)\napp.config.from_object(Config)\n\n# initializing the login manager module\nlogin_manager = LoginManager()\nlogin_manager.init_app(app)\nlogin_manager.login_view = 'login'\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n# Connect to database before request\[email protected]_request\ndef before_request():\n \"\"\"Connect to database before each request \"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\[email protected]_request\ndef after_request(response):\n \"\"\"Close the database connection after each request.\"\"\"\n g.db.close()\n return response\n\n# ============ REGISTRATION PAGE ROUTE ============\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if \"generalassemb.ly\" in form.email.data:\n flash(\"Registered as an instructor\", 'success')\n models.User.create_user(\n username=form.username.data,\n email=form.email.data,\n role=\"Instructor\",\n password=form.password.data,\n course=form.course.data\n )\n else:\n flash(\"Registered as a student\", 'success')\n models.User.create_user(\n username=form.username.data,\n email=form.email.data,\n role=\"Student\",\n password=form.password.data,\n course=form.course.data\n )\n\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\n# ============ LOGIN PAGE ROUTE ============\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", \"error\")\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", \"success\")\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", \"error\")\n return render_template('login.html', form=form)\n\n\n# ============ LOGOUT PAGE ROUTE ============\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", \"success\")\n return redirect(url_for('index'))\n\n# ============ EVENT PAGE ROUTE ============\n\n\[email protected]('/event/', methods=('GET', 'POST'))\[email protected]('/event', methods=('GET', 'POST'))\n@login_required\ndef event():\n events = Event.select().order_by(Event.date, Event.time)\n return render_template('event.html', events=events)\n\n\n# ============ EVENT CRUD ROUTES ============\n\n# CREATE\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != \"Instructor\":\n flash(\"You must be an instructor to create events\")\n return redirect(url_for('index'))\n\n if form.validate_on_submit():\n locator = Event.select().where(\n (Event.instructor == current_user.id) &\n (Event.date == form.date.data) &\n (Event.time == form.time.data))\n if locator.count() == 0:\n flash(\"Created New Event\",\"success\")\n models.Event.create_event(\n instructor=g.user.id,\n date=form.date.data,\n time=form.time.data,\n )\n return redirect(url_for(\"dashboard\"))\n else:\n flash(\"Event already exists\",\"error\")\n return redirect(url_for(\"dashboard\"))\n\n return render_template('create_event.html', form=form)\n\n# DELETE\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned = False).where(User.id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash(\"Deleted event successfully\",\"error\")\n else:\n flash(\"You don't have permission to delete this event.\",\"error\")\n return redirect(url_for('dashboard'))\n\n# UPDATE\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if found_event.date != form.date.data and found_event.time != form.time.data:\n locator = Event.select().where(\n (Event.instructor == current_user.id) &\n (Event.date == form.date.data) &\n (Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.time.data).where(Event.id == id)\n update.execute()\n flash(\"Updated Event Successfully\",\"success\")\n return redirect(url_for('event'))\n else:\n flash(\"Could not update, duplicate event exists\",\"error\")\n return redirect(url_for('event'))\n\n else:\n flash(\"You do not have permission to edit this event\", \"error\")\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=found_event)\n\n# ADD STUDENT TO EVENT\[email protected]('/event/add_student/<id>', methods=('POST', 'GET'))\ndef add_student_to_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == None:\n if current_user.event_assigned == False:\n add_student = Event.update(student=current_user.id).where(Event.id == id)\n add_student.execute()\n lock_events = User.update(event_assigned=True).where(User.id == current_user.id)\n lock_events.execute()\n flash(\"Checked in for event\", \"success\")\n return redirect(url_for('dashboard'))\n else:\n flash(\"You can only be assigned one event at a time\")\n return redirect(url_for('dashboard'))\n else:\n flash(\"Even already has a student assigned\", \"error\")\n return redirect(url_for('dashboard'))\n\n# REMOVE STUDENT FROM EVENT\[email protected]('/event/remove_student/<id>', methods=('POST', 'GET'))\ndef remove_student_from_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == current_user:\n remove_student = Event.update(student_id=None).where(Event.id == id)\n remove_student.execute()\n unlock_events = User.update(event_assigned=False).where(User.id == current_user.id)\n unlock_events.execute()\n flash(\"Unscheduled successfully\", \"success\")\n else:\n flash(\"Cannot unschedule other user events\", \"error\")\n return redirect(url_for('dashboard'))\n\n# ============ HOME PAGE ROUTE ============\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n# ============ STUDENT DASHBOARD ROUTE ============\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\n# ============ TEACHER DASHBOARD ROUTE ============\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n# ============ Account update ROUTES ============\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n # function returns filename without ext and ext itself,,,, underscores are a python way to throw away variables or \"ignore\"\n _, f_ext = os.path.splitext(form_picture.filename)\n # ignore photo name and concat hex with extension\n picture_fn = random_hex + f_ext\n # full path where image will be saved. full path of project directory\n picture_path = os.path.join(app.root_path, 'static/profile_pics', picture_fn)\n\n # sets image resize with pillow\n output_size = (500, 500)\n # open image we passed into the function\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n #saves at picture_path on file system\n i.save(picture_path)\n # return value to user\n return picture_fn\n\[email protected](\"/account\", methods=['GET','POST'])\n@login_required\ndef account():\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n # allows us to set users current image to profile picture\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.id == current_user.id)\n update_image.execute()\n # current_user.username = form.username.data\n # current_user.email = form.email.data\n # g.db.commit()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('account'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n form.email.data = current_user.email\n \n image_location = User.get(User.id == current_user.id)\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' + decoded_location)\n return render_template('account.html', title='Account', image_file=image_file, form=form)\n\[email protected](\"/dashboard\", methods=['GET','POST'])\n@login_required\ndef dashboard():\n events = Event.select().order_by(Event.date, Event.time)\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.id == current_user.id)\n update_image.execute()\n # update_profile = User.update(username=form.username.data)\n # update_profile.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('dashboard'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n \n image_location = User.get(User.id == current_user.id)\n if image_location.image_file != \"default.png\":\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' + decoded_location)\n else:\n image_file = url_for('static', filename='profile_pics/default.png')\n \n\n return render_template('dashboard.html', events=events, title='Account', image_file=image_file, form=form)\n\nif __name__ == '__main__':\n models.initialize()\n try:\n models.User.create_user(\n username='jimbo',\n email=\"[email protected]\",\n password='password',\n course=\"General\",\n role=\"Instructor\"\n )\n models.User.create_user(\n username='joe student',\n email=\"[email protected]\",\n password='password',\n course=\"General\",\n role=\"Student\"\n )\n models.User.create_user(\n username='walrus',\n email=\"[email protected]\",\n password='password',\n course=\"General\",\n role=\"Instructor\"\n )\n models.User.create_user(\n username='rando calrissian',\n email=\"[email protected]\",\n password='password',\n course=\"General\",\n role=\"Student\"\n )\n except ValueError:\n pass\n\napp.run(debug=DEBUG, port=PORT)\n", "from flask import Flask, g, request\nfrom flask import render_template, flash, redirect, url_for\nfrom flask_login import LoginManager, login_user, logout_user, login_required, current_user\nfrom flask_bcrypt import check_password_hash\nfrom flask_bootstrap import Bootstrap\nfrom flask_fontawesome import FontAwesome\nfrom config import Config\nimport secrets\nimport os\nfrom PIL import Image\nimport moment\nimport models\nfrom models import User\nfrom models import Event\nimport forms\nDEBUG = True\nPORT = 8000\napp = Flask(__name__)\napp.config.from_object(Config)\nlogin_manager = LoginManager()\nlogin_manager.init_app(app)\nlogin_manager.login_view = 'login'\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\[email protected]_request\ndef before_request():\n \"\"\"Connect to database before each request \"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\[email protected]_request\ndef after_request(response):\n \"\"\"Close the database connection after each request.\"\"\"\n g.db.close()\n return response\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\[email protected]('/event/', methods=('GET', 'POST'))\[email protected]('/event', methods=('GET', 'POST'))\n@login_required\ndef event():\n events = Event.select().order_by(Event.date, Event.time)\n return render_template('event.html', events=events)\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\[email protected]('/event/add_student/<id>', methods=('POST', 'GET'))\ndef add_student_to_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == None:\n if current_user.event_assigned == False:\n add_student = Event.update(student=current_user.id).where(Event\n .id == id)\n add_student.execute()\n lock_events = User.update(event_assigned=True).where(User.id ==\n current_user.id)\n lock_events.execute()\n flash('Checked in for event', 'success')\n return redirect(url_for('dashboard'))\n else:\n flash('You can only be assigned one event at a time')\n return redirect(url_for('dashboard'))\n else:\n flash('Even already has a student assigned', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/remove_student/<id>', methods=('POST', 'GET'))\ndef remove_student_from_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == current_user:\n remove_student = Event.update(student_id=None).where(Event.id == id)\n remove_student.execute()\n unlock_events = User.update(event_assigned=False).where(User.id ==\n current_user.id)\n unlock_events.execute()\n flash('Unscheduled successfully', 'success')\n else:\n flash('Cannot unschedule other user events', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\[email protected]('/account', methods=['GET', 'POST'])\n@login_required\ndef account():\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('account'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n form.email.data = current_user.email\n image_location = User.get(User.id == current_user.id)\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' + decoded_location)\n return render_template('account.html', title='Account', image_file=\n image_file, form=form)\n\n\[email protected]('/dashboard', methods=['GET', 'POST'])\n@login_required\ndef dashboard():\n events = Event.select().order_by(Event.date, Event.time)\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('dashboard'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n image_location = User.get(User.id == current_user.id)\n if image_location.image_file != 'default.png':\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' +\n decoded_location)\n else:\n image_file = url_for('static', filename='profile_pics/default.png')\n return render_template('dashboard.html', events=events, title='Account',\n image_file=image_file, form=form)\n\n\nif __name__ == '__main__':\n models.initialize()\n try:\n models.User.create_user(username='jimbo', email='[email protected]',\n password='password', course='General', role='Instructor')\n models.User.create_user(username='joe student', email=\n '[email protected]', password='password', course='General', role=\n 'Student')\n models.User.create_user(username='walrus', email=\n '[email protected]', password='password', course='General',\n role='Instructor')\n models.User.create_user(username='rando calrissian', email=\n '[email protected]', password='password', course='General',\n role='Student')\n except ValueError:\n pass\napp.run(debug=DEBUG, port=PORT)\n", "<import token>\nDEBUG = True\nPORT = 8000\napp = Flask(__name__)\napp.config.from_object(Config)\nlogin_manager = LoginManager()\nlogin_manager.init_app(app)\nlogin_manager.login_view = 'login'\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\[email protected]_request\ndef before_request():\n \"\"\"Connect to database before each request \"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\[email protected]_request\ndef after_request(response):\n \"\"\"Close the database connection after each request.\"\"\"\n g.db.close()\n return response\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\[email protected]('/event/', methods=('GET', 'POST'))\[email protected]('/event', methods=('GET', 'POST'))\n@login_required\ndef event():\n events = Event.select().order_by(Event.date, Event.time)\n return render_template('event.html', events=events)\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\[email protected]('/event/add_student/<id>', methods=('POST', 'GET'))\ndef add_student_to_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == None:\n if current_user.event_assigned == False:\n add_student = Event.update(student=current_user.id).where(Event\n .id == id)\n add_student.execute()\n lock_events = User.update(event_assigned=True).where(User.id ==\n current_user.id)\n lock_events.execute()\n flash('Checked in for event', 'success')\n return redirect(url_for('dashboard'))\n else:\n flash('You can only be assigned one event at a time')\n return redirect(url_for('dashboard'))\n else:\n flash('Even already has a student assigned', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/remove_student/<id>', methods=('POST', 'GET'))\ndef remove_student_from_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == current_user:\n remove_student = Event.update(student_id=None).where(Event.id == id)\n remove_student.execute()\n unlock_events = User.update(event_assigned=False).where(User.id ==\n current_user.id)\n unlock_events.execute()\n flash('Unscheduled successfully', 'success')\n else:\n flash('Cannot unschedule other user events', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\[email protected]('/account', methods=['GET', 'POST'])\n@login_required\ndef account():\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('account'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n form.email.data = current_user.email\n image_location = User.get(User.id == current_user.id)\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' + decoded_location)\n return render_template('account.html', title='Account', image_file=\n image_file, form=form)\n\n\[email protected]('/dashboard', methods=['GET', 'POST'])\n@login_required\ndef dashboard():\n events = Event.select().order_by(Event.date, Event.time)\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('dashboard'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n image_location = User.get(User.id == current_user.id)\n if image_location.image_file != 'default.png':\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' +\n decoded_location)\n else:\n image_file = url_for('static', filename='profile_pics/default.png')\n return render_template('dashboard.html', events=events, title='Account',\n image_file=image_file, form=form)\n\n\nif __name__ == '__main__':\n models.initialize()\n try:\n models.User.create_user(username='jimbo', email='[email protected]',\n password='password', course='General', role='Instructor')\n models.User.create_user(username='joe student', email=\n '[email protected]', password='password', course='General', role=\n 'Student')\n models.User.create_user(username='walrus', email=\n '[email protected]', password='password', course='General',\n role='Instructor')\n models.User.create_user(username='rando calrissian', email=\n '[email protected]', password='password', course='General',\n role='Student')\n except ValueError:\n pass\napp.run(debug=DEBUG, port=PORT)\n", "<import token>\n<assignment token>\napp.config.from_object(Config)\n<assignment token>\nlogin_manager.init_app(app)\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\[email protected]_request\ndef before_request():\n \"\"\"Connect to database before each request \"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\[email protected]_request\ndef after_request(response):\n \"\"\"Close the database connection after each request.\"\"\"\n g.db.close()\n return response\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\[email protected]('/event/', methods=('GET', 'POST'))\[email protected]('/event', methods=('GET', 'POST'))\n@login_required\ndef event():\n events = Event.select().order_by(Event.date, Event.time)\n return render_template('event.html', events=events)\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\[email protected]('/event/add_student/<id>', methods=('POST', 'GET'))\ndef add_student_to_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == None:\n if current_user.event_assigned == False:\n add_student = Event.update(student=current_user.id).where(Event\n .id == id)\n add_student.execute()\n lock_events = User.update(event_assigned=True).where(User.id ==\n current_user.id)\n lock_events.execute()\n flash('Checked in for event', 'success')\n return redirect(url_for('dashboard'))\n else:\n flash('You can only be assigned one event at a time')\n return redirect(url_for('dashboard'))\n else:\n flash('Even already has a student assigned', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/remove_student/<id>', methods=('POST', 'GET'))\ndef remove_student_from_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == current_user:\n remove_student = Event.update(student_id=None).where(Event.id == id)\n remove_student.execute()\n unlock_events = User.update(event_assigned=False).where(User.id ==\n current_user.id)\n unlock_events.execute()\n flash('Unscheduled successfully', 'success')\n else:\n flash('Cannot unschedule other user events', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\[email protected]('/account', methods=['GET', 'POST'])\n@login_required\ndef account():\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('account'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n form.email.data = current_user.email\n image_location = User.get(User.id == current_user.id)\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' + decoded_location)\n return render_template('account.html', title='Account', image_file=\n image_file, form=form)\n\n\[email protected]('/dashboard', methods=['GET', 'POST'])\n@login_required\ndef dashboard():\n events = Event.select().order_by(Event.date, Event.time)\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('dashboard'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n image_location = User.get(User.id == current_user.id)\n if image_location.image_file != 'default.png':\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' +\n decoded_location)\n else:\n image_file = url_for('static', filename='profile_pics/default.png')\n return render_template('dashboard.html', events=events, title='Account',\n image_file=image_file, form=form)\n\n\nif __name__ == '__main__':\n models.initialize()\n try:\n models.User.create_user(username='jimbo', email='[email protected]',\n password='password', course='General', role='Instructor')\n models.User.create_user(username='joe student', email=\n '[email protected]', password='password', course='General', role=\n 'Student')\n models.User.create_user(username='walrus', email=\n '[email protected]', password='password', course='General',\n role='Instructor')\n models.User.create_user(username='rando calrissian', email=\n '[email protected]', password='password', course='General',\n role='Student')\n except ValueError:\n pass\napp.run(debug=DEBUG, port=PORT)\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\[email protected]_request\ndef before_request():\n \"\"\"Connect to database before each request \"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\[email protected]_request\ndef after_request(response):\n \"\"\"Close the database connection after each request.\"\"\"\n g.db.close()\n return response\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\[email protected]('/event/', methods=('GET', 'POST'))\[email protected]('/event', methods=('GET', 'POST'))\n@login_required\ndef event():\n events = Event.select().order_by(Event.date, Event.time)\n return render_template('event.html', events=events)\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\[email protected]('/event/add_student/<id>', methods=('POST', 'GET'))\ndef add_student_to_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == None:\n if current_user.event_assigned == False:\n add_student = Event.update(student=current_user.id).where(Event\n .id == id)\n add_student.execute()\n lock_events = User.update(event_assigned=True).where(User.id ==\n current_user.id)\n lock_events.execute()\n flash('Checked in for event', 'success')\n return redirect(url_for('dashboard'))\n else:\n flash('You can only be assigned one event at a time')\n return redirect(url_for('dashboard'))\n else:\n flash('Even already has a student assigned', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/remove_student/<id>', methods=('POST', 'GET'))\ndef remove_student_from_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == current_user:\n remove_student = Event.update(student_id=None).where(Event.id == id)\n remove_student.execute()\n unlock_events = User.update(event_assigned=False).where(User.id ==\n current_user.id)\n unlock_events.execute()\n flash('Unscheduled successfully', 'success')\n else:\n flash('Cannot unschedule other user events', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\[email protected]('/account', methods=['GET', 'POST'])\n@login_required\ndef account():\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('account'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n form.email.data = current_user.email\n image_location = User.get(User.id == current_user.id)\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' + decoded_location)\n return render_template('account.html', title='Account', image_file=\n image_file, form=form)\n\n\[email protected]('/dashboard', methods=['GET', 'POST'])\n@login_required\ndef dashboard():\n events = Event.select().order_by(Event.date, Event.time)\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('dashboard'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n image_location = User.get(User.id == current_user.id)\n if image_location.image_file != 'default.png':\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' +\n decoded_location)\n else:\n image_file = url_for('static', filename='profile_pics/default.png')\n return render_template('dashboard.html', events=events, title='Account',\n image_file=image_file, form=form)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\[email protected]_request\ndef before_request():\n \"\"\"Connect to database before each request \"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\[email protected]('/event/', methods=('GET', 'POST'))\[email protected]('/event', methods=('GET', 'POST'))\n@login_required\ndef event():\n events = Event.select().order_by(Event.date, Event.time)\n return render_template('event.html', events=events)\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\[email protected]('/event/add_student/<id>', methods=('POST', 'GET'))\ndef add_student_to_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == None:\n if current_user.event_assigned == False:\n add_student = Event.update(student=current_user.id).where(Event\n .id == id)\n add_student.execute()\n lock_events = User.update(event_assigned=True).where(User.id ==\n current_user.id)\n lock_events.execute()\n flash('Checked in for event', 'success')\n return redirect(url_for('dashboard'))\n else:\n flash('You can only be assigned one event at a time')\n return redirect(url_for('dashboard'))\n else:\n flash('Even already has a student assigned', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/remove_student/<id>', methods=('POST', 'GET'))\ndef remove_student_from_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == current_user:\n remove_student = Event.update(student_id=None).where(Event.id == id)\n remove_student.execute()\n unlock_events = User.update(event_assigned=False).where(User.id ==\n current_user.id)\n unlock_events.execute()\n flash('Unscheduled successfully', 'success')\n else:\n flash('Cannot unschedule other user events', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\[email protected]('/account', methods=['GET', 'POST'])\n@login_required\ndef account():\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('account'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n form.email.data = current_user.email\n image_location = User.get(User.id == current_user.id)\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' + decoded_location)\n return render_template('account.html', title='Account', image_file=\n image_file, form=form)\n\n\[email protected]('/dashboard', methods=['GET', 'POST'])\n@login_required\ndef dashboard():\n events = Event.select().order_by(Event.date, Event.time)\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('dashboard'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n image_location = User.get(User.id == current_user.id)\n if image_location.image_file != 'default.png':\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' +\n decoded_location)\n else:\n image_file = url_for('static', filename='profile_pics/default.png')\n return render_template('dashboard.html', events=events, title='Account',\n image_file=image_file, form=form)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\[email protected]_request\ndef before_request():\n \"\"\"Connect to database before each request \"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\n<function token>\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\[email protected]('/event/add_student/<id>', methods=('POST', 'GET'))\ndef add_student_to_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == None:\n if current_user.event_assigned == False:\n add_student = Event.update(student=current_user.id).where(Event\n .id == id)\n add_student.execute()\n lock_events = User.update(event_assigned=True).where(User.id ==\n current_user.id)\n lock_events.execute()\n flash('Checked in for event', 'success')\n return redirect(url_for('dashboard'))\n else:\n flash('You can only be assigned one event at a time')\n return redirect(url_for('dashboard'))\n else:\n flash('Even already has a student assigned', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/remove_student/<id>', methods=('POST', 'GET'))\ndef remove_student_from_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == current_user:\n remove_student = Event.update(student_id=None).where(Event.id == id)\n remove_student.execute()\n unlock_events = User.update(event_assigned=False).where(User.id ==\n current_user.id)\n unlock_events.execute()\n flash('Unscheduled successfully', 'success')\n else:\n flash('Cannot unschedule other user events', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\[email protected]('/account', methods=['GET', 'POST'])\n@login_required\ndef account():\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('account'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n form.email.data = current_user.email\n image_location = User.get(User.id == current_user.id)\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' + decoded_location)\n return render_template('account.html', title='Account', image_file=\n image_file, form=form)\n\n\[email protected]('/dashboard', methods=['GET', 'POST'])\n@login_required\ndef dashboard():\n events = Event.select().order_by(Event.date, Event.time)\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('dashboard'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n image_location = User.get(User.id == current_user.id)\n if image_location.image_file != 'default.png':\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' +\n decoded_location)\n else:\n image_file = url_for('static', filename='profile_pics/default.png')\n return render_template('dashboard.html', events=events, title='Account',\n image_file=image_file, form=form)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\[email protected]_request\ndef before_request():\n \"\"\"Connect to database before each request \"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\n<function token>\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\[email protected]('/event/add_student/<id>', methods=('POST', 'GET'))\ndef add_student_to_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == None:\n if current_user.event_assigned == False:\n add_student = Event.update(student=current_user.id).where(Event\n .id == id)\n add_student.execute()\n lock_events = User.update(event_assigned=True).where(User.id ==\n current_user.id)\n lock_events.execute()\n flash('Checked in for event', 'success')\n return redirect(url_for('dashboard'))\n else:\n flash('You can only be assigned one event at a time')\n return redirect(url_for('dashboard'))\n else:\n flash('Even already has a student assigned', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/remove_student/<id>', methods=('POST', 'GET'))\ndef remove_student_from_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == current_user:\n remove_student = Event.update(student_id=None).where(Event.id == id)\n remove_student.execute()\n unlock_events = User.update(event_assigned=False).where(User.id ==\n current_user.id)\n unlock_events.execute()\n flash('Unscheduled successfully', 'success')\n else:\n flash('Cannot unschedule other user events', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\n<function token>\n\n\[email protected]('/dashboard', methods=['GET', 'POST'])\n@login_required\ndef dashboard():\n events = Event.select().order_by(Event.date, Event.time)\n form = forms.UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n update_image = User.update(image_file=picture_file).where(User.\n id == current_user.id)\n update_image.execute()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('dashboard'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n image_location = User.get(User.id == current_user.id)\n if image_location.image_file != 'default.png':\n decoded_location = image_location.image_file.decode()\n image_file = url_for('static', filename='profile_pics/' +\n decoded_location)\n else:\n image_file = url_for('static', filename='profile_pics/default.png')\n return render_template('dashboard.html', events=events, title='Account',\n image_file=image_file, form=form)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\[email protected]_request\ndef before_request():\n \"\"\"Connect to database before each request \"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\n<function token>\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\[email protected]('/event/add_student/<id>', methods=('POST', 'GET'))\ndef add_student_to_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == None:\n if current_user.event_assigned == False:\n add_student = Event.update(student=current_user.id).where(Event\n .id == id)\n add_student.execute()\n lock_events = User.update(event_assigned=True).where(User.id ==\n current_user.id)\n lock_events.execute()\n flash('Checked in for event', 'success')\n return redirect(url_for('dashboard'))\n else:\n flash('You can only be assigned one event at a time')\n return redirect(url_for('dashboard'))\n else:\n flash('Even already has a student assigned', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/remove_student/<id>', methods=('POST', 'GET'))\ndef remove_student_from_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == current_user:\n remove_student = Event.update(student_id=None).where(Event.id == id)\n remove_student.execute()\n unlock_events = User.update(event_assigned=False).where(User.id ==\n current_user.id)\n unlock_events.execute()\n flash('Unscheduled successfully', 'success')\n else:\n flash('Cannot unschedule other user events', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\n<function token>\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\n<function token>\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\[email protected]('/event/add_student/<id>', methods=('POST', 'GET'))\ndef add_student_to_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == None:\n if current_user.event_assigned == False:\n add_student = Event.update(student=current_user.id).where(Event\n .id == id)\n add_student.execute()\n lock_events = User.update(event_assigned=True).where(User.id ==\n current_user.id)\n lock_events.execute()\n flash('Checked in for event', 'success')\n return redirect(url_for('dashboard'))\n else:\n flash('You can only be assigned one event at a time')\n return redirect(url_for('dashboard'))\n else:\n flash('Even already has a student assigned', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/remove_student/<id>', methods=('POST', 'GET'))\ndef remove_student_from_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == current_user:\n remove_student = Event.update(student_id=None).where(Event.id == id)\n remove_student.execute()\n unlock_events = User.update(event_assigned=False).where(User.id ==\n current_user.id)\n unlock_events.execute()\n flash('Unscheduled successfully', 'success')\n else:\n flash('Cannot unschedule other user events', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\n<function token>\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\n<function token>\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\n<function token>\n\n\[email protected]('/event/remove_student/<id>', methods=('POST', 'GET'))\ndef remove_student_from_event(id):\n found_event = Event.get(Event.id == id)\n if found_event.student == current_user:\n remove_student = Event.update(student_id=None).where(Event.id == id)\n remove_student.execute()\n unlock_events = User.update(event_assigned=False).where(User.id ==\n current_user.id)\n unlock_events.execute()\n flash('Unscheduled successfully', 'success')\n else:\n flash('Cannot unschedule other user events', 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\n<function token>\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\n<function token>\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\n<function token>\n<function token>\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\[email protected]('/student')\ndef student_dash():\n return render_template('student-dashboard.html')\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\n<function token>\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\n<function token>\n\n\[email protected]('/event/create', methods=('GET', 'POST'))\n@login_required\ndef create_event():\n form = forms.CreateEventForm()\n if g.user.role != 'Instructor':\n flash('You must be an instructor to create events')\n return redirect(url_for('index'))\n if form.validate_on_submit():\n locator = Event.select().where((Event.instructor == current_user.id\n ) & (Event.date == form.date.data) & (Event.time == form.time.data)\n )\n if locator.count() == 0:\n flash('Created New Event', 'success')\n models.Event.create_event(instructor=g.user.id, date=form.date.\n data, time=form.time.data)\n return redirect(url_for('dashboard'))\n else:\n flash('Event already exists', 'error')\n return redirect(url_for('dashboard'))\n return render_template('create_event.html', form=form)\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\n<function token>\n<function token>\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\n<function token>\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\n<function token>\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\n<function token>\n<function token>\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\[email protected]('/event/update/<id>', methods=('POST', 'GET'))\ndef event_update(id):\n form = forms.EditEventForm()\n found_event = Event.get(Event.id == id)\n if g.user.id == found_event.instructor_id:\n if form.validate_on_submit():\n if (found_event.date != form.date.data and found_event.time !=\n form.time.data):\n locator = Event.select().where((Event.instructor ==\n current_user.id) & (Event.date == form.date.data) & (\n Event.time == form.time.data))\n if locator.count() == 0:\n update = Event.update(date=form.date.data, time=form.\n time.data).where(Event.id == id)\n update.execute()\n flash('Updated Event Successfully', 'success')\n return redirect(url_for('event'))\n else:\n flash('Could not update, duplicate event exists', 'error')\n return redirect(url_for('event'))\n else:\n flash('You do not have permission to edit this event', 'error')\n return redirect(url_for('dashboard'))\n return render_template('edit_event.html', form=form, found_event=\n found_event)\n\n\n<function token>\n<function token>\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\n<function token>\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\n<function token>\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You've been logged out\", 'success')\n return redirect(url_for('index'))\n\n\n<function token>\n<function token>\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\n<function token>\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\n<function token>\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\n<function token>\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\ndef save_picture(form_picture):\n random_hex = secrets.token_hex(8)\n _, f_ext = os.path.splitext(form_picture.filename)\n picture_fn = random_hex + f_ext\n picture_path = os.path.join(app.root_path, 'static/profile_pics',\n picture_fn)\n output_size = 500, 500\n i = Image.open(form_picture)\n i.thumbnail(output_size)\n i.save(picture_path)\n return picture_fn\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.User.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\n<function token>\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\n<function token>\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegisterForm()\n if form.validate_on_submit():\n if 'generalassemb.ly' in form.email.data:\n flash('Registered as an instructor', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Instructor', password=form.password.data,\n course=form.course.data)\n else:\n flash('Registered as a student', 'success')\n models.User.create_user(username=form.username.data, email=form\n .email.data, role='Student', password=form.password.data,\n course=form.course.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\n<function token>\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/')\ndef index():\n return render_template('hero.html')\n\n\n<function token>\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/teacher')\ndef teacher_dash():\n return render_template('teacher-dashboard.html')\n\n\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.email == form.email.data)\n except models.DoesNotExist:\n flash(\"your email or password doesn't match\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in\", 'success')\n return redirect(url_for('dashboard'))\n else:\n flash(\"your email or password doesn't match\", 'error')\n return render_template('login.html', form=form)\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/event/delete/<id>', methods=['DELETE', 'GET'])\n@login_required\ndef event_delete(id):\n found_event = models.Event.get(models.Event.id == id)\n if g.user.id == found_event.instructor_id:\n if found_event.student != None:\n unlock_student = User.update(event_assigned=False).where(User.\n id == found_event.student)\n unlock_student.execute()\n event_to_delete = Event.delete().where(Event.id == found_event.id)\n event_to_delete.execute()\n flash('Deleted event successfully', 'error')\n else:\n flash(\"You don't have permission to delete this event.\", 'error')\n return redirect(url_for('dashboard'))\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n" ]
false
99,415
5b8cc8ed84f98da8b7ec9ec1a0de9cced5f0f87d
#类调用自己的方法 #静态方法 class Room: tag=1 # @classmethod#只能 访问类的属性 # def tell_info(cls,x): # # print(cls) # print("____>",cls.tag,x)#直接调用类 # Room.tell_info(10) @staticmethod#和类和实例分别分隔开 只是名义上的归属类管理,不能使用类变量和实例变量,是类的工具包 def have_a_bath(a,b,c): print('%s %s %s 正在洗澡'%(a,b,c)) Room.have_a_bath('pl','pll','plll')
[ "#类调用自己的方法\n#静态方法\nclass Room:\n tag=1\n# @classmethod#只能 访问类的属性\n# def tell_info(cls,x):\n# # print(cls)\n# print(\"____>\",cls.tag,x)#直接调用类\n# Room.tell_info(10)\n @staticmethod#和类和实例分别分隔开 只是名义上的归属类管理,不能使用类变量和实例变量,是类的工具包\n def have_a_bath(a,b,c):\n print('%s %s %s 正在洗澡'%(a,b,c))\nRoom.have_a_bath('pl','pll','plll')\n", "class Room:\n tag = 1\n\n @staticmethod\n def have_a_bath(a, b, c):\n print('%s %s %s 正在洗澡' % (a, b, c))\n\n\nRoom.have_a_bath('pl', 'pll', 'plll')\n", "class Room:\n tag = 1\n\n @staticmethod\n def have_a_bath(a, b, c):\n print('%s %s %s 正在洗澡' % (a, b, c))\n\n\n<code token>\n", "class Room:\n <assignment token>\n\n @staticmethod\n def have_a_bath(a, b, c):\n print('%s %s %s 正在洗澡' % (a, b, c))\n\n\n<code token>\n", "class Room:\n <assignment token>\n <function token>\n\n\n<code token>\n", "<class token>\n<code token>\n" ]
false
99,416
f24bb634c9b94148cae1f2fef97d2ec7a1c51b0c
EXPECTED = {'PKIX1Explicit88': {'extensibility-implied': False, 'imports': {}, 'object-classes': {}, 'object-sets': {}, 'tags': 'EXPLICIT', 'types': {'AdministrationDomainName': {'members': [{'name': 'numeric', 'size': [(0, 'ub-domain-name-length')], 'type': 'NumericString'}, {'name': 'printable', 'size': [(0, 'ub-domain-name-length')], 'type': 'PrintableString'}], 'tag': {'class': 'APPLICATION', 'number': 2}, 'type': 'CHOICE'}, 'AlgorithmIdentifier': {'members': [{'name': 'algorithm', 'type': 'OBJECT ' 'IDENTIFIER'}, {'choices': {}, 'name': 'parameters', 'optional': True, 'type': 'ANY ' 'DEFINED ' 'BY', 'value': 'algorithm'}], 'type': 'SEQUENCE'}, 'Attribute': {'members': [{'name': 'type', 'type': 'AttributeType'}, {'element': {'type': 'AttributeValue'}, 'name': 'values', 'type': 'SET OF'}], 'type': 'SEQUENCE'}, 'AttributeType': {'type': 'OBJECT IDENTIFIER'}, 'AttributeTypeAndValue': {'members': [{'name': 'type', 'type': 'AttributeType'}, {'name': 'value', 'type': 'AttributeValue'}], 'type': 'SEQUENCE'}, 'AttributeValue': {'choices': {}, 'type': 'ANY DEFINED BY', 'value': 'type'}, 'BuiltInDomainDefinedAttribute': {'members': [{'name': 'type', 'size': [(1, 'ub-domain-defined-attribute-type-length')], 'type': 'PrintableString'}, {'name': 'value', 'size': [(1, 'ub-domain-defined-attribute-value-length')], 'type': 'PrintableString'}], 'type': 'SEQUENCE'}, 'BuiltInDomainDefinedAttributes': {'element': {'type': 'BuiltInDomainDefinedAttribute'}, 'size': [(1, 'ub-domain-defined-attributes')], 'type': 'SEQUENCE ' 'OF'}, 'BuiltInStandardAttributes': {'members': [{'name': 'country-name', 'optional': True, 'type': 'CountryName'}, {'name': 'administration-domain-name', 'optional': True, 'type': 'AdministrationDomainName'}, {'name': 'network-address', 'optional': True, 'tag': {'kind': 'IMPLICIT', 'number': 0}, 'type': 'NetworkAddress'}, {'name': 'terminal-identifier', 'optional': True, 'tag': {'kind': 'IMPLICIT', 'number': 1}, 'type': 'TerminalIdentifier'}, {'name': 'private-domain-name', 'optional': True, 'tag': {'number': 2}, 'type': 'PrivateDomainName'}, {'name': 'organization-name', 'optional': True, 'tag': {'kind': 'IMPLICIT', 'number': 3}, 'type': 'OrganizationName'}, {'name': 'numeric-user-identifier', 'optional': True, 'tag': {'kind': 'IMPLICIT', 'number': 4}, 'type': 'NumericUserIdentifier'}, {'name': 'personal-name', 'optional': True, 'tag': {'kind': 'IMPLICIT', 'number': 5}, 'type': 'PersonalName'}, {'name': 'organizational-unit-names', 'optional': True, 'tag': {'kind': 'IMPLICIT', 'number': 6}, 'type': 'OrganizationalUnitNames'}], 'type': 'SEQUENCE'}, 'Certificate': {'members': [{'name': 'tbsCertificate', 'type': 'TBSCertificate'}, {'name': 'signatureAlgorithm', 'type': 'AlgorithmIdentifier'}, {'name': 'signature', 'type': 'BIT ' 'STRING'}], 'type': 'SEQUENCE'}, 'CertificateList': {'members': [{'name': 'tbsCertList', 'type': 'TBSCertList'}, {'name': 'signatureAlgorithm', 'type': 'AlgorithmIdentifier'}, {'name': 'signature', 'type': 'BIT ' 'STRING'}], 'type': 'SEQUENCE'}, 'CertificateSerialNumber': {'type': 'INTEGER'}, 'CommonName': {'size': [(1, 'ub-common-name-length')], 'type': 'PrintableString'}, 'CountryName': {'members': [{'name': 'x121-dcc-code', 'size': ['ub-country-name-numeric-length'], 'type': 'NumericString'}, {'name': 'iso-3166-alpha2-code', 'size': ['ub-country-name-alpha-length'], 'type': 'PrintableString'}], 'tag': {'class': 'APPLICATION', 'number': 1}, 'type': 'CHOICE'}, 'DirectoryString': {'members': [{'name': 'teletexString', 'size': [(1, 'MAX')], 'type': 'TeletexString'}, {'name': 'printableString', 'size': [(1, 'MAX')], 'type': 'PrintableString'}, {'name': 'universalString', 'size': [(1, 'MAX')], 'type': 'UniversalString'}, {'name': 'utf8String', 'size': [(1, 'MAX')], 'type': 'UTF8String'}, {'name': 'bmpString', 'size': [(1, 'MAX')], 'type': 'BMPString'}], 'type': 'CHOICE'}, 'DistinguishedName': {'type': 'RDNSequence'}, 'DomainComponent': {'type': 'IA5String'}, 'EmailAddress': {'size': [(1, 'ub-emailaddress-length')], 'type': 'IA5String'}, 'ExtendedNetworkAddress': {'members': [{'members': [{'name': 'number', 'size': [(1, 'ub-e163-4-number-length')], 'tag': {'kind': 'IMPLICIT', 'number': 0}, 'type': 'NumericString'}, {'name': 'sub-address', 'optional': True, 'size': [(1, 'ub-e163-4-sub-address-length')], 'tag': {'kind': 'IMPLICIT', 'number': 1}, 'type': 'NumericString'}], 'name': 'e163-4-address', 'type': 'SEQUENCE'}, {'name': 'psap-address', 'tag': {'kind': 'IMPLICIT', 'number': 0}, 'type': 'PresentationAddress'}], 'type': 'CHOICE'}, 'Extension': {'members': [{'name': 'extnID', 'type': 'OBJECT ' 'IDENTIFIER'}, {'default': False, 'name': 'critical', 'type': 'BOOLEAN'}, {'name': 'extnValue', 'type': 'OCTET ' 'STRING'}], 'type': 'SEQUENCE'}, 'ExtensionAttribute': {'members': [{'name': 'extension-attribute-type', 'restricted-to': [(0, 'ub-extension-attributes')], 'tag': {'kind': 'IMPLICIT', 'number': 0}, 'type': 'INTEGER'}, {'choices': {}, 'name': 'extension-attribute-value', 'tag': {'number': 1}, 'type': 'ANY ' 'DEFINED ' 'BY', 'value': 'extension-attribute-type'}], 'type': 'SEQUENCE'}, 'ExtensionAttributes': {'element': {'type': 'ExtensionAttribute'}, 'size': [(1, 'ub-extension-attributes')], 'type': 'SET OF'}, 'ExtensionORAddressComponents': {'type': 'PDSParameter'}, 'ExtensionPhysicalDeliveryAddressComponents': {'type': 'PDSParameter'}, 'Extensions': {'element': {'type': 'Extension'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'}, 'LocalPostalAttributes': {'type': 'PDSParameter'}, 'Name': {'members': [{'name': 'rdnSequence', 'type': 'RDNSequence'}], 'type': 'CHOICE'}, 'NetworkAddress': {'type': 'X121Address'}, 'NumericUserIdentifier': {'size': [(1, 'ub-numeric-user-id-length')], 'type': 'NumericString'}, 'ORAddress': {'members': [{'name': 'built-in-standard-attributes', 'type': 'BuiltInStandardAttributes'}, {'name': 'built-in-domain-defined-attributes', 'optional': True, 'type': 'BuiltInDomainDefinedAttributes'}, {'name': 'extension-attributes', 'optional': True, 'type': 'ExtensionAttributes'}], 'type': 'SEQUENCE'}, 'OrganizationName': {'size': [(1, 'ub-organization-name-length')], 'type': 'PrintableString'}, 'OrganizationalUnitName': {'size': [(1, 'ub-organizational-unit-name-length')], 'type': 'PrintableString'}, 'OrganizationalUnitNames': {'element': {'type': 'OrganizationalUnitName'}, 'size': [(1, 'ub-organizational-units')], 'type': 'SEQUENCE ' 'OF'}, 'PDSName': {'size': [(1, 'ub-pds-name-length')], 'type': 'PrintableString'}, 'PDSParameter': {'members': [{'name': 'printable-string', 'optional': True, 'size': [(1, 'ub-pds-parameter-length')], 'type': 'PrintableString'}, {'name': 'teletex-string', 'optional': True, 'size': [(1, 'ub-pds-parameter-length')], 'type': 'TeletexString'}], 'type': 'SET'}, 'PersonalName': {'members': [{'name': 'surname', 'size': [(1, 'ub-surname-length')], 'tag': {'kind': 'IMPLICIT', 'number': 0}, 'type': 'PrintableString'}, {'name': 'given-name', 'optional': True, 'size': [(1, 'ub-given-name-length')], 'tag': {'kind': 'IMPLICIT', 'number': 1}, 'type': 'PrintableString'}, {'name': 'initials', 'optional': True, 'size': [(1, 'ub-initials-length')], 'tag': {'kind': 'IMPLICIT', 'number': 2}, 'type': 'PrintableString'}, {'name': 'generation-qualifier', 'optional': True, 'size': [(1, 'ub-generation-qualifier-length')], 'tag': {'kind': 'IMPLICIT', 'number': 3}, 'type': 'PrintableString'}], 'type': 'SET'}, 'PhysicalDeliveryCountryName': {'members': [{'name': 'x121-dcc-code', 'size': ['ub-country-name-numeric-length'], 'type': 'NumericString'}, {'name': 'iso-3166-alpha2-code', 'size': ['ub-country-name-alpha-length'], 'type': 'PrintableString'}], 'type': 'CHOICE'}, 'PhysicalDeliveryOfficeName': {'type': 'PDSParameter'}, 'PhysicalDeliveryOfficeNumber': {'type': 'PDSParameter'}, 'PhysicalDeliveryOrganizationName': {'type': 'PDSParameter'}, 'PhysicalDeliveryPersonalName': {'type': 'PDSParameter'}, 'PostOfficeBoxAddress': {'type': 'PDSParameter'}, 'PostalCode': {'members': [{'name': 'numeric-code', 'size': [(1, 'ub-postal-code-length')], 'type': 'NumericString'}, {'name': 'printable-code', 'size': [(1, 'ub-postal-code-length')], 'type': 'PrintableString'}], 'type': 'CHOICE'}, 'PosteRestanteAddress': {'type': 'PDSParameter'}, 'PresentationAddress': {'members': [{'name': 'pSelector', 'optional': True, 'tag': {'kind': 'EXPLICIT', 'number': 0}, 'type': 'OCTET ' 'STRING'}, {'name': 'sSelector', 'optional': True, 'tag': {'kind': 'EXPLICIT', 'number': 1}, 'type': 'OCTET ' 'STRING'}, {'name': 'tSelector', 'optional': True, 'tag': {'kind': 'EXPLICIT', 'number': 2}, 'type': 'OCTET ' 'STRING'}, {'element': {'type': 'OCTET ' 'STRING'}, 'name': 'nAddresses', 'size': [(1, 'MAX')], 'tag': {'kind': 'EXPLICIT', 'number': 3}, 'type': 'SET ' 'OF'}], 'type': 'SEQUENCE'}, 'PrivateDomainName': {'members': [{'name': 'numeric', 'size': [(1, 'ub-domain-name-length')], 'type': 'NumericString'}, {'name': 'printable', 'size': [(1, 'ub-domain-name-length')], 'type': 'PrintableString'}], 'type': 'CHOICE'}, 'RDNSequence': {'element': {'type': 'RelativeDistinguishedName'}, 'type': 'SEQUENCE OF'}, 'RelativeDistinguishedName': {'element': {'type': 'AttributeTypeAndValue'}, 'size': [(1, 'MAX')], 'type': 'SET OF'}, 'StreetAddress': {'type': 'PDSParameter'}, 'SubjectPublicKeyInfo': {'members': [{'name': 'algorithm', 'type': 'AlgorithmIdentifier'}, {'name': 'subjectPublicKey', 'type': 'BIT ' 'STRING'}], 'type': 'SEQUENCE'}, 'TBSCertList': {'members': [{'name': 'version', 'optional': True, 'type': 'Version'}, {'name': 'signature', 'type': 'AlgorithmIdentifier'}, {'name': 'issuer', 'type': 'Name'}, {'name': 'thisUpdate', 'type': 'Time'}, {'name': 'nextUpdate', 'optional': True, 'type': 'Time'}, {'element': {'members': [{'name': 'userCertificate', 'type': 'CertificateSerialNumber'}, {'name': 'revocationDate', 'type': 'Time'}, {'name': 'crlEntryExtensions', 'optional': True, 'type': 'Extensions'}], 'type': 'SEQUENCE'}, 'name': 'revokedCertificates', 'optional': True, 'type': 'SEQUENCE ' 'OF'}, {'name': 'crlExtensions', 'optional': True, 'tag': {'number': 0}, 'type': 'Extensions'}], 'type': 'SEQUENCE'}, 'TBSCertificate': {'members': [{'default': 'v1', 'name': 'version', 'tag': {'number': 0}, 'type': 'Version'}, {'name': 'serialNumber', 'type': 'CertificateSerialNumber'}, {'name': 'signature', 'type': 'AlgorithmIdentifier'}, {'name': 'issuer', 'type': 'Name'}, {'name': 'validity', 'type': 'Validity'}, {'name': 'subject', 'type': 'Name'}, {'name': 'subjectPublicKeyInfo', 'type': 'SubjectPublicKeyInfo'}, {'name': 'issuerUniqueID', 'optional': True, 'tag': {'kind': 'IMPLICIT', 'number': 1}, 'type': 'UniqueIdentifier'}, {'name': 'subjectUniqueID', 'optional': True, 'tag': {'kind': 'IMPLICIT', 'number': 2}, 'type': 'UniqueIdentifier'}, {'name': 'extensions', 'optional': True, 'tag': {'number': 3}, 'type': 'Extensions'}], 'type': 'SEQUENCE'}, 'TeletexCommonName': {'size': [(1, 'ub-common-name-length')], 'type': 'TeletexString'}, 'TeletexDomainDefinedAttribute': {'members': [{'name': 'type', 'size': [(1, 'ub-domain-defined-attribute-type-length')], 'type': 'TeletexString'}, {'name': 'value', 'size': [(1, 'ub-domain-defined-attribute-value-length')], 'type': 'TeletexString'}], 'type': 'SEQUENCE'}, 'TeletexDomainDefinedAttributes': {'element': {'type': 'TeletexDomainDefinedAttribute'}, 'size': [(1, 'ub-domain-defined-attributes')], 'type': 'SEQUENCE ' 'OF'}, 'TeletexOrganizationName': {'size': [(1, 'ub-organization-name-length')], 'type': 'TeletexString'}, 'TeletexOrganizationalUnitName': {'size': [(1, 'ub-organizational-unit-name-length')], 'type': 'TeletexString'}, 'TeletexOrganizationalUnitNames': {'element': {'type': 'TeletexOrganizationalUnitName'}, 'size': [(1, 'ub-organizational-units')], 'type': 'SEQUENCE ' 'OF'}, 'TeletexPersonalName': {'members': [{'name': 'surname', 'size': [(1, 'ub-surname-length')], 'tag': {'kind': 'IMPLICIT', 'number': 0}, 'type': 'TeletexString'}, {'name': 'given-name', 'optional': True, 'size': [(1, 'ub-given-name-length')], 'tag': {'kind': 'IMPLICIT', 'number': 1}, 'type': 'TeletexString'}, {'name': 'initials', 'optional': True, 'size': [(1, 'ub-initials-length')], 'tag': {'kind': 'IMPLICIT', 'number': 2}, 'type': 'TeletexString'}, {'name': 'generation-qualifier', 'optional': True, 'size': [(1, 'ub-generation-qualifier-length')], 'tag': {'kind': 'IMPLICIT', 'number': 3}, 'type': 'TeletexString'}], 'type': 'SET'}, 'TerminalIdentifier': {'size': [(1, 'ub-terminal-id-length')], 'type': 'PrintableString'}, 'TerminalType': {'named-numbers': {'g3-facsimile': 5, 'g4-facsimile': 6, 'ia5-terminal': 7, 'teletex': 4, 'telex': 3, 'videotex': 8}, 'restricted-to': [(0, 'ub-integer-options')], 'type': 'INTEGER'}, 'Time': {'members': [{'name': 'utcTime', 'type': 'UTCTime'}, {'name': 'generalTime', 'type': 'GeneralizedTime'}], 'type': 'CHOICE'}, 'UnformattedPostalAddress': {'members': [{'element': {'size': [(1, 'ub-pds-parameter-length')], 'type': 'PrintableString'}, 'name': 'printable-address', 'optional': True, 'size': [(1, 'ub-pds-physical-address-lines')], 'type': 'SEQUENCE ' 'OF'}, {'name': 'teletex-string', 'optional': True, 'size': [(1, 'ub-unformatted-address-length')], 'type': 'TeletexString'}], 'type': 'SET'}, 'UniqueIdentifier': {'type': 'BIT STRING'}, 'UniquePostalName': {'type': 'PDSParameter'}, 'Validity': {'members': [{'name': 'notBefore', 'type': 'Time'}, {'name': 'notAfter', 'type': 'Time'}], 'type': 'SEQUENCE'}, 'Version': {'named-numbers': {'v1': 0, 'v2': 1, 'v3': 2}, 'type': 'INTEGER'}, 'X121Address': {'size': [(1, 'ub-x121-address-length')], 'type': 'NumericString'}, 'X520CommonName': {'members': [{'name': 'teletexString', 'size': [(1, 'ub-common-name')], 'type': 'TeletexString'}, {'name': 'printableString', 'size': [(1, 'ub-common-name')], 'type': 'PrintableString'}, {'name': 'universalString', 'size': [(1, 'ub-common-name')], 'type': 'UniversalString'}, {'name': 'utf8String', 'size': [(1, 'ub-common-name')], 'type': 'UTF8String'}, {'name': 'bmpString', 'size': [(1, 'ub-common-name')], 'type': 'BMPString'}], 'type': 'CHOICE'}, 'X520LocalityName': {'members': [{'name': 'teletexString', 'size': [(1, 'ub-locality-name')], 'type': 'TeletexString'}, {'name': 'printableString', 'size': [(1, 'ub-locality-name')], 'type': 'PrintableString'}, {'name': 'universalString', 'size': [(1, 'ub-locality-name')], 'type': 'UniversalString'}, {'name': 'utf8String', 'size': [(1, 'ub-locality-name')], 'type': 'UTF8String'}, {'name': 'bmpString', 'size': [(1, 'ub-locality-name')], 'type': 'BMPString'}], 'type': 'CHOICE'}, 'X520OrganizationName': {'members': [{'name': 'teletexString', 'size': [(1, 'ub-organization-name')], 'type': 'TeletexString'}, {'name': 'printableString', 'size': [(1, 'ub-organization-name')], 'type': 'PrintableString'}, {'name': 'universalString', 'size': [(1, 'ub-organization-name')], 'type': 'UniversalString'}, {'name': 'utf8String', 'size': [(1, 'ub-organization-name')], 'type': 'UTF8String'}, {'name': 'bmpString', 'size': [(1, 'ub-organization-name')], 'type': 'BMPString'}], 'type': 'CHOICE'}, 'X520OrganizationalUnitName': {'members': [{'name': 'teletexString', 'size': [(1, 'ub-organizational-unit-name')], 'type': 'TeletexString'}, {'name': 'printableString', 'size': [(1, 'ub-organizational-unit-name')], 'type': 'PrintableString'}, {'name': 'universalString', 'size': [(1, 'ub-organizational-unit-name')], 'type': 'UniversalString'}, {'name': 'utf8String', 'size': [(1, 'ub-organizational-unit-name')], 'type': 'UTF8String'}, {'name': 'bmpString', 'size': [(1, 'ub-organizational-unit-name')], 'type': 'BMPString'}], 'type': 'CHOICE'}, 'X520Pseudonym': {'members': [{'name': 'teletexString', 'size': [(1, 'ub-pseudonym')], 'type': 'TeletexString'}, {'name': 'printableString', 'size': [(1, 'ub-pseudonym')], 'type': 'PrintableString'}, {'name': 'universalString', 'size': [(1, 'ub-pseudonym')], 'type': 'UniversalString'}, {'name': 'utf8String', 'size': [(1, 'ub-pseudonym')], 'type': 'UTF8String'}, {'name': 'bmpString', 'size': [(1, 'ub-pseudonym')], 'type': 'BMPString'}], 'type': 'CHOICE'}, 'X520SerialNumber': {'size': [(1, 'ub-serial-number')], 'type': 'PrintableString'}, 'X520StateOrProvinceName': {'members': [{'name': 'teletexString', 'size': [(1, 'ub-state-name')], 'type': 'TeletexString'}, {'name': 'printableString', 'size': [(1, 'ub-state-name')], 'type': 'PrintableString'}, {'name': 'universalString', 'size': [(1, 'ub-state-name')], 'type': 'UniversalString'}, {'name': 'utf8String', 'size': [(1, 'ub-state-name')], 'type': 'UTF8String'}, {'name': 'bmpString', 'size': [(1, 'ub-state-name')], 'type': 'BMPString'}], 'type': 'CHOICE'}, 'X520Title': {'members': [{'name': 'teletexString', 'size': [(1, 'ub-title')], 'type': 'TeletexString'}, {'name': 'printableString', 'size': [(1, 'ub-title')], 'type': 'PrintableString'}, {'name': 'universalString', 'size': [(1, 'ub-title')], 'type': 'UniversalString'}, {'name': 'utf8String', 'size': [(1, 'ub-title')], 'type': 'UTF8String'}, {'name': 'bmpString', 'size': [(1, 'ub-title')], 'type': 'BMPString'}], 'type': 'CHOICE'}, 'X520countryName': {'size': [2], 'type': 'PrintableString'}, 'X520dnQualifier': {'type': 'PrintableString'}, 'X520name': {'members': [{'name': 'teletexString', 'size': [(1, 'ub-name')], 'type': 'TeletexString'}, {'name': 'printableString', 'size': [(1, 'ub-name')], 'type': 'PrintableString'}, {'name': 'universalString', 'size': [(1, 'ub-name')], 'type': 'UniversalString'}, {'name': 'utf8String', 'size': [(1, 'ub-name')], 'type': 'UTF8String'}, {'name': 'bmpString', 'size': [(1, 'ub-name')], 'type': 'BMPString'}], 'type': 'CHOICE'}}, 'values': {'common-name': {'type': 'INTEGER', 'value': 1}, 'extended-network-address': {'type': 'INTEGER', 'value': 22}, 'extension-OR-address-components': {'type': 'INTEGER', 'value': 12}, 'extension-physical-delivery-address-components': {'type': 'INTEGER', 'value': 15}, 'id-ad': {'type': 'OBJECT IDENTIFIER', 'value': ['id-pkix', 48]}, 'id-ad-caIssuers': {'type': 'OBJECT IDENTIFIER', 'value': ['id-ad', 2]}, 'id-ad-caRepository': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ad', 5]}, 'id-ad-ocsp': {'type': 'OBJECT IDENTIFIER', 'value': ['id-ad', 1]}, 'id-ad-timeStamping': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ad', 3]}, 'id-at': {'type': 'OBJECT IDENTIFIER', 'value': [('joint-iso-ccitt', 2), ('ds', 5), 4]}, 'id-at-commonName': {'type': 'AttributeType', 'value': None}, 'id-at-countryName': {'type': 'AttributeType', 'value': None}, 'id-at-dnQualifier': {'type': 'AttributeType', 'value': None}, 'id-at-generationQualifier': {'type': 'AttributeType', 'value': None}, 'id-at-givenName': {'type': 'AttributeType', 'value': None}, 'id-at-initials': {'type': 'AttributeType', 'value': None}, 'id-at-localityName': {'type': 'AttributeType', 'value': None}, 'id-at-name': {'type': 'AttributeType', 'value': None}, 'id-at-organizationName': {'type': 'AttributeType', 'value': None}, 'id-at-organizationalUnitName': {'type': 'AttributeType', 'value': None}, 'id-at-pseudonym': {'type': 'AttributeType', 'value': None}, 'id-at-serialNumber': {'type': 'AttributeType', 'value': None}, 'id-at-stateOrProvinceName': {'type': 'AttributeType', 'value': None}, 'id-at-surname': {'type': 'AttributeType', 'value': None}, 'id-at-title': {'type': 'AttributeType', 'value': None}, 'id-domainComponent': {'type': 'AttributeType', 'value': None}, 'id-emailAddress': {'type': 'AttributeType', 'value': None}, 'id-kp': {'type': 'OBJECT IDENTIFIER', 'value': ['id-pkix', 3]}, 'id-pe': {'type': 'OBJECT IDENTIFIER', 'value': ['id-pkix', 1]}, 'id-pkix': {'type': 'OBJECT IDENTIFIER', 'value': [('iso', 1), ('identified-organization', 3), ('dod', 6), ('internet', 1), ('security', 5), ('mechanisms', 5), ('pkix', 7)]}, 'id-qt': {'type': 'OBJECT IDENTIFIER', 'value': ['id-pkix', 2]}, 'id-qt-cps': {'type': 'OBJECT IDENTIFIER', 'value': ['id-qt', 1]}, 'id-qt-unotice': {'type': 'OBJECT IDENTIFIER', 'value': ['id-qt', 2]}, 'local-postal-attributes': {'type': 'INTEGER', 'value': 21}, 'pds-name': {'type': 'INTEGER', 'value': 7}, 'physical-delivery-country-name': {'type': 'INTEGER', 'value': 8}, 'physical-delivery-office-name': {'type': 'INTEGER', 'value': 10}, 'physical-delivery-office-number': {'type': 'INTEGER', 'value': 11}, 'physical-delivery-organization-name': {'type': 'INTEGER', 'value': 14}, 'physical-delivery-personal-name': {'type': 'INTEGER', 'value': 13}, 'pkcs-9': {'type': 'OBJECT IDENTIFIER', 'value': [('iso', 1), ('member-body', 2), ('us', 840), ('rsadsi', 113549), ('pkcs', 1), 9]}, 'post-office-box-address': {'type': 'INTEGER', 'value': 18}, 'postal-code': {'type': 'INTEGER', 'value': 9}, 'poste-restante-address': {'type': 'INTEGER', 'value': 19}, 'street-address': {'type': 'INTEGER', 'value': 17}, 'teletex-common-name': {'type': 'INTEGER', 'value': 2}, 'teletex-domain-defined-attributes': {'type': 'INTEGER', 'value': 6}, 'teletex-organization-name': {'type': 'INTEGER', 'value': 3}, 'teletex-organizational-unit-names': {'type': 'INTEGER', 'value': 5}, 'teletex-personal-name': {'type': 'INTEGER', 'value': 4}, 'terminal-type': {'type': 'INTEGER', 'value': 23}, 'ub-common-name': {'type': 'INTEGER', 'value': 64}, 'ub-common-name-length': {'type': 'INTEGER', 'value': 64}, 'ub-country-name-alpha-length': {'type': 'INTEGER', 'value': 2}, 'ub-country-name-numeric-length': {'type': 'INTEGER', 'value': 3}, 'ub-domain-defined-attribute-type-length': {'type': 'INTEGER', 'value': 8}, 'ub-domain-defined-attribute-value-length': {'type': 'INTEGER', 'value': 128}, 'ub-domain-defined-attributes': {'type': 'INTEGER', 'value': 4}, 'ub-domain-name-length': {'type': 'INTEGER', 'value': 16}, 'ub-e163-4-number-length': {'type': 'INTEGER', 'value': 15}, 'ub-e163-4-sub-address-length': {'type': 'INTEGER', 'value': 40}, 'ub-emailaddress-length': {'type': 'INTEGER', 'value': 255}, 'ub-extension-attributes': {'type': 'INTEGER', 'value': 256}, 'ub-generation-qualifier-length': {'type': 'INTEGER', 'value': 3}, 'ub-given-name-length': {'type': 'INTEGER', 'value': 16}, 'ub-initials-length': {'type': 'INTEGER', 'value': 5}, 'ub-integer-options': {'type': 'INTEGER', 'value': 256}, 'ub-locality-name': {'type': 'INTEGER', 'value': 128}, 'ub-match': {'type': 'INTEGER', 'value': 128}, 'ub-name': {'type': 'INTEGER', 'value': 32768}, 'ub-numeric-user-id-length': {'type': 'INTEGER', 'value': 32}, 'ub-organization-name': {'type': 'INTEGER', 'value': 64}, 'ub-organization-name-length': {'type': 'INTEGER', 'value': 64}, 'ub-organizational-unit-name': {'type': 'INTEGER', 'value': 64}, 'ub-organizational-unit-name-length': {'type': 'INTEGER', 'value': 32}, 'ub-organizational-units': {'type': 'INTEGER', 'value': 4}, 'ub-pds-name-length': {'type': 'INTEGER', 'value': 16}, 'ub-pds-parameter-length': {'type': 'INTEGER', 'value': 30}, 'ub-pds-physical-address-lines': {'type': 'INTEGER', 'value': 6}, 'ub-postal-code-length': {'type': 'INTEGER', 'value': 16}, 'ub-pseudonym': {'type': 'INTEGER', 'value': 128}, 'ub-serial-number': {'type': 'INTEGER', 'value': 64}, 'ub-state-name': {'type': 'INTEGER', 'value': 128}, 'ub-surname-length': {'type': 'INTEGER', 'value': 40}, 'ub-terminal-id-length': {'type': 'INTEGER', 'value': 24}, 'ub-title': {'type': 'INTEGER', 'value': 64}, 'ub-unformatted-address-length': {'type': 'INTEGER', 'value': 180}, 'ub-x121-address-length': {'type': 'INTEGER', 'value': 16}, 'unformatted-postal-address': {'type': 'INTEGER', 'value': 16}, 'unique-postal-name': {'type': 'INTEGER', 'value': 20}}}, 'PKIX1Implicit88': {'extensibility-implied': False, 'imports': {'PKIX1Explicit88': ['Attribute', 'BMPString', 'CertificateSerialNumber', 'DirectoryString', 'Name', 'ORAddress', 'RelativeDistinguishedName', 'UTF8String', 'id-kp', 'id-pe', 'id-qt-cps', 'id-qt-unotice']}, 'object-classes': {}, 'object-sets': {}, 'tags': 'IMPLICIT', 'types': {'AccessDescription': {'members': [{'name': 'accessMethod', 'type': 'OBJECT ' 'IDENTIFIER'}, {'name': 'accessLocation', 'type': 'GeneralName'}], 'type': 'SEQUENCE'}, 'AnotherName': {'members': [{'name': 'type-id', 'type': 'OBJECT ' 'IDENTIFIER'}, {'choices': {}, 'name': 'value', 'tag': {'kind': 'EXPLICIT', 'number': 0}, 'type': 'ANY ' 'DEFINED ' 'BY', 'value': 'type-id'}], 'type': 'SEQUENCE'}, 'AuthorityInfoAccessSyntax': {'element': {'type': 'AccessDescription'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE ' 'OF'}, 'AuthorityKeyIdentifier': {'members': [{'name': 'keyIdentifier', 'optional': True, 'tag': {'number': 0}, 'type': 'KeyIdentifier'}, {'name': 'authorityCertIssuer', 'optional': True, 'tag': {'number': 1}, 'type': 'GeneralNames'}, {'name': 'authorityCertSerialNumber', 'optional': True, 'tag': {'number': 2}, 'type': 'CertificateSerialNumber'}], 'type': 'SEQUENCE'}, 'BaseCRLNumber': {'type': 'CRLNumber'}, 'BaseDistance': {'restricted-to': [(0, 'MAX')], 'type': 'INTEGER'}, 'BasicConstraints': {'members': [{'default': False, 'name': 'cA', 'type': 'BOOLEAN'}, {'name': 'pathLenConstraint', 'optional': True, 'restricted-to': [(0, 'MAX')], 'type': 'INTEGER'}], 'type': 'SEQUENCE'}, 'CPSuri': {'type': 'IA5String'}, 'CRLDistributionPoints': {'element': {'type': 'DistributionPoint'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'}, 'CRLNumber': {'restricted-to': [(0, 'MAX')], 'type': 'INTEGER'}, 'CRLReason': {'type': 'ENUMERATED', 'values': [('unspecified', 0), ('keyCompromise', 1), ('cACompromise', 2), ('affiliationChanged', 3), ('superseded', 4), ('cessationOfOperation', 5), ('certificateHold', 6), ('removeFromCRL', 8), ('privilegeWithdrawn', 9), ('aACompromise', 10)]}, 'CertPolicyId': {'type': 'OBJECT IDENTIFIER'}, 'CertificateIssuer': {'type': 'GeneralNames'}, 'CertificatePolicies': {'element': {'type': 'PolicyInformation'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'}, 'DisplayText': {'members': [{'name': 'ia5String', 'size': [(1, 200)], 'type': 'IA5String'}, {'name': 'visibleString', 'size': [(1, 200)], 'type': 'VisibleString'}, {'name': 'bmpString', 'size': [(1, 200)], 'type': 'BMPString'}, {'name': 'utf8String', 'size': [(1, 200)], 'type': 'UTF8String'}], 'type': 'CHOICE'}, 'DistributionPoint': {'members': [{'name': 'distributionPoint', 'optional': True, 'tag': {'number': 0}, 'type': 'DistributionPointName'}, {'name': 'reasons', 'optional': True, 'tag': {'number': 1}, 'type': 'ReasonFlags'}, {'name': 'cRLIssuer', 'optional': True, 'tag': {'number': 2}, 'type': 'GeneralNames'}], 'type': 'SEQUENCE'}, 'DistributionPointName': {'members': [{'name': 'fullName', 'tag': {'number': 0}, 'type': 'GeneralNames'}, {'name': 'nameRelativeToCRLIssuer', 'tag': {'number': 1}, 'type': 'RelativeDistinguishedName'}], 'type': 'CHOICE'}, 'EDIPartyName': {'members': [{'name': 'nameAssigner', 'optional': True, 'tag': {'number': 0}, 'type': 'DirectoryString'}, {'name': 'partyName', 'tag': {'number': 1}, 'type': 'DirectoryString'}], 'type': 'SEQUENCE'}, 'ExtKeyUsageSyntax': {'element': {'type': 'KeyPurposeId'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'}, 'FreshestCRL': {'type': 'CRLDistributionPoints'}, 'GeneralName': {'members': [{'name': 'otherName', 'tag': {'number': 0}, 'type': 'AnotherName'}, {'name': 'rfc822Name', 'tag': {'number': 1}, 'type': 'IA5String'}, {'name': 'dNSName', 'tag': {'number': 2}, 'type': 'IA5String'}, {'name': 'x400Address', 'tag': {'number': 3}, 'type': 'ORAddress'}, {'name': 'directoryName', 'tag': {'number': 4}, 'type': 'Name'}, {'name': 'ediPartyName', 'tag': {'number': 5}, 'type': 'EDIPartyName'}, {'name': 'uniformResourceIdentifier', 'tag': {'number': 6}, 'type': 'IA5String'}, {'name': 'iPAddress', 'tag': {'number': 7}, 'type': 'OCTET ' 'STRING'}, {'name': 'registeredID', 'tag': {'number': 8}, 'type': 'OBJECT ' 'IDENTIFIER'}], 'type': 'CHOICE'}, 'GeneralNames': {'element': {'type': 'GeneralName'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'}, 'GeneralSubtree': {'members': [{'name': 'base', 'type': 'GeneralName'}, {'default': 0, 'name': 'minimum', 'tag': {'number': 0}, 'type': 'BaseDistance'}, {'name': 'maximum', 'optional': True, 'tag': {'number': 1}, 'type': 'BaseDistance'}], 'type': 'SEQUENCE'}, 'GeneralSubtrees': {'element': {'type': 'GeneralSubtree'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'}, 'HoldInstructionCode': {'type': 'OBJECT ' 'IDENTIFIER'}, 'InhibitAnyPolicy': {'type': 'SkipCerts'}, 'InvalidityDate': {'type': 'GeneralizedTime'}, 'IssuerAltName': {'type': 'GeneralNames'}, 'IssuingDistributionPoint': {'members': [{'name': 'distributionPoint', 'optional': True, 'tag': {'number': 0}, 'type': 'DistributionPointName'}, {'default': False, 'name': 'onlyContainsUserCerts', 'tag': {'number': 1}, 'type': 'BOOLEAN'}, {'default': False, 'name': 'onlyContainsCACerts', 'tag': {'number': 2}, 'type': 'BOOLEAN'}, {'name': 'onlySomeReasons', 'optional': True, 'tag': {'number': 3}, 'type': 'ReasonFlags'}, {'default': False, 'name': 'indirectCRL', 'tag': {'number': 4}, 'type': 'BOOLEAN'}, {'default': False, 'name': 'onlyContainsAttributeCerts', 'tag': {'number': 5}, 'type': 'BOOLEAN'}], 'type': 'SEQUENCE'}, 'KeyIdentifier': {'type': 'OCTET STRING'}, 'KeyPurposeId': {'type': 'OBJECT IDENTIFIER'}, 'KeyUsage': {'named-bits': [('digitalSignature', '0'), ('nonRepudiation', '1'), ('keyEncipherment', '2'), ('dataEncipherment', '3'), ('keyAgreement', '4'), ('keyCertSign', '5'), ('cRLSign', '6'), ('encipherOnly', '7'), ('decipherOnly', '8')], 'type': 'BIT STRING'}, 'NameConstraints': {'members': [{'name': 'permittedSubtrees', 'optional': True, 'tag': {'number': 0}, 'type': 'GeneralSubtrees'}, {'name': 'excludedSubtrees', 'optional': True, 'tag': {'number': 1}, 'type': 'GeneralSubtrees'}], 'type': 'SEQUENCE'}, 'NoticeReference': {'members': [{'name': 'organization', 'type': 'DisplayText'}, {'element': {'type': 'INTEGER'}, 'name': 'noticeNumbers', 'type': 'SEQUENCE ' 'OF'}], 'type': 'SEQUENCE'}, 'PolicyConstraints': {'members': [{'name': 'requireExplicitPolicy', 'optional': True, 'tag': {'number': 0}, 'type': 'SkipCerts'}, {'name': 'inhibitPolicyMapping', 'optional': True, 'tag': {'number': 1}, 'type': 'SkipCerts'}], 'type': 'SEQUENCE'}, 'PolicyInformation': {'members': [{'name': 'policyIdentifier', 'type': 'CertPolicyId'}, {'element': {'type': 'PolicyQualifierInfo'}, 'name': 'policyQualifiers', 'optional': True, 'size': [(1, 'MAX')], 'type': 'SEQUENCE ' 'OF'}], 'type': 'SEQUENCE'}, 'PolicyMappings': {'element': {'members': [{'name': 'issuerDomainPolicy', 'type': 'CertPolicyId'}, {'name': 'subjectDomainPolicy', 'type': 'CertPolicyId'}], 'type': 'SEQUENCE'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'}, 'PolicyQualifierId': {'type': 'OBJECT ' 'IDENTIFIER'}, 'PolicyQualifierInfo': {'members': [{'name': 'policyQualifierId', 'type': 'PolicyQualifierId'}, {'choices': {}, 'name': 'qualifier', 'type': 'ANY ' 'DEFINED ' 'BY', 'value': 'policyQualifierId'}], 'type': 'SEQUENCE'}, 'PrivateKeyUsagePeriod': {'members': [{'name': 'notBefore', 'optional': True, 'tag': {'number': 0}, 'type': 'GeneralizedTime'}, {'name': 'notAfter', 'optional': True, 'tag': {'number': 1}, 'type': 'GeneralizedTime'}], 'type': 'SEQUENCE'}, 'ReasonFlags': {'named-bits': [('unused', '0'), ('keyCompromise', '1'), ('cACompromise', '2'), ('affiliationChanged', '3'), ('superseded', '4'), ('cessationOfOperation', '5'), ('certificateHold', '6'), ('privilegeWithdrawn', '7'), ('aACompromise', '8')], 'type': 'BIT STRING'}, 'SkipCerts': {'restricted-to': [(0, 'MAX')], 'type': 'INTEGER'}, 'SubjectAltName': {'type': 'GeneralNames'}, 'SubjectDirectoryAttributes': {'element': {'type': 'Attribute'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE ' 'OF'}, 'SubjectInfoAccessSyntax': {'element': {'type': 'AccessDescription'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE ' 'OF'}, 'SubjectKeyIdentifier': {'type': 'KeyIdentifier'}, 'UserNotice': {'members': [{'name': 'noticeRef', 'optional': True, 'type': 'NoticeReference'}, {'name': 'explicitText', 'optional': True, 'type': 'DisplayText'}], 'type': 'SEQUENCE'}}, 'values': {'anyExtendedKeyUsage': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce-extKeyUsage', 0]}, 'anyPolicy': {'type': 'OBJECT IDENTIFIER', 'value': ['id-ce-certificatePolicies', 0]}, 'holdInstruction': {'type': 'OBJECT IDENTIFIER', 'value': [('joint-iso-itu-t', 2), ('member-body', 2), ('us', 840), ('x9cm', 10040), 2]}, 'id-ce': {'type': 'OBJECT IDENTIFIER', 'value': [('joint-iso-ccitt', 2), ('ds', 5), 29]}, 'id-ce-authorityKeyIdentifier': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 35]}, 'id-ce-basicConstraints': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 19]}, 'id-ce-cRLDistributionPoints': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 31]}, 'id-ce-cRLNumber': {'type': 'OBJECT IDENTIFIER', 'value': ['id-ce', 20]}, 'id-ce-cRLReasons': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 21]}, 'id-ce-certificateIssuer': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 29]}, 'id-ce-certificatePolicies': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 32]}, 'id-ce-deltaCRLIndicator': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 27]}, 'id-ce-extKeyUsage': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 37]}, 'id-ce-freshestCRL': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 46]}, 'id-ce-holdInstructionCode': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 23]}, 'id-ce-inhibitAnyPolicy': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 54]}, 'id-ce-invalidityDate': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 24]}, 'id-ce-issuerAltName': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 18]}, 'id-ce-issuingDistributionPoint': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 28]}, 'id-ce-keyUsage': {'type': 'OBJECT IDENTIFIER', 'value': ['id-ce', 15]}, 'id-ce-nameConstraints': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 30]}, 'id-ce-policyConstraints': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 36]}, 'id-ce-policyMappings': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 33]}, 'id-ce-privateKeyUsagePeriod': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 16]}, 'id-ce-subjectAltName': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 17]}, 'id-ce-subjectDirectoryAttributes': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 9]}, 'id-ce-subjectKeyIdentifier': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-ce', 14]}, 'id-holdinstruction-callissuer': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['holdInstruction', 2]}, 'id-holdinstruction-none': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['holdInstruction', 1]}, 'id-holdinstruction-reject': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['holdInstruction', 3]}, 'id-kp-OCSPSigning': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-kp', 9]}, 'id-kp-clientAuth': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-kp', 2]}, 'id-kp-codeSigning': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-kp', 3]}, 'id-kp-emailProtection': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-kp', 4]}, 'id-kp-serverAuth': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-kp', 1]}, 'id-kp-timeStamping': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-kp', 8]}, 'id-pe-authorityInfoAccess': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-pe', 1]}, 'id-pe-subjectInfoAccess': {'type': 'OBJECT ' 'IDENTIFIER', 'value': ['id-pe', 11]}}}}
[ "EXPECTED = {'PKIX1Explicit88': {'extensibility-implied': False,\n 'imports': {},\n 'object-classes': {},\n 'object-sets': {},\n 'tags': 'EXPLICIT',\n 'types': {'AdministrationDomainName': {'members': [{'name': 'numeric',\n 'size': [(0,\n 'ub-domain-name-length')],\n 'type': 'NumericString'},\n {'name': 'printable',\n 'size': [(0,\n 'ub-domain-name-length')],\n 'type': 'PrintableString'}],\n 'tag': {'class': 'APPLICATION',\n 'number': 2},\n 'type': 'CHOICE'},\n 'AlgorithmIdentifier': {'members': [{'name': 'algorithm',\n 'type': 'OBJECT '\n 'IDENTIFIER'},\n {'choices': {},\n 'name': 'parameters',\n 'optional': True,\n 'type': 'ANY '\n 'DEFINED '\n 'BY',\n 'value': 'algorithm'}],\n 'type': 'SEQUENCE'},\n 'Attribute': {'members': [{'name': 'type',\n 'type': 'AttributeType'},\n {'element': {'type': 'AttributeValue'},\n 'name': 'values',\n 'type': 'SET OF'}],\n 'type': 'SEQUENCE'},\n 'AttributeType': {'type': 'OBJECT IDENTIFIER'},\n 'AttributeTypeAndValue': {'members': [{'name': 'type',\n 'type': 'AttributeType'},\n {'name': 'value',\n 'type': 'AttributeValue'}],\n 'type': 'SEQUENCE'},\n 'AttributeValue': {'choices': {},\n 'type': 'ANY DEFINED BY',\n 'value': 'type'},\n 'BuiltInDomainDefinedAttribute': {'members': [{'name': 'type',\n 'size': [(1,\n 'ub-domain-defined-attribute-type-length')],\n 'type': 'PrintableString'},\n {'name': 'value',\n 'size': [(1,\n 'ub-domain-defined-attribute-value-length')],\n 'type': 'PrintableString'}],\n 'type': 'SEQUENCE'},\n 'BuiltInDomainDefinedAttributes': {'element': {'type': 'BuiltInDomainDefinedAttribute'},\n 'size': [(1,\n 'ub-domain-defined-attributes')],\n 'type': 'SEQUENCE '\n 'OF'},\n 'BuiltInStandardAttributes': {'members': [{'name': 'country-name',\n 'optional': True,\n 'type': 'CountryName'},\n {'name': 'administration-domain-name',\n 'optional': True,\n 'type': 'AdministrationDomainName'},\n {'name': 'network-address',\n 'optional': True,\n 'tag': {'kind': 'IMPLICIT',\n 'number': 0},\n 'type': 'NetworkAddress'},\n {'name': 'terminal-identifier',\n 'optional': True,\n 'tag': {'kind': 'IMPLICIT',\n 'number': 1},\n 'type': 'TerminalIdentifier'},\n {'name': 'private-domain-name',\n 'optional': True,\n 'tag': {'number': 2},\n 'type': 'PrivateDomainName'},\n {'name': 'organization-name',\n 'optional': True,\n 'tag': {'kind': 'IMPLICIT',\n 'number': 3},\n 'type': 'OrganizationName'},\n {'name': 'numeric-user-identifier',\n 'optional': True,\n 'tag': {'kind': 'IMPLICIT',\n 'number': 4},\n 'type': 'NumericUserIdentifier'},\n {'name': 'personal-name',\n 'optional': True,\n 'tag': {'kind': 'IMPLICIT',\n 'number': 5},\n 'type': 'PersonalName'},\n {'name': 'organizational-unit-names',\n 'optional': True,\n 'tag': {'kind': 'IMPLICIT',\n 'number': 6},\n 'type': 'OrganizationalUnitNames'}],\n 'type': 'SEQUENCE'},\n 'Certificate': {'members': [{'name': 'tbsCertificate',\n 'type': 'TBSCertificate'},\n {'name': 'signatureAlgorithm',\n 'type': 'AlgorithmIdentifier'},\n {'name': 'signature',\n 'type': 'BIT '\n 'STRING'}],\n 'type': 'SEQUENCE'},\n 'CertificateList': {'members': [{'name': 'tbsCertList',\n 'type': 'TBSCertList'},\n {'name': 'signatureAlgorithm',\n 'type': 'AlgorithmIdentifier'},\n {'name': 'signature',\n 'type': 'BIT '\n 'STRING'}],\n 'type': 'SEQUENCE'},\n 'CertificateSerialNumber': {'type': 'INTEGER'},\n 'CommonName': {'size': [(1,\n 'ub-common-name-length')],\n 'type': 'PrintableString'},\n 'CountryName': {'members': [{'name': 'x121-dcc-code',\n 'size': ['ub-country-name-numeric-length'],\n 'type': 'NumericString'},\n {'name': 'iso-3166-alpha2-code',\n 'size': ['ub-country-name-alpha-length'],\n 'type': 'PrintableString'}],\n 'tag': {'class': 'APPLICATION',\n 'number': 1},\n 'type': 'CHOICE'},\n 'DirectoryString': {'members': [{'name': 'teletexString',\n 'size': [(1,\n 'MAX')],\n 'type': 'TeletexString'},\n {'name': 'printableString',\n 'size': [(1,\n 'MAX')],\n 'type': 'PrintableString'},\n {'name': 'universalString',\n 'size': [(1,\n 'MAX')],\n 'type': 'UniversalString'},\n {'name': 'utf8String',\n 'size': [(1,\n 'MAX')],\n 'type': 'UTF8String'},\n {'name': 'bmpString',\n 'size': [(1,\n 'MAX')],\n 'type': 'BMPString'}],\n 'type': 'CHOICE'},\n 'DistinguishedName': {'type': 'RDNSequence'},\n 'DomainComponent': {'type': 'IA5String'},\n 'EmailAddress': {'size': [(1,\n 'ub-emailaddress-length')],\n 'type': 'IA5String'},\n 'ExtendedNetworkAddress': {'members': [{'members': [{'name': 'number',\n 'size': [(1,\n 'ub-e163-4-number-length')],\n 'tag': {'kind': 'IMPLICIT',\n 'number': 0},\n 'type': 'NumericString'},\n {'name': 'sub-address',\n 'optional': True,\n 'size': [(1,\n 'ub-e163-4-sub-address-length')],\n 'tag': {'kind': 'IMPLICIT',\n 'number': 1},\n 'type': 'NumericString'}],\n 'name': 'e163-4-address',\n 'type': 'SEQUENCE'},\n {'name': 'psap-address',\n 'tag': {'kind': 'IMPLICIT',\n 'number': 0},\n 'type': 'PresentationAddress'}],\n 'type': 'CHOICE'},\n 'Extension': {'members': [{'name': 'extnID',\n 'type': 'OBJECT '\n 'IDENTIFIER'},\n {'default': False,\n 'name': 'critical',\n 'type': 'BOOLEAN'},\n {'name': 'extnValue',\n 'type': 'OCTET '\n 'STRING'}],\n 'type': 'SEQUENCE'},\n 'ExtensionAttribute': {'members': [{'name': 'extension-attribute-type',\n 'restricted-to': [(0,\n 'ub-extension-attributes')],\n 'tag': {'kind': 'IMPLICIT',\n 'number': 0},\n 'type': 'INTEGER'},\n {'choices': {},\n 'name': 'extension-attribute-value',\n 'tag': {'number': 1},\n 'type': 'ANY '\n 'DEFINED '\n 'BY',\n 'value': 'extension-attribute-type'}],\n 'type': 'SEQUENCE'},\n 'ExtensionAttributes': {'element': {'type': 'ExtensionAttribute'},\n 'size': [(1,\n 'ub-extension-attributes')],\n 'type': 'SET OF'},\n 'ExtensionORAddressComponents': {'type': 'PDSParameter'},\n 'ExtensionPhysicalDeliveryAddressComponents': {'type': 'PDSParameter'},\n 'Extensions': {'element': {'type': 'Extension'},\n 'size': [(1, 'MAX')],\n 'type': 'SEQUENCE OF'},\n 'LocalPostalAttributes': {'type': 'PDSParameter'},\n 'Name': {'members': [{'name': 'rdnSequence',\n 'type': 'RDNSequence'}],\n 'type': 'CHOICE'},\n 'NetworkAddress': {'type': 'X121Address'},\n 'NumericUserIdentifier': {'size': [(1,\n 'ub-numeric-user-id-length')],\n 'type': 'NumericString'},\n 'ORAddress': {'members': [{'name': 'built-in-standard-attributes',\n 'type': 'BuiltInStandardAttributes'},\n {'name': 'built-in-domain-defined-attributes',\n 'optional': True,\n 'type': 'BuiltInDomainDefinedAttributes'},\n {'name': 'extension-attributes',\n 'optional': True,\n 'type': 'ExtensionAttributes'}],\n 'type': 'SEQUENCE'},\n 'OrganizationName': {'size': [(1,\n 'ub-organization-name-length')],\n 'type': 'PrintableString'},\n 'OrganizationalUnitName': {'size': [(1,\n 'ub-organizational-unit-name-length')],\n 'type': 'PrintableString'},\n 'OrganizationalUnitNames': {'element': {'type': 'OrganizationalUnitName'},\n 'size': [(1,\n 'ub-organizational-units')],\n 'type': 'SEQUENCE '\n 'OF'},\n 'PDSName': {'size': [(1, 'ub-pds-name-length')],\n 'type': 'PrintableString'},\n 'PDSParameter': {'members': [{'name': 'printable-string',\n 'optional': True,\n 'size': [(1,\n 'ub-pds-parameter-length')],\n 'type': 'PrintableString'},\n {'name': 'teletex-string',\n 'optional': True,\n 'size': [(1,\n 'ub-pds-parameter-length')],\n 'type': 'TeletexString'}],\n 'type': 'SET'},\n 'PersonalName': {'members': [{'name': 'surname',\n 'size': [(1,\n 'ub-surname-length')],\n 'tag': {'kind': 'IMPLICIT',\n 'number': 0},\n 'type': 'PrintableString'},\n {'name': 'given-name',\n 'optional': True,\n 'size': [(1,\n 'ub-given-name-length')],\n 'tag': {'kind': 'IMPLICIT',\n 'number': 1},\n 'type': 'PrintableString'},\n {'name': 'initials',\n 'optional': True,\n 'size': [(1,\n 'ub-initials-length')],\n 'tag': {'kind': 'IMPLICIT',\n 'number': 2},\n 'type': 'PrintableString'},\n {'name': 'generation-qualifier',\n 'optional': True,\n 'size': [(1,\n 'ub-generation-qualifier-length')],\n 'tag': {'kind': 'IMPLICIT',\n 'number': 3},\n 'type': 'PrintableString'}],\n 'type': 'SET'},\n 'PhysicalDeliveryCountryName': {'members': [{'name': 'x121-dcc-code',\n 'size': ['ub-country-name-numeric-length'],\n 'type': 'NumericString'},\n {'name': 'iso-3166-alpha2-code',\n 'size': ['ub-country-name-alpha-length'],\n 'type': 'PrintableString'}],\n 'type': 'CHOICE'},\n 'PhysicalDeliveryOfficeName': {'type': 'PDSParameter'},\n 'PhysicalDeliveryOfficeNumber': {'type': 'PDSParameter'},\n 'PhysicalDeliveryOrganizationName': {'type': 'PDSParameter'},\n 'PhysicalDeliveryPersonalName': {'type': 'PDSParameter'},\n 'PostOfficeBoxAddress': {'type': 'PDSParameter'},\n 'PostalCode': {'members': [{'name': 'numeric-code',\n 'size': [(1,\n 'ub-postal-code-length')],\n 'type': 'NumericString'},\n {'name': 'printable-code',\n 'size': [(1,\n 'ub-postal-code-length')],\n 'type': 'PrintableString'}],\n 'type': 'CHOICE'},\n 'PosteRestanteAddress': {'type': 'PDSParameter'},\n 'PresentationAddress': {'members': [{'name': 'pSelector',\n 'optional': True,\n 'tag': {'kind': 'EXPLICIT',\n 'number': 0},\n 'type': 'OCTET '\n 'STRING'},\n {'name': 'sSelector',\n 'optional': True,\n 'tag': {'kind': 'EXPLICIT',\n 'number': 1},\n 'type': 'OCTET '\n 'STRING'},\n {'name': 'tSelector',\n 'optional': True,\n 'tag': {'kind': 'EXPLICIT',\n 'number': 2},\n 'type': 'OCTET '\n 'STRING'},\n {'element': {'type': 'OCTET '\n 'STRING'},\n 'name': 'nAddresses',\n 'size': [(1,\n 'MAX')],\n 'tag': {'kind': 'EXPLICIT',\n 'number': 3},\n 'type': 'SET '\n 'OF'}],\n 'type': 'SEQUENCE'},\n 'PrivateDomainName': {'members': [{'name': 'numeric',\n 'size': [(1,\n 'ub-domain-name-length')],\n 'type': 'NumericString'},\n {'name': 'printable',\n 'size': [(1,\n 'ub-domain-name-length')],\n 'type': 'PrintableString'}],\n 'type': 'CHOICE'},\n 'RDNSequence': {'element': {'type': 'RelativeDistinguishedName'},\n 'type': 'SEQUENCE OF'},\n 'RelativeDistinguishedName': {'element': {'type': 'AttributeTypeAndValue'},\n 'size': [(1,\n 'MAX')],\n 'type': 'SET OF'},\n 'StreetAddress': {'type': 'PDSParameter'},\n 'SubjectPublicKeyInfo': {'members': [{'name': 'algorithm',\n 'type': 'AlgorithmIdentifier'},\n {'name': 'subjectPublicKey',\n 'type': 'BIT '\n 'STRING'}],\n 'type': 'SEQUENCE'},\n 'TBSCertList': {'members': [{'name': 'version',\n 'optional': True,\n 'type': 'Version'},\n {'name': 'signature',\n 'type': 'AlgorithmIdentifier'},\n {'name': 'issuer',\n 'type': 'Name'},\n {'name': 'thisUpdate',\n 'type': 'Time'},\n {'name': 'nextUpdate',\n 'optional': True,\n 'type': 'Time'},\n {'element': {'members': [{'name': 'userCertificate',\n 'type': 'CertificateSerialNumber'},\n {'name': 'revocationDate',\n 'type': 'Time'},\n {'name': 'crlEntryExtensions',\n 'optional': True,\n 'type': 'Extensions'}],\n 'type': 'SEQUENCE'},\n 'name': 'revokedCertificates',\n 'optional': True,\n 'type': 'SEQUENCE '\n 'OF'},\n {'name': 'crlExtensions',\n 'optional': True,\n 'tag': {'number': 0},\n 'type': 'Extensions'}],\n 'type': 'SEQUENCE'},\n 'TBSCertificate': {'members': [{'default': 'v1',\n 'name': 'version',\n 'tag': {'number': 0},\n 'type': 'Version'},\n {'name': 'serialNumber',\n 'type': 'CertificateSerialNumber'},\n {'name': 'signature',\n 'type': 'AlgorithmIdentifier'},\n {'name': 'issuer',\n 'type': 'Name'},\n {'name': 'validity',\n 'type': 'Validity'},\n {'name': 'subject',\n 'type': 'Name'},\n {'name': 'subjectPublicKeyInfo',\n 'type': 'SubjectPublicKeyInfo'},\n {'name': 'issuerUniqueID',\n 'optional': True,\n 'tag': {'kind': 'IMPLICIT',\n 'number': 1},\n 'type': 'UniqueIdentifier'},\n {'name': 'subjectUniqueID',\n 'optional': True,\n 'tag': {'kind': 'IMPLICIT',\n 'number': 2},\n 'type': 'UniqueIdentifier'},\n {'name': 'extensions',\n 'optional': True,\n 'tag': {'number': 3},\n 'type': 'Extensions'}],\n 'type': 'SEQUENCE'},\n 'TeletexCommonName': {'size': [(1,\n 'ub-common-name-length')],\n 'type': 'TeletexString'},\n 'TeletexDomainDefinedAttribute': {'members': [{'name': 'type',\n 'size': [(1,\n 'ub-domain-defined-attribute-type-length')],\n 'type': 'TeletexString'},\n {'name': 'value',\n 'size': [(1,\n 'ub-domain-defined-attribute-value-length')],\n 'type': 'TeletexString'}],\n 'type': 'SEQUENCE'},\n 'TeletexDomainDefinedAttributes': {'element': {'type': 'TeletexDomainDefinedAttribute'},\n 'size': [(1,\n 'ub-domain-defined-attributes')],\n 'type': 'SEQUENCE '\n 'OF'},\n 'TeletexOrganizationName': {'size': [(1,\n 'ub-organization-name-length')],\n 'type': 'TeletexString'},\n 'TeletexOrganizationalUnitName': {'size': [(1,\n 'ub-organizational-unit-name-length')],\n 'type': 'TeletexString'},\n 'TeletexOrganizationalUnitNames': {'element': {'type': 'TeletexOrganizationalUnitName'},\n 'size': [(1,\n 'ub-organizational-units')],\n 'type': 'SEQUENCE '\n 'OF'},\n 'TeletexPersonalName': {'members': [{'name': 'surname',\n 'size': [(1,\n 'ub-surname-length')],\n 'tag': {'kind': 'IMPLICIT',\n 'number': 0},\n 'type': 'TeletexString'},\n {'name': 'given-name',\n 'optional': True,\n 'size': [(1,\n 'ub-given-name-length')],\n 'tag': {'kind': 'IMPLICIT',\n 'number': 1},\n 'type': 'TeletexString'},\n {'name': 'initials',\n 'optional': True,\n 'size': [(1,\n 'ub-initials-length')],\n 'tag': {'kind': 'IMPLICIT',\n 'number': 2},\n 'type': 'TeletexString'},\n {'name': 'generation-qualifier',\n 'optional': True,\n 'size': [(1,\n 'ub-generation-qualifier-length')],\n 'tag': {'kind': 'IMPLICIT',\n 'number': 3},\n 'type': 'TeletexString'}],\n 'type': 'SET'},\n 'TerminalIdentifier': {'size': [(1,\n 'ub-terminal-id-length')],\n 'type': 'PrintableString'},\n 'TerminalType': {'named-numbers': {'g3-facsimile': 5,\n 'g4-facsimile': 6,\n 'ia5-terminal': 7,\n 'teletex': 4,\n 'telex': 3,\n 'videotex': 8},\n 'restricted-to': [(0,\n 'ub-integer-options')],\n 'type': 'INTEGER'},\n 'Time': {'members': [{'name': 'utcTime',\n 'type': 'UTCTime'},\n {'name': 'generalTime',\n 'type': 'GeneralizedTime'}],\n 'type': 'CHOICE'},\n 'UnformattedPostalAddress': {'members': [{'element': {'size': [(1,\n 'ub-pds-parameter-length')],\n 'type': 'PrintableString'},\n 'name': 'printable-address',\n 'optional': True,\n 'size': [(1,\n 'ub-pds-physical-address-lines')],\n 'type': 'SEQUENCE '\n 'OF'},\n {'name': 'teletex-string',\n 'optional': True,\n 'size': [(1,\n 'ub-unformatted-address-length')],\n 'type': 'TeletexString'}],\n 'type': 'SET'},\n 'UniqueIdentifier': {'type': 'BIT STRING'},\n 'UniquePostalName': {'type': 'PDSParameter'},\n 'Validity': {'members': [{'name': 'notBefore',\n 'type': 'Time'},\n {'name': 'notAfter',\n 'type': 'Time'}],\n 'type': 'SEQUENCE'},\n 'Version': {'named-numbers': {'v1': 0,\n 'v2': 1,\n 'v3': 2},\n 'type': 'INTEGER'},\n 'X121Address': {'size': [(1,\n 'ub-x121-address-length')],\n 'type': 'NumericString'},\n 'X520CommonName': {'members': [{'name': 'teletexString',\n 'size': [(1,\n 'ub-common-name')],\n 'type': 'TeletexString'},\n {'name': 'printableString',\n 'size': [(1,\n 'ub-common-name')],\n 'type': 'PrintableString'},\n {'name': 'universalString',\n 'size': [(1,\n 'ub-common-name')],\n 'type': 'UniversalString'},\n {'name': 'utf8String',\n 'size': [(1,\n 'ub-common-name')],\n 'type': 'UTF8String'},\n {'name': 'bmpString',\n 'size': [(1,\n 'ub-common-name')],\n 'type': 'BMPString'}],\n 'type': 'CHOICE'},\n 'X520LocalityName': {'members': [{'name': 'teletexString',\n 'size': [(1,\n 'ub-locality-name')],\n 'type': 'TeletexString'},\n {'name': 'printableString',\n 'size': [(1,\n 'ub-locality-name')],\n 'type': 'PrintableString'},\n {'name': 'universalString',\n 'size': [(1,\n 'ub-locality-name')],\n 'type': 'UniversalString'},\n {'name': 'utf8String',\n 'size': [(1,\n 'ub-locality-name')],\n 'type': 'UTF8String'},\n {'name': 'bmpString',\n 'size': [(1,\n 'ub-locality-name')],\n 'type': 'BMPString'}],\n 'type': 'CHOICE'},\n 'X520OrganizationName': {'members': [{'name': 'teletexString',\n 'size': [(1,\n 'ub-organization-name')],\n 'type': 'TeletexString'},\n {'name': 'printableString',\n 'size': [(1,\n 'ub-organization-name')],\n 'type': 'PrintableString'},\n {'name': 'universalString',\n 'size': [(1,\n 'ub-organization-name')],\n 'type': 'UniversalString'},\n {'name': 'utf8String',\n 'size': [(1,\n 'ub-organization-name')],\n 'type': 'UTF8String'},\n {'name': 'bmpString',\n 'size': [(1,\n 'ub-organization-name')],\n 'type': 'BMPString'}],\n 'type': 'CHOICE'},\n 'X520OrganizationalUnitName': {'members': [{'name': 'teletexString',\n 'size': [(1,\n 'ub-organizational-unit-name')],\n 'type': 'TeletexString'},\n {'name': 'printableString',\n 'size': [(1,\n 'ub-organizational-unit-name')],\n 'type': 'PrintableString'},\n {'name': 'universalString',\n 'size': [(1,\n 'ub-organizational-unit-name')],\n 'type': 'UniversalString'},\n {'name': 'utf8String',\n 'size': [(1,\n 'ub-organizational-unit-name')],\n 'type': 'UTF8String'},\n {'name': 'bmpString',\n 'size': [(1,\n 'ub-organizational-unit-name')],\n 'type': 'BMPString'}],\n 'type': 'CHOICE'},\n 'X520Pseudonym': {'members': [{'name': 'teletexString',\n 'size': [(1,\n 'ub-pseudonym')],\n 'type': 'TeletexString'},\n {'name': 'printableString',\n 'size': [(1,\n 'ub-pseudonym')],\n 'type': 'PrintableString'},\n {'name': 'universalString',\n 'size': [(1,\n 'ub-pseudonym')],\n 'type': 'UniversalString'},\n {'name': 'utf8String',\n 'size': [(1,\n 'ub-pseudonym')],\n 'type': 'UTF8String'},\n {'name': 'bmpString',\n 'size': [(1,\n 'ub-pseudonym')],\n 'type': 'BMPString'}],\n 'type': 'CHOICE'},\n 'X520SerialNumber': {'size': [(1,\n 'ub-serial-number')],\n 'type': 'PrintableString'},\n 'X520StateOrProvinceName': {'members': [{'name': 'teletexString',\n 'size': [(1,\n 'ub-state-name')],\n 'type': 'TeletexString'},\n {'name': 'printableString',\n 'size': [(1,\n 'ub-state-name')],\n 'type': 'PrintableString'},\n {'name': 'universalString',\n 'size': [(1,\n 'ub-state-name')],\n 'type': 'UniversalString'},\n {'name': 'utf8String',\n 'size': [(1,\n 'ub-state-name')],\n 'type': 'UTF8String'},\n {'name': 'bmpString',\n 'size': [(1,\n 'ub-state-name')],\n 'type': 'BMPString'}],\n 'type': 'CHOICE'},\n 'X520Title': {'members': [{'name': 'teletexString',\n 'size': [(1,\n 'ub-title')],\n 'type': 'TeletexString'},\n {'name': 'printableString',\n 'size': [(1,\n 'ub-title')],\n 'type': 'PrintableString'},\n {'name': 'universalString',\n 'size': [(1,\n 'ub-title')],\n 'type': 'UniversalString'},\n {'name': 'utf8String',\n 'size': [(1,\n 'ub-title')],\n 'type': 'UTF8String'},\n {'name': 'bmpString',\n 'size': [(1,\n 'ub-title')],\n 'type': 'BMPString'}],\n 'type': 'CHOICE'},\n 'X520countryName': {'size': [2],\n 'type': 'PrintableString'},\n 'X520dnQualifier': {'type': 'PrintableString'},\n 'X520name': {'members': [{'name': 'teletexString',\n 'size': [(1,\n 'ub-name')],\n 'type': 'TeletexString'},\n {'name': 'printableString',\n 'size': [(1,\n 'ub-name')],\n 'type': 'PrintableString'},\n {'name': 'universalString',\n 'size': [(1,\n 'ub-name')],\n 'type': 'UniversalString'},\n {'name': 'utf8String',\n 'size': [(1,\n 'ub-name')],\n 'type': 'UTF8String'},\n {'name': 'bmpString',\n 'size': [(1,\n 'ub-name')],\n 'type': 'BMPString'}],\n 'type': 'CHOICE'}},\n 'values': {'common-name': {'type': 'INTEGER', 'value': 1},\n 'extended-network-address': {'type': 'INTEGER',\n 'value': 22},\n 'extension-OR-address-components': {'type': 'INTEGER',\n 'value': 12},\n 'extension-physical-delivery-address-components': {'type': 'INTEGER',\n 'value': 15},\n 'id-ad': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-pkix', 48]},\n 'id-ad-caIssuers': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-ad', 2]},\n 'id-ad-caRepository': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ad', 5]},\n 'id-ad-ocsp': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-ad', 1]},\n 'id-ad-timeStamping': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ad', 3]},\n 'id-at': {'type': 'OBJECT IDENTIFIER',\n 'value': [('joint-iso-ccitt', 2),\n ('ds', 5),\n 4]},\n 'id-at-commonName': {'type': 'AttributeType',\n 'value': None},\n 'id-at-countryName': {'type': 'AttributeType',\n 'value': None},\n 'id-at-dnQualifier': {'type': 'AttributeType',\n 'value': None},\n 'id-at-generationQualifier': {'type': 'AttributeType',\n 'value': None},\n 'id-at-givenName': {'type': 'AttributeType',\n 'value': None},\n 'id-at-initials': {'type': 'AttributeType',\n 'value': None},\n 'id-at-localityName': {'type': 'AttributeType',\n 'value': None},\n 'id-at-name': {'type': 'AttributeType',\n 'value': None},\n 'id-at-organizationName': {'type': 'AttributeType',\n 'value': None},\n 'id-at-organizationalUnitName': {'type': 'AttributeType',\n 'value': None},\n 'id-at-pseudonym': {'type': 'AttributeType',\n 'value': None},\n 'id-at-serialNumber': {'type': 'AttributeType',\n 'value': None},\n 'id-at-stateOrProvinceName': {'type': 'AttributeType',\n 'value': None},\n 'id-at-surname': {'type': 'AttributeType',\n 'value': None},\n 'id-at-title': {'type': 'AttributeType',\n 'value': None},\n 'id-domainComponent': {'type': 'AttributeType',\n 'value': None},\n 'id-emailAddress': {'type': 'AttributeType',\n 'value': None},\n 'id-kp': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-pkix', 3]},\n 'id-pe': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-pkix', 1]},\n 'id-pkix': {'type': 'OBJECT IDENTIFIER',\n 'value': [('iso', 1),\n ('identified-organization',\n 3),\n ('dod', 6),\n ('internet', 1),\n ('security', 5),\n ('mechanisms', 5),\n ('pkix', 7)]},\n 'id-qt': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-pkix', 2]},\n 'id-qt-cps': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-qt', 1]},\n 'id-qt-unotice': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-qt', 2]},\n 'local-postal-attributes': {'type': 'INTEGER',\n 'value': 21},\n 'pds-name': {'type': 'INTEGER', 'value': 7},\n 'physical-delivery-country-name': {'type': 'INTEGER',\n 'value': 8},\n 'physical-delivery-office-name': {'type': 'INTEGER',\n 'value': 10},\n 'physical-delivery-office-number': {'type': 'INTEGER',\n 'value': 11},\n 'physical-delivery-organization-name': {'type': 'INTEGER',\n 'value': 14},\n 'physical-delivery-personal-name': {'type': 'INTEGER',\n 'value': 13},\n 'pkcs-9': {'type': 'OBJECT IDENTIFIER',\n 'value': [('iso', 1),\n ('member-body', 2),\n ('us', 840),\n ('rsadsi', 113549),\n ('pkcs', 1),\n 9]},\n 'post-office-box-address': {'type': 'INTEGER',\n 'value': 18},\n 'postal-code': {'type': 'INTEGER', 'value': 9},\n 'poste-restante-address': {'type': 'INTEGER',\n 'value': 19},\n 'street-address': {'type': 'INTEGER',\n 'value': 17},\n 'teletex-common-name': {'type': 'INTEGER',\n 'value': 2},\n 'teletex-domain-defined-attributes': {'type': 'INTEGER',\n 'value': 6},\n 'teletex-organization-name': {'type': 'INTEGER',\n 'value': 3},\n 'teletex-organizational-unit-names': {'type': 'INTEGER',\n 'value': 5},\n 'teletex-personal-name': {'type': 'INTEGER',\n 'value': 4},\n 'terminal-type': {'type': 'INTEGER',\n 'value': 23},\n 'ub-common-name': {'type': 'INTEGER',\n 'value': 64},\n 'ub-common-name-length': {'type': 'INTEGER',\n 'value': 64},\n 'ub-country-name-alpha-length': {'type': 'INTEGER',\n 'value': 2},\n 'ub-country-name-numeric-length': {'type': 'INTEGER',\n 'value': 3},\n 'ub-domain-defined-attribute-type-length': {'type': 'INTEGER',\n 'value': 8},\n 'ub-domain-defined-attribute-value-length': {'type': 'INTEGER',\n 'value': 128},\n 'ub-domain-defined-attributes': {'type': 'INTEGER',\n 'value': 4},\n 'ub-domain-name-length': {'type': 'INTEGER',\n 'value': 16},\n 'ub-e163-4-number-length': {'type': 'INTEGER',\n 'value': 15},\n 'ub-e163-4-sub-address-length': {'type': 'INTEGER',\n 'value': 40},\n 'ub-emailaddress-length': {'type': 'INTEGER',\n 'value': 255},\n 'ub-extension-attributes': {'type': 'INTEGER',\n 'value': 256},\n 'ub-generation-qualifier-length': {'type': 'INTEGER',\n 'value': 3},\n 'ub-given-name-length': {'type': 'INTEGER',\n 'value': 16},\n 'ub-initials-length': {'type': 'INTEGER',\n 'value': 5},\n 'ub-integer-options': {'type': 'INTEGER',\n 'value': 256},\n 'ub-locality-name': {'type': 'INTEGER',\n 'value': 128},\n 'ub-match': {'type': 'INTEGER', 'value': 128},\n 'ub-name': {'type': 'INTEGER', 'value': 32768},\n 'ub-numeric-user-id-length': {'type': 'INTEGER',\n 'value': 32},\n 'ub-organization-name': {'type': 'INTEGER',\n 'value': 64},\n 'ub-organization-name-length': {'type': 'INTEGER',\n 'value': 64},\n 'ub-organizational-unit-name': {'type': 'INTEGER',\n 'value': 64},\n 'ub-organizational-unit-name-length': {'type': 'INTEGER',\n 'value': 32},\n 'ub-organizational-units': {'type': 'INTEGER',\n 'value': 4},\n 'ub-pds-name-length': {'type': 'INTEGER',\n 'value': 16},\n 'ub-pds-parameter-length': {'type': 'INTEGER',\n 'value': 30},\n 'ub-pds-physical-address-lines': {'type': 'INTEGER',\n 'value': 6},\n 'ub-postal-code-length': {'type': 'INTEGER',\n 'value': 16},\n 'ub-pseudonym': {'type': 'INTEGER',\n 'value': 128},\n 'ub-serial-number': {'type': 'INTEGER',\n 'value': 64},\n 'ub-state-name': {'type': 'INTEGER',\n 'value': 128},\n 'ub-surname-length': {'type': 'INTEGER',\n 'value': 40},\n 'ub-terminal-id-length': {'type': 'INTEGER',\n 'value': 24},\n 'ub-title': {'type': 'INTEGER', 'value': 64},\n 'ub-unformatted-address-length': {'type': 'INTEGER',\n 'value': 180},\n 'ub-x121-address-length': {'type': 'INTEGER',\n 'value': 16},\n 'unformatted-postal-address': {'type': 'INTEGER',\n 'value': 16},\n 'unique-postal-name': {'type': 'INTEGER',\n 'value': 20}}},\n 'PKIX1Implicit88': {'extensibility-implied': False,\n 'imports': {'PKIX1Explicit88': ['Attribute',\n 'BMPString',\n 'CertificateSerialNumber',\n 'DirectoryString',\n 'Name',\n 'ORAddress',\n 'RelativeDistinguishedName',\n 'UTF8String',\n 'id-kp',\n 'id-pe',\n 'id-qt-cps',\n 'id-qt-unotice']},\n 'object-classes': {},\n 'object-sets': {},\n 'tags': 'IMPLICIT',\n 'types': {'AccessDescription': {'members': [{'name': 'accessMethod',\n 'type': 'OBJECT '\n 'IDENTIFIER'},\n {'name': 'accessLocation',\n 'type': 'GeneralName'}],\n 'type': 'SEQUENCE'},\n 'AnotherName': {'members': [{'name': 'type-id',\n 'type': 'OBJECT '\n 'IDENTIFIER'},\n {'choices': {},\n 'name': 'value',\n 'tag': {'kind': 'EXPLICIT',\n 'number': 0},\n 'type': 'ANY '\n 'DEFINED '\n 'BY',\n 'value': 'type-id'}],\n 'type': 'SEQUENCE'},\n 'AuthorityInfoAccessSyntax': {'element': {'type': 'AccessDescription'},\n 'size': [(1,\n 'MAX')],\n 'type': 'SEQUENCE '\n 'OF'},\n 'AuthorityKeyIdentifier': {'members': [{'name': 'keyIdentifier',\n 'optional': True,\n 'tag': {'number': 0},\n 'type': 'KeyIdentifier'},\n {'name': 'authorityCertIssuer',\n 'optional': True,\n 'tag': {'number': 1},\n 'type': 'GeneralNames'},\n {'name': 'authorityCertSerialNumber',\n 'optional': True,\n 'tag': {'number': 2},\n 'type': 'CertificateSerialNumber'}],\n 'type': 'SEQUENCE'},\n 'BaseCRLNumber': {'type': 'CRLNumber'},\n 'BaseDistance': {'restricted-to': [(0, 'MAX')],\n 'type': 'INTEGER'},\n 'BasicConstraints': {'members': [{'default': False,\n 'name': 'cA',\n 'type': 'BOOLEAN'},\n {'name': 'pathLenConstraint',\n 'optional': True,\n 'restricted-to': [(0,\n 'MAX')],\n 'type': 'INTEGER'}],\n 'type': 'SEQUENCE'},\n 'CPSuri': {'type': 'IA5String'},\n 'CRLDistributionPoints': {'element': {'type': 'DistributionPoint'},\n 'size': [(1, 'MAX')],\n 'type': 'SEQUENCE OF'},\n 'CRLNumber': {'restricted-to': [(0, 'MAX')],\n 'type': 'INTEGER'},\n 'CRLReason': {'type': 'ENUMERATED',\n 'values': [('unspecified', 0),\n ('keyCompromise', 1),\n ('cACompromise', 2),\n ('affiliationChanged',\n 3),\n ('superseded', 4),\n ('cessationOfOperation',\n 5),\n ('certificateHold', 6),\n ('removeFromCRL', 8),\n ('privilegeWithdrawn',\n 9),\n ('aACompromise', 10)]},\n 'CertPolicyId': {'type': 'OBJECT IDENTIFIER'},\n 'CertificateIssuer': {'type': 'GeneralNames'},\n 'CertificatePolicies': {'element': {'type': 'PolicyInformation'},\n 'size': [(1, 'MAX')],\n 'type': 'SEQUENCE OF'},\n 'DisplayText': {'members': [{'name': 'ia5String',\n 'size': [(1, 200)],\n 'type': 'IA5String'},\n {'name': 'visibleString',\n 'size': [(1, 200)],\n 'type': 'VisibleString'},\n {'name': 'bmpString',\n 'size': [(1, 200)],\n 'type': 'BMPString'},\n {'name': 'utf8String',\n 'size': [(1, 200)],\n 'type': 'UTF8String'}],\n 'type': 'CHOICE'},\n 'DistributionPoint': {'members': [{'name': 'distributionPoint',\n 'optional': True,\n 'tag': {'number': 0},\n 'type': 'DistributionPointName'},\n {'name': 'reasons',\n 'optional': True,\n 'tag': {'number': 1},\n 'type': 'ReasonFlags'},\n {'name': 'cRLIssuer',\n 'optional': True,\n 'tag': {'number': 2},\n 'type': 'GeneralNames'}],\n 'type': 'SEQUENCE'},\n 'DistributionPointName': {'members': [{'name': 'fullName',\n 'tag': {'number': 0},\n 'type': 'GeneralNames'},\n {'name': 'nameRelativeToCRLIssuer',\n 'tag': {'number': 1},\n 'type': 'RelativeDistinguishedName'}],\n 'type': 'CHOICE'},\n 'EDIPartyName': {'members': [{'name': 'nameAssigner',\n 'optional': True,\n 'tag': {'number': 0},\n 'type': 'DirectoryString'},\n {'name': 'partyName',\n 'tag': {'number': 1},\n 'type': 'DirectoryString'}],\n 'type': 'SEQUENCE'},\n 'ExtKeyUsageSyntax': {'element': {'type': 'KeyPurposeId'},\n 'size': [(1, 'MAX')],\n 'type': 'SEQUENCE OF'},\n 'FreshestCRL': {'type': 'CRLDistributionPoints'},\n 'GeneralName': {'members': [{'name': 'otherName',\n 'tag': {'number': 0},\n 'type': 'AnotherName'},\n {'name': 'rfc822Name',\n 'tag': {'number': 1},\n 'type': 'IA5String'},\n {'name': 'dNSName',\n 'tag': {'number': 2},\n 'type': 'IA5String'},\n {'name': 'x400Address',\n 'tag': {'number': 3},\n 'type': 'ORAddress'},\n {'name': 'directoryName',\n 'tag': {'number': 4},\n 'type': 'Name'},\n {'name': 'ediPartyName',\n 'tag': {'number': 5},\n 'type': 'EDIPartyName'},\n {'name': 'uniformResourceIdentifier',\n 'tag': {'number': 6},\n 'type': 'IA5String'},\n {'name': 'iPAddress',\n 'tag': {'number': 7},\n 'type': 'OCTET '\n 'STRING'},\n {'name': 'registeredID',\n 'tag': {'number': 8},\n 'type': 'OBJECT '\n 'IDENTIFIER'}],\n 'type': 'CHOICE'},\n 'GeneralNames': {'element': {'type': 'GeneralName'},\n 'size': [(1, 'MAX')],\n 'type': 'SEQUENCE OF'},\n 'GeneralSubtree': {'members': [{'name': 'base',\n 'type': 'GeneralName'},\n {'default': 0,\n 'name': 'minimum',\n 'tag': {'number': 0},\n 'type': 'BaseDistance'},\n {'name': 'maximum',\n 'optional': True,\n 'tag': {'number': 1},\n 'type': 'BaseDistance'}],\n 'type': 'SEQUENCE'},\n 'GeneralSubtrees': {'element': {'type': 'GeneralSubtree'},\n 'size': [(1, 'MAX')],\n 'type': 'SEQUENCE OF'},\n 'HoldInstructionCode': {'type': 'OBJECT '\n 'IDENTIFIER'},\n 'InhibitAnyPolicy': {'type': 'SkipCerts'},\n 'InvalidityDate': {'type': 'GeneralizedTime'},\n 'IssuerAltName': {'type': 'GeneralNames'},\n 'IssuingDistributionPoint': {'members': [{'name': 'distributionPoint',\n 'optional': True,\n 'tag': {'number': 0},\n 'type': 'DistributionPointName'},\n {'default': False,\n 'name': 'onlyContainsUserCerts',\n 'tag': {'number': 1},\n 'type': 'BOOLEAN'},\n {'default': False,\n 'name': 'onlyContainsCACerts',\n 'tag': {'number': 2},\n 'type': 'BOOLEAN'},\n {'name': 'onlySomeReasons',\n 'optional': True,\n 'tag': {'number': 3},\n 'type': 'ReasonFlags'},\n {'default': False,\n 'name': 'indirectCRL',\n 'tag': {'number': 4},\n 'type': 'BOOLEAN'},\n {'default': False,\n 'name': 'onlyContainsAttributeCerts',\n 'tag': {'number': 5},\n 'type': 'BOOLEAN'}],\n 'type': 'SEQUENCE'},\n 'KeyIdentifier': {'type': 'OCTET STRING'},\n 'KeyPurposeId': {'type': 'OBJECT IDENTIFIER'},\n 'KeyUsage': {'named-bits': [('digitalSignature',\n '0'),\n ('nonRepudiation',\n '1'),\n ('keyEncipherment',\n '2'),\n ('dataEncipherment',\n '3'),\n ('keyAgreement',\n '4'),\n ('keyCertSign', '5'),\n ('cRLSign', '6'),\n ('encipherOnly',\n '7'),\n ('decipherOnly',\n '8')],\n 'type': 'BIT STRING'},\n 'NameConstraints': {'members': [{'name': 'permittedSubtrees',\n 'optional': True,\n 'tag': {'number': 0},\n 'type': 'GeneralSubtrees'},\n {'name': 'excludedSubtrees',\n 'optional': True,\n 'tag': {'number': 1},\n 'type': 'GeneralSubtrees'}],\n 'type': 'SEQUENCE'},\n 'NoticeReference': {'members': [{'name': 'organization',\n 'type': 'DisplayText'},\n {'element': {'type': 'INTEGER'},\n 'name': 'noticeNumbers',\n 'type': 'SEQUENCE '\n 'OF'}],\n 'type': 'SEQUENCE'},\n 'PolicyConstraints': {'members': [{'name': 'requireExplicitPolicy',\n 'optional': True,\n 'tag': {'number': 0},\n 'type': 'SkipCerts'},\n {'name': 'inhibitPolicyMapping',\n 'optional': True,\n 'tag': {'number': 1},\n 'type': 'SkipCerts'}],\n 'type': 'SEQUENCE'},\n 'PolicyInformation': {'members': [{'name': 'policyIdentifier',\n 'type': 'CertPolicyId'},\n {'element': {'type': 'PolicyQualifierInfo'},\n 'name': 'policyQualifiers',\n 'optional': True,\n 'size': [(1,\n 'MAX')],\n 'type': 'SEQUENCE '\n 'OF'}],\n 'type': 'SEQUENCE'},\n 'PolicyMappings': {'element': {'members': [{'name': 'issuerDomainPolicy',\n 'type': 'CertPolicyId'},\n {'name': 'subjectDomainPolicy',\n 'type': 'CertPolicyId'}],\n 'type': 'SEQUENCE'},\n 'size': [(1, 'MAX')],\n 'type': 'SEQUENCE OF'},\n 'PolicyQualifierId': {'type': 'OBJECT '\n 'IDENTIFIER'},\n 'PolicyQualifierInfo': {'members': [{'name': 'policyQualifierId',\n 'type': 'PolicyQualifierId'},\n {'choices': {},\n 'name': 'qualifier',\n 'type': 'ANY '\n 'DEFINED '\n 'BY',\n 'value': 'policyQualifierId'}],\n 'type': 'SEQUENCE'},\n 'PrivateKeyUsagePeriod': {'members': [{'name': 'notBefore',\n 'optional': True,\n 'tag': {'number': 0},\n 'type': 'GeneralizedTime'},\n {'name': 'notAfter',\n 'optional': True,\n 'tag': {'number': 1},\n 'type': 'GeneralizedTime'}],\n 'type': 'SEQUENCE'},\n 'ReasonFlags': {'named-bits': [('unused', '0'),\n ('keyCompromise',\n '1'),\n ('cACompromise',\n '2'),\n ('affiliationChanged',\n '3'),\n ('superseded',\n '4'),\n ('cessationOfOperation',\n '5'),\n ('certificateHold',\n '6'),\n ('privilegeWithdrawn',\n '7'),\n ('aACompromise',\n '8')],\n 'type': 'BIT STRING'},\n 'SkipCerts': {'restricted-to': [(0, 'MAX')],\n 'type': 'INTEGER'},\n 'SubjectAltName': {'type': 'GeneralNames'},\n 'SubjectDirectoryAttributes': {'element': {'type': 'Attribute'},\n 'size': [(1,\n 'MAX')],\n 'type': 'SEQUENCE '\n 'OF'},\n 'SubjectInfoAccessSyntax': {'element': {'type': 'AccessDescription'},\n 'size': [(1, 'MAX')],\n 'type': 'SEQUENCE '\n 'OF'},\n 'SubjectKeyIdentifier': {'type': 'KeyIdentifier'},\n 'UserNotice': {'members': [{'name': 'noticeRef',\n 'optional': True,\n 'type': 'NoticeReference'},\n {'name': 'explicitText',\n 'optional': True,\n 'type': 'DisplayText'}],\n 'type': 'SEQUENCE'}},\n 'values': {'anyExtendedKeyUsage': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce-extKeyUsage',\n 0]},\n 'anyPolicy': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-ce-certificatePolicies',\n 0]},\n 'holdInstruction': {'type': 'OBJECT IDENTIFIER',\n 'value': [('joint-iso-itu-t',\n 2),\n ('member-body',\n 2),\n ('us', 840),\n ('x9cm', 10040),\n 2]},\n 'id-ce': {'type': 'OBJECT IDENTIFIER',\n 'value': [('joint-iso-ccitt', 2),\n ('ds', 5),\n 29]},\n 'id-ce-authorityKeyIdentifier': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 35]},\n 'id-ce-basicConstraints': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 19]},\n 'id-ce-cRLDistributionPoints': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 31]},\n 'id-ce-cRLNumber': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-ce', 20]},\n 'id-ce-cRLReasons': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce', 21]},\n 'id-ce-certificateIssuer': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 29]},\n 'id-ce-certificatePolicies': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 32]},\n 'id-ce-deltaCRLIndicator': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 27]},\n 'id-ce-extKeyUsage': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce', 37]},\n 'id-ce-freshestCRL': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce', 46]},\n 'id-ce-holdInstructionCode': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 23]},\n 'id-ce-inhibitAnyPolicy': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 54]},\n 'id-ce-invalidityDate': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 24]},\n 'id-ce-issuerAltName': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce', 18]},\n 'id-ce-issuingDistributionPoint': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 28]},\n 'id-ce-keyUsage': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-ce', 15]},\n 'id-ce-nameConstraints': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 30]},\n 'id-ce-policyConstraints': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 36]},\n 'id-ce-policyMappings': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 33]},\n 'id-ce-privateKeyUsagePeriod': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 16]},\n 'id-ce-subjectAltName': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 17]},\n 'id-ce-subjectDirectoryAttributes': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 9]},\n 'id-ce-subjectKeyIdentifier': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-ce',\n 14]},\n 'id-holdinstruction-callissuer': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['holdInstruction',\n 2]},\n 'id-holdinstruction-none': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['holdInstruction',\n 1]},\n 'id-holdinstruction-reject': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['holdInstruction',\n 3]},\n 'id-kp-OCSPSigning': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-kp', 9]},\n 'id-kp-clientAuth': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-kp', 2]},\n 'id-kp-codeSigning': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-kp', 3]},\n 'id-kp-emailProtection': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-kp',\n 4]},\n 'id-kp-serverAuth': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-kp', 1]},\n 'id-kp-timeStamping': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-kp', 8]},\n 'id-pe-authorityInfoAccess': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-pe',\n 1]},\n 'id-pe-subjectInfoAccess': {'type': 'OBJECT '\n 'IDENTIFIER',\n 'value': ['id-pe',\n 11]}}}}", "EXPECTED = {'PKIX1Explicit88': {'extensibility-implied': False, 'imports':\n {}, 'object-classes': {}, 'object-sets': {}, 'tags': 'EXPLICIT',\n 'types': {'AdministrationDomainName': {'members': [{'name': 'numeric',\n 'size': [(0, 'ub-domain-name-length')], 'type': 'NumericString'}, {\n 'name': 'printable', 'size': [(0, 'ub-domain-name-length')], 'type':\n 'PrintableString'}], 'tag': {'class': 'APPLICATION', 'number': 2},\n 'type': 'CHOICE'}, 'AlgorithmIdentifier': {'members': [{'name':\n 'algorithm', 'type': 'OBJECT IDENTIFIER'}, {'choices': {}, 'name':\n 'parameters', 'optional': True, 'type': 'ANY DEFINED BY', 'value':\n 'algorithm'}], 'type': 'SEQUENCE'}, 'Attribute': {'members': [{'name':\n 'type', 'type': 'AttributeType'}, {'element': {'type': 'AttributeValue'\n }, 'name': 'values', 'type': 'SET OF'}], 'type': 'SEQUENCE'},\n 'AttributeType': {'type': 'OBJECT IDENTIFIER'}, 'AttributeTypeAndValue':\n {'members': [{'name': 'type', 'type': 'AttributeType'}, {'name':\n 'value', 'type': 'AttributeValue'}], 'type': 'SEQUENCE'},\n 'AttributeValue': {'choices': {}, 'type': 'ANY DEFINED BY', 'value':\n 'type'}, 'BuiltInDomainDefinedAttribute': {'members': [{'name': 'type',\n 'size': [(1, 'ub-domain-defined-attribute-type-length')], 'type':\n 'PrintableString'}, {'name': 'value', 'size': [(1,\n 'ub-domain-defined-attribute-value-length')], 'type': 'PrintableString'\n }], 'type': 'SEQUENCE'}, 'BuiltInDomainDefinedAttributes': {'element':\n {'type': 'BuiltInDomainDefinedAttribute'}, 'size': [(1,\n 'ub-domain-defined-attributes')], 'type': 'SEQUENCE OF'},\n 'BuiltInStandardAttributes': {'members': [{'name': 'country-name',\n 'optional': True, 'type': 'CountryName'}, {'name':\n 'administration-domain-name', 'optional': True, 'type':\n 'AdministrationDomainName'}, {'name': 'network-address', 'optional': \n True, 'tag': {'kind': 'IMPLICIT', 'number': 0}, 'type':\n 'NetworkAddress'}, {'name': 'terminal-identifier', 'optional': True,\n 'tag': {'kind': 'IMPLICIT', 'number': 1}, 'type': 'TerminalIdentifier'},\n {'name': 'private-domain-name', 'optional': True, 'tag': {'number': 2},\n 'type': 'PrivateDomainName'}, {'name': 'organization-name', 'optional':\n True, 'tag': {'kind': 'IMPLICIT', 'number': 3}, 'type':\n 'OrganizationName'}, {'name': 'numeric-user-identifier', 'optional': \n True, 'tag': {'kind': 'IMPLICIT', 'number': 4}, 'type':\n 'NumericUserIdentifier'}, {'name': 'personal-name', 'optional': True,\n 'tag': {'kind': 'IMPLICIT', 'number': 5}, 'type': 'PersonalName'}, {\n 'name': 'organizational-unit-names', 'optional': True, 'tag': {'kind':\n 'IMPLICIT', 'number': 6}, 'type': 'OrganizationalUnitNames'}], 'type':\n 'SEQUENCE'}, 'Certificate': {'members': [{'name': 'tbsCertificate',\n 'type': 'TBSCertificate'}, {'name': 'signatureAlgorithm', 'type':\n 'AlgorithmIdentifier'}, {'name': 'signature', 'type': 'BIT STRING'}],\n 'type': 'SEQUENCE'}, 'CertificateList': {'members': [{'name':\n 'tbsCertList', 'type': 'TBSCertList'}, {'name': 'signatureAlgorithm',\n 'type': 'AlgorithmIdentifier'}, {'name': 'signature', 'type':\n 'BIT STRING'}], 'type': 'SEQUENCE'}, 'CertificateSerialNumber': {'type':\n 'INTEGER'}, 'CommonName': {'size': [(1, 'ub-common-name-length')],\n 'type': 'PrintableString'}, 'CountryName': {'members': [{'name':\n 'x121-dcc-code', 'size': ['ub-country-name-numeric-length'], 'type':\n 'NumericString'}, {'name': 'iso-3166-alpha2-code', 'size': [\n 'ub-country-name-alpha-length'], 'type': 'PrintableString'}], 'tag': {\n 'class': 'APPLICATION', 'number': 1}, 'type': 'CHOICE'},\n 'DirectoryString': {'members': [{'name': 'teletexString', 'size': [(1,\n 'MAX')], 'type': 'TeletexString'}, {'name': 'printableString', 'size':\n [(1, 'MAX')], 'type': 'PrintableString'}, {'name': 'universalString',\n 'size': [(1, 'MAX')], 'type': 'UniversalString'}, {'name': 'utf8String',\n 'size': [(1, 'MAX')], 'type': 'UTF8String'}, {'name': 'bmpString',\n 'size': [(1, 'MAX')], 'type': 'BMPString'}], 'type': 'CHOICE'},\n 'DistinguishedName': {'type': 'RDNSequence'}, 'DomainComponent': {\n 'type': 'IA5String'}, 'EmailAddress': {'size': [(1,\n 'ub-emailaddress-length')], 'type': 'IA5String'},\n 'ExtendedNetworkAddress': {'members': [{'members': [{'name': 'number',\n 'size': [(1, 'ub-e163-4-number-length')], 'tag': {'kind': 'IMPLICIT',\n 'number': 0}, 'type': 'NumericString'}, {'name': 'sub-address',\n 'optional': True, 'size': [(1, 'ub-e163-4-sub-address-length')], 'tag':\n {'kind': 'IMPLICIT', 'number': 1}, 'type': 'NumericString'}], 'name':\n 'e163-4-address', 'type': 'SEQUENCE'}, {'name': 'psap-address', 'tag':\n {'kind': 'IMPLICIT', 'number': 0}, 'type': 'PresentationAddress'}],\n 'type': 'CHOICE'}, 'Extension': {'members': [{'name': 'extnID', 'type':\n 'OBJECT IDENTIFIER'}, {'default': False, 'name': 'critical', 'type':\n 'BOOLEAN'}, {'name': 'extnValue', 'type': 'OCTET STRING'}], 'type':\n 'SEQUENCE'}, 'ExtensionAttribute': {'members': [{'name':\n 'extension-attribute-type', 'restricted-to': [(0,\n 'ub-extension-attributes')], 'tag': {'kind': 'IMPLICIT', 'number': 0},\n 'type': 'INTEGER'}, {'choices': {}, 'name': 'extension-attribute-value',\n 'tag': {'number': 1}, 'type': 'ANY DEFINED BY', 'value':\n 'extension-attribute-type'}], 'type': 'SEQUENCE'},\n 'ExtensionAttributes': {'element': {'type': 'ExtensionAttribute'},\n 'size': [(1, 'ub-extension-attributes')], 'type': 'SET OF'},\n 'ExtensionORAddressComponents': {'type': 'PDSParameter'},\n 'ExtensionPhysicalDeliveryAddressComponents': {'type': 'PDSParameter'},\n 'Extensions': {'element': {'type': 'Extension'}, 'size': [(1, 'MAX')],\n 'type': 'SEQUENCE OF'}, 'LocalPostalAttributes': {'type':\n 'PDSParameter'}, 'Name': {'members': [{'name': 'rdnSequence', 'type':\n 'RDNSequence'}], 'type': 'CHOICE'}, 'NetworkAddress': {'type':\n 'X121Address'}, 'NumericUserIdentifier': {'size': [(1,\n 'ub-numeric-user-id-length')], 'type': 'NumericString'}, 'ORAddress': {\n 'members': [{'name': 'built-in-standard-attributes', 'type':\n 'BuiltInStandardAttributes'}, {'name':\n 'built-in-domain-defined-attributes', 'optional': True, 'type':\n 'BuiltInDomainDefinedAttributes'}, {'name': 'extension-attributes',\n 'optional': True, 'type': 'ExtensionAttributes'}], 'type': 'SEQUENCE'},\n 'OrganizationName': {'size': [(1, 'ub-organization-name-length')],\n 'type': 'PrintableString'}, 'OrganizationalUnitName': {'size': [(1,\n 'ub-organizational-unit-name-length')], 'type': 'PrintableString'},\n 'OrganizationalUnitNames': {'element': {'type':\n 'OrganizationalUnitName'}, 'size': [(1, 'ub-organizational-units')],\n 'type': 'SEQUENCE OF'}, 'PDSName': {'size': [(1, 'ub-pds-name-length')],\n 'type': 'PrintableString'}, 'PDSParameter': {'members': [{'name':\n 'printable-string', 'optional': True, 'size': [(1,\n 'ub-pds-parameter-length')], 'type': 'PrintableString'}, {'name':\n 'teletex-string', 'optional': True, 'size': [(1,\n 'ub-pds-parameter-length')], 'type': 'TeletexString'}], 'type': 'SET'},\n 'PersonalName': {'members': [{'name': 'surname', 'size': [(1,\n 'ub-surname-length')], 'tag': {'kind': 'IMPLICIT', 'number': 0}, 'type':\n 'PrintableString'}, {'name': 'given-name', 'optional': True, 'size': [(\n 1, 'ub-given-name-length')], 'tag': {'kind': 'IMPLICIT', 'number': 1},\n 'type': 'PrintableString'}, {'name': 'initials', 'optional': True,\n 'size': [(1, 'ub-initials-length')], 'tag': {'kind': 'IMPLICIT',\n 'number': 2}, 'type': 'PrintableString'}, {'name':\n 'generation-qualifier', 'optional': True, 'size': [(1,\n 'ub-generation-qualifier-length')], 'tag': {'kind': 'IMPLICIT',\n 'number': 3}, 'type': 'PrintableString'}], 'type': 'SET'},\n 'PhysicalDeliveryCountryName': {'members': [{'name': 'x121-dcc-code',\n 'size': ['ub-country-name-numeric-length'], 'type': 'NumericString'}, {\n 'name': 'iso-3166-alpha2-code', 'size': ['ub-country-name-alpha-length'\n ], 'type': 'PrintableString'}], 'type': 'CHOICE'},\n 'PhysicalDeliveryOfficeName': {'type': 'PDSParameter'},\n 'PhysicalDeliveryOfficeNumber': {'type': 'PDSParameter'},\n 'PhysicalDeliveryOrganizationName': {'type': 'PDSParameter'},\n 'PhysicalDeliveryPersonalName': {'type': 'PDSParameter'},\n 'PostOfficeBoxAddress': {'type': 'PDSParameter'}, 'PostalCode': {\n 'members': [{'name': 'numeric-code', 'size': [(1,\n 'ub-postal-code-length')], 'type': 'NumericString'}, {'name':\n 'printable-code', 'size': [(1, 'ub-postal-code-length')], 'type':\n 'PrintableString'}], 'type': 'CHOICE'}, 'PosteRestanteAddress': {'type':\n 'PDSParameter'}, 'PresentationAddress': {'members': [{'name':\n 'pSelector', 'optional': True, 'tag': {'kind': 'EXPLICIT', 'number': 0},\n 'type': 'OCTET STRING'}, {'name': 'sSelector', 'optional': True, 'tag':\n {'kind': 'EXPLICIT', 'number': 1}, 'type': 'OCTET STRING'}, {'name':\n 'tSelector', 'optional': True, 'tag': {'kind': 'EXPLICIT', 'number': 2},\n 'type': 'OCTET STRING'}, {'element': {'type': 'OCTET STRING'}, 'name':\n 'nAddresses', 'size': [(1, 'MAX')], 'tag': {'kind': 'EXPLICIT',\n 'number': 3}, 'type': 'SET OF'}], 'type': 'SEQUENCE'},\n 'PrivateDomainName': {'members': [{'name': 'numeric', 'size': [(1,\n 'ub-domain-name-length')], 'type': 'NumericString'}, {'name':\n 'printable', 'size': [(1, 'ub-domain-name-length')], 'type':\n 'PrintableString'}], 'type': 'CHOICE'}, 'RDNSequence': {'element': {\n 'type': 'RelativeDistinguishedName'}, 'type': 'SEQUENCE OF'},\n 'RelativeDistinguishedName': {'element': {'type':\n 'AttributeTypeAndValue'}, 'size': [(1, 'MAX')], 'type': 'SET OF'},\n 'StreetAddress': {'type': 'PDSParameter'}, 'SubjectPublicKeyInfo': {\n 'members': [{'name': 'algorithm', 'type': 'AlgorithmIdentifier'}, {\n 'name': 'subjectPublicKey', 'type': 'BIT STRING'}], 'type': 'SEQUENCE'},\n 'TBSCertList': {'members': [{'name': 'version', 'optional': True,\n 'type': 'Version'}, {'name': 'signature', 'type': 'AlgorithmIdentifier'\n }, {'name': 'issuer', 'type': 'Name'}, {'name': 'thisUpdate', 'type':\n 'Time'}, {'name': 'nextUpdate', 'optional': True, 'type': 'Time'}, {\n 'element': {'members': [{'name': 'userCertificate', 'type':\n 'CertificateSerialNumber'}, {'name': 'revocationDate', 'type': 'Time'},\n {'name': 'crlEntryExtensions', 'optional': True, 'type': 'Extensions'}],\n 'type': 'SEQUENCE'}, 'name': 'revokedCertificates', 'optional': True,\n 'type': 'SEQUENCE OF'}, {'name': 'crlExtensions', 'optional': True,\n 'tag': {'number': 0}, 'type': 'Extensions'}], 'type': 'SEQUENCE'},\n 'TBSCertificate': {'members': [{'default': 'v1', 'name': 'version',\n 'tag': {'number': 0}, 'type': 'Version'}, {'name': 'serialNumber',\n 'type': 'CertificateSerialNumber'}, {'name': 'signature', 'type':\n 'AlgorithmIdentifier'}, {'name': 'issuer', 'type': 'Name'}, {'name':\n 'validity', 'type': 'Validity'}, {'name': 'subject', 'type': 'Name'}, {\n 'name': 'subjectPublicKeyInfo', 'type': 'SubjectPublicKeyInfo'}, {\n 'name': 'issuerUniqueID', 'optional': True, 'tag': {'kind': 'IMPLICIT',\n 'number': 1}, 'type': 'UniqueIdentifier'}, {'name': 'subjectUniqueID',\n 'optional': True, 'tag': {'kind': 'IMPLICIT', 'number': 2}, 'type':\n 'UniqueIdentifier'}, {'name': 'extensions', 'optional': True, 'tag': {\n 'number': 3}, 'type': 'Extensions'}], 'type': 'SEQUENCE'},\n 'TeletexCommonName': {'size': [(1, 'ub-common-name-length')], 'type':\n 'TeletexString'}, 'TeletexDomainDefinedAttribute': {'members': [{'name':\n 'type', 'size': [(1, 'ub-domain-defined-attribute-type-length')],\n 'type': 'TeletexString'}, {'name': 'value', 'size': [(1,\n 'ub-domain-defined-attribute-value-length')], 'type': 'TeletexString'}],\n 'type': 'SEQUENCE'}, 'TeletexDomainDefinedAttributes': {'element': {\n 'type': 'TeletexDomainDefinedAttribute'}, 'size': [(1,\n 'ub-domain-defined-attributes')], 'type': 'SEQUENCE OF'},\n 'TeletexOrganizationName': {'size': [(1, 'ub-organization-name-length')\n ], 'type': 'TeletexString'}, 'TeletexOrganizationalUnitName': {'size':\n [(1, 'ub-organizational-unit-name-length')], 'type': 'TeletexString'},\n 'TeletexOrganizationalUnitNames': {'element': {'type':\n 'TeletexOrganizationalUnitName'}, 'size': [(1,\n 'ub-organizational-units')], 'type': 'SEQUENCE OF'},\n 'TeletexPersonalName': {'members': [{'name': 'surname', 'size': [(1,\n 'ub-surname-length')], 'tag': {'kind': 'IMPLICIT', 'number': 0}, 'type':\n 'TeletexString'}, {'name': 'given-name', 'optional': True, 'size': [(1,\n 'ub-given-name-length')], 'tag': {'kind': 'IMPLICIT', 'number': 1},\n 'type': 'TeletexString'}, {'name': 'initials', 'optional': True, 'size':\n [(1, 'ub-initials-length')], 'tag': {'kind': 'IMPLICIT', 'number': 2},\n 'type': 'TeletexString'}, {'name': 'generation-qualifier', 'optional': \n True, 'size': [(1, 'ub-generation-qualifier-length')], 'tag': {'kind':\n 'IMPLICIT', 'number': 3}, 'type': 'TeletexString'}], 'type': 'SET'},\n 'TerminalIdentifier': {'size': [(1, 'ub-terminal-id-length')], 'type':\n 'PrintableString'}, 'TerminalType': {'named-numbers': {'g3-facsimile': \n 5, 'g4-facsimile': 6, 'ia5-terminal': 7, 'teletex': 4, 'telex': 3,\n 'videotex': 8}, 'restricted-to': [(0, 'ub-integer-options')], 'type':\n 'INTEGER'}, 'Time': {'members': [{'name': 'utcTime', 'type': 'UTCTime'},\n {'name': 'generalTime', 'type': 'GeneralizedTime'}], 'type': 'CHOICE'},\n 'UnformattedPostalAddress': {'members': [{'element': {'size': [(1,\n 'ub-pds-parameter-length')], 'type': 'PrintableString'}, 'name':\n 'printable-address', 'optional': True, 'size': [(1,\n 'ub-pds-physical-address-lines')], 'type': 'SEQUENCE OF'}, {'name':\n 'teletex-string', 'optional': True, 'size': [(1,\n 'ub-unformatted-address-length')], 'type': 'TeletexString'}], 'type':\n 'SET'}, 'UniqueIdentifier': {'type': 'BIT STRING'}, 'UniquePostalName':\n {'type': 'PDSParameter'}, 'Validity': {'members': [{'name': 'notBefore',\n 'type': 'Time'}, {'name': 'notAfter', 'type': 'Time'}], 'type':\n 'SEQUENCE'}, 'Version': {'named-numbers': {'v1': 0, 'v2': 1, 'v3': 2},\n 'type': 'INTEGER'}, 'X121Address': {'size': [(1,\n 'ub-x121-address-length')], 'type': 'NumericString'}, 'X520CommonName':\n {'members': [{'name': 'teletexString', 'size': [(1, 'ub-common-name')],\n 'type': 'TeletexString'}, {'name': 'printableString', 'size': [(1,\n 'ub-common-name')], 'type': 'PrintableString'}, {'name':\n 'universalString', 'size': [(1, 'ub-common-name')], 'type':\n 'UniversalString'}, {'name': 'utf8String', 'size': [(1,\n 'ub-common-name')], 'type': 'UTF8String'}, {'name': 'bmpString', 'size':\n [(1, 'ub-common-name')], 'type': 'BMPString'}], 'type': 'CHOICE'},\n 'X520LocalityName': {'members': [{'name': 'teletexString', 'size': [(1,\n 'ub-locality-name')], 'type': 'TeletexString'}, {'name':\n 'printableString', 'size': [(1, 'ub-locality-name')], 'type':\n 'PrintableString'}, {'name': 'universalString', 'size': [(1,\n 'ub-locality-name')], 'type': 'UniversalString'}, {'name': 'utf8String',\n 'size': [(1, 'ub-locality-name')], 'type': 'UTF8String'}, {'name':\n 'bmpString', 'size': [(1, 'ub-locality-name')], 'type': 'BMPString'}],\n 'type': 'CHOICE'}, 'X520OrganizationName': {'members': [{'name':\n 'teletexString', 'size': [(1, 'ub-organization-name')], 'type':\n 'TeletexString'}, {'name': 'printableString', 'size': [(1,\n 'ub-organization-name')], 'type': 'PrintableString'}, {'name':\n 'universalString', 'size': [(1, 'ub-organization-name')], 'type':\n 'UniversalString'}, {'name': 'utf8String', 'size': [(1,\n 'ub-organization-name')], 'type': 'UTF8String'}, {'name': 'bmpString',\n 'size': [(1, 'ub-organization-name')], 'type': 'BMPString'}], 'type':\n 'CHOICE'}, 'X520OrganizationalUnitName': {'members': [{'name':\n 'teletexString', 'size': [(1, 'ub-organizational-unit-name')], 'type':\n 'TeletexString'}, {'name': 'printableString', 'size': [(1,\n 'ub-organizational-unit-name')], 'type': 'PrintableString'}, {'name':\n 'universalString', 'size': [(1, 'ub-organizational-unit-name')], 'type':\n 'UniversalString'}, {'name': 'utf8String', 'size': [(1,\n 'ub-organizational-unit-name')], 'type': 'UTF8String'}, {'name':\n 'bmpString', 'size': [(1, 'ub-organizational-unit-name')], 'type':\n 'BMPString'}], 'type': 'CHOICE'}, 'X520Pseudonym': {'members': [{'name':\n 'teletexString', 'size': [(1, 'ub-pseudonym')], 'type': 'TeletexString'\n }, {'name': 'printableString', 'size': [(1, 'ub-pseudonym')], 'type':\n 'PrintableString'}, {'name': 'universalString', 'size': [(1,\n 'ub-pseudonym')], 'type': 'UniversalString'}, {'name': 'utf8String',\n 'size': [(1, 'ub-pseudonym')], 'type': 'UTF8String'}, {'name':\n 'bmpString', 'size': [(1, 'ub-pseudonym')], 'type': 'BMPString'}],\n 'type': 'CHOICE'}, 'X520SerialNumber': {'size': [(1, 'ub-serial-number'\n )], 'type': 'PrintableString'}, 'X520StateOrProvinceName': {'members':\n [{'name': 'teletexString', 'size': [(1, 'ub-state-name')], 'type':\n 'TeletexString'}, {'name': 'printableString', 'size': [(1,\n 'ub-state-name')], 'type': 'PrintableString'}, {'name':\n 'universalString', 'size': [(1, 'ub-state-name')], 'type':\n 'UniversalString'}, {'name': 'utf8String', 'size': [(1, 'ub-state-name'\n )], 'type': 'UTF8String'}, {'name': 'bmpString', 'size': [(1,\n 'ub-state-name')], 'type': 'BMPString'}], 'type': 'CHOICE'},\n 'X520Title': {'members': [{'name': 'teletexString', 'size': [(1,\n 'ub-title')], 'type': 'TeletexString'}, {'name': 'printableString',\n 'size': [(1, 'ub-title')], 'type': 'PrintableString'}, {'name':\n 'universalString', 'size': [(1, 'ub-title')], 'type': 'UniversalString'\n }, {'name': 'utf8String', 'size': [(1, 'ub-title')], 'type':\n 'UTF8String'}, {'name': 'bmpString', 'size': [(1, 'ub-title')], 'type':\n 'BMPString'}], 'type': 'CHOICE'}, 'X520countryName': {'size': [2],\n 'type': 'PrintableString'}, 'X520dnQualifier': {'type':\n 'PrintableString'}, 'X520name': {'members': [{'name': 'teletexString',\n 'size': [(1, 'ub-name')], 'type': 'TeletexString'}, {'name':\n 'printableString', 'size': [(1, 'ub-name')], 'type': 'PrintableString'},\n {'name': 'universalString', 'size': [(1, 'ub-name')], 'type':\n 'UniversalString'}, {'name': 'utf8String', 'size': [(1, 'ub-name')],\n 'type': 'UTF8String'}, {'name': 'bmpString', 'size': [(1, 'ub-name')],\n 'type': 'BMPString'}], 'type': 'CHOICE'}}, 'values': {'common-name': {\n 'type': 'INTEGER', 'value': 1}, 'extended-network-address': {'type':\n 'INTEGER', 'value': 22}, 'extension-OR-address-components': {'type':\n 'INTEGER', 'value': 12},\n 'extension-physical-delivery-address-components': {'type': 'INTEGER',\n 'value': 15}, 'id-ad': {'type': 'OBJECT IDENTIFIER', 'value': [\n 'id-pkix', 48]}, 'id-ad-caIssuers': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-ad', 2]}, 'id-ad-caRepository': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-ad', 5]}, 'id-ad-ocsp': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-ad', 1]}, 'id-ad-timeStamping': {\n 'type': 'OBJECT IDENTIFIER', 'value': ['id-ad', 3]}, 'id-at': {'type':\n 'OBJECT IDENTIFIER', 'value': [('joint-iso-ccitt', 2), ('ds', 5), 4]},\n 'id-at-commonName': {'type': 'AttributeType', 'value': None},\n 'id-at-countryName': {'type': 'AttributeType', 'value': None},\n 'id-at-dnQualifier': {'type': 'AttributeType', 'value': None},\n 'id-at-generationQualifier': {'type': 'AttributeType', 'value': None},\n 'id-at-givenName': {'type': 'AttributeType', 'value': None},\n 'id-at-initials': {'type': 'AttributeType', 'value': None},\n 'id-at-localityName': {'type': 'AttributeType', 'value': None},\n 'id-at-name': {'type': 'AttributeType', 'value': None},\n 'id-at-organizationName': {'type': 'AttributeType', 'value': None},\n 'id-at-organizationalUnitName': {'type': 'AttributeType', 'value': None\n }, 'id-at-pseudonym': {'type': 'AttributeType', 'value': None},\n 'id-at-serialNumber': {'type': 'AttributeType', 'value': None},\n 'id-at-stateOrProvinceName': {'type': 'AttributeType', 'value': None},\n 'id-at-surname': {'type': 'AttributeType', 'value': None},\n 'id-at-title': {'type': 'AttributeType', 'value': None},\n 'id-domainComponent': {'type': 'AttributeType', 'value': None},\n 'id-emailAddress': {'type': 'AttributeType', 'value': None}, 'id-kp': {\n 'type': 'OBJECT IDENTIFIER', 'value': ['id-pkix', 3]}, 'id-pe': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-pkix', 1]}, 'id-pkix': {'type':\n 'OBJECT IDENTIFIER', 'value': [('iso', 1), ('identified-organization', \n 3), ('dod', 6), ('internet', 1), ('security', 5), ('mechanisms', 5), (\n 'pkix', 7)]}, 'id-qt': {'type': 'OBJECT IDENTIFIER', 'value': [\n 'id-pkix', 2]}, 'id-qt-cps': {'type': 'OBJECT IDENTIFIER', 'value': [\n 'id-qt', 1]}, 'id-qt-unotice': {'type': 'OBJECT IDENTIFIER', 'value': [\n 'id-qt', 2]}, 'local-postal-attributes': {'type': 'INTEGER', 'value': \n 21}, 'pds-name': {'type': 'INTEGER', 'value': 7},\n 'physical-delivery-country-name': {'type': 'INTEGER', 'value': 8},\n 'physical-delivery-office-name': {'type': 'INTEGER', 'value': 10},\n 'physical-delivery-office-number': {'type': 'INTEGER', 'value': 11},\n 'physical-delivery-organization-name': {'type': 'INTEGER', 'value': 14},\n 'physical-delivery-personal-name': {'type': 'INTEGER', 'value': 13},\n 'pkcs-9': {'type': 'OBJECT IDENTIFIER', 'value': [('iso', 1), (\n 'member-body', 2), ('us', 840), ('rsadsi', 113549), ('pkcs', 1), 9]},\n 'post-office-box-address': {'type': 'INTEGER', 'value': 18},\n 'postal-code': {'type': 'INTEGER', 'value': 9},\n 'poste-restante-address': {'type': 'INTEGER', 'value': 19},\n 'street-address': {'type': 'INTEGER', 'value': 17},\n 'teletex-common-name': {'type': 'INTEGER', 'value': 2},\n 'teletex-domain-defined-attributes': {'type': 'INTEGER', 'value': 6},\n 'teletex-organization-name': {'type': 'INTEGER', 'value': 3},\n 'teletex-organizational-unit-names': {'type': 'INTEGER', 'value': 5},\n 'teletex-personal-name': {'type': 'INTEGER', 'value': 4},\n 'terminal-type': {'type': 'INTEGER', 'value': 23}, 'ub-common-name': {\n 'type': 'INTEGER', 'value': 64}, 'ub-common-name-length': {'type':\n 'INTEGER', 'value': 64}, 'ub-country-name-alpha-length': {'type':\n 'INTEGER', 'value': 2}, 'ub-country-name-numeric-length': {'type':\n 'INTEGER', 'value': 3}, 'ub-domain-defined-attribute-type-length': {\n 'type': 'INTEGER', 'value': 8},\n 'ub-domain-defined-attribute-value-length': {'type': 'INTEGER', 'value':\n 128}, 'ub-domain-defined-attributes': {'type': 'INTEGER', 'value': 4},\n 'ub-domain-name-length': {'type': 'INTEGER', 'value': 16},\n 'ub-e163-4-number-length': {'type': 'INTEGER', 'value': 15},\n 'ub-e163-4-sub-address-length': {'type': 'INTEGER', 'value': 40},\n 'ub-emailaddress-length': {'type': 'INTEGER', 'value': 255},\n 'ub-extension-attributes': {'type': 'INTEGER', 'value': 256},\n 'ub-generation-qualifier-length': {'type': 'INTEGER', 'value': 3},\n 'ub-given-name-length': {'type': 'INTEGER', 'value': 16},\n 'ub-initials-length': {'type': 'INTEGER', 'value': 5},\n 'ub-integer-options': {'type': 'INTEGER', 'value': 256},\n 'ub-locality-name': {'type': 'INTEGER', 'value': 128}, 'ub-match': {\n 'type': 'INTEGER', 'value': 128}, 'ub-name': {'type': 'INTEGER',\n 'value': 32768}, 'ub-numeric-user-id-length': {'type': 'INTEGER',\n 'value': 32}, 'ub-organization-name': {'type': 'INTEGER', 'value': 64},\n 'ub-organization-name-length': {'type': 'INTEGER', 'value': 64},\n 'ub-organizational-unit-name': {'type': 'INTEGER', 'value': 64},\n 'ub-organizational-unit-name-length': {'type': 'INTEGER', 'value': 32},\n 'ub-organizational-units': {'type': 'INTEGER', 'value': 4},\n 'ub-pds-name-length': {'type': 'INTEGER', 'value': 16},\n 'ub-pds-parameter-length': {'type': 'INTEGER', 'value': 30},\n 'ub-pds-physical-address-lines': {'type': 'INTEGER', 'value': 6},\n 'ub-postal-code-length': {'type': 'INTEGER', 'value': 16},\n 'ub-pseudonym': {'type': 'INTEGER', 'value': 128}, 'ub-serial-number':\n {'type': 'INTEGER', 'value': 64}, 'ub-state-name': {'type': 'INTEGER',\n 'value': 128}, 'ub-surname-length': {'type': 'INTEGER', 'value': 40},\n 'ub-terminal-id-length': {'type': 'INTEGER', 'value': 24}, 'ub-title':\n {'type': 'INTEGER', 'value': 64}, 'ub-unformatted-address-length': {\n 'type': 'INTEGER', 'value': 180}, 'ub-x121-address-length': {'type':\n 'INTEGER', 'value': 16}, 'unformatted-postal-address': {'type':\n 'INTEGER', 'value': 16}, 'unique-postal-name': {'type': 'INTEGER',\n 'value': 20}}}, 'PKIX1Implicit88': {'extensibility-implied': False,\n 'imports': {'PKIX1Explicit88': ['Attribute', 'BMPString',\n 'CertificateSerialNumber', 'DirectoryString', 'Name', 'ORAddress',\n 'RelativeDistinguishedName', 'UTF8String', 'id-kp', 'id-pe',\n 'id-qt-cps', 'id-qt-unotice']}, 'object-classes': {}, 'object-sets': {},\n 'tags': 'IMPLICIT', 'types': {'AccessDescription': {'members': [{'name':\n 'accessMethod', 'type': 'OBJECT IDENTIFIER'}, {'name': 'accessLocation',\n 'type': 'GeneralName'}], 'type': 'SEQUENCE'}, 'AnotherName': {'members':\n [{'name': 'type-id', 'type': 'OBJECT IDENTIFIER'}, {'choices': {},\n 'name': 'value', 'tag': {'kind': 'EXPLICIT', 'number': 0}, 'type':\n 'ANY DEFINED BY', 'value': 'type-id'}], 'type': 'SEQUENCE'},\n 'AuthorityInfoAccessSyntax': {'element': {'type': 'AccessDescription'},\n 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'}, 'AuthorityKeyIdentifier':\n {'members': [{'name': 'keyIdentifier', 'optional': True, 'tag': {\n 'number': 0}, 'type': 'KeyIdentifier'}, {'name': 'authorityCertIssuer',\n 'optional': True, 'tag': {'number': 1}, 'type': 'GeneralNames'}, {\n 'name': 'authorityCertSerialNumber', 'optional': True, 'tag': {'number':\n 2}, 'type': 'CertificateSerialNumber'}], 'type': 'SEQUENCE'},\n 'BaseCRLNumber': {'type': 'CRLNumber'}, 'BaseDistance': {\n 'restricted-to': [(0, 'MAX')], 'type': 'INTEGER'}, 'BasicConstraints':\n {'members': [{'default': False, 'name': 'cA', 'type': 'BOOLEAN'}, {\n 'name': 'pathLenConstraint', 'optional': True, 'restricted-to': [(0,\n 'MAX')], 'type': 'INTEGER'}], 'type': 'SEQUENCE'}, 'CPSuri': {'type':\n 'IA5String'}, 'CRLDistributionPoints': {'element': {'type':\n 'DistributionPoint'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'},\n 'CRLNumber': {'restricted-to': [(0, 'MAX')], 'type': 'INTEGER'},\n 'CRLReason': {'type': 'ENUMERATED', 'values': [('unspecified', 0), (\n 'keyCompromise', 1), ('cACompromise', 2), ('affiliationChanged', 3), (\n 'superseded', 4), ('cessationOfOperation', 5), ('certificateHold', 6),\n ('removeFromCRL', 8), ('privilegeWithdrawn', 9), ('aACompromise', 10)]},\n 'CertPolicyId': {'type': 'OBJECT IDENTIFIER'}, 'CertificateIssuer': {\n 'type': 'GeneralNames'}, 'CertificatePolicies': {'element': {'type':\n 'PolicyInformation'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'},\n 'DisplayText': {'members': [{'name': 'ia5String', 'size': [(1, 200)],\n 'type': 'IA5String'}, {'name': 'visibleString', 'size': [(1, 200)],\n 'type': 'VisibleString'}, {'name': 'bmpString', 'size': [(1, 200)],\n 'type': 'BMPString'}, {'name': 'utf8String', 'size': [(1, 200)], 'type':\n 'UTF8String'}], 'type': 'CHOICE'}, 'DistributionPoint': {'members': [{\n 'name': 'distributionPoint', 'optional': True, 'tag': {'number': 0},\n 'type': 'DistributionPointName'}, {'name': 'reasons', 'optional': True,\n 'tag': {'number': 1}, 'type': 'ReasonFlags'}, {'name': 'cRLIssuer',\n 'optional': True, 'tag': {'number': 2}, 'type': 'GeneralNames'}],\n 'type': 'SEQUENCE'}, 'DistributionPointName': {'members': [{'name':\n 'fullName', 'tag': {'number': 0}, 'type': 'GeneralNames'}, {'name':\n 'nameRelativeToCRLIssuer', 'tag': {'number': 1}, 'type':\n 'RelativeDistinguishedName'}], 'type': 'CHOICE'}, 'EDIPartyName': {\n 'members': [{'name': 'nameAssigner', 'optional': True, 'tag': {'number':\n 0}, 'type': 'DirectoryString'}, {'name': 'partyName', 'tag': {'number':\n 1}, 'type': 'DirectoryString'}], 'type': 'SEQUENCE'},\n 'ExtKeyUsageSyntax': {'element': {'type': 'KeyPurposeId'}, 'size': [(1,\n 'MAX')], 'type': 'SEQUENCE OF'}, 'FreshestCRL': {'type':\n 'CRLDistributionPoints'}, 'GeneralName': {'members': [{'name':\n 'otherName', 'tag': {'number': 0}, 'type': 'AnotherName'}, {'name':\n 'rfc822Name', 'tag': {'number': 1}, 'type': 'IA5String'}, {'name':\n 'dNSName', 'tag': {'number': 2}, 'type': 'IA5String'}, {'name':\n 'x400Address', 'tag': {'number': 3}, 'type': 'ORAddress'}, {'name':\n 'directoryName', 'tag': {'number': 4}, 'type': 'Name'}, {'name':\n 'ediPartyName', 'tag': {'number': 5}, 'type': 'EDIPartyName'}, {'name':\n 'uniformResourceIdentifier', 'tag': {'number': 6}, 'type': 'IA5String'},\n {'name': 'iPAddress', 'tag': {'number': 7}, 'type': 'OCTET STRING'}, {\n 'name': 'registeredID', 'tag': {'number': 8}, 'type':\n 'OBJECT IDENTIFIER'}], 'type': 'CHOICE'}, 'GeneralNames': {'element': {\n 'type': 'GeneralName'}, 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'},\n 'GeneralSubtree': {'members': [{'name': 'base', 'type': 'GeneralName'},\n {'default': 0, 'name': 'minimum', 'tag': {'number': 0}, 'type':\n 'BaseDistance'}, {'name': 'maximum', 'optional': True, 'tag': {'number':\n 1}, 'type': 'BaseDistance'}], 'type': 'SEQUENCE'}, 'GeneralSubtrees': {\n 'element': {'type': 'GeneralSubtree'}, 'size': [(1, 'MAX')], 'type':\n 'SEQUENCE OF'}, 'HoldInstructionCode': {'type': 'OBJECT IDENTIFIER'},\n 'InhibitAnyPolicy': {'type': 'SkipCerts'}, 'InvalidityDate': {'type':\n 'GeneralizedTime'}, 'IssuerAltName': {'type': 'GeneralNames'},\n 'IssuingDistributionPoint': {'members': [{'name': 'distributionPoint',\n 'optional': True, 'tag': {'number': 0}, 'type': 'DistributionPointName'\n }, {'default': False, 'name': 'onlyContainsUserCerts', 'tag': {'number':\n 1}, 'type': 'BOOLEAN'}, {'default': False, 'name':\n 'onlyContainsCACerts', 'tag': {'number': 2}, 'type': 'BOOLEAN'}, {\n 'name': 'onlySomeReasons', 'optional': True, 'tag': {'number': 3},\n 'type': 'ReasonFlags'}, {'default': False, 'name': 'indirectCRL', 'tag':\n {'number': 4}, 'type': 'BOOLEAN'}, {'default': False, 'name':\n 'onlyContainsAttributeCerts', 'tag': {'number': 5}, 'type': 'BOOLEAN'}],\n 'type': 'SEQUENCE'}, 'KeyIdentifier': {'type': 'OCTET STRING'},\n 'KeyPurposeId': {'type': 'OBJECT IDENTIFIER'}, 'KeyUsage': {\n 'named-bits': [('digitalSignature', '0'), ('nonRepudiation', '1'), (\n 'keyEncipherment', '2'), ('dataEncipherment', '3'), ('keyAgreement',\n '4'), ('keyCertSign', '5'), ('cRLSign', '6'), ('encipherOnly', '7'), (\n 'decipherOnly', '8')], 'type': 'BIT STRING'}, 'NameConstraints': {\n 'members': [{'name': 'permittedSubtrees', 'optional': True, 'tag': {\n 'number': 0}, 'type': 'GeneralSubtrees'}, {'name': 'excludedSubtrees',\n 'optional': True, 'tag': {'number': 1}, 'type': 'GeneralSubtrees'}],\n 'type': 'SEQUENCE'}, 'NoticeReference': {'members': [{'name':\n 'organization', 'type': 'DisplayText'}, {'element': {'type': 'INTEGER'},\n 'name': 'noticeNumbers', 'type': 'SEQUENCE OF'}], 'type': 'SEQUENCE'},\n 'PolicyConstraints': {'members': [{'name': 'requireExplicitPolicy',\n 'optional': True, 'tag': {'number': 0}, 'type': 'SkipCerts'}, {'name':\n 'inhibitPolicyMapping', 'optional': True, 'tag': {'number': 1}, 'type':\n 'SkipCerts'}], 'type': 'SEQUENCE'}, 'PolicyInformation': {'members': [{\n 'name': 'policyIdentifier', 'type': 'CertPolicyId'}, {'element': {\n 'type': 'PolicyQualifierInfo'}, 'name': 'policyQualifiers', 'optional':\n True, 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'}], 'type': 'SEQUENCE'\n }, 'PolicyMappings': {'element': {'members': [{'name':\n 'issuerDomainPolicy', 'type': 'CertPolicyId'}, {'name':\n 'subjectDomainPolicy', 'type': 'CertPolicyId'}], 'type': 'SEQUENCE'},\n 'size': [(1, 'MAX')], 'type': 'SEQUENCE OF'}, 'PolicyQualifierId': {\n 'type': 'OBJECT IDENTIFIER'}, 'PolicyQualifierInfo': {'members': [{\n 'name': 'policyQualifierId', 'type': 'PolicyQualifierId'}, {'choices':\n {}, 'name': 'qualifier', 'type': 'ANY DEFINED BY', 'value':\n 'policyQualifierId'}], 'type': 'SEQUENCE'}, 'PrivateKeyUsagePeriod': {\n 'members': [{'name': 'notBefore', 'optional': True, 'tag': {'number': 0\n }, 'type': 'GeneralizedTime'}, {'name': 'notAfter', 'optional': True,\n 'tag': {'number': 1}, 'type': 'GeneralizedTime'}], 'type': 'SEQUENCE'},\n 'ReasonFlags': {'named-bits': [('unused', '0'), ('keyCompromise', '1'),\n ('cACompromise', '2'), ('affiliationChanged', '3'), ('superseded', '4'),\n ('cessationOfOperation', '5'), ('certificateHold', '6'), (\n 'privilegeWithdrawn', '7'), ('aACompromise', '8')], 'type':\n 'BIT STRING'}, 'SkipCerts': {'restricted-to': [(0, 'MAX')], 'type':\n 'INTEGER'}, 'SubjectAltName': {'type': 'GeneralNames'},\n 'SubjectDirectoryAttributes': {'element': {'type': 'Attribute'}, 'size':\n [(1, 'MAX')], 'type': 'SEQUENCE OF'}, 'SubjectInfoAccessSyntax': {\n 'element': {'type': 'AccessDescription'}, 'size': [(1, 'MAX')], 'type':\n 'SEQUENCE OF'}, 'SubjectKeyIdentifier': {'type': 'KeyIdentifier'},\n 'UserNotice': {'members': [{'name': 'noticeRef', 'optional': True,\n 'type': 'NoticeReference'}, {'name': 'explicitText', 'optional': True,\n 'type': 'DisplayText'}], 'type': 'SEQUENCE'}}, 'values': {\n 'anyExtendedKeyUsage': {'type': 'OBJECT IDENTIFIER', 'value': [\n 'id-ce-extKeyUsage', 0]}, 'anyPolicy': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-ce-certificatePolicies', 0]}, 'holdInstruction': {'type':\n 'OBJECT IDENTIFIER', 'value': [('joint-iso-itu-t', 2), ('member-body', \n 2), ('us', 840), ('x9cm', 10040), 2]}, 'id-ce': {'type':\n 'OBJECT IDENTIFIER', 'value': [('joint-iso-ccitt', 2), ('ds', 5), 29]},\n 'id-ce-authorityKeyIdentifier': {'type': 'OBJECT IDENTIFIER', 'value':\n ['id-ce', 35]}, 'id-ce-basicConstraints': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-ce', 19]}, 'id-ce-cRLDistributionPoints': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-ce', 31]}, 'id-ce-cRLNumber': {\n 'type': 'OBJECT IDENTIFIER', 'value': ['id-ce', 20]},\n 'id-ce-cRLReasons': {'type': 'OBJECT IDENTIFIER', 'value': ['id-ce', 21\n ]}, 'id-ce-certificateIssuer': {'type': 'OBJECT IDENTIFIER', 'value': [\n 'id-ce', 29]}, 'id-ce-certificatePolicies': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-ce', 32]}, 'id-ce-deltaCRLIndicator':\n {'type': 'OBJECT IDENTIFIER', 'value': ['id-ce', 27]},\n 'id-ce-extKeyUsage': {'type': 'OBJECT IDENTIFIER', 'value': ['id-ce', \n 37]}, 'id-ce-freshestCRL': {'type': 'OBJECT IDENTIFIER', 'value': [\n 'id-ce', 46]}, 'id-ce-holdInstructionCode': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-ce', 23]}, 'id-ce-inhibitAnyPolicy':\n {'type': 'OBJECT IDENTIFIER', 'value': ['id-ce', 54]},\n 'id-ce-invalidityDate': {'type': 'OBJECT IDENTIFIER', 'value': ['id-ce',\n 24]}, 'id-ce-issuerAltName': {'type': 'OBJECT IDENTIFIER', 'value': [\n 'id-ce', 18]}, 'id-ce-issuingDistributionPoint': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-ce', 28]}, 'id-ce-keyUsage': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-ce', 15]}, 'id-ce-nameConstraints':\n {'type': 'OBJECT IDENTIFIER', 'value': ['id-ce', 30]},\n 'id-ce-policyConstraints': {'type': 'OBJECT IDENTIFIER', 'value': [\n 'id-ce', 36]}, 'id-ce-policyMappings': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-ce', 33]}, 'id-ce-privateKeyUsagePeriod': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-ce', 16]}, 'id-ce-subjectAltName': {\n 'type': 'OBJECT IDENTIFIER', 'value': ['id-ce', 17]},\n 'id-ce-subjectDirectoryAttributes': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-ce', 9]}, 'id-ce-subjectKeyIdentifier': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-ce', 14]},\n 'id-holdinstruction-callissuer': {'type': 'OBJECT IDENTIFIER', 'value':\n ['holdInstruction', 2]}, 'id-holdinstruction-none': {'type':\n 'OBJECT IDENTIFIER', 'value': ['holdInstruction', 1]},\n 'id-holdinstruction-reject': {'type': 'OBJECT IDENTIFIER', 'value': [\n 'holdInstruction', 3]}, 'id-kp-OCSPSigning': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-kp', 9]}, 'id-kp-clientAuth': {\n 'type': 'OBJECT IDENTIFIER', 'value': ['id-kp', 2]},\n 'id-kp-codeSigning': {'type': 'OBJECT IDENTIFIER', 'value': ['id-kp', 3\n ]}, 'id-kp-emailProtection': {'type': 'OBJECT IDENTIFIER', 'value': [\n 'id-kp', 4]}, 'id-kp-serverAuth': {'type': 'OBJECT IDENTIFIER', 'value':\n ['id-kp', 1]}, 'id-kp-timeStamping': {'type': 'OBJECT IDENTIFIER',\n 'value': ['id-kp', 8]}, 'id-pe-authorityInfoAccess': {'type':\n 'OBJECT IDENTIFIER', 'value': ['id-pe', 1]}, 'id-pe-subjectInfoAccess':\n {'type': 'OBJECT IDENTIFIER', 'value': ['id-pe', 11]}}}}\n", "<assignment token>\n" ]
false
99,417
cd5991cdd0aedbf240459d66dc5243154a30f857
import random as rnd import time import inputs.inputs as inputs import representation.encoding as encoding import representation.decoding as decoding import functions.best_solution_update as best_solution_update import algorithms.classical_fo_maths as classical_fo_maths import algorithms.fo_maths as fo_maths import algorithms.pso_maths as pso_maths import functions.mutation as mutation def FO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim, length, t1, t2, s, e, NAB, alfa, gamma, beta0, minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG): ## get the initial solution SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity, aux_mutation, aux_break = inputs.Initial_solution(nParticle, nVessel, minVel, maxVel, minCoordination, maxCoordination) ## encoding the initial solution encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)] ## decoding the initial solution for i in range(nParticle): SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB) ## update the global best solution G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL) change = 0 ## start the algorithm start_time = time.time() for iteration in range(nIteration): for i in range(nParticle): ## choose type of solution update and update the solutions if gbest == 99999: Solution[i] = [rnd.uniform(minCoordination, maxCoordination) for _ in range(nVessel)] Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(nVessel)] elif Cost[i] < gbest: Solution[i] = classical_fo_maths.FO2(nVessel, Solution[i], alfa, minCoordination, maxCoordination) elif Cost[i] == gbest: aux_mutation += 1 aux_break += 1 Solution[i] = classical_fo_maths.FO2(nVessel, Solution[i], alfa, minCoordination, maxCoordination) else: aux_mutation += 1 aux_break += 1 Solution[i] = classical_fo_maths.FO(i, nParticle, nVessel, Solution, beta0, alfa, gamma, minCoordination, maxCoordination) ## encode solution encode[i] = encoding.Ordering(nVessel, Solution[i]) ## decode solution SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB) ## control of best solution changing if Cost[i] < gbest: change = 1 else: change = 0 if change == 1: aux_break = 0 ## updateglobal best solution G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL) ## decision of mutation if aux_mutation / nParticle >= round(nIteration * 0.10): aux_mutation = 0 Solution, Velocity = mutation.Mutation(nParticle, nVessel, Solution, Velocity, minVel, maxVel, minCoordination, maxCoordination) ## stopping criteria 2 if aux_break / nParticle >= round(nIteration * 0.33): break end_time = time.time() TIMES.append(end_time - start_time) if len(SOLVS) == 0 or gbest < min(SOLVS): ggbest = gbest gG = GloSOL SOLVS.append(gbest) print("Solution : ", gbest) print("Time : ", (end_time - start_time), " sec.") return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG #### PARTICLE SWARM OPTIMIZATION ### #### --------------------------- ### def PSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim, length, t1, t2, s, e, NAB, c1, c2, wmin, wmax, minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG): ## get the initial solution SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity, aux_mutation, aux_break = inputs.Initial_solution(nParticle, nVessel, minVel, maxVel, minCoordination, maxCoordination) ## encoding the initial solution encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)] ## decoding the initial solution for i in range(nParticle): SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB) ## update personal best solution P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P) ## update the global best solution G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL) change = 0 ## start the algorithm start_time = time.time() for iteration in range(nIteration): for i in range(nParticle): ## choose type of solution update and update the solutions if gbest == 99999: Solution[i] = [rnd.uniform(minCoordination, maxCoordination) for _ in range(nVessel)] Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(nVessel)] else: aux_mutation += 1 aux_break += 1 Solution[i], Velocity[i] = pso_maths.PSO(nVessel, nIteration, Solution[i], P[i], G, Velocity[i], c1, c2, wmin, wmax, iteration, minVel, maxVel, minCoordination, maxCoordination) ## encode solution encode[i] = encoding.Ordering(nVessel, Solution[i]) ## decode solution SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB) ## control of best solution changing if Cost[i] < gbest: change = 1 else: change = 0 if change == 1: aux_break = 0 ## update personal best solution P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P) ## update global best solution G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL) ## decision of mutation if aux_mutation / nParticle >= round(nIteration * 0.10): aux_mutation = 0 Solution, Velocity = mutation.Mutation(nParticle, nVessel, Solution, Velocity, minVel, maxVel, minCoordination, maxCoordination) ## stopping criteria 2 if aux_break / nParticle >= round(nIteration * 0.33): break end_time = time.time() TIMES.append(end_time - start_time) if len(SOLVS) == 0 or gbest < min(SOLVS): ggbest = gbest gG = GloSOL SOLVS.append(gbest) print("Solution : ", gbest) print("Time : ", (end_time - start_time), " sec.") return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG #### PARTICLE SWARM OPTIMIZATION ### #### --------------------------- ### def HFPSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim, length, t1, t2, s, e, NAB, alfa, gamma, beta0, c1, c2, wmin, wmax, minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG): ## get the initial solution SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity, aux_mutation, aux_break = inputs.Initial_solution(nParticle, nVessel, minVel, maxVel, minCoordination, maxCoordination) ## encoding the initial solution encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)] ## decoding the initial solution for i in range(nParticle): SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB) ## update personal best solution P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P) ## update the global best solution G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL) change = 0 change2 = 0 change3 = 0 ## start the algorithm start_time = time.time() for iteration in range(nIteration): if change2 == 1: change = 1 change2 = 0 change3 = 0 else: change = 0 for i in range(nParticle): ## choose type of solution update and update the solutions if gbest == 99999: Solution[i] = [rnd.uniform(minCoordination, maxCoordination) for _ in range(nVessel)] Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(nVessel)] else: if change == 0: aux_mutation += 1 aux_break += 1 Solution[i], Velocity[i] = pso_maths.PSO(nVessel, nIteration, Solution[i], P[i], G, Velocity[i], c1, c2, wmin, wmax, iteration, minVel, maxVel, minCoordination, maxCoordination) else: aux_break = 0 Solution[i], Velocity[i] = fo_maths.FO(nVessel,G,Solution[i],Velocity[i],beta0,alfa,gamma,minCoordination,maxCoordination,minVel,maxVel) ## encode solution encode[i] = encoding.Ordering(nVessel, Solution[i]) ## decode solution SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB) ## control of best solution changing if Cost[i] < gbest: change2 = 1 change3 = 1 else: change3 = 0 if change3 == 1: aux_break = 0 ## update personal best solution P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P) ## update global best solution G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL) ## decision of mutation if aux_mutation / nParticle >= round(nIteration * 0.10): aux_mutation = 0 Solution, Velocity = mutation.Mutation(nParticle, nVessel, Solution, Velocity, minVel, maxVel, minCoordination, maxCoordination) ## stopping criteria 2 if aux_break / nParticle >= round(nIteration * 0.33): break end_time = time.time() TIMES.append(end_time - start_time) if len(SOLVS) == 0 or gbest < min(SOLVS): ggbest = gbest gG = GloSOL SOLVS.append(gbest) print("Solution : ", gbest) print("Time : ", (end_time - start_time), " sec.") return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG #### PARTICLE SWARM OPTIMIZATION ### #### --------------------------- ### def HFPSO2(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim, length, t1, t2, s, e, NAB, alfa, gamma, beta0, c1, c2, wmin, wmax, minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG): ## get the initial solution SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity, aux_mutation, aux_break = inputs.Initial_solution(nParticle, nVessel, minVel, maxVel, minCoordination, maxCoordination) ## encoding the initial solution encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)] ## decoding the initial solution for i in range(nParticle): SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB) ## update personal best solution P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P) ## update the global best solution G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL) change = 0 change2 = 0 change3 = 0 ## start the algorithm start_time = time.time() for iteration in range(nIteration): if change2 == 1: change = 1 change2 = 0 change3 = 0 else: change = 0 for i in range(nParticle): ## choose type of solution update and update the solutions if gbest == 99999: Solution[i] = [rnd.uniform(minCoordination, maxCoordination) for _ in range(nVessel)] Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(nVessel)] else: if change == 1: aux_break = 0 Solution[i], Velocity[i] = pso_maths.PSO(nVessel, nIteration, Solution[i], P[i], G, Velocity[i], c1, c2, wmin, wmax, iteration, minVel, maxVel, minCoordination, maxCoordination) else: aux_mutation += 1 aux_break += 1 Solution[i], Velocity[i] = fo_maths.FO(nVessel,G,Solution[i],Velocity[i],beta0,alfa,gamma,minCoordination,maxCoordination,minVel,maxVel) ## encode solution encode[i] = encoding.Ordering(nVessel, Solution[i]) ## decode solution SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB) ## control of best solution changing if Cost[i] < gbest: change2 = 1 change3 = 1 else: change3 = 0 if change3 == 1: aux_break = 0 ## update personal best solution P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P) ## update global best solution G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL) ## decision of mutation if aux_mutation / nParticle >= round(nIteration * 0.10): aux_mutation = 0 Solution, Velocity = mutation.Mutation(nParticle, nVessel, Solution, Velocity, minVel, maxVel, minCoordination, maxCoordination) ## stopping criteria 2 if aux_break / nParticle >= round(nIteration * 0.33): break end_time = time.time() TIMES.append(end_time - start_time) if len(SOLVS) == 0 or gbest < min(SOLVS): ggbest = gbest gG = GloSOL SOLVS.append(gbest) print("Solution : ", gbest) print("Time : ", (end_time - start_time), " sec.") return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG
[ "import random as rnd\r\nimport time\r\n\r\nimport inputs.inputs as inputs\r\nimport representation.encoding as encoding\r\nimport representation.decoding as decoding\r\nimport functions.best_solution_update as best_solution_update\r\nimport algorithms.classical_fo_maths as classical_fo_maths\r\nimport algorithms.fo_maths as fo_maths\r\nimport algorithms.pso_maths as pso_maths\r\nimport functions.mutation as mutation\r\n\r\ndef FO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim, length, t1, t2, s, e, NAB, alfa, gamma, beta0, minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\r\n\r\n ## get the initial solution\r\n SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity, aux_mutation, aux_break = inputs.Initial_solution(nParticle, nVessel, minVel, maxVel, minCoordination, maxCoordination)\r\n ## encoding the initial solution\r\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)]\r\n ## decoding the initial solution\r\n for i in range(nParticle):\r\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\r\n ## update the global best solution\r\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL)\r\n change = 0\r\n\r\n ## start the algorithm\r\n start_time = time.time()\r\n\r\n for iteration in range(nIteration):\r\n\r\n for i in range(nParticle):\r\n\r\n ## choose type of solution update and update the solutions\r\n if gbest == 99999:\r\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination) for _ in range(nVessel)]\r\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(nVessel)]\r\n elif Cost[i] < gbest:\r\n Solution[i] = classical_fo_maths.FO2(nVessel, Solution[i], alfa, minCoordination, maxCoordination)\r\n elif Cost[i] == gbest:\r\n aux_mutation += 1\r\n aux_break += 1\r\n Solution[i] = classical_fo_maths.FO2(nVessel, Solution[i], alfa, minCoordination, maxCoordination)\r\n else:\r\n aux_mutation += 1\r\n aux_break += 1\r\n Solution[i] = classical_fo_maths.FO(i, nParticle, nVessel, Solution, beta0, alfa, gamma,\r\n minCoordination, maxCoordination)\r\n\r\n ## encode solution\r\n encode[i] = encoding.Ordering(nVessel, Solution[i])\r\n\r\n ## decode solution\r\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p,\r\n structure, NAB)\r\n\r\n ## control of best solution changing\r\n if Cost[i] < gbest:\r\n change = 1\r\n else:\r\n change = 0\r\n if change == 1:\r\n aux_break = 0\r\n\r\n ## updateglobal best solution\r\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL)\r\n\r\n ## decision of mutation\r\n if aux_mutation / nParticle >= round(nIteration * 0.10):\r\n aux_mutation = 0\r\n Solution, Velocity = mutation.Mutation(nParticle, nVessel, Solution, Velocity, minVel, maxVel,\r\n minCoordination, maxCoordination)\r\n\r\n ## stopping criteria 2\r\n if aux_break / nParticle >= round(nIteration * 0.33):\r\n break\r\n\r\n end_time = time.time()\r\n\r\n TIMES.append(end_time - start_time)\r\n if len(SOLVS) == 0 or gbest < min(SOLVS):\r\n ggbest = gbest\r\n gG = GloSOL\r\n SOLVS.append(gbest)\r\n print(\"Solution : \", gbest)\r\n print(\"Time : \", (end_time - start_time), \" sec.\")\r\n\r\n\r\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\r\n\r\n\r\n\r\n\r\n\r\n\r\n#### PARTICLE SWARM OPTIMIZATION ###\r\n#### --------------------------- ###\r\ndef PSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim, length, t1, t2, s, e, NAB, c1, c2, wmin, wmax, minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\r\n\r\n ## get the initial solution\r\n SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity, aux_mutation, aux_break = inputs.Initial_solution(nParticle, nVessel, minVel, maxVel, minCoordination, maxCoordination)\r\n ## encoding the initial solution\r\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)]\r\n ## decoding the initial solution\r\n for i in range(nParticle):\r\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\r\n ## update personal best solution\r\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P)\r\n ## update the global best solution\r\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL)\r\n change = 0\r\n\r\n ## start the algorithm\r\n start_time = time.time()\r\n\r\n for iteration in range(nIteration):\r\n\r\n for i in range(nParticle):\r\n\r\n ## choose type of solution update and update the solutions\r\n if gbest == 99999:\r\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination) for _ in range(nVessel)]\r\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(nVessel)]\r\n else:\r\n aux_mutation += 1\r\n aux_break += 1\r\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel, nIteration, Solution[i], P[i], G, Velocity[i], c1, c2, wmin, wmax, iteration, minVel, maxVel, minCoordination, maxCoordination)\r\n\r\n ## encode solution\r\n encode[i] = encoding.Ordering(nVessel, Solution[i])\r\n\r\n ## decode solution\r\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\r\n\r\n ## control of best solution changing\r\n if Cost[i] < gbest:\r\n change = 1\r\n else:\r\n change = 0\r\n if change == 1:\r\n aux_break = 0\r\n\r\n ## update personal best solution\r\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P)\r\n\r\n ## update global best solution\r\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL)\r\n\r\n ## decision of mutation\r\n if aux_mutation / nParticle >= round(nIteration * 0.10):\r\n aux_mutation = 0\r\n Solution, Velocity = mutation.Mutation(nParticle, nVessel, Solution, Velocity, minVel, maxVel, minCoordination, maxCoordination)\r\n\r\n ## stopping criteria 2\r\n if aux_break / nParticle >= round(nIteration * 0.33):\r\n break\r\n\r\n end_time = time.time()\r\n\r\n TIMES.append(end_time - start_time)\r\n if len(SOLVS) == 0 or gbest < min(SOLVS):\r\n ggbest = gbest\r\n gG = GloSOL\r\n SOLVS.append(gbest)\r\n print(\"Solution : \", gbest)\r\n print(\"Time : \", (end_time - start_time), \" sec.\")\r\n\r\n\r\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\r\n\r\n\r\n\r\n\r\n\r\n\r\n#### PARTICLE SWARM OPTIMIZATION ###\r\n#### --------------------------- ###\r\ndef HFPSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim, length, t1, t2, s, e, NAB, alfa, gamma, beta0, c1, c2, wmin, wmax, minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\r\n\r\n ## get the initial solution\r\n SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity, aux_mutation, aux_break = inputs.Initial_solution(nParticle, nVessel, minVel, maxVel, minCoordination, maxCoordination)\r\n ## encoding the initial solution\r\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)]\r\n ## decoding the initial solution\r\n for i in range(nParticle):\r\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\r\n ## update personal best solution\r\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P)\r\n ## update the global best solution\r\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL)\r\n change = 0\r\n change2 = 0\r\n change3 = 0\r\n\r\n ## start the algorithm\r\n start_time = time.time()\r\n\r\n for iteration in range(nIteration):\r\n\r\n if change2 == 1:\r\n change = 1\r\n change2 = 0\r\n change3 = 0\r\n else:\r\n change = 0\r\n\r\n for i in range(nParticle):\r\n\r\n ## choose type of solution update and update the solutions\r\n if gbest == 99999:\r\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination) for _ in range(nVessel)]\r\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(nVessel)]\r\n else:\r\n if change == 0:\r\n aux_mutation += 1\r\n aux_break += 1\r\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel, nIteration, Solution[i], P[i], G, Velocity[i], c1, c2, wmin, wmax, iteration, minVel, maxVel, minCoordination, maxCoordination)\r\n else:\r\n aux_break = 0\r\n Solution[i], Velocity[i] = fo_maths.FO(nVessel,G,Solution[i],Velocity[i],beta0,alfa,gamma,minCoordination,maxCoordination,minVel,maxVel)\r\n\r\n\r\n ## encode solution\r\n encode[i] = encoding.Ordering(nVessel, Solution[i])\r\n\r\n ## decode solution\r\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\r\n\r\n ## control of best solution changing\r\n if Cost[i] < gbest:\r\n change2 = 1\r\n change3 = 1\r\n else:\r\n change3 = 0\r\n if change3 == 1:\r\n aux_break = 0\r\n\r\n ## update personal best solution\r\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P)\r\n\r\n ## update global best solution\r\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL)\r\n\r\n ## decision of mutation\r\n if aux_mutation / nParticle >= round(nIteration * 0.10):\r\n aux_mutation = 0\r\n Solution, Velocity = mutation.Mutation(nParticle, nVessel, Solution, Velocity, minVel, maxVel, minCoordination, maxCoordination)\r\n\r\n ## stopping criteria 2\r\n if aux_break / nParticle >= round(nIteration * 0.33):\r\n break\r\n\r\n end_time = time.time()\r\n\r\n TIMES.append(end_time - start_time)\r\n if len(SOLVS) == 0 or gbest < min(SOLVS):\r\n ggbest = gbest\r\n gG = GloSOL\r\n SOLVS.append(gbest)\r\n print(\"Solution : \", gbest)\r\n print(\"Time : \", (end_time - start_time), \" sec.\")\r\n\r\n\r\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\r\n\r\n\r\n\r\n\r\n\r\n\r\n#### PARTICLE SWARM OPTIMIZATION ###\r\n#### --------------------------- ###\r\ndef HFPSO2(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim, length, t1, t2, s, e, NAB, alfa, gamma, beta0, c1, c2, wmin, wmax, minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\r\n\r\n ## get the initial solution\r\n SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity, aux_mutation, aux_break = inputs.Initial_solution(nParticle, nVessel, minVel, maxVel, minCoordination, maxCoordination)\r\n ## encoding the initial solution\r\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)]\r\n ## decoding the initial solution\r\n for i in range(nParticle):\r\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\r\n ## update personal best solution\r\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P)\r\n ## update the global best solution\r\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL)\r\n change = 0\r\n change2 = 0\r\n change3 = 0\r\n\r\n ## start the algorithm\r\n start_time = time.time()\r\n\r\n for iteration in range(nIteration):\r\n\r\n if change2 == 1:\r\n change = 1\r\n change2 = 0\r\n change3 = 0\r\n else:\r\n change = 0\r\n\r\n for i in range(nParticle):\r\n\r\n ## choose type of solution update and update the solutions\r\n if gbest == 99999:\r\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination) for _ in range(nVessel)]\r\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(nVessel)]\r\n else:\r\n if change == 1:\r\n aux_break = 0\r\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel, nIteration, Solution[i], P[i], G, Velocity[i], c1, c2, wmin, wmax, iteration, minVel, maxVel, minCoordination, maxCoordination)\r\n else:\r\n aux_mutation += 1\r\n aux_break += 1\r\n Solution[i], Velocity[i] = fo_maths.FO(nVessel,G,Solution[i],Velocity[i],beta0,alfa,gamma,minCoordination,maxCoordination,minVel,maxVel)\r\n\r\n\r\n ## encode solution\r\n encode[i] = encoding.Ordering(nVessel, Solution[i])\r\n\r\n ## decode solution\r\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\r\n\r\n ## control of best solution changing\r\n if Cost[i] < gbest:\r\n change2 = 1\r\n change3 = 1\r\n else:\r\n change3 = 0\r\n if change3 == 1:\r\n aux_break = 0\r\n\r\n ## update personal best solution\r\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest, Cost, P)\r\n\r\n ## update global best solution\r\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle, Solution, gbest, Cost, G, SOLS, GloSOL)\r\n\r\n ## decision of mutation\r\n if aux_mutation / nParticle >= round(nIteration * 0.10):\r\n aux_mutation = 0\r\n Solution, Velocity = mutation.Mutation(nParticle, nVessel, Solution, Velocity, minVel, maxVel, minCoordination, maxCoordination)\r\n\r\n ## stopping criteria 2\r\n if aux_break / nParticle >= round(nIteration * 0.33):\r\n break\r\n\r\n end_time = time.time()\r\n\r\n TIMES.append(end_time - start_time)\r\n if len(SOLVS) == 0 or gbest < min(SOLVS):\r\n ggbest = gbest\r\n gG = GloSOL\r\n SOLVS.append(gbest)\r\n print(\"Solution : \", gbest)\r\n print(\"Time : \", (end_time - start_time), \" sec.\")\r\n\r\n\r\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG", "import random as rnd\nimport time\nimport inputs.inputs as inputs\nimport representation.encoding as encoding\nimport representation.decoding as decoding\nimport functions.best_solution_update as best_solution_update\nimport algorithms.classical_fo_maths as classical_fo_maths\nimport algorithms.fo_maths as fo_maths\nimport algorithms.pso_maths as pso_maths\nimport functions.mutation as mutation\n\n\ndef FO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, alfa, gamma, beta0, minCoordination,\n maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n start_time = time.time()\n for iteration in range(nIteration):\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n elif Cost[i] < gbest:\n Solution[i] = classical_fo_maths.FO2(nVessel, Solution[i],\n alfa, minCoordination, maxCoordination)\n elif Cost[i] == gbest:\n aux_mutation += 1\n aux_break += 1\n Solution[i] = classical_fo_maths.FO2(nVessel, Solution[i],\n alfa, minCoordination, maxCoordination)\n else:\n aux_mutation += 1\n aux_break += 1\n Solution[i] = classical_fo_maths.FO(i, nParticle, nVessel,\n Solution, beta0, alfa, gamma, minCoordination,\n maxCoordination)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change = 1\n else:\n change = 0\n if change == 1:\n aux_break = 0\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\ndef PSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, c1, c2, wmin, wmax, minCoordination,\n maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest,\n Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n start_time = time.time()\n for iteration in range(nIteration):\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n else:\n aux_mutation += 1\n aux_break += 1\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel,\n nIteration, Solution[i], P[i], G, Velocity[i], c1, c2,\n wmin, wmax, iteration, minVel, maxVel, minCoordination,\n maxCoordination)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change = 1\n else:\n change = 0\n if change == 1:\n aux_break = 0\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution,\n pbest, Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\ndef HFPSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, alfa, gamma, beta0, c1, c2, wmin, wmax,\n minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG\n ):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest,\n Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n change2 = 0\n change3 = 0\n start_time = time.time()\n for iteration in range(nIteration):\n if change2 == 1:\n change = 1\n change2 = 0\n change3 = 0\n else:\n change = 0\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n elif change == 0:\n aux_mutation += 1\n aux_break += 1\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel,\n nIteration, Solution[i], P[i], G, Velocity[i], c1, c2,\n wmin, wmax, iteration, minVel, maxVel, minCoordination,\n maxCoordination)\n else:\n aux_break = 0\n Solution[i], Velocity[i] = fo_maths.FO(nVessel, G, Solution\n [i], Velocity[i], beta0, alfa, gamma, minCoordination,\n maxCoordination, minVel, maxVel)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change2 = 1\n change3 = 1\n else:\n change3 = 0\n if change3 == 1:\n aux_break = 0\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution,\n pbest, Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\ndef HFPSO2(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, alfa, gamma, beta0, c1, c2, wmin, wmax,\n minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG\n ):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest,\n Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n change2 = 0\n change3 = 0\n start_time = time.time()\n for iteration in range(nIteration):\n if change2 == 1:\n change = 1\n change2 = 0\n change3 = 0\n else:\n change = 0\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n elif change == 1:\n aux_break = 0\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel,\n nIteration, Solution[i], P[i], G, Velocity[i], c1, c2,\n wmin, wmax, iteration, minVel, maxVel, minCoordination,\n maxCoordination)\n else:\n aux_mutation += 1\n aux_break += 1\n Solution[i], Velocity[i] = fo_maths.FO(nVessel, G, Solution\n [i], Velocity[i], beta0, alfa, gamma, minCoordination,\n maxCoordination, minVel, maxVel)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change2 = 1\n change3 = 1\n else:\n change3 = 0\n if change3 == 1:\n aux_break = 0\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution,\n pbest, Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n", "<import token>\n\n\ndef FO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, alfa, gamma, beta0, minCoordination,\n maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n start_time = time.time()\n for iteration in range(nIteration):\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n elif Cost[i] < gbest:\n Solution[i] = classical_fo_maths.FO2(nVessel, Solution[i],\n alfa, minCoordination, maxCoordination)\n elif Cost[i] == gbest:\n aux_mutation += 1\n aux_break += 1\n Solution[i] = classical_fo_maths.FO2(nVessel, Solution[i],\n alfa, minCoordination, maxCoordination)\n else:\n aux_mutation += 1\n aux_break += 1\n Solution[i] = classical_fo_maths.FO(i, nParticle, nVessel,\n Solution, beta0, alfa, gamma, minCoordination,\n maxCoordination)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change = 1\n else:\n change = 0\n if change == 1:\n aux_break = 0\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\ndef PSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, c1, c2, wmin, wmax, minCoordination,\n maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest,\n Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n start_time = time.time()\n for iteration in range(nIteration):\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n else:\n aux_mutation += 1\n aux_break += 1\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel,\n nIteration, Solution[i], P[i], G, Velocity[i], c1, c2,\n wmin, wmax, iteration, minVel, maxVel, minCoordination,\n maxCoordination)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change = 1\n else:\n change = 0\n if change == 1:\n aux_break = 0\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution,\n pbest, Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\ndef HFPSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, alfa, gamma, beta0, c1, c2, wmin, wmax,\n minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG\n ):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest,\n Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n change2 = 0\n change3 = 0\n start_time = time.time()\n for iteration in range(nIteration):\n if change2 == 1:\n change = 1\n change2 = 0\n change3 = 0\n else:\n change = 0\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n elif change == 0:\n aux_mutation += 1\n aux_break += 1\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel,\n nIteration, Solution[i], P[i], G, Velocity[i], c1, c2,\n wmin, wmax, iteration, minVel, maxVel, minCoordination,\n maxCoordination)\n else:\n aux_break = 0\n Solution[i], Velocity[i] = fo_maths.FO(nVessel, G, Solution\n [i], Velocity[i], beta0, alfa, gamma, minCoordination,\n maxCoordination, minVel, maxVel)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change2 = 1\n change3 = 1\n else:\n change3 = 0\n if change3 == 1:\n aux_break = 0\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution,\n pbest, Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\ndef HFPSO2(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, alfa, gamma, beta0, c1, c2, wmin, wmax,\n minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG\n ):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest,\n Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n change2 = 0\n change3 = 0\n start_time = time.time()\n for iteration in range(nIteration):\n if change2 == 1:\n change = 1\n change2 = 0\n change3 = 0\n else:\n change = 0\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n elif change == 1:\n aux_break = 0\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel,\n nIteration, Solution[i], P[i], G, Velocity[i], c1, c2,\n wmin, wmax, iteration, minVel, maxVel, minCoordination,\n maxCoordination)\n else:\n aux_mutation += 1\n aux_break += 1\n Solution[i], Velocity[i] = fo_maths.FO(nVessel, G, Solution\n [i], Velocity[i], beta0, alfa, gamma, minCoordination,\n maxCoordination, minVel, maxVel)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change2 = 1\n change3 = 1\n else:\n change3 = 0\n if change3 == 1:\n aux_break = 0\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution,\n pbest, Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n", "<import token>\n\n\ndef FO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, alfa, gamma, beta0, minCoordination,\n maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n start_time = time.time()\n for iteration in range(nIteration):\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n elif Cost[i] < gbest:\n Solution[i] = classical_fo_maths.FO2(nVessel, Solution[i],\n alfa, minCoordination, maxCoordination)\n elif Cost[i] == gbest:\n aux_mutation += 1\n aux_break += 1\n Solution[i] = classical_fo_maths.FO2(nVessel, Solution[i],\n alfa, minCoordination, maxCoordination)\n else:\n aux_mutation += 1\n aux_break += 1\n Solution[i] = classical_fo_maths.FO(i, nParticle, nVessel,\n Solution, beta0, alfa, gamma, minCoordination,\n maxCoordination)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change = 1\n else:\n change = 0\n if change == 1:\n aux_break = 0\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\ndef PSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, c1, c2, wmin, wmax, minCoordination,\n maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest,\n Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n start_time = time.time()\n for iteration in range(nIteration):\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n else:\n aux_mutation += 1\n aux_break += 1\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel,\n nIteration, Solution[i], P[i], G, Velocity[i], c1, c2,\n wmin, wmax, iteration, minVel, maxVel, minCoordination,\n maxCoordination)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change = 1\n else:\n change = 0\n if change == 1:\n aux_break = 0\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution,\n pbest, Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\ndef HFPSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, alfa, gamma, beta0, c1, c2, wmin, wmax,\n minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG\n ):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest,\n Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n change2 = 0\n change3 = 0\n start_time = time.time()\n for iteration in range(nIteration):\n if change2 == 1:\n change = 1\n change2 = 0\n change3 = 0\n else:\n change = 0\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n elif change == 0:\n aux_mutation += 1\n aux_break += 1\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel,\n nIteration, Solution[i], P[i], G, Velocity[i], c1, c2,\n wmin, wmax, iteration, minVel, maxVel, minCoordination,\n maxCoordination)\n else:\n aux_break = 0\n Solution[i], Velocity[i] = fo_maths.FO(nVessel, G, Solution\n [i], Velocity[i], beta0, alfa, gamma, minCoordination,\n maxCoordination, minVel, maxVel)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change2 = 1\n change3 = 1\n else:\n change3 = 0\n if change3 == 1:\n aux_break = 0\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution,\n pbest, Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\n<function token>\n", "<import token>\n<function token>\n\n\ndef PSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, c1, c2, wmin, wmax, minCoordination,\n maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest,\n Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n start_time = time.time()\n for iteration in range(nIteration):\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n else:\n aux_mutation += 1\n aux_break += 1\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel,\n nIteration, Solution[i], P[i], G, Velocity[i], c1, c2,\n wmin, wmax, iteration, minVel, maxVel, minCoordination,\n maxCoordination)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change = 1\n else:\n change = 0\n if change == 1:\n aux_break = 0\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution,\n pbest, Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\ndef HFPSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, alfa, gamma, beta0, c1, c2, wmin, wmax,\n minCoordination, maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG\n ):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest,\n Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n change2 = 0\n change3 = 0\n start_time = time.time()\n for iteration in range(nIteration):\n if change2 == 1:\n change = 1\n change2 = 0\n change3 = 0\n else:\n change = 0\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n elif change == 0:\n aux_mutation += 1\n aux_break += 1\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel,\n nIteration, Solution[i], P[i], G, Velocity[i], c1, c2,\n wmin, wmax, iteration, minVel, maxVel, minCoordination,\n maxCoordination)\n else:\n aux_break = 0\n Solution[i], Velocity[i] = fo_maths.FO(nVessel, G, Solution\n [i], Velocity[i], beta0, alfa, gamma, minCoordination,\n maxCoordination, minVel, maxVel)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change2 = 1\n change3 = 1\n else:\n change3 = 0\n if change3 == 1:\n aux_break = 0\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution,\n pbest, Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\n<function token>\n", "<import token>\n<function token>\n\n\ndef PSO(structure, nIteration, nParticle, nVessel, nBerth, p, pro_tim,\n length, t1, t2, s, e, NAB, c1, c2, wmin, wmax, minCoordination,\n maxCoordination, minVel, maxVel, TIMES, SOLVS, ggbest, gG):\n (SOLS, nf, Cost, GloSOL, P, pbest, G, gbest, Solution, Velocity,\n aux_mutation, aux_break) = (inputs.Initial_solution(nParticle,\n nVessel, minVel, maxVel, minCoordination, maxCoordination))\n encode = [encoding.Ordering(nVessel, Solution[i]) for i in range(nParticle)\n ]\n for i in range(nParticle):\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth, encode[\n i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution, pbest,\n Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n change = 0\n start_time = time.time()\n for iteration in range(nIteration):\n for i in range(nParticle):\n if gbest == 99999:\n Solution[i] = [rnd.uniform(minCoordination, maxCoordination\n ) for _ in range(nVessel)]\n Velocity[i] = [rnd.uniform(minVel, maxVel) for _ in range(\n nVessel)]\n else:\n aux_mutation += 1\n aux_break += 1\n Solution[i], Velocity[i] = pso_maths.PSO(nVessel,\n nIteration, Solution[i], P[i], G, Velocity[i], c1, c2,\n wmin, wmax, iteration, minVel, maxVel, minCoordination,\n maxCoordination)\n encode[i] = encoding.Ordering(nVessel, Solution[i])\n SOLS[i], Cost[i] = decoding.Represantation(nVessel, nBerth,\n encode[i], pro_tim, length, t1, t2, s, e, p, structure, NAB)\n if Cost[i] < gbest:\n change = 1\n else:\n change = 0\n if change == 1:\n aux_break = 0\n P, pbest = best_solution_update.Update_pbest(nParticle, Solution,\n pbest, Cost, P)\n G, gbest, GloSOL = best_solution_update.Update_gbest(nParticle,\n Solution, gbest, Cost, G, SOLS, GloSOL)\n if aux_mutation / nParticle >= round(nIteration * 0.1):\n aux_mutation = 0\n Solution, Velocity = mutation.Mutation(nParticle, nVessel,\n Solution, Velocity, minVel, maxVel, minCoordination,\n maxCoordination)\n if aux_break / nParticle >= round(nIteration * 0.33):\n break\n end_time = time.time()\n TIMES.append(end_time - start_time)\n if len(SOLVS) == 0 or gbest < min(SOLVS):\n ggbest = gbest\n gG = GloSOL\n SOLVS.append(gbest)\n print('Solution : ', gbest)\n print('Time : ', end_time - start_time, ' sec.')\n return G, gbest, GloSOL, TIMES, SOLVS, ggbest, gG\n\n\n<function token>\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
99,418
52fe10656c908224d97a6dc394d50682c30e7bf4
import pytest from streamsets.testframework.decorators import stub @stub @pytest.mark.parametrize('stage_attributes', [{'enable_udp_multithreading': True}]) def test_accept_threads(sdc_builder, sdc_executor, stage_attributes): pass @stub def test_broker_uri(sdc_builder, sdc_executor): pass @stub def test_charset(sdc_builder, sdc_executor): pass @stub @pytest.mark.parametrize('stage_attributes', [{'data_format': 'COLLECTD'}, {'data_format': 'NETFLOW'}, {'data_format': 'SYSLOG'}]) def test_data_format(sdc_builder, sdc_executor, stage_attributes): pass @stub @pytest.mark.parametrize('stage_attributes', [{'enable_udp_multithreading': False}, {'enable_udp_multithreading': True}]) def test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes): pass @stub def test_kafka_configuration(sdc_builder, sdc_executor): pass @stub @pytest.mark.parametrize('stage_attributes', [{'message_key_format': 'AVRO'}, {'message_key_format': 'STRING'}]) def test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes): pass @stub @pytest.mark.parametrize('stage_attributes', [{'key_serializer': 'CONFLUENT', 'message_key_format': 'AVRO'}, {'key_serializer': 'STRING', 'message_key_format': 'AVRO'}]) def test_key_serializer(sdc_builder, sdc_executor, stage_attributes): pass @stub @pytest.mark.parametrize('stage_attributes', [{'message_key_format': 'AVRO'}, {'message_key_format': 'STRING'}]) def test_message_key_format(sdc_builder, sdc_executor, stage_attributes): pass @stub @pytest.mark.parametrize('stage_attributes', [{'on_missing_field': 'ERROR'}, {'on_missing_field': 'IGNORE'}]) def test_on_missing_field(sdc_builder, sdc_executor, stage_attributes): pass @stub @pytest.mark.parametrize('stage_attributes', [{'on_record_error': 'DISCARD'}, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}]) def test_on_record_error(sdc_builder, sdc_executor, stage_attributes): pass @stub def test_port(sdc_builder, sdc_executor): pass @stub @pytest.mark.parametrize('stage_attributes', [{'pretty_format': False}, {'pretty_format': True}]) def test_pretty_format(sdc_builder, sdc_executor, stage_attributes): pass @stub @pytest.mark.parametrize('stage_attributes', [{'quote_mode': 'ALL'}, {'quote_mode': 'MINIMAL'}, {'quote_mode': 'NONE'}]) def test_quote_mode(sdc_builder, sdc_executor, stage_attributes): pass @stub @pytest.mark.parametrize('stage_attributes', [{}, {}]) def test_topic(sdc_builder, sdc_executor, stage_attributes): pass @stub @pytest.mark.parametrize('stage_attributes', [{'validate_schema': False}, {'validate_schema': True}]) def test_validate_schema(sdc_builder, sdc_executor, stage_attributes): pass @stub @pytest.mark.parametrize('stage_attributes', [{'value_serializer': 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}]) def test_value_serializer(sdc_builder, sdc_executor, stage_attributes): pass @stub def test_write_concurrency(sdc_builder, sdc_executor): pass @stub @pytest.mark.parametrize('stage_attributes', [{'validate_schema': True}]) def test_xml_schema(sdc_builder, sdc_executor, stage_attributes): pass
[ "import pytest\n\nfrom streamsets.testframework.decorators import stub\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading': True}])\ndef test_accept_threads(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_broker_uri(sdc_builder, sdc_executor):\n pass\n\n\n@stub\ndef test_charset(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'data_format': 'COLLECTD'},\n {'data_format': 'NETFLOW'},\n {'data_format': 'SYSLOG'}])\ndef test_data_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading': False}, {'enable_udp_multithreading': True}])\ndef test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'}, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'key_serializer': 'CONFLUENT', 'message_key_format': 'AVRO'},\n {'key_serializer': 'STRING', 'message_key_format': 'AVRO'}])\ndef test_key_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'}, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'}, {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'},\n {'on_record_error': 'STOP_PIPELINE'},\n {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'pretty_format': False}, {'pretty_format': True}])\ndef test_pretty_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'quote_mode': 'ALL'}, {'quote_mode': 'MINIMAL'}, {'quote_mode': 'NONE'}])\ndef test_quote_mode(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer': 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': True}])\ndef test_xml_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n", "import pytest\nfrom streamsets.testframework.decorators import stub\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n True}])\ndef test_accept_threads(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_broker_uri(sdc_builder, sdc_executor):\n pass\n\n\n@stub\ndef test_charset(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'data_format': 'COLLECTD'},\n {'data_format': 'NETFLOW'}, {'data_format': 'SYSLOG'}])\ndef test_data_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n False}, {'enable_udp_multithreading': True}])\ndef test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes\n ):\n pass\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'key_serializer':\n 'CONFLUENT', 'message_key_format': 'AVRO'}, {'key_serializer': 'STRING',\n 'message_key_format': 'AVRO'}])\ndef test_key_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'pretty_format': False}, {\n 'pretty_format': True}])\ndef test_pretty_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'quote_mode': 'ALL'}, {\n 'quote_mode': 'MINIMAL'}, {'quote_mode': 'NONE'}])\ndef test_quote_mode(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': True}])\ndef test_xml_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n", "<import token>\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n True}])\ndef test_accept_threads(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_broker_uri(sdc_builder, sdc_executor):\n pass\n\n\n@stub\ndef test_charset(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'data_format': 'COLLECTD'},\n {'data_format': 'NETFLOW'}, {'data_format': 'SYSLOG'}])\ndef test_data_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n False}, {'enable_udp_multithreading': True}])\ndef test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes\n ):\n pass\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'key_serializer':\n 'CONFLUENT', 'message_key_format': 'AVRO'}, {'key_serializer': 'STRING',\n 'message_key_format': 'AVRO'}])\ndef test_key_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'pretty_format': False}, {\n 'pretty_format': True}])\ndef test_pretty_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'quote_mode': 'ALL'}, {\n 'quote_mode': 'MINIMAL'}, {'quote_mode': 'NONE'}])\ndef test_quote_mode(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': True}])\ndef test_xml_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n", "<import token>\n<function token>\n\n\n@stub\ndef test_broker_uri(sdc_builder, sdc_executor):\n pass\n\n\n@stub\ndef test_charset(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'data_format': 'COLLECTD'},\n {'data_format': 'NETFLOW'}, {'data_format': 'SYSLOG'}])\ndef test_data_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n False}, {'enable_udp_multithreading': True}])\ndef test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes\n ):\n pass\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'key_serializer':\n 'CONFLUENT', 'message_key_format': 'AVRO'}, {'key_serializer': 'STRING',\n 'message_key_format': 'AVRO'}])\ndef test_key_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'pretty_format': False}, {\n 'pretty_format': True}])\ndef test_pretty_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'quote_mode': 'ALL'}, {\n 'quote_mode': 'MINIMAL'}, {'quote_mode': 'NONE'}])\ndef test_quote_mode(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': True}])\ndef test_xml_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n", "<import token>\n<function token>\n\n\n@stub\ndef test_broker_uri(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'data_format': 'COLLECTD'},\n {'data_format': 'NETFLOW'}, {'data_format': 'SYSLOG'}])\ndef test_data_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n False}, {'enable_udp_multithreading': True}])\ndef test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes\n ):\n pass\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'key_serializer':\n 'CONFLUENT', 'message_key_format': 'AVRO'}, {'key_serializer': 'STRING',\n 'message_key_format': 'AVRO'}])\ndef test_key_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'pretty_format': False}, {\n 'pretty_format': True}])\ndef test_pretty_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'quote_mode': 'ALL'}, {\n 'quote_mode': 'MINIMAL'}, {'quote_mode': 'NONE'}])\ndef test_quote_mode(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': True}])\ndef test_xml_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n", "<import token>\n<function token>\n\n\n@stub\ndef test_broker_uri(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'data_format': 'COLLECTD'},\n {'data_format': 'NETFLOW'}, {'data_format': 'SYSLOG'}])\ndef test_data_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n False}, {'enable_udp_multithreading': True}])\ndef test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes\n ):\n pass\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'key_serializer':\n 'CONFLUENT', 'message_key_format': 'AVRO'}, {'key_serializer': 'STRING',\n 'message_key_format': 'AVRO'}])\ndef test_key_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'quote_mode': 'ALL'}, {\n 'quote_mode': 'MINIMAL'}, {'quote_mode': 'NONE'}])\ndef test_quote_mode(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': True}])\ndef test_xml_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n", "<import token>\n<function token>\n\n\n@stub\ndef test_broker_uri(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'data_format': 'COLLECTD'},\n {'data_format': 'NETFLOW'}, {'data_format': 'SYSLOG'}])\ndef test_data_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n False}, {'enable_udp_multithreading': True}])\ndef test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes\n ):\n pass\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'key_serializer':\n 'CONFLUENT', 'message_key_format': 'AVRO'}, {'key_serializer': 'STRING',\n 'message_key_format': 'AVRO'}])\ndef test_key_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': True}])\ndef test_xml_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n", "<import token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'data_format': 'COLLECTD'},\n {'data_format': 'NETFLOW'}, {'data_format': 'SYSLOG'}])\ndef test_data_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n False}, {'enable_udp_multithreading': True}])\ndef test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes\n ):\n pass\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'key_serializer':\n 'CONFLUENT', 'message_key_format': 'AVRO'}, {'key_serializer': 'STRING',\n 'message_key_format': 'AVRO'}])\ndef test_key_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': True}])\ndef test_xml_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n", "<import token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'data_format': 'COLLECTD'},\n {'data_format': 'NETFLOW'}, {'data_format': 'SYSLOG'}])\ndef test_data_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n False}, {'enable_udp_multithreading': True}])\ndef test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes\n ):\n pass\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': True}])\ndef test_xml_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n", "<import token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'data_format': 'COLLECTD'},\n {'data_format': 'NETFLOW'}, {'data_format': 'SYSLOG'}])\ndef test_data_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n False}, {'enable_udp_multithreading': True}])\ndef test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes\n ):\n pass\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'enable_udp_multithreading':\n False}, {'enable_udp_multithreading': True}])\ndef test_enable_udp_multithreading(sdc_builder, sdc_executor, stage_attributes\n ):\n pass\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\ndef test_kafka_configuration(sdc_builder, sdc_executor):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_record_error': 'DISCARD'\n }, {'on_record_error': 'STOP_PIPELINE'}, {'on_record_error': 'TO_ERROR'}])\ndef test_on_record_error(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\ndef test_port(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{}, {}])\ndef test_topic(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'value_serializer':\n 'CONFLUENT'}, {'value_serializer': 'DEFAULT'}])\ndef test_value_serializer(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\ndef test_write_concurrency(sdc_builder, sdc_executor):\n pass\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_message_key_format(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n<function token>\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'message_key_format': 'AVRO'\n }, {'message_key_format': 'STRING'}])\ndef test_kafka_message_key(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n<function token>\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'on_missing_field': 'ERROR'},\n {'on_missing_field': 'IGNORE'}])\ndef test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n<function token>\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@stub\[email protected]('stage_attributes', [{'validate_schema': False}, {\n 'validate_schema': True}])\ndef test_validate_schema(sdc_builder, sdc_executor, stage_attributes):\n pass\n\n\n<function token>\n<function token>\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
99,419
0cc1e84428b9f056e3d8b2869869037e27a89eed
from PIL import Image class FilterEditor: filter_dict = { 'black_and_white': 'make_black_and_white', 'sepia': 'make_sepia', 'bright': 'make_bright' } @staticmethod def add_filter(image, user_filter): # TODO Проверка на тот случай, если передавать словарь с настройками для цвета, но это в будущем return getattr(FilterEditor, FilterEditor.filter_dict[user_filter])(image) @staticmethod def make_black_and_white(image): result = Image.new('RGB', image.size) for x in range(image.size[0]): for y in range(image.size[1]): r, g, b = image.getpixel((x, y)) gray = int(r * 0.2126 + g * 0.7152 + b * 0.0722) result.putpixel((x, y), (gray, gray, gray)) return result @staticmethod def make_sepia(image): result = Image.new('RGB', image.size) for x in range(image.size[0]): for y in range(image.size[1]): r, g, b = image.getpixel((x, y)) red = int(r * 0.393 + g * 0.769 + b * 0.189) green = int(r * 0.349 + g * 0.686 + b * 0.168) blue = int(r * 0.272 + g * 0.534 + b * 0.131) result.putpixel((x, y), (red, green, blue)) return result @staticmethod def make_bright(image): brightness = 2 result = Image.new('RGB', image.size) for x in range(image.size[0]): for y in range(image.size[1]): r, g, b = image.getpixel((x, y)) red = int(r * brightness) red = min(255, max(0, red)) green = int(g * brightness) green = min(255, max(0, green)) blue = int(b * brightness) blue = min(255, max(0, blue)) result.putpixel((x, y), (red, green, blue)) return result
[ "from PIL import Image\n\nclass FilterEditor:\n filter_dict = {\n 'black_and_white': 'make_black_and_white',\n 'sepia': 'make_sepia',\n 'bright': 'make_bright'\n }\n\n @staticmethod\n def add_filter(image, user_filter):\n # TODO Проверка на тот случай, если передавать словарь с настройками для цвета, но это в будущем\n return getattr(FilterEditor, FilterEditor.filter_dict[user_filter])(image)\n\n @staticmethod\n def make_black_and_white(image):\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n gray = int(r * 0.2126 + g * 0.7152 + b * 0.0722)\n result.putpixel((x, y), (gray, gray, gray))\n return result\n\n @staticmethod\n def make_sepia(image):\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n red = int(r * 0.393 + g * 0.769 + b * 0.189)\n green = int(r * 0.349 + g * 0.686 + b * 0.168)\n blue = int(r * 0.272 + g * 0.534 + b * 0.131)\n result.putpixel((x, y), (red, green, blue))\n return result\n\n @staticmethod\n def make_bright(image):\n brightness = 2\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n\n red = int(r * brightness)\n red = min(255, max(0, red))\n\n green = int(g * brightness)\n green = min(255, max(0, green))\n\n blue = int(b * brightness)\n blue = min(255, max(0, blue))\n\n result.putpixel((x, y), (red, green, blue))\n return result\n", "from PIL import Image\n\n\nclass FilterEditor:\n filter_dict = {'black_and_white': 'make_black_and_white', 'sepia':\n 'make_sepia', 'bright': 'make_bright'}\n\n @staticmethod\n def add_filter(image, user_filter):\n return getattr(FilterEditor, FilterEditor.filter_dict[user_filter])(\n image)\n\n @staticmethod\n def make_black_and_white(image):\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n gray = int(r * 0.2126 + g * 0.7152 + b * 0.0722)\n result.putpixel((x, y), (gray, gray, gray))\n return result\n\n @staticmethod\n def make_sepia(image):\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n red = int(r * 0.393 + g * 0.769 + b * 0.189)\n green = int(r * 0.349 + g * 0.686 + b * 0.168)\n blue = int(r * 0.272 + g * 0.534 + b * 0.131)\n result.putpixel((x, y), (red, green, blue))\n return result\n\n @staticmethod\n def make_bright(image):\n brightness = 2\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n red = int(r * brightness)\n red = min(255, max(0, red))\n green = int(g * brightness)\n green = min(255, max(0, green))\n blue = int(b * brightness)\n blue = min(255, max(0, blue))\n result.putpixel((x, y), (red, green, blue))\n return result\n", "<import token>\n\n\nclass FilterEditor:\n filter_dict = {'black_and_white': 'make_black_and_white', 'sepia':\n 'make_sepia', 'bright': 'make_bright'}\n\n @staticmethod\n def add_filter(image, user_filter):\n return getattr(FilterEditor, FilterEditor.filter_dict[user_filter])(\n image)\n\n @staticmethod\n def make_black_and_white(image):\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n gray = int(r * 0.2126 + g * 0.7152 + b * 0.0722)\n result.putpixel((x, y), (gray, gray, gray))\n return result\n\n @staticmethod\n def make_sepia(image):\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n red = int(r * 0.393 + g * 0.769 + b * 0.189)\n green = int(r * 0.349 + g * 0.686 + b * 0.168)\n blue = int(r * 0.272 + g * 0.534 + b * 0.131)\n result.putpixel((x, y), (red, green, blue))\n return result\n\n @staticmethod\n def make_bright(image):\n brightness = 2\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n red = int(r * brightness)\n red = min(255, max(0, red))\n green = int(g * brightness)\n green = min(255, max(0, green))\n blue = int(b * brightness)\n blue = min(255, max(0, blue))\n result.putpixel((x, y), (red, green, blue))\n return result\n", "<import token>\n\n\nclass FilterEditor:\n <assignment token>\n\n @staticmethod\n def add_filter(image, user_filter):\n return getattr(FilterEditor, FilterEditor.filter_dict[user_filter])(\n image)\n\n @staticmethod\n def make_black_and_white(image):\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n gray = int(r * 0.2126 + g * 0.7152 + b * 0.0722)\n result.putpixel((x, y), (gray, gray, gray))\n return result\n\n @staticmethod\n def make_sepia(image):\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n red = int(r * 0.393 + g * 0.769 + b * 0.189)\n green = int(r * 0.349 + g * 0.686 + b * 0.168)\n blue = int(r * 0.272 + g * 0.534 + b * 0.131)\n result.putpixel((x, y), (red, green, blue))\n return result\n\n @staticmethod\n def make_bright(image):\n brightness = 2\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n red = int(r * brightness)\n red = min(255, max(0, red))\n green = int(g * brightness)\n green = min(255, max(0, green))\n blue = int(b * brightness)\n blue = min(255, max(0, blue))\n result.putpixel((x, y), (red, green, blue))\n return result\n", "<import token>\n\n\nclass FilterEditor:\n <assignment token>\n\n @staticmethod\n def add_filter(image, user_filter):\n return getattr(FilterEditor, FilterEditor.filter_dict[user_filter])(\n image)\n\n @staticmethod\n def make_black_and_white(image):\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n gray = int(r * 0.2126 + g * 0.7152 + b * 0.0722)\n result.putpixel((x, y), (gray, gray, gray))\n return result\n <function token>\n\n @staticmethod\n def make_bright(image):\n brightness = 2\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n red = int(r * brightness)\n red = min(255, max(0, red))\n green = int(g * brightness)\n green = min(255, max(0, green))\n blue = int(b * brightness)\n blue = min(255, max(0, blue))\n result.putpixel((x, y), (red, green, blue))\n return result\n", "<import token>\n\n\nclass FilterEditor:\n <assignment token>\n\n @staticmethod\n def add_filter(image, user_filter):\n return getattr(FilterEditor, FilterEditor.filter_dict[user_filter])(\n image)\n\n @staticmethod\n def make_black_and_white(image):\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n gray = int(r * 0.2126 + g * 0.7152 + b * 0.0722)\n result.putpixel((x, y), (gray, gray, gray))\n return result\n <function token>\n <function token>\n", "<import token>\n\n\nclass FilterEditor:\n <assignment token>\n <function token>\n\n @staticmethod\n def make_black_and_white(image):\n result = Image.new('RGB', image.size)\n for x in range(image.size[0]):\n for y in range(image.size[1]):\n r, g, b = image.getpixel((x, y))\n gray = int(r * 0.2126 + g * 0.7152 + b * 0.0722)\n result.putpixel((x, y), (gray, gray, gray))\n return result\n <function token>\n <function token>\n", "<import token>\n\n\nclass FilterEditor:\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
99,420
ee2f616b2d9541fc3dc7401ab9121d94d17aedb4
"""Semantic shift benchmark.""" import dataclasses import os import pickle import numpy as np import requests import torchvision.transforms as tv_transforms import shifthappens.config import shifthappens.data.base as sh_data import shifthappens.data.torch as sh_data_torch from shifthappens import benchmark as sh_benchmark from shifthappens.models import base as sh_models from shifthappens.models.base import PredictionTargets from shifthappens.tasks.base import abstract_variable from shifthappens.tasks.base import Task from shifthappens.tasks.base import variable from shifthappens.tasks.metrics import Metric from shifthappens.tasks.mixins import OODScoreTaskMixin from shifthappens.tasks.ssb.imagenet_ssb import _get_imagenet_ssb_subset from shifthappens.tasks.ssb.imagenet_ssb import assert_data_downloaded from shifthappens.tasks.task_result import TaskResult from shifthappens.tasks.utils import auroc_ood from shifthappens.tasks.utils import fpr_at_tpr @dataclasses.dataclass class _SSB(Task, OODScoreTaskMixin): """ Prepares the ImageNet evaluation from the Semantic Shift Benchmark for open-set recognition (OSR) Downloads SSB OSR splits to Task.data_root Assumes ImageNet-21KP validation splits are downloaded to shifthappens.config.imagenet21k_preprocessed_validation_path To download the ImageNet21k-P data: Follow instructions at https://github.com/Alibaba-MIIL/ImageNet21K/blob/main/dataset_preprocessing/processing_instructions.md Ensure data is from the Winter21 ImageNet release! """ OSR_URL = "https://github.com/sgvaze/osr_closed_set_all_you_need/raw/main/data/open_set_splits/imagenet_osr_splits_winter21.pkl" subset_type: str = abstract_variable() max_batch_size: int = 256 def setup(self): """Asserts data is downloaded and sets up open-set dataset""" osr_split_path = os.path.join( self.data_root, "imagenet_osr_splits_winter21.pkl" ) if not os.path.exists(osr_split_path): os.makedirs(self.data_root, exist_ok=True) osr_split = requests.get(self.OSR_URL) open(osr_split_path, "wb").write(osr_split.content) else: with open(osr_split_path, "rb") as f: osr_split = pickle.load(f) # Ensure data is downloaded assert_data_downloaded( osr_split, shifthappens.config.imagenet21k_preprocessed_validation_path ) test_transform = tv_transforms.Compose( [ tv_transforms.ToTensor(), tv_transforms.Lambda(lambda x: x.permute(1, 2, 0)), ] ) dataset_out = _get_imagenet_ssb_subset( imagenet21k_root=shifthappens.config.imagenet21k_preprocessed_validation_path, osr_split=osr_split, test_transform=test_transform, subset_type=self.subset_type, ) self.dataset_out = sh_data_torch.IndexedTorchDataset( sh_data_torch.ImagesOnlyTorchDataset(dataset_out) ) def _prepare_dataloader(self): dataloader_out = sh_data.DataLoader( self.dataset_out, max_batch_size=self.max_batch_size ) return dataloader_out def _evaluate(self, model: sh_models.Model) -> TaskResult: dataloader = self._prepare_dataloader() ood_scores_out_list = [] for predictions_out in model.predict( dataloader, PredictionTargets(ood_scores=True) ): assert ( predictions_out.ood_scores is not None ), "OOD scores for SSB task is None" ood_scores_out_list.append(predictions_out.ood_scores) ood_scores_out = np.hstack(ood_scores_out_list) auroc = auroc_ood( np.array(model.imagenet_validation_result.ood_scores), ood_scores_out ) fpr_at_95 = fpr_at_tpr( np.array(model.imagenet_validation_result.ood_scores), ood_scores_out, 0.95 ) return TaskResult( auroc=auroc, fpr_at_95=fpr_at_95, summary_metrics={ Metric.OODDetection: ("auroc", "fpr_at_95"), }, ) @sh_benchmark.register_task( name="SSB_easy", relative_data_folder="ssb", standalone=True ) @dataclasses.dataclass class SSBEasy(_SSB): """SSB Easy subset""" subset_type: str = variable("easy") @sh_benchmark.register_task( name="SSB_hard", relative_data_folder="ssb", standalone=True ) @dataclasses.dataclass class SSBHard(_SSB): """SSB Hard subset""" subset_type: str = variable("hard")
[ "\"\"\"Semantic shift benchmark.\"\"\"\n\nimport dataclasses\nimport os\nimport pickle\n\nimport numpy as np\nimport requests\nimport torchvision.transforms as tv_transforms\n\nimport shifthappens.config\nimport shifthappens.data.base as sh_data\nimport shifthappens.data.torch as sh_data_torch\nfrom shifthappens import benchmark as sh_benchmark\nfrom shifthappens.models import base as sh_models\nfrom shifthappens.models.base import PredictionTargets\nfrom shifthappens.tasks.base import abstract_variable\nfrom shifthappens.tasks.base import Task\nfrom shifthappens.tasks.base import variable\nfrom shifthappens.tasks.metrics import Metric\nfrom shifthappens.tasks.mixins import OODScoreTaskMixin\nfrom shifthappens.tasks.ssb.imagenet_ssb import _get_imagenet_ssb_subset\nfrom shifthappens.tasks.ssb.imagenet_ssb import assert_data_downloaded\nfrom shifthappens.tasks.task_result import TaskResult\nfrom shifthappens.tasks.utils import auroc_ood\nfrom shifthappens.tasks.utils import fpr_at_tpr\n\n\[email protected]\nclass _SSB(Task, OODScoreTaskMixin):\n \"\"\"\n Prepares the ImageNet evaluation from the Semantic Shift Benchmark for open-set recognition (OSR)\n\n Downloads SSB OSR splits to Task.data_root\n Assumes ImageNet-21KP validation splits are downloaded to shifthappens.config.imagenet21k_preprocessed_validation_path\n To download the ImageNet21k-P data:\n Follow instructions at https://github.com/Alibaba-MIIL/ImageNet21K/blob/main/dataset_preprocessing/processing_instructions.md\n Ensure data is from the Winter21 ImageNet release!\n \"\"\"\n\n OSR_URL = \"https://github.com/sgvaze/osr_closed_set_all_you_need/raw/main/data/open_set_splits/imagenet_osr_splits_winter21.pkl\"\n\n subset_type: str = abstract_variable()\n\n max_batch_size: int = 256\n\n def setup(self):\n \"\"\"Asserts data is downloaded and sets up open-set dataset\"\"\"\n osr_split_path = os.path.join(\n self.data_root, \"imagenet_osr_splits_winter21.pkl\"\n )\n if not os.path.exists(osr_split_path):\n os.makedirs(self.data_root, exist_ok=True)\n osr_split = requests.get(self.OSR_URL)\n open(osr_split_path, \"wb\").write(osr_split.content)\n else:\n with open(osr_split_path, \"rb\") as f:\n osr_split = pickle.load(f)\n # Ensure data is downloaded\n assert_data_downloaded(\n osr_split, shifthappens.config.imagenet21k_preprocessed_validation_path\n )\n test_transform = tv_transforms.Compose(\n [\n tv_transforms.ToTensor(),\n tv_transforms.Lambda(lambda x: x.permute(1, 2, 0)),\n ]\n )\n\n dataset_out = _get_imagenet_ssb_subset(\n imagenet21k_root=shifthappens.config.imagenet21k_preprocessed_validation_path,\n osr_split=osr_split,\n test_transform=test_transform,\n subset_type=self.subset_type,\n )\n\n self.dataset_out = sh_data_torch.IndexedTorchDataset(\n sh_data_torch.ImagesOnlyTorchDataset(dataset_out)\n )\n\n def _prepare_dataloader(self):\n dataloader_out = sh_data.DataLoader(\n self.dataset_out, max_batch_size=self.max_batch_size\n )\n return dataloader_out\n\n def _evaluate(self, model: sh_models.Model) -> TaskResult:\n dataloader = self._prepare_dataloader()\n ood_scores_out_list = []\n for predictions_out in model.predict(\n dataloader, PredictionTargets(ood_scores=True)\n ):\n assert (\n predictions_out.ood_scores is not None\n ), \"OOD scores for SSB task is None\"\n ood_scores_out_list.append(predictions_out.ood_scores)\n ood_scores_out = np.hstack(ood_scores_out_list)\n\n auroc = auroc_ood(\n np.array(model.imagenet_validation_result.ood_scores), ood_scores_out\n )\n fpr_at_95 = fpr_at_tpr(\n np.array(model.imagenet_validation_result.ood_scores), ood_scores_out, 0.95\n )\n return TaskResult(\n auroc=auroc,\n fpr_at_95=fpr_at_95,\n summary_metrics={\n Metric.OODDetection: (\"auroc\", \"fpr_at_95\"),\n },\n )\n\n\n@sh_benchmark.register_task(\n name=\"SSB_easy\", relative_data_folder=\"ssb\", standalone=True\n)\[email protected]\nclass SSBEasy(_SSB):\n \"\"\"SSB Easy subset\"\"\"\n\n subset_type: str = variable(\"easy\")\n\n\n@sh_benchmark.register_task(\n name=\"SSB_hard\", relative_data_folder=\"ssb\", standalone=True\n)\[email protected]\nclass SSBHard(_SSB):\n \"\"\"SSB Hard subset\"\"\"\n\n subset_type: str = variable(\"hard\")\n", "<docstring token>\nimport dataclasses\nimport os\nimport pickle\nimport numpy as np\nimport requests\nimport torchvision.transforms as tv_transforms\nimport shifthappens.config\nimport shifthappens.data.base as sh_data\nimport shifthappens.data.torch as sh_data_torch\nfrom shifthappens import benchmark as sh_benchmark\nfrom shifthappens.models import base as sh_models\nfrom shifthappens.models.base import PredictionTargets\nfrom shifthappens.tasks.base import abstract_variable\nfrom shifthappens.tasks.base import Task\nfrom shifthappens.tasks.base import variable\nfrom shifthappens.tasks.metrics import Metric\nfrom shifthappens.tasks.mixins import OODScoreTaskMixin\nfrom shifthappens.tasks.ssb.imagenet_ssb import _get_imagenet_ssb_subset\nfrom shifthappens.tasks.ssb.imagenet_ssb import assert_data_downloaded\nfrom shifthappens.tasks.task_result import TaskResult\nfrom shifthappens.tasks.utils import auroc_ood\nfrom shifthappens.tasks.utils import fpr_at_tpr\n\n\[email protected]\nclass _SSB(Task, OODScoreTaskMixin):\n \"\"\"\n Prepares the ImageNet evaluation from the Semantic Shift Benchmark for open-set recognition (OSR)\n\n Downloads SSB OSR splits to Task.data_root\n Assumes ImageNet-21KP validation splits are downloaded to shifthappens.config.imagenet21k_preprocessed_validation_path\n To download the ImageNet21k-P data:\n Follow instructions at https://github.com/Alibaba-MIIL/ImageNet21K/blob/main/dataset_preprocessing/processing_instructions.md\n Ensure data is from the Winter21 ImageNet release!\n \"\"\"\n OSR_URL = (\n 'https://github.com/sgvaze/osr_closed_set_all_you_need/raw/main/data/open_set_splits/imagenet_osr_splits_winter21.pkl'\n )\n subset_type: str = abstract_variable()\n max_batch_size: int = 256\n\n def setup(self):\n \"\"\"Asserts data is downloaded and sets up open-set dataset\"\"\"\n osr_split_path = os.path.join(self.data_root,\n 'imagenet_osr_splits_winter21.pkl')\n if not os.path.exists(osr_split_path):\n os.makedirs(self.data_root, exist_ok=True)\n osr_split = requests.get(self.OSR_URL)\n open(osr_split_path, 'wb').write(osr_split.content)\n else:\n with open(osr_split_path, 'rb') as f:\n osr_split = pickle.load(f)\n assert_data_downloaded(osr_split, shifthappens.config.\n imagenet21k_preprocessed_validation_path)\n test_transform = tv_transforms.Compose([tv_transforms.ToTensor(),\n tv_transforms.Lambda(lambda x: x.permute(1, 2, 0))])\n dataset_out = _get_imagenet_ssb_subset(imagenet21k_root=\n shifthappens.config.imagenet21k_preprocessed_validation_path,\n osr_split=osr_split, test_transform=test_transform, subset_type\n =self.subset_type)\n self.dataset_out = sh_data_torch.IndexedTorchDataset(sh_data_torch.\n ImagesOnlyTorchDataset(dataset_out))\n\n def _prepare_dataloader(self):\n dataloader_out = sh_data.DataLoader(self.dataset_out,\n max_batch_size=self.max_batch_size)\n return dataloader_out\n\n def _evaluate(self, model: sh_models.Model) ->TaskResult:\n dataloader = self._prepare_dataloader()\n ood_scores_out_list = []\n for predictions_out in model.predict(dataloader, PredictionTargets(\n ood_scores=True)):\n assert predictions_out.ood_scores is not None, 'OOD scores for SSB task is None'\n ood_scores_out_list.append(predictions_out.ood_scores)\n ood_scores_out = np.hstack(ood_scores_out_list)\n auroc = auroc_ood(np.array(model.imagenet_validation_result.\n ood_scores), ood_scores_out)\n fpr_at_95 = fpr_at_tpr(np.array(model.imagenet_validation_result.\n ood_scores), ood_scores_out, 0.95)\n return TaskResult(auroc=auroc, fpr_at_95=fpr_at_95, summary_metrics\n ={Metric.OODDetection: ('auroc', 'fpr_at_95')})\n\n\n@sh_benchmark.register_task(name='SSB_easy', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBEasy(_SSB):\n \"\"\"SSB Easy subset\"\"\"\n subset_type: str = variable('easy')\n\n\n@sh_benchmark.register_task(name='SSB_hard', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBHard(_SSB):\n \"\"\"SSB Hard subset\"\"\"\n subset_type: str = variable('hard')\n", "<docstring token>\n<import token>\n\n\[email protected]\nclass _SSB(Task, OODScoreTaskMixin):\n \"\"\"\n Prepares the ImageNet evaluation from the Semantic Shift Benchmark for open-set recognition (OSR)\n\n Downloads SSB OSR splits to Task.data_root\n Assumes ImageNet-21KP validation splits are downloaded to shifthappens.config.imagenet21k_preprocessed_validation_path\n To download the ImageNet21k-P data:\n Follow instructions at https://github.com/Alibaba-MIIL/ImageNet21K/blob/main/dataset_preprocessing/processing_instructions.md\n Ensure data is from the Winter21 ImageNet release!\n \"\"\"\n OSR_URL = (\n 'https://github.com/sgvaze/osr_closed_set_all_you_need/raw/main/data/open_set_splits/imagenet_osr_splits_winter21.pkl'\n )\n subset_type: str = abstract_variable()\n max_batch_size: int = 256\n\n def setup(self):\n \"\"\"Asserts data is downloaded and sets up open-set dataset\"\"\"\n osr_split_path = os.path.join(self.data_root,\n 'imagenet_osr_splits_winter21.pkl')\n if not os.path.exists(osr_split_path):\n os.makedirs(self.data_root, exist_ok=True)\n osr_split = requests.get(self.OSR_URL)\n open(osr_split_path, 'wb').write(osr_split.content)\n else:\n with open(osr_split_path, 'rb') as f:\n osr_split = pickle.load(f)\n assert_data_downloaded(osr_split, shifthappens.config.\n imagenet21k_preprocessed_validation_path)\n test_transform = tv_transforms.Compose([tv_transforms.ToTensor(),\n tv_transforms.Lambda(lambda x: x.permute(1, 2, 0))])\n dataset_out = _get_imagenet_ssb_subset(imagenet21k_root=\n shifthappens.config.imagenet21k_preprocessed_validation_path,\n osr_split=osr_split, test_transform=test_transform, subset_type\n =self.subset_type)\n self.dataset_out = sh_data_torch.IndexedTorchDataset(sh_data_torch.\n ImagesOnlyTorchDataset(dataset_out))\n\n def _prepare_dataloader(self):\n dataloader_out = sh_data.DataLoader(self.dataset_out,\n max_batch_size=self.max_batch_size)\n return dataloader_out\n\n def _evaluate(self, model: sh_models.Model) ->TaskResult:\n dataloader = self._prepare_dataloader()\n ood_scores_out_list = []\n for predictions_out in model.predict(dataloader, PredictionTargets(\n ood_scores=True)):\n assert predictions_out.ood_scores is not None, 'OOD scores for SSB task is None'\n ood_scores_out_list.append(predictions_out.ood_scores)\n ood_scores_out = np.hstack(ood_scores_out_list)\n auroc = auroc_ood(np.array(model.imagenet_validation_result.\n ood_scores), ood_scores_out)\n fpr_at_95 = fpr_at_tpr(np.array(model.imagenet_validation_result.\n ood_scores), ood_scores_out, 0.95)\n return TaskResult(auroc=auroc, fpr_at_95=fpr_at_95, summary_metrics\n ={Metric.OODDetection: ('auroc', 'fpr_at_95')})\n\n\n@sh_benchmark.register_task(name='SSB_easy', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBEasy(_SSB):\n \"\"\"SSB Easy subset\"\"\"\n subset_type: str = variable('easy')\n\n\n@sh_benchmark.register_task(name='SSB_hard', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBHard(_SSB):\n \"\"\"SSB Hard subset\"\"\"\n subset_type: str = variable('hard')\n", "<docstring token>\n<import token>\n\n\[email protected]\nclass _SSB(Task, OODScoreTaskMixin):\n <docstring token>\n OSR_URL = (\n 'https://github.com/sgvaze/osr_closed_set_all_you_need/raw/main/data/open_set_splits/imagenet_osr_splits_winter21.pkl'\n )\n subset_type: str = abstract_variable()\n max_batch_size: int = 256\n\n def setup(self):\n \"\"\"Asserts data is downloaded and sets up open-set dataset\"\"\"\n osr_split_path = os.path.join(self.data_root,\n 'imagenet_osr_splits_winter21.pkl')\n if not os.path.exists(osr_split_path):\n os.makedirs(self.data_root, exist_ok=True)\n osr_split = requests.get(self.OSR_URL)\n open(osr_split_path, 'wb').write(osr_split.content)\n else:\n with open(osr_split_path, 'rb') as f:\n osr_split = pickle.load(f)\n assert_data_downloaded(osr_split, shifthappens.config.\n imagenet21k_preprocessed_validation_path)\n test_transform = tv_transforms.Compose([tv_transforms.ToTensor(),\n tv_transforms.Lambda(lambda x: x.permute(1, 2, 0))])\n dataset_out = _get_imagenet_ssb_subset(imagenet21k_root=\n shifthappens.config.imagenet21k_preprocessed_validation_path,\n osr_split=osr_split, test_transform=test_transform, subset_type\n =self.subset_type)\n self.dataset_out = sh_data_torch.IndexedTorchDataset(sh_data_torch.\n ImagesOnlyTorchDataset(dataset_out))\n\n def _prepare_dataloader(self):\n dataloader_out = sh_data.DataLoader(self.dataset_out,\n max_batch_size=self.max_batch_size)\n return dataloader_out\n\n def _evaluate(self, model: sh_models.Model) ->TaskResult:\n dataloader = self._prepare_dataloader()\n ood_scores_out_list = []\n for predictions_out in model.predict(dataloader, PredictionTargets(\n ood_scores=True)):\n assert predictions_out.ood_scores is not None, 'OOD scores for SSB task is None'\n ood_scores_out_list.append(predictions_out.ood_scores)\n ood_scores_out = np.hstack(ood_scores_out_list)\n auroc = auroc_ood(np.array(model.imagenet_validation_result.\n ood_scores), ood_scores_out)\n fpr_at_95 = fpr_at_tpr(np.array(model.imagenet_validation_result.\n ood_scores), ood_scores_out, 0.95)\n return TaskResult(auroc=auroc, fpr_at_95=fpr_at_95, summary_metrics\n ={Metric.OODDetection: ('auroc', 'fpr_at_95')})\n\n\n@sh_benchmark.register_task(name='SSB_easy', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBEasy(_SSB):\n \"\"\"SSB Easy subset\"\"\"\n subset_type: str = variable('easy')\n\n\n@sh_benchmark.register_task(name='SSB_hard', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBHard(_SSB):\n \"\"\"SSB Hard subset\"\"\"\n subset_type: str = variable('hard')\n", "<docstring token>\n<import token>\n\n\[email protected]\nclass _SSB(Task, OODScoreTaskMixin):\n <docstring token>\n <assignment token>\n subset_type: str = abstract_variable()\n max_batch_size: int = 256\n\n def setup(self):\n \"\"\"Asserts data is downloaded and sets up open-set dataset\"\"\"\n osr_split_path = os.path.join(self.data_root,\n 'imagenet_osr_splits_winter21.pkl')\n if not os.path.exists(osr_split_path):\n os.makedirs(self.data_root, exist_ok=True)\n osr_split = requests.get(self.OSR_URL)\n open(osr_split_path, 'wb').write(osr_split.content)\n else:\n with open(osr_split_path, 'rb') as f:\n osr_split = pickle.load(f)\n assert_data_downloaded(osr_split, shifthappens.config.\n imagenet21k_preprocessed_validation_path)\n test_transform = tv_transforms.Compose([tv_transforms.ToTensor(),\n tv_transforms.Lambda(lambda x: x.permute(1, 2, 0))])\n dataset_out = _get_imagenet_ssb_subset(imagenet21k_root=\n shifthappens.config.imagenet21k_preprocessed_validation_path,\n osr_split=osr_split, test_transform=test_transform, subset_type\n =self.subset_type)\n self.dataset_out = sh_data_torch.IndexedTorchDataset(sh_data_torch.\n ImagesOnlyTorchDataset(dataset_out))\n\n def _prepare_dataloader(self):\n dataloader_out = sh_data.DataLoader(self.dataset_out,\n max_batch_size=self.max_batch_size)\n return dataloader_out\n\n def _evaluate(self, model: sh_models.Model) ->TaskResult:\n dataloader = self._prepare_dataloader()\n ood_scores_out_list = []\n for predictions_out in model.predict(dataloader, PredictionTargets(\n ood_scores=True)):\n assert predictions_out.ood_scores is not None, 'OOD scores for SSB task is None'\n ood_scores_out_list.append(predictions_out.ood_scores)\n ood_scores_out = np.hstack(ood_scores_out_list)\n auroc = auroc_ood(np.array(model.imagenet_validation_result.\n ood_scores), ood_scores_out)\n fpr_at_95 = fpr_at_tpr(np.array(model.imagenet_validation_result.\n ood_scores), ood_scores_out, 0.95)\n return TaskResult(auroc=auroc, fpr_at_95=fpr_at_95, summary_metrics\n ={Metric.OODDetection: ('auroc', 'fpr_at_95')})\n\n\n@sh_benchmark.register_task(name='SSB_easy', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBEasy(_SSB):\n \"\"\"SSB Easy subset\"\"\"\n subset_type: str = variable('easy')\n\n\n@sh_benchmark.register_task(name='SSB_hard', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBHard(_SSB):\n \"\"\"SSB Hard subset\"\"\"\n subset_type: str = variable('hard')\n", "<docstring token>\n<import token>\n\n\[email protected]\nclass _SSB(Task, OODScoreTaskMixin):\n <docstring token>\n <assignment token>\n subset_type: str = abstract_variable()\n max_batch_size: int = 256\n\n def setup(self):\n \"\"\"Asserts data is downloaded and sets up open-set dataset\"\"\"\n osr_split_path = os.path.join(self.data_root,\n 'imagenet_osr_splits_winter21.pkl')\n if not os.path.exists(osr_split_path):\n os.makedirs(self.data_root, exist_ok=True)\n osr_split = requests.get(self.OSR_URL)\n open(osr_split_path, 'wb').write(osr_split.content)\n else:\n with open(osr_split_path, 'rb') as f:\n osr_split = pickle.load(f)\n assert_data_downloaded(osr_split, shifthappens.config.\n imagenet21k_preprocessed_validation_path)\n test_transform = tv_transforms.Compose([tv_transforms.ToTensor(),\n tv_transforms.Lambda(lambda x: x.permute(1, 2, 0))])\n dataset_out = _get_imagenet_ssb_subset(imagenet21k_root=\n shifthappens.config.imagenet21k_preprocessed_validation_path,\n osr_split=osr_split, test_transform=test_transform, subset_type\n =self.subset_type)\n self.dataset_out = sh_data_torch.IndexedTorchDataset(sh_data_torch.\n ImagesOnlyTorchDataset(dataset_out))\n\n def _prepare_dataloader(self):\n dataloader_out = sh_data.DataLoader(self.dataset_out,\n max_batch_size=self.max_batch_size)\n return dataloader_out\n <function token>\n\n\n@sh_benchmark.register_task(name='SSB_easy', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBEasy(_SSB):\n \"\"\"SSB Easy subset\"\"\"\n subset_type: str = variable('easy')\n\n\n@sh_benchmark.register_task(name='SSB_hard', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBHard(_SSB):\n \"\"\"SSB Hard subset\"\"\"\n subset_type: str = variable('hard')\n", "<docstring token>\n<import token>\n\n\[email protected]\nclass _SSB(Task, OODScoreTaskMixin):\n <docstring token>\n <assignment token>\n subset_type: str = abstract_variable()\n max_batch_size: int = 256\n\n def setup(self):\n \"\"\"Asserts data is downloaded and sets up open-set dataset\"\"\"\n osr_split_path = os.path.join(self.data_root,\n 'imagenet_osr_splits_winter21.pkl')\n if not os.path.exists(osr_split_path):\n os.makedirs(self.data_root, exist_ok=True)\n osr_split = requests.get(self.OSR_URL)\n open(osr_split_path, 'wb').write(osr_split.content)\n else:\n with open(osr_split_path, 'rb') as f:\n osr_split = pickle.load(f)\n assert_data_downloaded(osr_split, shifthappens.config.\n imagenet21k_preprocessed_validation_path)\n test_transform = tv_transforms.Compose([tv_transforms.ToTensor(),\n tv_transforms.Lambda(lambda x: x.permute(1, 2, 0))])\n dataset_out = _get_imagenet_ssb_subset(imagenet21k_root=\n shifthappens.config.imagenet21k_preprocessed_validation_path,\n osr_split=osr_split, test_transform=test_transform, subset_type\n =self.subset_type)\n self.dataset_out = sh_data_torch.IndexedTorchDataset(sh_data_torch.\n ImagesOnlyTorchDataset(dataset_out))\n <function token>\n <function token>\n\n\n@sh_benchmark.register_task(name='SSB_easy', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBEasy(_SSB):\n \"\"\"SSB Easy subset\"\"\"\n subset_type: str = variable('easy')\n\n\n@sh_benchmark.register_task(name='SSB_hard', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBHard(_SSB):\n \"\"\"SSB Hard subset\"\"\"\n subset_type: str = variable('hard')\n", "<docstring token>\n<import token>\n\n\[email protected]\nclass _SSB(Task, OODScoreTaskMixin):\n <docstring token>\n <assignment token>\n subset_type: str = abstract_variable()\n max_batch_size: int = 256\n <function token>\n <function token>\n <function token>\n\n\n@sh_benchmark.register_task(name='SSB_easy', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBEasy(_SSB):\n \"\"\"SSB Easy subset\"\"\"\n subset_type: str = variable('easy')\n\n\n@sh_benchmark.register_task(name='SSB_hard', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBHard(_SSB):\n \"\"\"SSB Hard subset\"\"\"\n subset_type: str = variable('hard')\n", "<docstring token>\n<import token>\n<class token>\n\n\n@sh_benchmark.register_task(name='SSB_easy', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBEasy(_SSB):\n \"\"\"SSB Easy subset\"\"\"\n subset_type: str = variable('easy')\n\n\n@sh_benchmark.register_task(name='SSB_hard', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBHard(_SSB):\n \"\"\"SSB Hard subset\"\"\"\n subset_type: str = variable('hard')\n", "<docstring token>\n<import token>\n<class token>\n\n\n@sh_benchmark.register_task(name='SSB_easy', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBEasy(_SSB):\n <docstring token>\n subset_type: str = variable('easy')\n\n\n@sh_benchmark.register_task(name='SSB_hard', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBHard(_SSB):\n \"\"\"SSB Hard subset\"\"\"\n subset_type: str = variable('hard')\n", "<docstring token>\n<import token>\n<class token>\n<class token>\n\n\n@sh_benchmark.register_task(name='SSB_hard', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBHard(_SSB):\n \"\"\"SSB Hard subset\"\"\"\n subset_type: str = variable('hard')\n", "<docstring token>\n<import token>\n<class token>\n<class token>\n\n\n@sh_benchmark.register_task(name='SSB_hard', relative_data_folder='ssb',\n standalone=True)\[email protected]\nclass SSBHard(_SSB):\n <docstring token>\n subset_type: str = variable('hard')\n", "<docstring token>\n<import token>\n<class token>\n<class token>\n<class token>\n" ]
false
99,421
8fa57a2826c2d01ecb91a9357572210404691eda
import urllib.parse import urllib.request import urllib.error import concurrent.futures from logger import get_logger from crawler.cfg import MAX_HTTP_WORKERS logger = get_logger(__name__) class UrllibHandler: """ Http handler that uses urllib and allow proxies """ def __init__(self, url, proxy=None, **kwargs): """ Initialize the http handler to perform requests in a URL. A proxy with the format "ip:port" can be provided. All the extra arguments (kwargs) will be parsed and sent as GET parameters in the request. :param url: URL to perform the request :type url: string :param proxy: string with format "ip:port" :type query: string """ self.proxy = proxy self.query_params = urllib.parse.urlencode(kwargs) self.url = url if not self.query_params else f"{url}?{self.query_params}" logger.info("UrllibHandler initialized: url=%s, proxy=%s", self.url, self.proxy) def get(self): """ Get a url and return a redeable object with the raw html retrived """ request = urllib.request.Request(self.url) if self.proxy: request.set_proxy(self.proxy, 'http') logger.info("Attempt to do GET request: url=%s, proxy=%s", self.url, self.proxy) response = urllib.request.urlopen(request) logger.info("GET request was successful: url=%s, proxy=%s", self.url, self.proxy) return response def get_urls_async(urls, proxy=None, max_workers=MAX_HTTP_WORKERS): """ Perform async requests on each url in urls and return the result. The max number of concurrent requests is controled by `max_workers` If a proxy is provided, it will be used to make all the requests. Return a dictionary with `url` as key and the resultant requests as value (or None if an exception is rised during request) """ result = {} with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: futures = {} for url in urls: handler = UrllibHandler(url, proxy) future = executor.submit(handler.get) futures[future] = url for future in concurrent.futures.as_completed(futures): url = futures[future] try: response = future.result() except (urllib.error.URLError, urllib.error.HTTPError) as ex: logger.error("Unexpected error during request: url=%s, proxy=%s, " + \ "error=%s", url, proxy, ex) response = None result[url] = response return result
[ "import urllib.parse\nimport urllib.request\nimport urllib.error\nimport concurrent.futures\nfrom logger import get_logger\nfrom crawler.cfg import MAX_HTTP_WORKERS\n\nlogger = get_logger(__name__)\n\n\nclass UrllibHandler:\n \"\"\"\n Http handler that uses urllib and allow proxies\n \"\"\"\n\n def __init__(self, url, proxy=None, **kwargs):\n \"\"\"\n Initialize the http handler to perform requests in a URL.\n A proxy with the format \"ip:port\" can be provided.\n All the extra arguments (kwargs) will be parsed and sent as\n GET parameters in the request.\n :param url: URL to perform the request\n :type url: string\n :param proxy: string with format \"ip:port\"\n :type query: string\n \"\"\"\n self.proxy = proxy\n self.query_params = urllib.parse.urlencode(kwargs)\n self.url = url if not self.query_params else f\"{url}?{self.query_params}\"\n logger.info(\"UrllibHandler initialized: url=%s, proxy=%s\", self.url, self.proxy)\n\n def get(self):\n \"\"\"\n Get a url and return a redeable object with the raw html retrived\n \"\"\"\n request = urllib.request.Request(self.url)\n if self.proxy:\n request.set_proxy(self.proxy, 'http')\n logger.info(\"Attempt to do GET request: url=%s, proxy=%s\",\n self.url, self.proxy)\n response = urllib.request.urlopen(request)\n logger.info(\"GET request was successful: url=%s, proxy=%s\",\n self.url, self.proxy)\n return response\n\n\ndef get_urls_async(urls, proxy=None, max_workers=MAX_HTTP_WORKERS):\n \"\"\"\n Perform async requests on each url in urls and return the result.\n The max number of concurrent requests is controled by `max_workers`\n If a proxy is provided, it will be used to make all the requests.\n\n Return a dictionary with `url` as key and the resultant requests as value\n (or None if an exception is rised during request)\n \"\"\"\n result = {}\n with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:\n futures = {}\n for url in urls:\n handler = UrllibHandler(url, proxy)\n future = executor.submit(handler.get)\n futures[future] = url\n\n for future in concurrent.futures.as_completed(futures):\n url = futures[future]\n try:\n response = future.result()\n except (urllib.error.URLError, urllib.error.HTTPError) as ex:\n logger.error(\"Unexpected error during request: url=%s, proxy=%s, \" + \\\n \"error=%s\", url, proxy, ex)\n response = None\n\n result[url] = response\n return result\n", "import urllib.parse\nimport urllib.request\nimport urllib.error\nimport concurrent.futures\nfrom logger import get_logger\nfrom crawler.cfg import MAX_HTTP_WORKERS\nlogger = get_logger(__name__)\n\n\nclass UrllibHandler:\n \"\"\"\n Http handler that uses urllib and allow proxies\n \"\"\"\n\n def __init__(self, url, proxy=None, **kwargs):\n \"\"\"\n Initialize the http handler to perform requests in a URL.\n A proxy with the format \"ip:port\" can be provided.\n All the extra arguments (kwargs) will be parsed and sent as\n GET parameters in the request.\n :param url: URL to perform the request\n :type url: string\n :param proxy: string with format \"ip:port\"\n :type query: string\n \"\"\"\n self.proxy = proxy\n self.query_params = urllib.parse.urlencode(kwargs)\n self.url = (url if not self.query_params else\n f'{url}?{self.query_params}')\n logger.info('UrllibHandler initialized: url=%s, proxy=%s', self.url,\n self.proxy)\n\n def get(self):\n \"\"\"\n Get a url and return a redeable object with the raw html retrived\n \"\"\"\n request = urllib.request.Request(self.url)\n if self.proxy:\n request.set_proxy(self.proxy, 'http')\n logger.info('Attempt to do GET request: url=%s, proxy=%s', self.url,\n self.proxy)\n response = urllib.request.urlopen(request)\n logger.info('GET request was successful: url=%s, proxy=%s', self.\n url, self.proxy)\n return response\n\n\ndef get_urls_async(urls, proxy=None, max_workers=MAX_HTTP_WORKERS):\n \"\"\"\n Perform async requests on each url in urls and return the result.\n The max number of concurrent requests is controled by `max_workers`\n If a proxy is provided, it will be used to make all the requests.\n\n Return a dictionary with `url` as key and the resultant requests as value\n (or None if an exception is rised during request)\n \"\"\"\n result = {}\n with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers\n ) as executor:\n futures = {}\n for url in urls:\n handler = UrllibHandler(url, proxy)\n future = executor.submit(handler.get)\n futures[future] = url\n for future in concurrent.futures.as_completed(futures):\n url = futures[future]\n try:\n response = future.result()\n except (urllib.error.URLError, urllib.error.HTTPError) as ex:\n logger.error(\n 'Unexpected error during request: url=%s, proxy=%s, ' +\n 'error=%s', url, proxy, ex)\n response = None\n result[url] = response\n return result\n", "<import token>\nlogger = get_logger(__name__)\n\n\nclass UrllibHandler:\n \"\"\"\n Http handler that uses urllib and allow proxies\n \"\"\"\n\n def __init__(self, url, proxy=None, **kwargs):\n \"\"\"\n Initialize the http handler to perform requests in a URL.\n A proxy with the format \"ip:port\" can be provided.\n All the extra arguments (kwargs) will be parsed and sent as\n GET parameters in the request.\n :param url: URL to perform the request\n :type url: string\n :param proxy: string with format \"ip:port\"\n :type query: string\n \"\"\"\n self.proxy = proxy\n self.query_params = urllib.parse.urlencode(kwargs)\n self.url = (url if not self.query_params else\n f'{url}?{self.query_params}')\n logger.info('UrllibHandler initialized: url=%s, proxy=%s', self.url,\n self.proxy)\n\n def get(self):\n \"\"\"\n Get a url and return a redeable object with the raw html retrived\n \"\"\"\n request = urllib.request.Request(self.url)\n if self.proxy:\n request.set_proxy(self.proxy, 'http')\n logger.info('Attempt to do GET request: url=%s, proxy=%s', self.url,\n self.proxy)\n response = urllib.request.urlopen(request)\n logger.info('GET request was successful: url=%s, proxy=%s', self.\n url, self.proxy)\n return response\n\n\ndef get_urls_async(urls, proxy=None, max_workers=MAX_HTTP_WORKERS):\n \"\"\"\n Perform async requests on each url in urls and return the result.\n The max number of concurrent requests is controled by `max_workers`\n If a proxy is provided, it will be used to make all the requests.\n\n Return a dictionary with `url` as key and the resultant requests as value\n (or None if an exception is rised during request)\n \"\"\"\n result = {}\n with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers\n ) as executor:\n futures = {}\n for url in urls:\n handler = UrllibHandler(url, proxy)\n future = executor.submit(handler.get)\n futures[future] = url\n for future in concurrent.futures.as_completed(futures):\n url = futures[future]\n try:\n response = future.result()\n except (urllib.error.URLError, urllib.error.HTTPError) as ex:\n logger.error(\n 'Unexpected error during request: url=%s, proxy=%s, ' +\n 'error=%s', url, proxy, ex)\n response = None\n result[url] = response\n return result\n", "<import token>\n<assignment token>\n\n\nclass UrllibHandler:\n \"\"\"\n Http handler that uses urllib and allow proxies\n \"\"\"\n\n def __init__(self, url, proxy=None, **kwargs):\n \"\"\"\n Initialize the http handler to perform requests in a URL.\n A proxy with the format \"ip:port\" can be provided.\n All the extra arguments (kwargs) will be parsed and sent as\n GET parameters in the request.\n :param url: URL to perform the request\n :type url: string\n :param proxy: string with format \"ip:port\"\n :type query: string\n \"\"\"\n self.proxy = proxy\n self.query_params = urllib.parse.urlencode(kwargs)\n self.url = (url if not self.query_params else\n f'{url}?{self.query_params}')\n logger.info('UrllibHandler initialized: url=%s, proxy=%s', self.url,\n self.proxy)\n\n def get(self):\n \"\"\"\n Get a url and return a redeable object with the raw html retrived\n \"\"\"\n request = urllib.request.Request(self.url)\n if self.proxy:\n request.set_proxy(self.proxy, 'http')\n logger.info('Attempt to do GET request: url=%s, proxy=%s', self.url,\n self.proxy)\n response = urllib.request.urlopen(request)\n logger.info('GET request was successful: url=%s, proxy=%s', self.\n url, self.proxy)\n return response\n\n\ndef get_urls_async(urls, proxy=None, max_workers=MAX_HTTP_WORKERS):\n \"\"\"\n Perform async requests on each url in urls and return the result.\n The max number of concurrent requests is controled by `max_workers`\n If a proxy is provided, it will be used to make all the requests.\n\n Return a dictionary with `url` as key and the resultant requests as value\n (or None if an exception is rised during request)\n \"\"\"\n result = {}\n with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers\n ) as executor:\n futures = {}\n for url in urls:\n handler = UrllibHandler(url, proxy)\n future = executor.submit(handler.get)\n futures[future] = url\n for future in concurrent.futures.as_completed(futures):\n url = futures[future]\n try:\n response = future.result()\n except (urllib.error.URLError, urllib.error.HTTPError) as ex:\n logger.error(\n 'Unexpected error during request: url=%s, proxy=%s, ' +\n 'error=%s', url, proxy, ex)\n response = None\n result[url] = response\n return result\n", "<import token>\n<assignment token>\n\n\nclass UrllibHandler:\n \"\"\"\n Http handler that uses urllib and allow proxies\n \"\"\"\n\n def __init__(self, url, proxy=None, **kwargs):\n \"\"\"\n Initialize the http handler to perform requests in a URL.\n A proxy with the format \"ip:port\" can be provided.\n All the extra arguments (kwargs) will be parsed and sent as\n GET parameters in the request.\n :param url: URL to perform the request\n :type url: string\n :param proxy: string with format \"ip:port\"\n :type query: string\n \"\"\"\n self.proxy = proxy\n self.query_params = urllib.parse.urlencode(kwargs)\n self.url = (url if not self.query_params else\n f'{url}?{self.query_params}')\n logger.info('UrllibHandler initialized: url=%s, proxy=%s', self.url,\n self.proxy)\n\n def get(self):\n \"\"\"\n Get a url and return a redeable object with the raw html retrived\n \"\"\"\n request = urllib.request.Request(self.url)\n if self.proxy:\n request.set_proxy(self.proxy, 'http')\n logger.info('Attempt to do GET request: url=%s, proxy=%s', self.url,\n self.proxy)\n response = urllib.request.urlopen(request)\n logger.info('GET request was successful: url=%s, proxy=%s', self.\n url, self.proxy)\n return response\n\n\n<function token>\n", "<import token>\n<assignment token>\n\n\nclass UrllibHandler:\n <docstring token>\n\n def __init__(self, url, proxy=None, **kwargs):\n \"\"\"\n Initialize the http handler to perform requests in a URL.\n A proxy with the format \"ip:port\" can be provided.\n All the extra arguments (kwargs) will be parsed and sent as\n GET parameters in the request.\n :param url: URL to perform the request\n :type url: string\n :param proxy: string with format \"ip:port\"\n :type query: string\n \"\"\"\n self.proxy = proxy\n self.query_params = urllib.parse.urlencode(kwargs)\n self.url = (url if not self.query_params else\n f'{url}?{self.query_params}')\n logger.info('UrllibHandler initialized: url=%s, proxy=%s', self.url,\n self.proxy)\n\n def get(self):\n \"\"\"\n Get a url and return a redeable object with the raw html retrived\n \"\"\"\n request = urllib.request.Request(self.url)\n if self.proxy:\n request.set_proxy(self.proxy, 'http')\n logger.info('Attempt to do GET request: url=%s, proxy=%s', self.url,\n self.proxy)\n response = urllib.request.urlopen(request)\n logger.info('GET request was successful: url=%s, proxy=%s', self.\n url, self.proxy)\n return response\n\n\n<function token>\n", "<import token>\n<assignment token>\n\n\nclass UrllibHandler:\n <docstring token>\n\n def __init__(self, url, proxy=None, **kwargs):\n \"\"\"\n Initialize the http handler to perform requests in a URL.\n A proxy with the format \"ip:port\" can be provided.\n All the extra arguments (kwargs) will be parsed and sent as\n GET parameters in the request.\n :param url: URL to perform the request\n :type url: string\n :param proxy: string with format \"ip:port\"\n :type query: string\n \"\"\"\n self.proxy = proxy\n self.query_params = urllib.parse.urlencode(kwargs)\n self.url = (url if not self.query_params else\n f'{url}?{self.query_params}')\n logger.info('UrllibHandler initialized: url=%s, proxy=%s', self.url,\n self.proxy)\n <function token>\n\n\n<function token>\n", "<import token>\n<assignment token>\n\n\nclass UrllibHandler:\n <docstring token>\n <function token>\n <function token>\n\n\n<function token>\n", "<import token>\n<assignment token>\n<class token>\n<function token>\n" ]
false
99,422
0947162cdb7feed2868df723b999a411626ff826
"""Class and Instance Variables. @see: https://docs.python.org/3/tutorial/classes.html#class-and-instance-variables Generally speaking, instance variables are for data unique to each instance and class variables are for attributes and methods shared by all instances of the class. """ def test_class_and_instance_variables(): """Class and Instance Variables.""" # pylint: disable=too-few-public-methods class Dog: """Dog class example""" kind = "canine" # Class variable shared by all instances. def __init__(self, name): self.name = name # Instance variable unique to each instance. fido = Dog("Fido") buddy = Dog("Buddy") # Shared by all dogs. assert fido.kind == "canine" assert buddy.kind == "canine" # Unique to fido. assert fido.name == "Fido" # Unique to buddy. assert buddy.name == "Buddy" # Shared data can have possibly surprising effects with involving mutable objects such as lists # and dictionaries. For example, the tricks list in the following code should not be used as a # class variable because just a single list would be shared by all Dog instances. # pylint: disable=too-few-public-methods class DogWithSharedTricks: """Dog class example with wrong shared variable usage""" tricks = [] # Mistaken use of a class variable (see below) for mutable objects. def __init__(self, name): self.name = name # Instance variable unique to each instance. def add_trick(self, trick): """Add trick to the dog This function illustrate mistaken use of mutable class variable tricks (see below). """ self.tricks.append(trick) fido = DogWithSharedTricks("Fido") buddy = DogWithSharedTricks("Buddy") fido.add_trick("roll over") buddy.add_trick("play dead") assert fido.tricks == ["roll over", "play dead"] # unexpectedly shared by all dogs assert buddy.tricks == ["roll over", "play dead"] # unexpectedly shared by all dogs # Correct design of the class should use an instance variable instead: # pylint: disable=too-few-public-methods class DogWithTricks: """Dog class example""" def __init__(self, name): self.name = name # Instance variable unique to each instance. self.tricks = [] # creates a new empty list for each dog def add_trick(self, trick): """Add trick to the dog This function illustrate mistaken use of mutable class variable tricks (see below). """ self.tricks.append(trick) fido = DogWithTricks("Fido") buddy = DogWithTricks("Buddy") fido.add_trick("roll over") buddy.add_trick("play dead") assert fido.tricks == ["roll over"] assert buddy.tricks == ["play dead"]
[ "\"\"\"Class and Instance Variables.\n\n@see: https://docs.python.org/3/tutorial/classes.html#class-and-instance-variables\n\nGenerally speaking, instance variables are for data unique to each instance and class variables are\nfor attributes and methods shared by all instances of the class.\n\"\"\"\n\n\ndef test_class_and_instance_variables():\n \"\"\"Class and Instance Variables.\"\"\"\n\n # pylint: disable=too-few-public-methods\n class Dog:\n \"\"\"Dog class example\"\"\"\n\n kind = \"canine\" # Class variable shared by all instances.\n\n def __init__(self, name):\n self.name = name # Instance variable unique to each instance.\n\n fido = Dog(\"Fido\")\n buddy = Dog(\"Buddy\")\n\n # Shared by all dogs.\n assert fido.kind == \"canine\"\n assert buddy.kind == \"canine\"\n\n # Unique to fido.\n assert fido.name == \"Fido\"\n\n # Unique to buddy.\n assert buddy.name == \"Buddy\"\n\n # Shared data can have possibly surprising effects with involving mutable objects such as lists\n # and dictionaries. For example, the tricks list in the following code should not be used as a\n # class variable because just a single list would be shared by all Dog instances.\n\n # pylint: disable=too-few-public-methods\n class DogWithSharedTricks:\n \"\"\"Dog class example with wrong shared variable usage\"\"\"\n\n tricks = [] # Mistaken use of a class variable (see below) for mutable objects.\n\n def __init__(self, name):\n self.name = name # Instance variable unique to each instance.\n\n def add_trick(self, trick):\n \"\"\"Add trick to the dog\n\n This function illustrate mistaken use of mutable class variable tricks (see below).\n \"\"\"\n self.tricks.append(trick)\n\n fido = DogWithSharedTricks(\"Fido\")\n buddy = DogWithSharedTricks(\"Buddy\")\n\n fido.add_trick(\"roll over\")\n buddy.add_trick(\"play dead\")\n\n assert fido.tricks == [\"roll over\", \"play dead\"] # unexpectedly shared by all dogs\n assert buddy.tricks == [\"roll over\", \"play dead\"] # unexpectedly shared by all dogs\n\n # Correct design of the class should use an instance variable instead:\n\n # pylint: disable=too-few-public-methods\n class DogWithTricks:\n \"\"\"Dog class example\"\"\"\n\n def __init__(self, name):\n self.name = name # Instance variable unique to each instance.\n self.tricks = [] # creates a new empty list for each dog\n\n def add_trick(self, trick):\n \"\"\"Add trick to the dog\n\n This function illustrate mistaken use of mutable class variable tricks (see below).\n \"\"\"\n self.tricks.append(trick)\n\n fido = DogWithTricks(\"Fido\")\n buddy = DogWithTricks(\"Buddy\")\n\n fido.add_trick(\"roll over\")\n buddy.add_trick(\"play dead\")\n\n assert fido.tricks == [\"roll over\"]\n assert buddy.tricks == [\"play dead\"]\n", "<docstring token>\n\n\ndef test_class_and_instance_variables():\n \"\"\"Class and Instance Variables.\"\"\"\n\n\n class Dog:\n \"\"\"Dog class example\"\"\"\n kind = 'canine'\n\n def __init__(self, name):\n self.name = name\n fido = Dog('Fido')\n buddy = Dog('Buddy')\n assert fido.kind == 'canine'\n assert buddy.kind == 'canine'\n assert fido.name == 'Fido'\n assert buddy.name == 'Buddy'\n\n\n class DogWithSharedTricks:\n \"\"\"Dog class example with wrong shared variable usage\"\"\"\n tricks = []\n\n def __init__(self, name):\n self.name = name\n\n def add_trick(self, trick):\n \"\"\"Add trick to the dog\n\n This function illustrate mistaken use of mutable class variable tricks (see below).\n \"\"\"\n self.tricks.append(trick)\n fido = DogWithSharedTricks('Fido')\n buddy = DogWithSharedTricks('Buddy')\n fido.add_trick('roll over')\n buddy.add_trick('play dead')\n assert fido.tricks == ['roll over', 'play dead']\n assert buddy.tricks == ['roll over', 'play dead']\n\n\n class DogWithTricks:\n \"\"\"Dog class example\"\"\"\n\n def __init__(self, name):\n self.name = name\n self.tricks = []\n\n def add_trick(self, trick):\n \"\"\"Add trick to the dog\n\n This function illustrate mistaken use of mutable class variable tricks (see below).\n \"\"\"\n self.tricks.append(trick)\n fido = DogWithTricks('Fido')\n buddy = DogWithTricks('Buddy')\n fido.add_trick('roll over')\n buddy.add_trick('play dead')\n assert fido.tricks == ['roll over']\n assert buddy.tricks == ['play dead']\n", "<docstring token>\n<function token>\n" ]
false
99,423
bc9e8ba60a8da444dc9fe43a595719fd477dd147
import numpy as np import sqlite3 as sq import sys import lib_PB1SQLDB as libsq class read_DB(): def __init__(self): self.filename = 'tmp.db' def read_BeamParams(self): conn = sq.connect(self.filename) c = conn.cursor() c.execute('select * from BeamParams') boloid=[]; boloname=[]; xpos=[]; ypos=[]; polang=[]; poleff=[] sigma_x=[]; sigma_y=[]; amp=[]; beam_tilt=[] for ar in c: boloid.append(int(ar[0])) boloname.append(str(ar[1])) xpos.append(float(ar[2])) ypos.append(float(ar[3])) polang.append(float(ar[4])) poleff.append(float(ar[5])) sigma_x.append(float(ar[6])) sigma_y.append(float(ar[7])) amp.append(float(ar[8])) beam_tilt.append(float(ar[9])) c.close() self.BeamParams = {'boloid':boloid,'boloname':boloname,'xpos':xpos,'ypos':ypos, 'polang':polang,'poleff':poleff,'sigma_x':sigma_x,'sigma_y':sigma_y, 'amp':amp,'beam_tilt':beam_tilt} return self.BeamParams read_db = libsq.read_DB() read_db.filename = '/scratch/scratchdirs/tmatsumu/sim/PB1_NTP/DB/beamprm_20120530_031419_hwp112.5.db' beam1 = read_db.read_BeamParams_selective([1,1,0,0,0,0,0,0,0,0]) read_db = libsq.read_DB() read_db.filename = '/scratch/scratchdirs/tmatsumu/sim/PB1_NTP/DB/pb1_fpdb_ver0.db' beam2 = read_db.read_BeamParams_selective([1,1,0,0,0,0,0,0,0,0]) num = len(beam2['boloid']) for i in range(num): ind = np.where(beam2['boloid'][i] == np.array(beam1['boloid'])) # print ind[0] if ( beam1['boloname'][ind[0]] != beam2['boloname'][i] ): print beam2['boloid'][i], beam1['boloname'][ind[0]], beam2['boloname'][i]
[ "import numpy as np\nimport sqlite3 as sq\nimport sys\nimport lib_PB1SQLDB as libsq\n\nclass read_DB():\n def __init__(self):\n self.filename = 'tmp.db'\n \n def read_BeamParams(self):\n conn = sq.connect(self.filename)\n c = conn.cursor()\n c.execute('select * from BeamParams')\n boloid=[]; boloname=[]; xpos=[]; ypos=[]; polang=[]; poleff=[]\n sigma_x=[]; sigma_y=[]; amp=[]; beam_tilt=[]\n for ar in c:\n boloid.append(int(ar[0]))\n boloname.append(str(ar[1]))\n xpos.append(float(ar[2]))\n ypos.append(float(ar[3]))\n polang.append(float(ar[4]))\n poleff.append(float(ar[5]))\n sigma_x.append(float(ar[6]))\n sigma_y.append(float(ar[7]))\n amp.append(float(ar[8]))\n beam_tilt.append(float(ar[9]))\n c.close()\n self.BeamParams = {'boloid':boloid,'boloname':boloname,'xpos':xpos,'ypos':ypos,\n 'polang':polang,'poleff':poleff,'sigma_x':sigma_x,'sigma_y':sigma_y,\n 'amp':amp,'beam_tilt':beam_tilt}\n return self.BeamParams\n\n\n\nread_db = libsq.read_DB()\nread_db.filename = '/scratch/scratchdirs/tmatsumu/sim/PB1_NTP/DB/beamprm_20120530_031419_hwp112.5.db'\nbeam1 = read_db.read_BeamParams_selective([1,1,0,0,0,0,0,0,0,0])\n\nread_db = libsq.read_DB()\nread_db.filename = '/scratch/scratchdirs/tmatsumu/sim/PB1_NTP/DB/pb1_fpdb_ver0.db'\nbeam2 = read_db.read_BeamParams_selective([1,1,0,0,0,0,0,0,0,0])\n\nnum = len(beam2['boloid'])\n\nfor i in range(num):\n ind = np.where(beam2['boloid'][i] == np.array(beam1['boloid']))\n# print ind[0]\n if ( beam1['boloname'][ind[0]] != beam2['boloname'][i] ):\n print beam2['boloid'][i], beam1['boloname'][ind[0]], beam2['boloname'][i]\n" ]
true
99,424
f389a14f07407555d9dd995a6a5d9b7770989797
# -*- coding: utf-8 -*- import numpy as np from matplotlib import pyplot as plt import proplot as pplt from bayesian_inference import ( get_posterior, get_stats, plot_linear_dependency, plot_parameters, ) from inputs import Inputs_ from catalog import catalog, Figure del catalog[-1] class Dependencies_(Inputs_): def generate(self): super().generate() rms = self["Amplitude"] distance = self["Distance"] velocity = self["Vitesse"] poids = self["Poids"] STEPS = 24 # NOTE Reduce step size to make computations faster. distance_dep = np.linspace(0.0, -.25, STEPS) velocity_dep = np.linspace(0.0, 4.5, STEPS) poids_dep = np.linspace(.0, 6E-6, STEPS) rms_0 = np.linspace(-40, 120, STEPS) rms_noise = np.logspace(1.3, 1.8, STEPS) vars = [distance_dep, velocity_dep, poids_dep, rms_0, rms_noise] posterior = get_posterior( vars, [distance, velocity, poids, np.ones_like(rms)], rms, ) print(posterior.sum()) _, _, vars_max, probs_mar, _, prob_null = get_stats( posterior, vars, null_dims=[1, 2], ) print("Against H0:", 1/prob_null) print("Most probable model:", vars_max) _, _, _, _, _, prob_velocity = get_stats( posterior, vars, null_dims=[1], ) print("Against H0 for velocity:", 1/prob_velocity) _, _, _, _, _, prob_weight = get_stats( posterior, vars, null_dims=[2], ) print("Against H0 for weight:", 1/prob_weight) self["vars"] = vars self["posterior"] = posterior self["vars_max"] = vars_max self["probs_mar"] = probs_mar class Dependencies(Figure): Metadata = Dependencies_ def plot(self, data): QTY_VARS = 5 _, axs = pplt.subplots( [ [1, *[2]*QTY_VARS], [3, *range(4, 4+QTY_VARS)], ], ref=1, wratios=(1, *[1/QTY_VARS]*QTY_VARS), wspace=(None, *[.05]*(QTY_VARS-1)), figsize=[7.66, 7.66], sharey=False, sharex=False, ) rms = data["Amplitude"] distance = data["Distance"] velocity = data["Vitesse"] weight = data["Poids"] a1, a2, a3, b, std = data["vars_max"] xs = [data["Distance"], data["Vitesse"], data["Poids"]] ys = [ rms-a2*velocity-a3*weight, rms-a1*distance-a3*weight, rms-a1*distance-a2*velocity, ] as_ = [a1, a2, a3] xlabels = [ "Distance to railroad $d$ (m)", "Train velocity $v$ (km/h)", "Train weight $w$ (kg)", ] ylabels = [ "Contribution of distance to RMS amplitude \n" r"$y-\beta_v v-\beta_w w$ ($\frac{\mathrm{mm}}{\mathrm{s}}$)", "Contribution of velocity to RMS amplitude \n" r"$y-\beta_d d-\beta_w w$ ($\frac{\mathrm{mm}}{\mathrm{s}}$)", "Contribution of weight to RMS amplitude \n" r"$y-\beta_d d-\beta_v v$ ($\frac{\mathrm{mm}}{\mathrm{s}}$)", ] for ax, x, y, a, xlabel, ylabel, in zip( axs[:3], xs, ys, as_, xlabels, ylabels, ): plt.sca(ax) plot_linear_dependency( x, y, a=a, b=b, std=std, xlabel=xlabel, ylabel=ylabel, ) vars = data["vars"] var_names = [ r"$\beta_d$", r"$\beta_v$", r"$\beta_w$", r"$y_0$", r"$\sigma_\epsilon$", ] probs_mar = data["probs_mar"] plot_parameters( vars, var_names, probs_mar, axes=axs[3:], units=[ r"\frac{\mathrm{mm}}{\mathrm{s} \cdot \mathrm{m}}", r"\frac{\mathrm{mm} \cdot \mathrm{h}}" r"{\mathrm{s} \cdot \mathrm{km}}", r"\frac{\mathrm{mm}}{\mathrm{s} \cdot \mathrm{kg}}", r"\frac{\mathrm{mm}}{\mathrm{s}}", r"\frac{\mathrm{mm}}{\mathrm{s}}", ], ) axs[3].format( ylabel=( "Normalized marginal probability " "$\\frac{p(\\theta)}{p_{max}(\\theta)}$" ), ) axs[3:].format(ylim=[0, 1], xmargin=.1) for ax in axs[3:]: ax.xaxis.label.set_fontsize(8) axs[-5].format(xticks=[0, -.15]) axs[-4].format(xticks=[0, 3]) axs[-3].format(xticks=[0, 4E-6], xformatter='sci') axs[-2].format(xticks=[0, 80]) axs[-1].format(xscale='log', xticks=[3E1, 5E1]) ticks = axs[2].get_xticks() axs[2].set_xticks(ticks[1::2]) axs[2].format(xformatter='sci') axs[:4].format(abc=True) catalog.register(Dependencies)
[ "# -*- coding: utf-8 -*-\n\nimport numpy as np\nfrom matplotlib import pyplot as plt\nimport proplot as pplt\n\nfrom bayesian_inference import (\n get_posterior, get_stats, plot_linear_dependency, plot_parameters,\n)\nfrom inputs import Inputs_\nfrom catalog import catalog, Figure\n\ndel catalog[-1]\n\n\nclass Dependencies_(Inputs_):\n def generate(self):\n super().generate()\n\n rms = self[\"Amplitude\"]\n distance = self[\"Distance\"]\n velocity = self[\"Vitesse\"]\n poids = self[\"Poids\"]\n\n STEPS = 24 # NOTE Reduce step size to make computations faster.\n\n distance_dep = np.linspace(0.0, -.25, STEPS)\n velocity_dep = np.linspace(0.0, 4.5, STEPS)\n poids_dep = np.linspace(.0, 6E-6, STEPS)\n rms_0 = np.linspace(-40, 120, STEPS)\n rms_noise = np.logspace(1.3, 1.8, STEPS)\n vars = [distance_dep, velocity_dep, poids_dep, rms_0, rms_noise]\n\n posterior = get_posterior(\n vars, [distance, velocity, poids, np.ones_like(rms)], rms,\n )\n print(posterior.sum())\n _, _, vars_max, probs_mar, _, prob_null = get_stats(\n posterior, vars, null_dims=[1, 2],\n )\n print(\"Against H0:\", 1/prob_null)\n print(\"Most probable model:\", vars_max)\n _, _, _, _, _, prob_velocity = get_stats(\n posterior, vars, null_dims=[1],\n )\n print(\"Against H0 for velocity:\", 1/prob_velocity)\n _, _, _, _, _, prob_weight = get_stats(\n posterior, vars, null_dims=[2],\n )\n print(\"Against H0 for weight:\", 1/prob_weight)\n\n self[\"vars\"] = vars\n self[\"posterior\"] = posterior\n self[\"vars_max\"] = vars_max\n self[\"probs_mar\"] = probs_mar\n\n\nclass Dependencies(Figure):\n Metadata = Dependencies_\n\n def plot(self, data):\n QTY_VARS = 5\n _, axs = pplt.subplots(\n [\n [1, *[2]*QTY_VARS],\n [3, *range(4, 4+QTY_VARS)],\n ],\n ref=1,\n wratios=(1, *[1/QTY_VARS]*QTY_VARS),\n wspace=(None, *[.05]*(QTY_VARS-1)),\n figsize=[7.66, 7.66],\n sharey=False,\n sharex=False,\n )\n\n rms = data[\"Amplitude\"]\n distance = data[\"Distance\"]\n velocity = data[\"Vitesse\"]\n weight = data[\"Poids\"]\n a1, a2, a3, b, std = data[\"vars_max\"]\n\n xs = [data[\"Distance\"], data[\"Vitesse\"], data[\"Poids\"]]\n ys = [\n rms-a2*velocity-a3*weight,\n rms-a1*distance-a3*weight,\n rms-a1*distance-a2*velocity,\n ]\n as_ = [a1, a2, a3]\n xlabels = [\n \"Distance to railroad $d$ (m)\",\n \"Train velocity $v$ (km/h)\",\n \"Train weight $w$ (kg)\",\n ]\n ylabels = [\n \"Contribution of distance to RMS amplitude \\n\"\n r\"$y-\\beta_v v-\\beta_w w$ ($\\frac{\\mathrm{mm}}{\\mathrm{s}}$)\",\n \"Contribution of velocity to RMS amplitude \\n\"\n r\"$y-\\beta_d d-\\beta_w w$ ($\\frac{\\mathrm{mm}}{\\mathrm{s}}$)\",\n \"Contribution of weight to RMS amplitude \\n\"\n r\"$y-\\beta_d d-\\beta_v v$ ($\\frac{\\mathrm{mm}}{\\mathrm{s}}$)\",\n ]\n\n for ax, x, y, a, xlabel, ylabel, in zip(\n axs[:3], xs, ys, as_, xlabels, ylabels,\n ):\n plt.sca(ax)\n plot_linear_dependency(\n x,\n y,\n a=a,\n b=b,\n std=std,\n xlabel=xlabel,\n ylabel=ylabel,\n )\n\n vars = data[\"vars\"]\n var_names = [\n r\"$\\beta_d$\", r\"$\\beta_v$\", r\"$\\beta_w$\", r\"$y_0$\",\n r\"$\\sigma_\\epsilon$\",\n ]\n probs_mar = data[\"probs_mar\"]\n plot_parameters(\n vars,\n var_names,\n probs_mar,\n axes=axs[3:],\n units=[\n r\"\\frac{\\mathrm{mm}}{\\mathrm{s} \\cdot \\mathrm{m}}\",\n r\"\\frac{\\mathrm{mm} \\cdot \\mathrm{h}}\"\n r\"{\\mathrm{s} \\cdot \\mathrm{km}}\",\n r\"\\frac{\\mathrm{mm}}{\\mathrm{s} \\cdot \\mathrm{kg}}\",\n r\"\\frac{\\mathrm{mm}}{\\mathrm{s}}\",\n r\"\\frac{\\mathrm{mm}}{\\mathrm{s}}\",\n ],\n )\n axs[3].format(\n ylabel=(\n \"Normalized marginal probability \"\n \"$\\\\frac{p(\\\\theta)}{p_{max}(\\\\theta)}$\"\n ),\n )\n axs[3:].format(ylim=[0, 1], xmargin=.1)\n for ax in axs[3:]:\n ax.xaxis.label.set_fontsize(8)\n\n axs[-5].format(xticks=[0, -.15])\n axs[-4].format(xticks=[0, 3])\n axs[-3].format(xticks=[0, 4E-6], xformatter='sci')\n axs[-2].format(xticks=[0, 80])\n axs[-1].format(xscale='log', xticks=[3E1, 5E1])\n\n ticks = axs[2].get_xticks()\n axs[2].set_xticks(ticks[1::2])\n axs[2].format(xformatter='sci')\n\n axs[:4].format(abc=True)\n\n\ncatalog.register(Dependencies)\n", "import numpy as np\nfrom matplotlib import pyplot as plt\nimport proplot as pplt\nfrom bayesian_inference import get_posterior, get_stats, plot_linear_dependency, plot_parameters\nfrom inputs import Inputs_\nfrom catalog import catalog, Figure\ndel catalog[-1]\n\n\nclass Dependencies_(Inputs_):\n\n def generate(self):\n super().generate()\n rms = self['Amplitude']\n distance = self['Distance']\n velocity = self['Vitesse']\n poids = self['Poids']\n STEPS = 24\n distance_dep = np.linspace(0.0, -0.25, STEPS)\n velocity_dep = np.linspace(0.0, 4.5, STEPS)\n poids_dep = np.linspace(0.0, 6e-06, STEPS)\n rms_0 = np.linspace(-40, 120, STEPS)\n rms_noise = np.logspace(1.3, 1.8, STEPS)\n vars = [distance_dep, velocity_dep, poids_dep, rms_0, rms_noise]\n posterior = get_posterior(vars, [distance, velocity, poids, np.\n ones_like(rms)], rms)\n print(posterior.sum())\n _, _, vars_max, probs_mar, _, prob_null = get_stats(posterior, vars,\n null_dims=[1, 2])\n print('Against H0:', 1 / prob_null)\n print('Most probable model:', vars_max)\n _, _, _, _, _, prob_velocity = get_stats(posterior, vars, null_dims=[1]\n )\n print('Against H0 for velocity:', 1 / prob_velocity)\n _, _, _, _, _, prob_weight = get_stats(posterior, vars, null_dims=[2])\n print('Against H0 for weight:', 1 / prob_weight)\n self['vars'] = vars\n self['posterior'] = posterior\n self['vars_max'] = vars_max\n self['probs_mar'] = probs_mar\n\n\nclass Dependencies(Figure):\n Metadata = Dependencies_\n\n def plot(self, data):\n QTY_VARS = 5\n _, axs = pplt.subplots([[1, *([2] * QTY_VARS)], [3, *range(4, 4 +\n QTY_VARS)]], ref=1, wratios=(1, *([1 / QTY_VARS] * QTY_VARS)),\n wspace=(None, *([0.05] * (QTY_VARS - 1))), figsize=[7.66, 7.66],\n sharey=False, sharex=False)\n rms = data['Amplitude']\n distance = data['Distance']\n velocity = data['Vitesse']\n weight = data['Poids']\n a1, a2, a3, b, std = data['vars_max']\n xs = [data['Distance'], data['Vitesse'], data['Poids']]\n ys = [rms - a2 * velocity - a3 * weight, rms - a1 * distance - a3 *\n weight, rms - a1 * distance - a2 * velocity]\n as_ = [a1, a2, a3]\n xlabels = ['Distance to railroad $d$ (m)',\n 'Train velocity $v$ (km/h)', 'Train weight $w$ (kg)']\n ylabels = [\n \"\"\"Contribution of distance to RMS amplitude \n$y-\\\\beta_v v-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of velocity to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of weight to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_v v$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ]\n for ax, x, y, a, xlabel, ylabel in zip(axs[:3], xs, ys, as_,\n xlabels, ylabels):\n plt.sca(ax)\n plot_linear_dependency(x, y, a=a, b=b, std=std, xlabel=xlabel,\n ylabel=ylabel)\n vars = data['vars']\n var_names = ['$\\\\beta_d$', '$\\\\beta_v$', '$\\\\beta_w$', '$y_0$',\n '$\\\\sigma_\\\\epsilon$']\n probs_mar = data['probs_mar']\n plot_parameters(vars, var_names, probs_mar, axes=axs[3:], units=[\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{m}}',\n '\\\\frac{\\\\mathrm{mm} \\\\cdot \\\\mathrm{h}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{km}}'\n , '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{kg}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}'])\n axs[3].format(ylabel=\n 'Normalized marginal probability $\\\\frac{p(\\\\theta)}{p_{max}(\\\\theta)}$'\n )\n axs[3:].format(ylim=[0, 1], xmargin=0.1)\n for ax in axs[3:]:\n ax.xaxis.label.set_fontsize(8)\n axs[-5].format(xticks=[0, -0.15])\n axs[-4].format(xticks=[0, 3])\n axs[-3].format(xticks=[0, 4e-06], xformatter='sci')\n axs[-2].format(xticks=[0, 80])\n axs[-1].format(xscale='log', xticks=[30.0, 50.0])\n ticks = axs[2].get_xticks()\n axs[2].set_xticks(ticks[1::2])\n axs[2].format(xformatter='sci')\n axs[:4].format(abc=True)\n\n\ncatalog.register(Dependencies)\n", "<import token>\ndel catalog[-1]\n\n\nclass Dependencies_(Inputs_):\n\n def generate(self):\n super().generate()\n rms = self['Amplitude']\n distance = self['Distance']\n velocity = self['Vitesse']\n poids = self['Poids']\n STEPS = 24\n distance_dep = np.linspace(0.0, -0.25, STEPS)\n velocity_dep = np.linspace(0.0, 4.5, STEPS)\n poids_dep = np.linspace(0.0, 6e-06, STEPS)\n rms_0 = np.linspace(-40, 120, STEPS)\n rms_noise = np.logspace(1.3, 1.8, STEPS)\n vars = [distance_dep, velocity_dep, poids_dep, rms_0, rms_noise]\n posterior = get_posterior(vars, [distance, velocity, poids, np.\n ones_like(rms)], rms)\n print(posterior.sum())\n _, _, vars_max, probs_mar, _, prob_null = get_stats(posterior, vars,\n null_dims=[1, 2])\n print('Against H0:', 1 / prob_null)\n print('Most probable model:', vars_max)\n _, _, _, _, _, prob_velocity = get_stats(posterior, vars, null_dims=[1]\n )\n print('Against H0 for velocity:', 1 / prob_velocity)\n _, _, _, _, _, prob_weight = get_stats(posterior, vars, null_dims=[2])\n print('Against H0 for weight:', 1 / prob_weight)\n self['vars'] = vars\n self['posterior'] = posterior\n self['vars_max'] = vars_max\n self['probs_mar'] = probs_mar\n\n\nclass Dependencies(Figure):\n Metadata = Dependencies_\n\n def plot(self, data):\n QTY_VARS = 5\n _, axs = pplt.subplots([[1, *([2] * QTY_VARS)], [3, *range(4, 4 +\n QTY_VARS)]], ref=1, wratios=(1, *([1 / QTY_VARS] * QTY_VARS)),\n wspace=(None, *([0.05] * (QTY_VARS - 1))), figsize=[7.66, 7.66],\n sharey=False, sharex=False)\n rms = data['Amplitude']\n distance = data['Distance']\n velocity = data['Vitesse']\n weight = data['Poids']\n a1, a2, a3, b, std = data['vars_max']\n xs = [data['Distance'], data['Vitesse'], data['Poids']]\n ys = [rms - a2 * velocity - a3 * weight, rms - a1 * distance - a3 *\n weight, rms - a1 * distance - a2 * velocity]\n as_ = [a1, a2, a3]\n xlabels = ['Distance to railroad $d$ (m)',\n 'Train velocity $v$ (km/h)', 'Train weight $w$ (kg)']\n ylabels = [\n \"\"\"Contribution of distance to RMS amplitude \n$y-\\\\beta_v v-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of velocity to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of weight to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_v v$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ]\n for ax, x, y, a, xlabel, ylabel in zip(axs[:3], xs, ys, as_,\n xlabels, ylabels):\n plt.sca(ax)\n plot_linear_dependency(x, y, a=a, b=b, std=std, xlabel=xlabel,\n ylabel=ylabel)\n vars = data['vars']\n var_names = ['$\\\\beta_d$', '$\\\\beta_v$', '$\\\\beta_w$', '$y_0$',\n '$\\\\sigma_\\\\epsilon$']\n probs_mar = data['probs_mar']\n plot_parameters(vars, var_names, probs_mar, axes=axs[3:], units=[\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{m}}',\n '\\\\frac{\\\\mathrm{mm} \\\\cdot \\\\mathrm{h}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{km}}'\n , '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{kg}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}'])\n axs[3].format(ylabel=\n 'Normalized marginal probability $\\\\frac{p(\\\\theta)}{p_{max}(\\\\theta)}$'\n )\n axs[3:].format(ylim=[0, 1], xmargin=0.1)\n for ax in axs[3:]:\n ax.xaxis.label.set_fontsize(8)\n axs[-5].format(xticks=[0, -0.15])\n axs[-4].format(xticks=[0, 3])\n axs[-3].format(xticks=[0, 4e-06], xformatter='sci')\n axs[-2].format(xticks=[0, 80])\n axs[-1].format(xscale='log', xticks=[30.0, 50.0])\n ticks = axs[2].get_xticks()\n axs[2].set_xticks(ticks[1::2])\n axs[2].format(xformatter='sci')\n axs[:4].format(abc=True)\n\n\ncatalog.register(Dependencies)\n", "<import token>\n<code token>\n\n\nclass Dependencies_(Inputs_):\n\n def generate(self):\n super().generate()\n rms = self['Amplitude']\n distance = self['Distance']\n velocity = self['Vitesse']\n poids = self['Poids']\n STEPS = 24\n distance_dep = np.linspace(0.0, -0.25, STEPS)\n velocity_dep = np.linspace(0.0, 4.5, STEPS)\n poids_dep = np.linspace(0.0, 6e-06, STEPS)\n rms_0 = np.linspace(-40, 120, STEPS)\n rms_noise = np.logspace(1.3, 1.8, STEPS)\n vars = [distance_dep, velocity_dep, poids_dep, rms_0, rms_noise]\n posterior = get_posterior(vars, [distance, velocity, poids, np.\n ones_like(rms)], rms)\n print(posterior.sum())\n _, _, vars_max, probs_mar, _, prob_null = get_stats(posterior, vars,\n null_dims=[1, 2])\n print('Against H0:', 1 / prob_null)\n print('Most probable model:', vars_max)\n _, _, _, _, _, prob_velocity = get_stats(posterior, vars, null_dims=[1]\n )\n print('Against H0 for velocity:', 1 / prob_velocity)\n _, _, _, _, _, prob_weight = get_stats(posterior, vars, null_dims=[2])\n print('Against H0 for weight:', 1 / prob_weight)\n self['vars'] = vars\n self['posterior'] = posterior\n self['vars_max'] = vars_max\n self['probs_mar'] = probs_mar\n\n\nclass Dependencies(Figure):\n Metadata = Dependencies_\n\n def plot(self, data):\n QTY_VARS = 5\n _, axs = pplt.subplots([[1, *([2] * QTY_VARS)], [3, *range(4, 4 +\n QTY_VARS)]], ref=1, wratios=(1, *([1 / QTY_VARS] * QTY_VARS)),\n wspace=(None, *([0.05] * (QTY_VARS - 1))), figsize=[7.66, 7.66],\n sharey=False, sharex=False)\n rms = data['Amplitude']\n distance = data['Distance']\n velocity = data['Vitesse']\n weight = data['Poids']\n a1, a2, a3, b, std = data['vars_max']\n xs = [data['Distance'], data['Vitesse'], data['Poids']]\n ys = [rms - a2 * velocity - a3 * weight, rms - a1 * distance - a3 *\n weight, rms - a1 * distance - a2 * velocity]\n as_ = [a1, a2, a3]\n xlabels = ['Distance to railroad $d$ (m)',\n 'Train velocity $v$ (km/h)', 'Train weight $w$ (kg)']\n ylabels = [\n \"\"\"Contribution of distance to RMS amplitude \n$y-\\\\beta_v v-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of velocity to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of weight to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_v v$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ]\n for ax, x, y, a, xlabel, ylabel in zip(axs[:3], xs, ys, as_,\n xlabels, ylabels):\n plt.sca(ax)\n plot_linear_dependency(x, y, a=a, b=b, std=std, xlabel=xlabel,\n ylabel=ylabel)\n vars = data['vars']\n var_names = ['$\\\\beta_d$', '$\\\\beta_v$', '$\\\\beta_w$', '$y_0$',\n '$\\\\sigma_\\\\epsilon$']\n probs_mar = data['probs_mar']\n plot_parameters(vars, var_names, probs_mar, axes=axs[3:], units=[\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{m}}',\n '\\\\frac{\\\\mathrm{mm} \\\\cdot \\\\mathrm{h}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{km}}'\n , '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{kg}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}'])\n axs[3].format(ylabel=\n 'Normalized marginal probability $\\\\frac{p(\\\\theta)}{p_{max}(\\\\theta)}$'\n )\n axs[3:].format(ylim=[0, 1], xmargin=0.1)\n for ax in axs[3:]:\n ax.xaxis.label.set_fontsize(8)\n axs[-5].format(xticks=[0, -0.15])\n axs[-4].format(xticks=[0, 3])\n axs[-3].format(xticks=[0, 4e-06], xformatter='sci')\n axs[-2].format(xticks=[0, 80])\n axs[-1].format(xscale='log', xticks=[30.0, 50.0])\n ticks = axs[2].get_xticks()\n axs[2].set_xticks(ticks[1::2])\n axs[2].format(xformatter='sci')\n axs[:4].format(abc=True)\n\n\n<code token>\n", "<import token>\n<code token>\n\n\nclass Dependencies_(Inputs_):\n <function token>\n\n\nclass Dependencies(Figure):\n Metadata = Dependencies_\n\n def plot(self, data):\n QTY_VARS = 5\n _, axs = pplt.subplots([[1, *([2] * QTY_VARS)], [3, *range(4, 4 +\n QTY_VARS)]], ref=1, wratios=(1, *([1 / QTY_VARS] * QTY_VARS)),\n wspace=(None, *([0.05] * (QTY_VARS - 1))), figsize=[7.66, 7.66],\n sharey=False, sharex=False)\n rms = data['Amplitude']\n distance = data['Distance']\n velocity = data['Vitesse']\n weight = data['Poids']\n a1, a2, a3, b, std = data['vars_max']\n xs = [data['Distance'], data['Vitesse'], data['Poids']]\n ys = [rms - a2 * velocity - a3 * weight, rms - a1 * distance - a3 *\n weight, rms - a1 * distance - a2 * velocity]\n as_ = [a1, a2, a3]\n xlabels = ['Distance to railroad $d$ (m)',\n 'Train velocity $v$ (km/h)', 'Train weight $w$ (kg)']\n ylabels = [\n \"\"\"Contribution of distance to RMS amplitude \n$y-\\\\beta_v v-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of velocity to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of weight to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_v v$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ]\n for ax, x, y, a, xlabel, ylabel in zip(axs[:3], xs, ys, as_,\n xlabels, ylabels):\n plt.sca(ax)\n plot_linear_dependency(x, y, a=a, b=b, std=std, xlabel=xlabel,\n ylabel=ylabel)\n vars = data['vars']\n var_names = ['$\\\\beta_d$', '$\\\\beta_v$', '$\\\\beta_w$', '$y_0$',\n '$\\\\sigma_\\\\epsilon$']\n probs_mar = data['probs_mar']\n plot_parameters(vars, var_names, probs_mar, axes=axs[3:], units=[\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{m}}',\n '\\\\frac{\\\\mathrm{mm} \\\\cdot \\\\mathrm{h}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{km}}'\n , '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{kg}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}'])\n axs[3].format(ylabel=\n 'Normalized marginal probability $\\\\frac{p(\\\\theta)}{p_{max}(\\\\theta)}$'\n )\n axs[3:].format(ylim=[0, 1], xmargin=0.1)\n for ax in axs[3:]:\n ax.xaxis.label.set_fontsize(8)\n axs[-5].format(xticks=[0, -0.15])\n axs[-4].format(xticks=[0, 3])\n axs[-3].format(xticks=[0, 4e-06], xformatter='sci')\n axs[-2].format(xticks=[0, 80])\n axs[-1].format(xscale='log', xticks=[30.0, 50.0])\n ticks = axs[2].get_xticks()\n axs[2].set_xticks(ticks[1::2])\n axs[2].format(xformatter='sci')\n axs[:4].format(abc=True)\n\n\n<code token>\n", "<import token>\n<code token>\n<class token>\n\n\nclass Dependencies(Figure):\n Metadata = Dependencies_\n\n def plot(self, data):\n QTY_VARS = 5\n _, axs = pplt.subplots([[1, *([2] * QTY_VARS)], [3, *range(4, 4 +\n QTY_VARS)]], ref=1, wratios=(1, *([1 / QTY_VARS] * QTY_VARS)),\n wspace=(None, *([0.05] * (QTY_VARS - 1))), figsize=[7.66, 7.66],\n sharey=False, sharex=False)\n rms = data['Amplitude']\n distance = data['Distance']\n velocity = data['Vitesse']\n weight = data['Poids']\n a1, a2, a3, b, std = data['vars_max']\n xs = [data['Distance'], data['Vitesse'], data['Poids']]\n ys = [rms - a2 * velocity - a3 * weight, rms - a1 * distance - a3 *\n weight, rms - a1 * distance - a2 * velocity]\n as_ = [a1, a2, a3]\n xlabels = ['Distance to railroad $d$ (m)',\n 'Train velocity $v$ (km/h)', 'Train weight $w$ (kg)']\n ylabels = [\n \"\"\"Contribution of distance to RMS amplitude \n$y-\\\\beta_v v-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of velocity to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of weight to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_v v$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ]\n for ax, x, y, a, xlabel, ylabel in zip(axs[:3], xs, ys, as_,\n xlabels, ylabels):\n plt.sca(ax)\n plot_linear_dependency(x, y, a=a, b=b, std=std, xlabel=xlabel,\n ylabel=ylabel)\n vars = data['vars']\n var_names = ['$\\\\beta_d$', '$\\\\beta_v$', '$\\\\beta_w$', '$y_0$',\n '$\\\\sigma_\\\\epsilon$']\n probs_mar = data['probs_mar']\n plot_parameters(vars, var_names, probs_mar, axes=axs[3:], units=[\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{m}}',\n '\\\\frac{\\\\mathrm{mm} \\\\cdot \\\\mathrm{h}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{km}}'\n , '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{kg}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}'])\n axs[3].format(ylabel=\n 'Normalized marginal probability $\\\\frac{p(\\\\theta)}{p_{max}(\\\\theta)}$'\n )\n axs[3:].format(ylim=[0, 1], xmargin=0.1)\n for ax in axs[3:]:\n ax.xaxis.label.set_fontsize(8)\n axs[-5].format(xticks=[0, -0.15])\n axs[-4].format(xticks=[0, 3])\n axs[-3].format(xticks=[0, 4e-06], xformatter='sci')\n axs[-2].format(xticks=[0, 80])\n axs[-1].format(xscale='log', xticks=[30.0, 50.0])\n ticks = axs[2].get_xticks()\n axs[2].set_xticks(ticks[1::2])\n axs[2].format(xformatter='sci')\n axs[:4].format(abc=True)\n\n\n<code token>\n", "<import token>\n<code token>\n<class token>\n\n\nclass Dependencies(Figure):\n <assignment token>\n\n def plot(self, data):\n QTY_VARS = 5\n _, axs = pplt.subplots([[1, *([2] * QTY_VARS)], [3, *range(4, 4 +\n QTY_VARS)]], ref=1, wratios=(1, *([1 / QTY_VARS] * QTY_VARS)),\n wspace=(None, *([0.05] * (QTY_VARS - 1))), figsize=[7.66, 7.66],\n sharey=False, sharex=False)\n rms = data['Amplitude']\n distance = data['Distance']\n velocity = data['Vitesse']\n weight = data['Poids']\n a1, a2, a3, b, std = data['vars_max']\n xs = [data['Distance'], data['Vitesse'], data['Poids']]\n ys = [rms - a2 * velocity - a3 * weight, rms - a1 * distance - a3 *\n weight, rms - a1 * distance - a2 * velocity]\n as_ = [a1, a2, a3]\n xlabels = ['Distance to railroad $d$ (m)',\n 'Train velocity $v$ (km/h)', 'Train weight $w$ (kg)']\n ylabels = [\n \"\"\"Contribution of distance to RMS amplitude \n$y-\\\\beta_v v-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of velocity to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_w w$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ,\n \"\"\"Contribution of weight to RMS amplitude \n$y-\\\\beta_d d-\\\\beta_v v$ ($\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}$)\"\"\"\n ]\n for ax, x, y, a, xlabel, ylabel in zip(axs[:3], xs, ys, as_,\n xlabels, ylabels):\n plt.sca(ax)\n plot_linear_dependency(x, y, a=a, b=b, std=std, xlabel=xlabel,\n ylabel=ylabel)\n vars = data['vars']\n var_names = ['$\\\\beta_d$', '$\\\\beta_v$', '$\\\\beta_w$', '$y_0$',\n '$\\\\sigma_\\\\epsilon$']\n probs_mar = data['probs_mar']\n plot_parameters(vars, var_names, probs_mar, axes=axs[3:], units=[\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{m}}',\n '\\\\frac{\\\\mathrm{mm} \\\\cdot \\\\mathrm{h}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{km}}'\n , '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s} \\\\cdot \\\\mathrm{kg}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}',\n '\\\\frac{\\\\mathrm{mm}}{\\\\mathrm{s}}'])\n axs[3].format(ylabel=\n 'Normalized marginal probability $\\\\frac{p(\\\\theta)}{p_{max}(\\\\theta)}$'\n )\n axs[3:].format(ylim=[0, 1], xmargin=0.1)\n for ax in axs[3:]:\n ax.xaxis.label.set_fontsize(8)\n axs[-5].format(xticks=[0, -0.15])\n axs[-4].format(xticks=[0, 3])\n axs[-3].format(xticks=[0, 4e-06], xformatter='sci')\n axs[-2].format(xticks=[0, 80])\n axs[-1].format(xscale='log', xticks=[30.0, 50.0])\n ticks = axs[2].get_xticks()\n axs[2].set_xticks(ticks[1::2])\n axs[2].format(xformatter='sci')\n axs[:4].format(abc=True)\n\n\n<code token>\n", "<import token>\n<code token>\n<class token>\n\n\nclass Dependencies(Figure):\n <assignment token>\n <function token>\n\n\n<code token>\n", "<import token>\n<code token>\n<class token>\n<class token>\n<code token>\n" ]
false
99,425
69dffc2ba8b2b81b27bf130ca79e8022ef2d248d
#!/usr/bin/env python from threading import Thread import rospy import math from robotis_mini_control.robotis_mini import RobotisMiniControlInterface from geometry_msgs.msg import Twist import time from std_msgs.msg import Empty, Float64, Float64MultiArray import argparse import std_srvs.srv import sys class SinusoidFunction: """ SinusoidFunction for single joints CPG style Provides a parameterized sine wave function as y=amplitude_offset+amplitude*(phase_offset+angular_frequency*x) """ def __init__(self): self.amplitude_offset=0 self.amplitude=1 self.phase_offset=0 self.angular_frequency=1 def get(self, x): """ x between 0 and 1""" f = math.sin(self.phase_offset + self.angular_frequency*x) return self.amplitude_offset + self.amplitude*f def clone(self): z=SinusoidFunction() z.amplitude_offset=self.amplitude_offset z.amplitude=self.amplitude z.phase_offset=self.phase_offset z.angular_frequency=self.angular_frequency return z def mirror(self): z=self.clone() z.amplitude_offset *= -1 z.amplitude *= -1 return z def mirror_keep_amplitude_offset(self): z=self.clone() z.amplitude *= -1 return z def mirror_freq(self): z=self.clone() z.phase_offset *= -1 z.angular_frequency *= -1 return z def __str__(self): return "y=%.2f+%.2f*sin(%.2f+%.2f*x)"%(self.amplitude_offset, self.amplitude, self.phase_offset, self.angular_frequency) class WholeBodyWalkerFunction: """ Multi-joint walk function for RobotisMini Creates SinusoidFunction for each joint with different parameters """ def __init__(self, walking_params): self.parameters={} print walking_params for pn, pp in walking_params.iteritems(): self.parameters[pn + '_amplitude']=pp[0] self.parameters[pn + '_amplitude_offset']=pp[1] self.parameters[pn + '_phase_offset']=pp[2] self.parameters["step_frequency"]=math.pi self.parameters["vx_amplitude"]=0.5 self.parameters["vy_amplitude"]=0.5 self.parameters["vt_amplitude"]=0.4 self.generate() def generate(self): """ Build CPG functions for walk-on-spot (no translation or rotation, only legs up/down) """ self.pfn={} # phase joint functions self.afn={} # anti phase joint functions ## Foot and hip -> Lateral motion foot_func=SinusoidFunction() foot_func.angular_frequency= self.parameters["step_frequency"] foot_func.amplitude= self.parameters["foot_amplitude"] foot_func.amplitude_offset= self.parameters["foot_amplitude_offset"] foot_func.phase_offset= self.parameters["foot_phase_offset"] self.pfn["l_foot_joint"]=foot_func foot_func_af=foot_func.mirror() self.afn["l_foot_joint"]=foot_func_af hip_func=SinusoidFunction() hip_func.angular_frequency= self.parameters["step_frequency"] hip_func.amplitude= self.parameters["hip_amplitude"] hip_func.amplitude_offset= self.parameters["hip_amplitude_offset"] hip_func.phase_offset= self.parameters["hip_phase_offset"] self.pfn["l_hip_joint"]=hip_func hip_func_af=hip_func.mirror() self.afn["l_hip_joint"]=hip_func_af ## Thigh, ankle and knee -> Frontal motion thigh_func=SinusoidFunction() thigh_func.angular_frequency= self.parameters["step_frequency"] thigh_func.amplitude= self.parameters["thigh_amplitude"] thigh_func.amplitude_offset= self.parameters["thigh_amplitude_offset"] thigh_func.phase_offset= self.parameters["thigh_phase_offset"] self.pfn["l_thigh_joint"]=thigh_func thigh_func_af=thigh_func.mirror_keep_amplitude_offset() self.afn["l_thigh_joint"]=thigh_func_af ankle_func=SinusoidFunction() ankle_func.angular_frequency= self.parameters["step_frequency"] ankle_func.amplitude= self.parameters["ankle_amplitude"] ankle_func.amplitude_offset= self.parameters["ankle_amplitude_offset"] ankle_func.phase_offset= self.parameters["ankle_phase_offset"] self.pfn["l_ankle_joint"]=ankle_func ankle_func_af=ankle_func.mirror_keep_amplitude_offset() self.afn["l_ankle_joint"]=ankle_func_af knee_func=SinusoidFunction() knee_func.angular_frequency= self.parameters["step_frequency"] knee_func.amplitude= self.parameters["knee_amplitude"] knee_func.amplitude_offset= self.parameters["knee_amplitude_offset"] knee_func.phase_offset= self.parameters["knee_phase_offset"] self.pfn["l_knee_joint"]=knee_func knee_func_af=knee_func.mirror_keep_amplitude_offset() self.afn["l_knee_joint"]=knee_func_af #f3=SinusoidFunction() #f3.angular_frequency=self.parameters["step_frequency"] #f3.amplitude=self.parameters["step_amplitude"] #f3.amplitude_offset=self.parameters["step_amplitude_offset"] #self.pfn["l_thigh_joint"]= f3 #f33=f3.clone() #f33.amplitude_offset = self.parameters["ankle_amplitude_offset"] #f33.amplitude = self.parameters["ankle_amplitude"] #self.pfn["l_ankle_joint"]=f33 #f4=f3.mirror() ##f4.amplitude_offset -= 0.4 #self.pfn["l_knee_joint"]=f4 #f5=f3.mirror_keep_amplitude_offset() #self.afn["l_thigh_joint"]=f5 #f6=f33.mirror_keep_amplitude_offset() #self.afn["l_ankle_joint"]=f6 #f7=f5.mirror() ##f7.amplitude_offset -= 0.4 #self.afn["l_knee_joint"]=f7 self.generate_right() self.show() def generate_right(self): """ Mirror CPG functions from left to right and antiphase right """ l=[ v[2:] for v in self.pfn.keys()] for j in l: self.pfn["r_"+j]=self.afn["l_"+j].mirror_keep_amplitude_offset() self.pfn["r_"+j].phase_offset += math.pi self.afn["r_"+j]=self.pfn["l_"+j].mirror_keep_amplitude_offset() self.afn["r_"+j].phase_offset += math.pi def get(self, phase, x, velocity): """ Obtain the joint angles for a given phase, position in cycle (x 0,1)) and velocity parameters """ angles={} for j in self.pfn.keys(): if phase: v=self.pfn[j].get(x) angles[j]=v else: angles[j]=self.afn[j].get(x) self.apply_velocity(angles,velocity,phase,x) return angles def show(self): """ Display the CPG functions used """ for j in self.pfn.keys(): print j,"p",self.pfn[j],"a",self.afn[j] print self.pfn["l_knee_joint"].amplitude_offset def apply_velocity(self, angles, velocity, phase, x): """ Modify on the walk-on-spot joint angles to apply the velocity vector""" # VX v=velocity[0]*self.parameters["vx_amplitude"] d=(x*2-1)*v if phase: angles["l_thigh_joint"]+=d angles["l_ankle_joint"]+=d angles["r_thigh_joint"]+=d angles["r_ankle_joint"]+=d else: angles["l_thigh_joint"]-=d angles["l_ankle_joint"]-=d angles["r_thigh_joint"]-=d angles["r_ankle_joint"]-=d # VY v=velocity[1]*self.parameters["vy_amplitude"] d=(x)*v d2=(1-x)*v if v>=0: if phase: angles["l_hip_joint"]-=d angles["l_foot_joint"]-=d angles["r_hip_joint"]+=d angles["r_foot_joint"]+=d else: angles["l_hip_joint"]-=d2 angles["l_foot_joint"]-=d2 angles["r_hip_joint"]+=d2 angles["r_foot_joint"]+=d2 else: if phase: angles["l_hip_joint"]+=d2 angles["l_foot_joint"]+=d2 angles["r_hip_joint"]-=d2 angles["r_foot_joint"]-=d2 else: angles["l_hip_joint"]+=d angles["l_foot_joint"]+=d angles["r_hip_joint"]-=d angles["r_foot_joint"]-=d ## VT #v=velocity[2]*self.parameters["vt_amplitude"] #d=(x)*v #d2=(1-x)*v #if v>=0: #if phase: #angles["j_pelvis_l"]=-d #angles["j_pelvis_r"]=d #else: #angles["j_pelvis_l"]=-d2 #angles["j_pelvis_r"]=d2 #else: #if phase: #angles["j_pelvis_l"]=d2 #angles["j_pelvis_r"]=-d2 #else: #angles["j_pelvis_l"]=d #angles["j_pelvis_r"]=-d class Walker: """ Class for making RobotisMini walk """ def __init__(self, robotis_mini_ci, real_robot): self.robotis_mini_ci=robotis_mini_ci self.real_robot = real_robot self.displacing=False #When the robot is walking AND displacing in the plane (velocity is non-zero) self.walking=False #When the robot is walking: moving up and down the legs self.velocity=[0,0,0] #Default walking params self.walking_params = {} self.walking_params['foot'] = [0.4,0,0] self.walking_params['ankle'] = [-0.01,-0.20,0] self.walking_params['knee'] = [0.4,0.7,0] self.walking_params['thigh'] = [-0.4,-0.7,0] self.walking_params['hip'] = [0.4,0,0] self.wb_walkerfunc=WholeBodyWalkerFunction(self.walking_params) self.initial_wq = self.wb_walkerfunc.get(True, 0, [0,0,0]) #First joint configuration to start the walking motion print "__init__:initial_wq" j_names=self.initial_wq.keys() for jn in j_names: print jn + str(":") + str(self.initial_wq[jn]) self._th_walk=None #Walking thread self._cycle_period = 10 #seconds self._sub_cmd_vel=rospy.Subscriber(robotis_mini_ci.ns+"cmd_vel", Twist,self._cb_cmd_vel, queue_size=1) self._sub_cmd_stop=rospy.Subscriber(robotis_mini_ci.ns+"stop_srv", Empty,self._cb_cmd_stop, queue_size=1) self._sub_cmd_restart=rospy.Subscriber(robotis_mini_ci.ns+"restart_srv", Empty,self._cb_cmd_restart, queue_size=1) self._sub_cmd_restart=rospy.Subscriber(robotis_mini_ci.ns+"walking_params", Float64MultiArray,self._cb_new_walking_params, queue_size=1) if not self.real_robot: self.pubs = {} # Wait until the joints have been populated while self.robotis_mini_ci.q_names is None: time.sleep(1) for jn in self.robotis_mini_ci.q_names: self.pubs[jn] = rospy.Publisher('/robotis_mini/' + jn + '_position_controller/command', Float64, queue_size=1) rospy.loginfo("Waiting for gazebo services") rospy.wait_for_service('/gazebo/pause_physics') self.pause_simulation_srv = rospy.ServiceProxy('/gazebo/pause_physics', std_srvs.srv.Empty) rospy.wait_for_service('/gazebo/reset_world') self.reset_world_srv = rospy.ServiceProxy('/gazebo/reset_world', std_srvs.srv.Empty) rospy.wait_for_service('/gazebo/unpause_physics') self.unpause_simulation_srv = rospy.ServiceProxy('/gazebo/unpause_physics', std_srvs.srv.Empty) def _cb_new_walking_params(self,msg): """ Processes a new set of parameters """ print "Walker new set of parameters received" self._cycle_period = msg.data[0] self.walking_params['foot'] = [msg.data[1],msg.data[2],msg.data[3]] self.walking_params['ankle'] = [msg.data[4],msg.data[5],msg.data[6]] self.walking_params['knee'] = [msg.data[7],msg.data[8],msg.data[9]] self.walking_params['thigh'] = [msg.data[10],msg.data[11],msg.data[12]] self.walking_params['hip'] = [msg.data[13],msg.data[14],msg.data[15]] self.wb_walkerfunc=WholeBodyWalkerFunction(self.walking_params) self.initial_wq = self.wb_walkerfunc.get(True, 0, [0,0,0]) #First joint configuration to start the walking motion print "initial_wq" j_names=self.initial_wq.keys() for jn in j_names: print jn + str(":") + str(self.initial_wq[jn]) def _cb_cmd_restart(self,msg): """ Processes cmd_restart and to start a new trial """ print "Walker restart command received" #Stop the running thread while self.displacing or self.walking or self._th_walk: rospy.loginfo('Stopping walking thread') self.stop() #If the robot is simuated -> send to initial configuration if not self.real_robot: rospy.loginfo("Sending robot to zero configuration") for jn in self.robotis_mini_ci.q_names: self.pubs[jn].publish(0.0) time.sleep(1) #If the robot is simulated -> reset simulation try: self.pause_simulation_srv() rospy.loginfo( "Paused gazebo") time.sleep(1) self.reset_world_srv() rospy.loginfo( "Reseting gazebo") time.sleep(1) self.unpause_simulation_srv() rospy.loginfo( "Unpaused gazebo") time.sleep(1) except rospy.ServiceException, e: print "Service call failed: %s"%e def _cb_cmd_vel(self,msg): """ Processes cmd_vel and update walker speed """ print "Walker velocity command received: ",msg vx=msg.linear.x vy=msg.linear.y vt=msg.angular.z self.start() self.set_desired_velocity(vx,vy,vt) def _cb_cmd_stop(self,msg): """ Processes cmd_stop """ print "Walker stop command received: " self.stop() def goto_initial_wq(self): """ If not there yet, go to initial walking configuration """ rospy.loginfo("Going to initial walking configuration") while self.get_qdist_to_initial_wq()>0.1: rospy.loginfo("Commanding to go to initial walking configuration") print "Initial configuration" print self.initial_wq self.robotis_mini_ci.set_qd_interpolated(self.initial_wq, 2) rospy.sleep(2) rospy.loginfo("Initial walking configuration reached") print "Distance",self.get_qdist_to_initial_wq() def start(self): if not self.displacing: self.displacing=True self.goto_initial_wq() self._th_walk=Thread(target=self._do_walk) self._th_walk.start() self.walking=True def stop(self): if self.displacing: self.walking=False rospy.loginfo("Waiting for stopped") while not rospy.is_shutdown() and self._th_walk is not None: rospy.sleep(0.1) rospy.loginfo("Stopped") self.displacing=False def set_desired_velocity(self,x,y,t): self.desired_velocity=[x,y,t] def _do_walk(self): """ Main walking loop, smoothly update velocity vectors and apply corresponding joint configurations """ rospy.loginfo("Started walking thread") wb_walkerfunc=self.wb_walkerfunc # Global walk loop n=50 print "Thread rate", 1.0/(self._cycle_period/(2.0*n)) r=rospy.Rate(1.0/(self._cycle_period/(2.0*n))) p=True i=0 self.current_velocity=[0,0,0] while not rospy.is_shutdown() and (self.walking or i<n or self.is_displacing()): if not self.walking: self.desired_velocity=[0,0,0] #if not self.is_displacing() and i==0: # Do not move if nothing to do and already at 0 # self.update_current_velocity(self.desired_velocity, n) # r.sleep() # continue x=float(i)/n qd_curr=wb_walkerfunc.get(p, x, self.current_velocity) self.update_current_velocity(self.desired_velocity, n) self.robotis_mini_ci.set_qd(qd_curr) i+=1 if i>n: i=0 p=not p r.sleep() rospy.loginfo("Finished walking thread") self._th_walk=None def is_displacing(self): """ Checks if the current velocity is not zero and returns True in that case """ e=0.02 for v in self.current_velocity: if abs(v)>e: return True return False def update_current_velocity(self, target_velocity, n): """ A pseudo-interpolation to a target velocity """ a=3/float(n) b=1-a self.current_velocity=[a*tv+b*cv for (tv,cv) in zip(target_velocity, self.current_velocity)] def get_qdist_to_initial_wq(self): """ Computes the absolute distance between the current robot joint state and the initial walking configuration """ current_q=self.robotis_mini_ci.get_q() return get_distance(self.initial_wq, current_q) def get_distance(qa_dict, qb_dict): """ Computes sum of absolute distances between two sets of joints represented as dictionaries of (jointName, jointConfiguration) """ d=0 j_names=qa_dict.keys() if len(j_names)==0: rospy.loginfo("Length is 0") return 0 for jn in j_names: d+=abs(qb_dict[jn]-qa_dict[jn]) d/=len(j_names) return d if __name__=="__main__": rospy.init_node("walker") parser = argparse.ArgumentParser(description='Walker trajectory generator') parser.add_argument('--real',action='store_true', help='define when using the real robot') options, args = parser.parse_known_args() if options.real: rospy.loginfo("Real Robot!") else: rospy.loginfo("Simulated Robot!") rospy.loginfo("Instantiating RobotisMini RobotisMiniControlInterface") robotis_mini_ci=RobotisMiniControlInterface(real_robot=options.real) rospy.loginfo("Instantiating RobotisMini Walker") walker=Walker(robotis_mini_ci, options.real) rospy.loginfo("RobotisMini Walker Ready") while not rospy.is_shutdown(): time.sleep(1)
[ "#!/usr/bin/env python\r\n\r\nfrom threading import Thread\r\nimport rospy\r\nimport math\r\nfrom robotis_mini_control.robotis_mini import RobotisMiniControlInterface\r\nfrom geometry_msgs.msg import Twist\r\nimport time\r\nfrom std_msgs.msg import Empty, Float64, Float64MultiArray\r\nimport argparse\r\nimport std_srvs.srv\r\nimport sys\r\n\r\n\r\nclass SinusoidFunction:\r\n \"\"\"\r\n SinusoidFunction for single joints CPG style\r\n Provides a parameterized sine wave function as y=amplitude_offset+amplitude*(phase_offset+angular_frequency*x)\r\n \"\"\"\r\n def __init__(self):\r\n self.amplitude_offset=0\r\n self.amplitude=1\r\n self.phase_offset=0\r\n self.angular_frequency=1\r\n \r\n def get(self, x):\r\n \"\"\" x between 0 and 1\"\"\"\r\n f = math.sin(self.phase_offset + self.angular_frequency*x)\r\n return self.amplitude_offset + self.amplitude*f \r\n \r\n def clone(self):\r\n z=SinusoidFunction()\r\n z.amplitude_offset=self.amplitude_offset\r\n z.amplitude=self.amplitude\r\n z.phase_offset=self.phase_offset\r\n z.angular_frequency=self.angular_frequency\r\n return z\r\n \r\n def mirror(self):\r\n z=self.clone()\r\n z.amplitude_offset *= -1\r\n z.amplitude *= -1\r\n return z\r\n \r\n def mirror_keep_amplitude_offset(self):\r\n z=self.clone()\r\n z.amplitude *= -1\r\n return z\r\n \r\n def mirror_freq(self):\r\n z=self.clone()\r\n z.phase_offset *= -1\r\n z.angular_frequency *= -1\r\n return z\r\n \r\n def __str__(self):\r\n return \"y=%.2f+%.2f*sin(%.2f+%.2f*x)\"%(self.amplitude_offset, self.amplitude, self.phase_offset, self.angular_frequency)\r\n \r\nclass WholeBodyWalkerFunction:\r\n \"\"\"\r\n Multi-joint walk function for RobotisMini \r\n Creates SinusoidFunction for each joint with different parameters\r\n \"\"\"\r\n def __init__(self, walking_params):\r\n self.parameters={}\r\n \r\n print walking_params\r\n for pn, pp in walking_params.iteritems():\r\n self.parameters[pn + '_amplitude']=pp[0]\r\n self.parameters[pn + '_amplitude_offset']=pp[1]\r\n self.parameters[pn + '_phase_offset']=pp[2]\r\n\r\n self.parameters[\"step_frequency\"]=math.pi\r\n \r\n self.parameters[\"vx_amplitude\"]=0.5\r\n self.parameters[\"vy_amplitude\"]=0.5\r\n self.parameters[\"vt_amplitude\"]=0.4\r\n \r\n self.generate()\r\n \r\n def generate(self):\r\n \"\"\"\r\n Build CPG functions for walk-on-spot (no translation or rotation, only legs up/down)\r\n \"\"\" \r\n \r\n self.pfn={} # phase joint functions \r\n self.afn={} # anti phase joint functions\r\n\r\n ## Foot and hip -> Lateral motion\r\n foot_func=SinusoidFunction()\r\n foot_func.angular_frequency= self.parameters[\"step_frequency\"]\r\n foot_func.amplitude= self.parameters[\"foot_amplitude\"]\r\n foot_func.amplitude_offset= self.parameters[\"foot_amplitude_offset\"]\r\n foot_func.phase_offset= self.parameters[\"foot_phase_offset\"]\r\n self.pfn[\"l_foot_joint\"]=foot_func \r\n foot_func_af=foot_func.mirror()\r\n self.afn[\"l_foot_joint\"]=foot_func_af\r\n \r\n hip_func=SinusoidFunction()\r\n hip_func.angular_frequency= self.parameters[\"step_frequency\"]\r\n hip_func.amplitude= self.parameters[\"hip_amplitude\"]\r\n hip_func.amplitude_offset= self.parameters[\"hip_amplitude_offset\"]\r\n hip_func.phase_offset= self.parameters[\"hip_phase_offset\"]\r\n self.pfn[\"l_hip_joint\"]=hip_func\r\n hip_func_af=hip_func.mirror()\r\n self.afn[\"l_hip_joint\"]=hip_func_af\r\n \r\n ## Thigh, ankle and knee -> Frontal motion\r\n thigh_func=SinusoidFunction()\r\n thigh_func.angular_frequency= self.parameters[\"step_frequency\"]\r\n thigh_func.amplitude= self.parameters[\"thigh_amplitude\"]\r\n thigh_func.amplitude_offset= self.parameters[\"thigh_amplitude_offset\"]\r\n thigh_func.phase_offset= self.parameters[\"thigh_phase_offset\"]\r\n self.pfn[\"l_thigh_joint\"]=thigh_func\r\n thigh_func_af=thigh_func.mirror_keep_amplitude_offset()\r\n self.afn[\"l_thigh_joint\"]=thigh_func_af\r\n \r\n ankle_func=SinusoidFunction()\r\n ankle_func.angular_frequency= self.parameters[\"step_frequency\"]\r\n ankle_func.amplitude= self.parameters[\"ankle_amplitude\"]\r\n ankle_func.amplitude_offset= self.parameters[\"ankle_amplitude_offset\"]\r\n ankle_func.phase_offset= self.parameters[\"ankle_phase_offset\"]\r\n self.pfn[\"l_ankle_joint\"]=ankle_func\r\n ankle_func_af=ankle_func.mirror_keep_amplitude_offset()\r\n self.afn[\"l_ankle_joint\"]=ankle_func_af\r\n \r\n knee_func=SinusoidFunction()\r\n knee_func.angular_frequency= self.parameters[\"step_frequency\"]\r\n knee_func.amplitude= self.parameters[\"knee_amplitude\"]\r\n knee_func.amplitude_offset= self.parameters[\"knee_amplitude_offset\"]\r\n knee_func.phase_offset= self.parameters[\"knee_phase_offset\"]\r\n self.pfn[\"l_knee_joint\"]=knee_func\r\n knee_func_af=knee_func.mirror_keep_amplitude_offset()\r\n self.afn[\"l_knee_joint\"]=knee_func_af\r\n \r\n #f3=SinusoidFunction()\r\n #f3.angular_frequency=self.parameters[\"step_frequency\"]\r\n #f3.amplitude=self.parameters[\"step_amplitude\"]\r\n #f3.amplitude_offset=self.parameters[\"step_amplitude_offset\"]\r\n #self.pfn[\"l_thigh_joint\"]= f3\r\n #f33=f3.clone()\r\n #f33.amplitude_offset = self.parameters[\"ankle_amplitude_offset\"]\r\n #f33.amplitude = self.parameters[\"ankle_amplitude\"]\r\n #self.pfn[\"l_ankle_joint\"]=f33\r\n #f4=f3.mirror()\r\n ##f4.amplitude_offset -= 0.4\r\n #self.pfn[\"l_knee_joint\"]=f4\r\n \r\n #f5=f3.mirror_keep_amplitude_offset()\r\n #self.afn[\"l_thigh_joint\"]=f5\r\n \r\n #f6=f33.mirror_keep_amplitude_offset()\r\n #self.afn[\"l_ankle_joint\"]=f6\r\n #f7=f5.mirror()\r\n ##f7.amplitude_offset -= 0.4\r\n #self.afn[\"l_knee_joint\"]=f7\r\n \r\n self.generate_right()\r\n \r\n self.show()\r\n \r\n def generate_right(self):\r\n \"\"\"\r\n Mirror CPG functions from left to right and antiphase right\r\n \"\"\"\r\n l=[ v[2:] for v in self.pfn.keys()]\r\n for j in l:\r\n self.pfn[\"r_\"+j]=self.afn[\"l_\"+j].mirror_keep_amplitude_offset()\r\n self.pfn[\"r_\"+j].phase_offset += math.pi\r\n self.afn[\"r_\"+j]=self.pfn[\"l_\"+j].mirror_keep_amplitude_offset()\r\n self.afn[\"r_\"+j].phase_offset += math.pi\r\n \r\n def get(self, phase, x, velocity):\r\n \"\"\" Obtain the joint angles for a given phase, position in cycle (x 0,1)) and velocity parameters \"\"\"\r\n angles={}\r\n for j in self.pfn.keys():\r\n if phase:\r\n v=self.pfn[j].get(x)\r\n angles[j]=v\r\n else:\r\n angles[j]=self.afn[j].get(x)\r\n self.apply_velocity(angles,velocity,phase,x)\r\n return angles\r\n \r\n def show(self):\r\n \"\"\"\r\n Display the CPG functions used\r\n \"\"\"\r\n for j in self.pfn.keys():\r\n print j,\"p\",self.pfn[j],\"a\",self.afn[j] \r\n print self.pfn[\"l_knee_joint\"].amplitude_offset\r\n\r\n def apply_velocity(self, angles, velocity, phase, x):\r\n \"\"\" Modify on the walk-on-spot joint angles to apply the velocity vector\"\"\"\r\n \r\n # VX\r\n v=velocity[0]*self.parameters[\"vx_amplitude\"]\r\n d=(x*2-1)*v\r\n if phase:\r\n angles[\"l_thigh_joint\"]+=d\r\n angles[\"l_ankle_joint\"]+=d\r\n angles[\"r_thigh_joint\"]+=d\r\n angles[\"r_ankle_joint\"]+=d\r\n else:\r\n angles[\"l_thigh_joint\"]-=d\r\n angles[\"l_ankle_joint\"]-=d\r\n angles[\"r_thigh_joint\"]-=d\r\n angles[\"r_ankle_joint\"]-=d\r\n\r\n # VY\r\n v=velocity[1]*self.parameters[\"vy_amplitude\"]\r\n d=(x)*v\r\n d2=(1-x)*v\r\n if v>=0:\r\n if phase:\r\n angles[\"l_hip_joint\"]-=d\r\n angles[\"l_foot_joint\"]-=d\r\n angles[\"r_hip_joint\"]+=d\r\n angles[\"r_foot_joint\"]+=d\r\n else:\r\n angles[\"l_hip_joint\"]-=d2\r\n angles[\"l_foot_joint\"]-=d2\r\n angles[\"r_hip_joint\"]+=d2\r\n angles[\"r_foot_joint\"]+=d2\r\n else:\r\n if phase:\r\n angles[\"l_hip_joint\"]+=d2\r\n angles[\"l_foot_joint\"]+=d2\r\n angles[\"r_hip_joint\"]-=d2\r\n angles[\"r_foot_joint\"]-=d2\r\n else:\r\n angles[\"l_hip_joint\"]+=d\r\n angles[\"l_foot_joint\"]+=d\r\n angles[\"r_hip_joint\"]-=d\r\n angles[\"r_foot_joint\"]-=d\r\n \r\n ## VT\r\n #v=velocity[2]*self.parameters[\"vt_amplitude\"]\r\n #d=(x)*v\r\n #d2=(1-x)*v\r\n #if v>=0:\r\n #if phase:\r\n #angles[\"j_pelvis_l\"]=-d\r\n #angles[\"j_pelvis_r\"]=d\r\n #else:\r\n #angles[\"j_pelvis_l\"]=-d2\r\n #angles[\"j_pelvis_r\"]=d2\r\n #else:\r\n #if phase:\r\n #angles[\"j_pelvis_l\"]=d2\r\n #angles[\"j_pelvis_r\"]=-d2\r\n #else:\r\n #angles[\"j_pelvis_l\"]=d\r\n #angles[\"j_pelvis_r\"]=-d\r\n\r\nclass Walker:\r\n \"\"\"\r\n Class for making RobotisMini walk\r\n \"\"\"\r\n def __init__(self, robotis_mini_ci, real_robot):\r\n \r\n self.robotis_mini_ci=robotis_mini_ci\r\n \r\n self.real_robot = real_robot\r\n \r\n self.displacing=False #When the robot is walking AND displacing in the plane (velocity is non-zero)\r\n self.walking=False #When the robot is walking: moving up and down the legs\r\n self.velocity=[0,0,0]\r\n \r\n #Default walking params\r\n self.walking_params = {}\r\n self.walking_params['foot'] = [0.4,0,0]\r\n self.walking_params['ankle'] = [-0.01,-0.20,0]\r\n self.walking_params['knee'] = [0.4,0.7,0]\r\n self.walking_params['thigh'] = [-0.4,-0.7,0]\r\n self.walking_params['hip'] = [0.4,0,0]\r\n \r\n self.wb_walkerfunc=WholeBodyWalkerFunction(self.walking_params)\r\n\r\n self.initial_wq = self.wb_walkerfunc.get(True, 0, [0,0,0]) #First joint configuration to start the walking motion\r\n\r\n print \"__init__:initial_wq\"\r\n j_names=self.initial_wq.keys()\r\n for jn in j_names:\r\n print jn + str(\":\") + str(self.initial_wq[jn])\r\n \r\n self._th_walk=None #Walking thread\r\n \r\n self._cycle_period = 10 #seconds\r\n\r\n self._sub_cmd_vel=rospy.Subscriber(robotis_mini_ci.ns+\"cmd_vel\", Twist,self._cb_cmd_vel, queue_size=1)\r\n self._sub_cmd_stop=rospy.Subscriber(robotis_mini_ci.ns+\"stop_srv\", Empty,self._cb_cmd_stop, queue_size=1)\r\n self._sub_cmd_restart=rospy.Subscriber(robotis_mini_ci.ns+\"restart_srv\", Empty,self._cb_cmd_restart, queue_size=1)\r\n self._sub_cmd_restart=rospy.Subscriber(robotis_mini_ci.ns+\"walking_params\", Float64MultiArray,self._cb_new_walking_params, queue_size=1)\r\n \r\n if not self.real_robot:\r\n self.pubs = {}\r\n # Wait until the joints have been populated\r\n while self.robotis_mini_ci.q_names is None:\r\n time.sleep(1)\r\n for jn in self.robotis_mini_ci.q_names:\r\n self.pubs[jn] = rospy.Publisher('/robotis_mini/' + jn + '_position_controller/command', Float64, queue_size=1)\r\n rospy.loginfo(\"Waiting for gazebo services\")\r\n rospy.wait_for_service('/gazebo/pause_physics')\r\n self.pause_simulation_srv = rospy.ServiceProxy('/gazebo/pause_physics', std_srvs.srv.Empty)\r\n rospy.wait_for_service('/gazebo/reset_world')\r\n self.reset_world_srv = rospy.ServiceProxy('/gazebo/reset_world', std_srvs.srv.Empty)\r\n rospy.wait_for_service('/gazebo/unpause_physics')\r\n self.unpause_simulation_srv = rospy.ServiceProxy('/gazebo/unpause_physics', std_srvs.srv.Empty)\r\n\r\n def _cb_new_walking_params(self,msg):\r\n \"\"\"\r\n Processes a new set of parameters\r\n \"\"\"\r\n print \"Walker new set of parameters received\"\r\n self._cycle_period = msg.data[0]\r\n self.walking_params['foot'] = [msg.data[1],msg.data[2],msg.data[3]]\r\n self.walking_params['ankle'] = [msg.data[4],msg.data[5],msg.data[6]]\r\n self.walking_params['knee'] = [msg.data[7],msg.data[8],msg.data[9]]\r\n self.walking_params['thigh'] = [msg.data[10],msg.data[11],msg.data[12]]\r\n self.walking_params['hip'] = [msg.data[13],msg.data[14],msg.data[15]]\r\n \r\n self.wb_walkerfunc=WholeBodyWalkerFunction(self.walking_params)\r\n\r\n self.initial_wq = self.wb_walkerfunc.get(True, 0, [0,0,0]) #First joint configuration to start the walking motion\r\n\r\n print \"initial_wq\"\r\n j_names=self.initial_wq.keys()\r\n for jn in j_names:\r\n print jn + str(\":\") + str(self.initial_wq[jn])\r\n \r\n def _cb_cmd_restart(self,msg):\r\n \"\"\"\r\n Processes cmd_restart and to start a new trial\r\n \"\"\"\r\n print \"Walker restart command received\"\r\n \r\n #Stop the running thread\r\n while self.displacing or self.walking or self._th_walk:\r\n rospy.loginfo('Stopping walking thread')\r\n self.stop()\r\n \r\n #If the robot is simuated -> send to initial configuration\r\n if not self.real_robot:\r\n rospy.loginfo(\"Sending robot to zero configuration\")\r\n for jn in self.robotis_mini_ci.q_names:\r\n self.pubs[jn].publish(0.0)\r\n \r\n time.sleep(1)\r\n \r\n #If the robot is simulated -> reset simulation\r\n try:\r\n self.pause_simulation_srv()\r\n rospy.loginfo( \"Paused gazebo\")\r\n time.sleep(1)\r\n self.reset_world_srv()\r\n rospy.loginfo( \"Reseting gazebo\")\r\n time.sleep(1)\r\n self.unpause_simulation_srv()\r\n rospy.loginfo( \"Unpaused gazebo\")\r\n time.sleep(1)\r\n except rospy.ServiceException, e:\r\n print \"Service call failed: %s\"%e\r\n \r\n def _cb_cmd_vel(self,msg):\r\n \"\"\"\r\n Processes cmd_vel and update walker speed\r\n \"\"\"\r\n print \"Walker velocity command received: \",msg\r\n vx=msg.linear.x\r\n vy=msg.linear.y\r\n vt=msg.angular.z\r\n self.start()\r\n self.set_desired_velocity(vx,vy,vt)\r\n \r\n def _cb_cmd_stop(self,msg):\r\n \"\"\"\r\n Processes cmd_stop\r\n \"\"\"\r\n print \"Walker stop command received: \"\r\n self.stop()\r\n \r\n def goto_initial_wq(self):\r\n \"\"\"\r\n If not there yet, go to initial walking configuration\r\n \"\"\"\r\n rospy.loginfo(\"Going to initial walking configuration\")\r\n while self.get_qdist_to_initial_wq()>0.1:\r\n rospy.loginfo(\"Commanding to go to initial walking configuration\")\r\n print \"Initial configuration\"\r\n print self.initial_wq\r\n self.robotis_mini_ci.set_qd_interpolated(self.initial_wq, 2)\r\n rospy.sleep(2) \r\n rospy.loginfo(\"Initial walking configuration reached\")\r\n print \"Distance\",self.get_qdist_to_initial_wq() \r\n \r\n def start(self):\r\n if not self.displacing:\r\n self.displacing=True \r\n self.goto_initial_wq()\r\n self._th_walk=Thread(target=self._do_walk)\r\n self._th_walk.start()\r\n self.walking=True\r\n \r\n def stop(self):\r\n if self.displacing:\r\n self.walking=False\r\n rospy.loginfo(\"Waiting for stopped\")\r\n while not rospy.is_shutdown() and self._th_walk is not None:\r\n rospy.sleep(0.1) \r\n rospy.loginfo(\"Stopped\")\r\n self.displacing=False\r\n \r\n def set_desired_velocity(self,x,y,t):\r\n self.desired_velocity=[x,y,t]\r\n\r\n def _do_walk(self):\r\n \"\"\"\r\n Main walking loop, smoothly update velocity vectors and apply corresponding joint configurations\r\n \"\"\"\r\n rospy.loginfo(\"Started walking thread\")\r\n wb_walkerfunc=self.wb_walkerfunc\r\n \r\n # Global walk loop\r\n n=50\r\n print \"Thread rate\", 1.0/(self._cycle_period/(2.0*n))\r\n r=rospy.Rate(1.0/(self._cycle_period/(2.0*n)))\r\n p=True\r\n i=0\r\n self.current_velocity=[0,0,0]\r\n while not rospy.is_shutdown() and (self.walking or i<n or self.is_displacing()):\r\n if not self.walking:\r\n self.desired_velocity=[0,0,0]\r\n #if not self.is_displacing() and i==0: # Do not move if nothing to do and already at 0\r\n # self.update_current_velocity(self.desired_velocity, n)\r\n # r.sleep()\r\n # continue\r\n x=float(i)/n \r\n qd_curr=wb_walkerfunc.get(p, x, self.current_velocity)\r\n self.update_current_velocity(self.desired_velocity, n)\r\n self.robotis_mini_ci.set_qd(qd_curr)\r\n i+=1\r\n if i>n:\r\n i=0\r\n p=not p\r\n r.sleep()\r\n rospy.loginfo(\"Finished walking thread\")\r\n \r\n self._th_walk=None\r\n\r\n def is_displacing(self):\r\n \"\"\"\r\n Checks if the current velocity is not zero and returns True in that case\r\n \"\"\"\r\n e=0.02\r\n for v in self.current_velocity:\r\n if abs(v)>e: return True\r\n return False\r\n \r\n def update_current_velocity(self, target_velocity, n):\r\n \"\"\"\r\n A pseudo-interpolation to a target velocity\r\n \"\"\"\r\n a=3/float(n)\r\n b=1-a\r\n self.current_velocity=[a*tv+b*cv for (tv,cv) in zip(target_velocity, self.current_velocity)]\r\n \r\n def get_qdist_to_initial_wq(self):\r\n \"\"\"\r\n Computes the absolute distance between the current robot joint state and the initial walking configuration\r\n \"\"\"\r\n current_q=self.robotis_mini_ci.get_q()\r\n return get_distance(self.initial_wq, current_q)\r\n\r\ndef get_distance(qa_dict, qb_dict):\r\n \"\"\"\r\n Computes sum of absolute distances between two sets of joints represented as dictionaries of (jointName, jointConfiguration)\r\n \"\"\"\r\n d=0\r\n j_names=qa_dict.keys()\r\n if len(j_names)==0:\r\n rospy.loginfo(\"Length is 0\")\r\n return 0\r\n for jn in j_names:\r\n d+=abs(qb_dict[jn]-qa_dict[jn])\r\n d/=len(j_names)\r\n return d\r\n\r\nif __name__==\"__main__\":\r\n rospy.init_node(\"walker\")\r\n \r\n parser = argparse.ArgumentParser(description='Walker trajectory generator')\r\n parser.add_argument('--real',action='store_true', help='define when using the real robot')\r\n \r\n options, args = parser.parse_known_args()\r\n \r\n if options.real:\r\n rospy.loginfo(\"Real Robot!\")\r\n else:\r\n rospy.loginfo(\"Simulated Robot!\")\r\n \r\n rospy.loginfo(\"Instantiating RobotisMini RobotisMiniControlInterface\")\r\n robotis_mini_ci=RobotisMiniControlInterface(real_robot=options.real)\r\n rospy.loginfo(\"Instantiating RobotisMini Walker\")\r\n walker=Walker(robotis_mini_ci, options.real)\r\n \r\n rospy.loginfo(\"RobotisMini Walker Ready\")\r\n while not rospy.is_shutdown():\r\n time.sleep(1)\r\n" ]
true
99,426
c4b02ea5abe6d31d23bce24384e080ca57a9d55e
from django.urls import path from . import views urlpatterns = [ path('',views.home,name='home'), path('home',views.home,name='home'), path('book',views.book,name='book'), path('comment', views.comment, name='comment'), path('sign',views.sign,name='sign'), path('sub',views.sub,name='sub'), path('login',views.login,name='login'), path('new',views.new,name='new'), path('old',views.old,name='old'), path('oldsubmit',views.oldsubmit,name='oldsubmit'), path('old_bookings',views.old_bookings,name='old_bookings'), path('submit',views.submit,name='submit'), path('doc',views.doc,name='doc'), path('doclogin',views.doclogin,name='doclogin'), path('logout',views.logout,name='logout'), path('profile',views.profile,name='profile'), path('appoinment',views.appoinment,name='appoinment'), path('edit',views.edit,name='edit'), path('save',views.save,name='save'), path('today',views.today_booking,name='today'), path('all',views.all,name='all'), path('date',views.date,name='date') # ]
[ "from django.urls import path\nfrom . import views\nurlpatterns = [\n\n path('',views.home,name='home'),\n path('home',views.home,name='home'),\n path('book',views.book,name='book'),\n path('comment', views.comment, name='comment'),\n path('sign',views.sign,name='sign'),\n path('sub',views.sub,name='sub'),\n path('login',views.login,name='login'),\n path('new',views.new,name='new'),\n path('old',views.old,name='old'),\n path('oldsubmit',views.oldsubmit,name='oldsubmit'),\n path('old_bookings',views.old_bookings,name='old_bookings'),\n path('submit',views.submit,name='submit'),\n path('doc',views.doc,name='doc'),\n path('doclogin',views.doclogin,name='doclogin'),\n path('logout',views.logout,name='logout'),\n path('profile',views.profile,name='profile'),\n path('appoinment',views.appoinment,name='appoinment'),\n path('edit',views.edit,name='edit'),\n path('save',views.save,name='save'),\n path('today',views.today_booking,name='today'),\n path('all',views.all,name='all'),\n path('date',views.date,name='date')\n\n #\n]\n", "from django.urls import path\nfrom . import views\nurlpatterns = [path('', views.home, name='home'), path('home', views.home,\n name='home'), path('book', views.book, name='book'), path('comment',\n views.comment, name='comment'), path('sign', views.sign, name='sign'),\n path('sub', views.sub, name='sub'), path('login', views.login, name=\n 'login'), path('new', views.new, name='new'), path('old', views.old,\n name='old'), path('oldsubmit', views.oldsubmit, name='oldsubmit'), path\n ('old_bookings', views.old_bookings, name='old_bookings'), path(\n 'submit', views.submit, name='submit'), path('doc', views.doc, name=\n 'doc'), path('doclogin', views.doclogin, name='doclogin'), path(\n 'logout', views.logout, name='logout'), path('profile', views.profile,\n name='profile'), path('appoinment', views.appoinment, name='appoinment'\n ), path('edit', views.edit, name='edit'), path('save', views.save, name\n ='save'), path('today', views.today_booking, name='today'), path('all',\n views.all, name='all'), path('date', views.date, name='date')]\n", "<import token>\nurlpatterns = [path('', views.home, name='home'), path('home', views.home,\n name='home'), path('book', views.book, name='book'), path('comment',\n views.comment, name='comment'), path('sign', views.sign, name='sign'),\n path('sub', views.sub, name='sub'), path('login', views.login, name=\n 'login'), path('new', views.new, name='new'), path('old', views.old,\n name='old'), path('oldsubmit', views.oldsubmit, name='oldsubmit'), path\n ('old_bookings', views.old_bookings, name='old_bookings'), path(\n 'submit', views.submit, name='submit'), path('doc', views.doc, name=\n 'doc'), path('doclogin', views.doclogin, name='doclogin'), path(\n 'logout', views.logout, name='logout'), path('profile', views.profile,\n name='profile'), path('appoinment', views.appoinment, name='appoinment'\n ), path('edit', views.edit, name='edit'), path('save', views.save, name\n ='save'), path('today', views.today_booking, name='today'), path('all',\n views.all, name='all'), path('date', views.date, name='date')]\n", "<import token>\n<assignment token>\n" ]
false
99,427
fac0b38882351719c929c32d0eb0f0d4cc8182fb
import os import logging from cloudpks import RestClient logging.basicConfig(level=os.getenv('vke_log_level'), format='%(asctime)s %(name)s %(levelname)s %(message)s' ) logger = logging.getLogger(__name__) logging.getLogger('requests').setLevel(logging.CRITICAL) logging.getLogger('urllib3').setLevel(logging.CRITICAL) class User(object): """ This is inspired by work from Grant The user and organisation management runs through the centralised Cloud Services Portal and as such, we use a different baseurl for this module when compared with the other modules. """ def __init__(self, server, api_key, auth_token): self._server = server self._api_key = api_key self.header = { 'Content-Type': "application/json", 'csp-auth-token': auth_token } def remove(self, session, id, username): payload = { "emails": username } logger.debug(f'Payload: {payload}') response = session.do_patch(self._server, self.header, f'{id}/users/', payload, 'DISCOVERY') return response def invite(self, session, id, usernames, org_role='org_member', vke=False ): payload = { 'usernames': usernames, 'orgRoleName': org_role, 'serviceRolesDtos': [] } if vke: payload['serviceRolesDtos'].append({ 'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions' '/external' '/o3ecbsAvjpw6lmL3aliJX29zVhE_' ), 'serviceRoleNames': [ 'vke:service-user' ] }) logger.debug(f'Payload: {payload}') response = session.do_post(self._server, self.header, f'{id}/invitations', payload, 'DISCOVERY') return response def list(self, session, id): return session.do_get(self._server, self.header, f'{id}/users/', 'DISCOVERY')
[ "import os\nimport logging\nfrom cloudpks import RestClient\n\nlogging.basicConfig(level=os.getenv('vke_log_level'),\n format='%(asctime)s %(name)s %(levelname)s %(message)s'\n )\nlogger = logging.getLogger(__name__)\nlogging.getLogger('requests').setLevel(logging.CRITICAL)\nlogging.getLogger('urllib3').setLevel(logging.CRITICAL)\n\nclass User(object):\n \"\"\"\n This is inspired by work from Grant \n The user and organisation management runs through the centralised\n Cloud Services Portal and as such, we use a different baseurl for\n this module when compared with the other modules.\n \"\"\"\n\n def __init__(self, server, api_key, auth_token):\n self._server = server\n self._api_key = api_key\n self.header = {\n 'Content-Type': \"application/json\",\n 'csp-auth-token': auth_token\n }\n\n def remove(self, session, id, username):\n payload = {\n \"emails\": username\n }\n logger.debug(f'Payload: {payload}')\n response = session.do_patch(self._server, self.header, f'{id}/users/', payload, 'DISCOVERY')\n return response\n\n def invite(self, session,\n id,\n usernames,\n org_role='org_member',\n vke=False\n ):\n payload = {\n 'usernames': usernames,\n 'orgRoleName': org_role,\n 'serviceRolesDtos': []\n }\n if vke:\n payload['serviceRolesDtos'].append({\n 'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions'\n '/external'\n '/o3ecbsAvjpw6lmL3aliJX29zVhE_'\n ),\n 'serviceRoleNames':\n [\n 'vke:service-user'\n ]\n })\n\n logger.debug(f'Payload: {payload}')\n response = session.do_post(self._server, self.header, f'{id}/invitations', payload, 'DISCOVERY')\n return response\n\n def list(self, session, id):\n return session.do_get(self._server, self.header, f'{id}/users/', 'DISCOVERY')\n", "import os\nimport logging\nfrom cloudpks import RestClient\nlogging.basicConfig(level=os.getenv('vke_log_level'), format=\n '%(asctime)s %(name)s %(levelname)s %(message)s')\nlogger = logging.getLogger(__name__)\nlogging.getLogger('requests').setLevel(logging.CRITICAL)\nlogging.getLogger('urllib3').setLevel(logging.CRITICAL)\n\n\nclass User(object):\n \"\"\"\n This is inspired by work from Grant \n The user and organisation management runs through the centralised\n Cloud Services Portal and as such, we use a different baseurl for\n this module when compared with the other modules.\n \"\"\"\n\n def __init__(self, server, api_key, auth_token):\n self._server = server\n self._api_key = api_key\n self.header = {'Content-Type': 'application/json', 'csp-auth-token':\n auth_token}\n\n def remove(self, session, id, username):\n payload = {'emails': username}\n logger.debug(f'Payload: {payload}')\n response = session.do_patch(self._server, self.header,\n f'{id}/users/', payload, 'DISCOVERY')\n return response\n\n def invite(self, session, id, usernames, org_role='org_member', vke=False):\n payload = {'usernames': usernames, 'orgRoleName': org_role,\n 'serviceRolesDtos': []}\n if vke:\n payload['serviceRolesDtos'].append({'serviceDefinitionLink':\n '/csp/gateway/slc/api/definitions/external/o3ecbsAvjpw6lmL3aliJX29zVhE_'\n , 'serviceRoleNames': ['vke:service-user']})\n logger.debug(f'Payload: {payload}')\n response = session.do_post(self._server, self.header,\n f'{id}/invitations', payload, 'DISCOVERY')\n return response\n\n def list(self, session, id):\n return session.do_get(self._server, self.header, f'{id}/users/',\n 'DISCOVERY')\n", "<import token>\nlogging.basicConfig(level=os.getenv('vke_log_level'), format=\n '%(asctime)s %(name)s %(levelname)s %(message)s')\nlogger = logging.getLogger(__name__)\nlogging.getLogger('requests').setLevel(logging.CRITICAL)\nlogging.getLogger('urllib3').setLevel(logging.CRITICAL)\n\n\nclass User(object):\n \"\"\"\n This is inspired by work from Grant \n The user and organisation management runs through the centralised\n Cloud Services Portal and as such, we use a different baseurl for\n this module when compared with the other modules.\n \"\"\"\n\n def __init__(self, server, api_key, auth_token):\n self._server = server\n self._api_key = api_key\n self.header = {'Content-Type': 'application/json', 'csp-auth-token':\n auth_token}\n\n def remove(self, session, id, username):\n payload = {'emails': username}\n logger.debug(f'Payload: {payload}')\n response = session.do_patch(self._server, self.header,\n f'{id}/users/', payload, 'DISCOVERY')\n return response\n\n def invite(self, session, id, usernames, org_role='org_member', vke=False):\n payload = {'usernames': usernames, 'orgRoleName': org_role,\n 'serviceRolesDtos': []}\n if vke:\n payload['serviceRolesDtos'].append({'serviceDefinitionLink':\n '/csp/gateway/slc/api/definitions/external/o3ecbsAvjpw6lmL3aliJX29zVhE_'\n , 'serviceRoleNames': ['vke:service-user']})\n logger.debug(f'Payload: {payload}')\n response = session.do_post(self._server, self.header,\n f'{id}/invitations', payload, 'DISCOVERY')\n return response\n\n def list(self, session, id):\n return session.do_get(self._server, self.header, f'{id}/users/',\n 'DISCOVERY')\n", "<import token>\nlogging.basicConfig(level=os.getenv('vke_log_level'), format=\n '%(asctime)s %(name)s %(levelname)s %(message)s')\n<assignment token>\nlogging.getLogger('requests').setLevel(logging.CRITICAL)\nlogging.getLogger('urllib3').setLevel(logging.CRITICAL)\n\n\nclass User(object):\n \"\"\"\n This is inspired by work from Grant \n The user and organisation management runs through the centralised\n Cloud Services Portal and as such, we use a different baseurl for\n this module when compared with the other modules.\n \"\"\"\n\n def __init__(self, server, api_key, auth_token):\n self._server = server\n self._api_key = api_key\n self.header = {'Content-Type': 'application/json', 'csp-auth-token':\n auth_token}\n\n def remove(self, session, id, username):\n payload = {'emails': username}\n logger.debug(f'Payload: {payload}')\n response = session.do_patch(self._server, self.header,\n f'{id}/users/', payload, 'DISCOVERY')\n return response\n\n def invite(self, session, id, usernames, org_role='org_member', vke=False):\n payload = {'usernames': usernames, 'orgRoleName': org_role,\n 'serviceRolesDtos': []}\n if vke:\n payload['serviceRolesDtos'].append({'serviceDefinitionLink':\n '/csp/gateway/slc/api/definitions/external/o3ecbsAvjpw6lmL3aliJX29zVhE_'\n , 'serviceRoleNames': ['vke:service-user']})\n logger.debug(f'Payload: {payload}')\n response = session.do_post(self._server, self.header,\n f'{id}/invitations', payload, 'DISCOVERY')\n return response\n\n def list(self, session, id):\n return session.do_get(self._server, self.header, f'{id}/users/',\n 'DISCOVERY')\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n\n\nclass User(object):\n \"\"\"\n This is inspired by work from Grant \n The user and organisation management runs through the centralised\n Cloud Services Portal and as such, we use a different baseurl for\n this module when compared with the other modules.\n \"\"\"\n\n def __init__(self, server, api_key, auth_token):\n self._server = server\n self._api_key = api_key\n self.header = {'Content-Type': 'application/json', 'csp-auth-token':\n auth_token}\n\n def remove(self, session, id, username):\n payload = {'emails': username}\n logger.debug(f'Payload: {payload}')\n response = session.do_patch(self._server, self.header,\n f'{id}/users/', payload, 'DISCOVERY')\n return response\n\n def invite(self, session, id, usernames, org_role='org_member', vke=False):\n payload = {'usernames': usernames, 'orgRoleName': org_role,\n 'serviceRolesDtos': []}\n if vke:\n payload['serviceRolesDtos'].append({'serviceDefinitionLink':\n '/csp/gateway/slc/api/definitions/external/o3ecbsAvjpw6lmL3aliJX29zVhE_'\n , 'serviceRoleNames': ['vke:service-user']})\n logger.debug(f'Payload: {payload}')\n response = session.do_post(self._server, self.header,\n f'{id}/invitations', payload, 'DISCOVERY')\n return response\n\n def list(self, session, id):\n return session.do_get(self._server, self.header, f'{id}/users/',\n 'DISCOVERY')\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n\n\nclass User(object):\n <docstring token>\n\n def __init__(self, server, api_key, auth_token):\n self._server = server\n self._api_key = api_key\n self.header = {'Content-Type': 'application/json', 'csp-auth-token':\n auth_token}\n\n def remove(self, session, id, username):\n payload = {'emails': username}\n logger.debug(f'Payload: {payload}')\n response = session.do_patch(self._server, self.header,\n f'{id}/users/', payload, 'DISCOVERY')\n return response\n\n def invite(self, session, id, usernames, org_role='org_member', vke=False):\n payload = {'usernames': usernames, 'orgRoleName': org_role,\n 'serviceRolesDtos': []}\n if vke:\n payload['serviceRolesDtos'].append({'serviceDefinitionLink':\n '/csp/gateway/slc/api/definitions/external/o3ecbsAvjpw6lmL3aliJX29zVhE_'\n , 'serviceRoleNames': ['vke:service-user']})\n logger.debug(f'Payload: {payload}')\n response = session.do_post(self._server, self.header,\n f'{id}/invitations', payload, 'DISCOVERY')\n return response\n\n def list(self, session, id):\n return session.do_get(self._server, self.header, f'{id}/users/',\n 'DISCOVERY')\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n\n\nclass User(object):\n <docstring token>\n\n def __init__(self, server, api_key, auth_token):\n self._server = server\n self._api_key = api_key\n self.header = {'Content-Type': 'application/json', 'csp-auth-token':\n auth_token}\n\n def remove(self, session, id, username):\n payload = {'emails': username}\n logger.debug(f'Payload: {payload}')\n response = session.do_patch(self._server, self.header,\n f'{id}/users/', payload, 'DISCOVERY')\n return response\n <function token>\n\n def list(self, session, id):\n return session.do_get(self._server, self.header, f'{id}/users/',\n 'DISCOVERY')\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n\n\nclass User(object):\n <docstring token>\n\n def __init__(self, server, api_key, auth_token):\n self._server = server\n self._api_key = api_key\n self.header = {'Content-Type': 'application/json', 'csp-auth-token':\n auth_token}\n\n def remove(self, session, id, username):\n payload = {'emails': username}\n logger.debug(f'Payload: {payload}')\n response = session.do_patch(self._server, self.header,\n f'{id}/users/', payload, 'DISCOVERY')\n return response\n <function token>\n <function token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n\n\nclass User(object):\n <docstring token>\n\n def __init__(self, server, api_key, auth_token):\n self._server = server\n self._api_key = api_key\n self.header = {'Content-Type': 'application/json', 'csp-auth-token':\n auth_token}\n <function token>\n <function token>\n <function token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n\n\nclass User(object):\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<class token>\n" ]
false
99,428
3c3065547bbc9774a13103c4a7e8ed7b65874be4
"""mcpython - a minecraft clone written in python licenced under MIT-licence authors: uuk, xkcdjerry original game by forgleman licenced under MIT-licence minecraft by Mojang blocks based on 1.14.4.jar of minecraft, downloaded on 20th of July, 2019""" import globals as G import chat.command.Command from chat.command.Command import ParseBridge, ParseType, ParseMode, SubCommand import util.math @G.registry class CommandGenerate(chat.command.Command.Command): """ class for /generate command """ @staticmethod def insert_parse_bridge(parsebridge: ParseBridge): parsebridge.main_entry = "generate" parsebridge.add_subcommand(SubCommand(ParseType.INT, mode=ParseMode.OPTIONAL).add_subcommand( SubCommand(ParseType.INT).add_subcommand(SubCommand(ParseType.INT, mode=ParseMode.OPTIONAL).add_subcommand( SubCommand(ParseType.INT))))) @staticmethod def parse(values: list, modes: list, info): dim = G.world.get_active_dimension() if len(values) > 0: # have we definite an chunk? chunkf = tuple(values[:2]) chunkt = tuple(values[2:]) if len(values) > 2 else chunkf else: chunkf = chunkt = util.math.sectorize(G.window.position) fx, fz = chunkf tx, tz = chunkt if fx > tx: fx, tx = tx, fx if fz > tz: fz, tz = tz, fz for x in range(fx, tx): for z in range(fz, tz): G.worldgenerationhandler.generate_chunk(dim.get_chunk(x, z, generate=False)) G.world.process_entire_queue() @staticmethod def get_help() -> list: return ["/generate [<x> <z> [<tox> <toz>]]: generates the chunk you are in if no one is specified or the " "specified area, else the specified"]
[ "\"\"\"mcpython - a minecraft clone written in python licenced under MIT-licence\nauthors: uuk, xkcdjerry\n\noriginal game by forgleman licenced under MIT-licence\nminecraft by Mojang\n\nblocks based on 1.14.4.jar of minecraft, downloaded on 20th of July, 2019\"\"\"\nimport globals as G\nimport chat.command.Command\nfrom chat.command.Command import ParseBridge, ParseType, ParseMode, SubCommand\nimport util.math\n\n\[email protected]\nclass CommandGenerate(chat.command.Command.Command):\n \"\"\"\n class for /generate command\n \"\"\"\n @staticmethod\n def insert_parse_bridge(parsebridge: ParseBridge):\n parsebridge.main_entry = \"generate\"\n parsebridge.add_subcommand(SubCommand(ParseType.INT, mode=ParseMode.OPTIONAL).add_subcommand(\n SubCommand(ParseType.INT).add_subcommand(SubCommand(ParseType.INT, mode=ParseMode.OPTIONAL).add_subcommand(\n SubCommand(ParseType.INT)))))\n\n @staticmethod\n def parse(values: list, modes: list, info):\n dim = G.world.get_active_dimension()\n if len(values) > 0: # have we definite an chunk?\n chunkf = tuple(values[:2])\n chunkt = tuple(values[2:]) if len(values) > 2 else chunkf\n else:\n chunkf = chunkt = util.math.sectorize(G.window.position)\n fx, fz = chunkf\n tx, tz = chunkt\n if fx > tx: fx, tx = tx, fx\n if fz > tz: fz, tz = tz, fz\n for x in range(fx, tx):\n for z in range(fz, tz):\n G.worldgenerationhandler.generate_chunk(dim.get_chunk(x, z, generate=False))\n G.world.process_entire_queue()\n\n @staticmethod\n def get_help() -> list:\n return [\"/generate [<x> <z> [<tox> <toz>]]: generates the chunk you are in if no one is specified or the \"\n \"specified area, else the specified\"]\n\n", "<docstring token>\nimport globals as G\nimport chat.command.Command\nfrom chat.command.Command import ParseBridge, ParseType, ParseMode, SubCommand\nimport util.math\n\n\[email protected]\nclass CommandGenerate(chat.command.Command.Command):\n \"\"\"\n class for /generate command\n \"\"\"\n\n @staticmethod\n def insert_parse_bridge(parsebridge: ParseBridge):\n parsebridge.main_entry = 'generate'\n parsebridge.add_subcommand(SubCommand(ParseType.INT, mode=ParseMode\n .OPTIONAL).add_subcommand(SubCommand(ParseType.INT).\n add_subcommand(SubCommand(ParseType.INT, mode=ParseMode.\n OPTIONAL).add_subcommand(SubCommand(ParseType.INT)))))\n\n @staticmethod\n def parse(values: list, modes: list, info):\n dim = G.world.get_active_dimension()\n if len(values) > 0:\n chunkf = tuple(values[:2])\n chunkt = tuple(values[2:]) if len(values) > 2 else chunkf\n else:\n chunkf = chunkt = util.math.sectorize(G.window.position)\n fx, fz = chunkf\n tx, tz = chunkt\n if fx > tx:\n fx, tx = tx, fx\n if fz > tz:\n fz, tz = tz, fz\n for x in range(fx, tx):\n for z in range(fz, tz):\n G.worldgenerationhandler.generate_chunk(dim.get_chunk(x, z,\n generate=False))\n G.world.process_entire_queue()\n\n @staticmethod\n def get_help() ->list:\n return [\n '/generate [<x> <z> [<tox> <toz>]]: generates the chunk you are in if no one is specified or the specified area, else the specified'\n ]\n", "<docstring token>\n<import token>\n\n\[email protected]\nclass CommandGenerate(chat.command.Command.Command):\n \"\"\"\n class for /generate command\n \"\"\"\n\n @staticmethod\n def insert_parse_bridge(parsebridge: ParseBridge):\n parsebridge.main_entry = 'generate'\n parsebridge.add_subcommand(SubCommand(ParseType.INT, mode=ParseMode\n .OPTIONAL).add_subcommand(SubCommand(ParseType.INT).\n add_subcommand(SubCommand(ParseType.INT, mode=ParseMode.\n OPTIONAL).add_subcommand(SubCommand(ParseType.INT)))))\n\n @staticmethod\n def parse(values: list, modes: list, info):\n dim = G.world.get_active_dimension()\n if len(values) > 0:\n chunkf = tuple(values[:2])\n chunkt = tuple(values[2:]) if len(values) > 2 else chunkf\n else:\n chunkf = chunkt = util.math.sectorize(G.window.position)\n fx, fz = chunkf\n tx, tz = chunkt\n if fx > tx:\n fx, tx = tx, fx\n if fz > tz:\n fz, tz = tz, fz\n for x in range(fx, tx):\n for z in range(fz, tz):\n G.worldgenerationhandler.generate_chunk(dim.get_chunk(x, z,\n generate=False))\n G.world.process_entire_queue()\n\n @staticmethod\n def get_help() ->list:\n return [\n '/generate [<x> <z> [<tox> <toz>]]: generates the chunk you are in if no one is specified or the specified area, else the specified'\n ]\n", "<docstring token>\n<import token>\n\n\[email protected]\nclass CommandGenerate(chat.command.Command.Command):\n <docstring token>\n\n @staticmethod\n def insert_parse_bridge(parsebridge: ParseBridge):\n parsebridge.main_entry = 'generate'\n parsebridge.add_subcommand(SubCommand(ParseType.INT, mode=ParseMode\n .OPTIONAL).add_subcommand(SubCommand(ParseType.INT).\n add_subcommand(SubCommand(ParseType.INT, mode=ParseMode.\n OPTIONAL).add_subcommand(SubCommand(ParseType.INT)))))\n\n @staticmethod\n def parse(values: list, modes: list, info):\n dim = G.world.get_active_dimension()\n if len(values) > 0:\n chunkf = tuple(values[:2])\n chunkt = tuple(values[2:]) if len(values) > 2 else chunkf\n else:\n chunkf = chunkt = util.math.sectorize(G.window.position)\n fx, fz = chunkf\n tx, tz = chunkt\n if fx > tx:\n fx, tx = tx, fx\n if fz > tz:\n fz, tz = tz, fz\n for x in range(fx, tx):\n for z in range(fz, tz):\n G.worldgenerationhandler.generate_chunk(dim.get_chunk(x, z,\n generate=False))\n G.world.process_entire_queue()\n\n @staticmethod\n def get_help() ->list:\n return [\n '/generate [<x> <z> [<tox> <toz>]]: generates the chunk you are in if no one is specified or the specified area, else the specified'\n ]\n", "<docstring token>\n<import token>\n\n\[email protected]\nclass CommandGenerate(chat.command.Command.Command):\n <docstring token>\n\n @staticmethod\n def insert_parse_bridge(parsebridge: ParseBridge):\n parsebridge.main_entry = 'generate'\n parsebridge.add_subcommand(SubCommand(ParseType.INT, mode=ParseMode\n .OPTIONAL).add_subcommand(SubCommand(ParseType.INT).\n add_subcommand(SubCommand(ParseType.INT, mode=ParseMode.\n OPTIONAL).add_subcommand(SubCommand(ParseType.INT)))))\n <function token>\n\n @staticmethod\n def get_help() ->list:\n return [\n '/generate [<x> <z> [<tox> <toz>]]: generates the chunk you are in if no one is specified or the specified area, else the specified'\n ]\n", "<docstring token>\n<import token>\n\n\[email protected]\nclass CommandGenerate(chat.command.Command.Command):\n <docstring token>\n\n @staticmethod\n def insert_parse_bridge(parsebridge: ParseBridge):\n parsebridge.main_entry = 'generate'\n parsebridge.add_subcommand(SubCommand(ParseType.INT, mode=ParseMode\n .OPTIONAL).add_subcommand(SubCommand(ParseType.INT).\n add_subcommand(SubCommand(ParseType.INT, mode=ParseMode.\n OPTIONAL).add_subcommand(SubCommand(ParseType.INT)))))\n <function token>\n <function token>\n", "<docstring token>\n<import token>\n\n\[email protected]\nclass CommandGenerate(chat.command.Command.Command):\n <docstring token>\n <function token>\n <function token>\n <function token>\n", "<docstring token>\n<import token>\n<class token>\n" ]
false
99,429
9933ca702421da2f0d6c3b9775bcc494b3452edb
import sys import urllib.request, urllib.error, urllib.parse import http.cookiejar class HTTPMyDebugProcessor(urllib2.AbstractHTTPHandler): """Track HTTP Requests and responses with this custom handlers. Be sure to add it your build_opener call, or use: handler_order = 900 """ def __init__(self, httpout = sys.stdout): self.httpout = httpout def http_request(self, request): if __debug__: host, full_url = request.get_host(), request.get_full_url() url_path = full_url[full_url.find(host) + len(host):] self.httpout.write("%s\n" % request.get_full_url()) self.httpout.write("\n") self.httpout.write("%s %s\n" % (request.get_method(), url_path)) for header in request.header_items(): self.httpout.write("%s: %s\n" % header[:]) self.httpout.write("\n") return request def http_response(self, request, response): if __debug__: code, msg, hdrs = response.code, response.msg, response.info() self.httpout.write("HTTP/1.x %s %s\n" % (code, msg)) self.httpout.write(str(hdrs)) return response https_request = http_request https_response = http_response # Example cjar = http.cookiejar.LWPCookieJar() opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cjar),HTTPMyDebugProcessor(),) #opener = urllib2.build_opener(HTTPMyDebugProcessor(),) urllib.request.install_opener(opener) ##response = urllib2.urlopen("http://www.google.com") #response = urllib2.urlopen("https://www.idcourts.us/repository/start.do") #response = urllib2.urlopen("https://www.idcourts.us/repository/searchParty.do") req = urllib.request.Request('http://www.microsoft.com/windows/windows-7/default.aspx') #req = urllib2.Request('https://www.idcourts.us/repository/start.do') res = opener.open(req) print(cjar) for c in cjar: cookie_str = "%s=%s" % (c.name, c.value) print(cookie_str) req = urllib.request.Request('http://www.microsoft.com/windows/windows-xp/default.aspx') #req.add_header("Cookie",cookie_str) opener.open(req) print(cjar)
[ "import sys\nimport urllib.request, urllib.error, urllib.parse\nimport http.cookiejar\n\nclass HTTPMyDebugProcessor(urllib2.AbstractHTTPHandler):\n \"\"\"Track HTTP Requests and responses with this custom handlers. Be sure to\n add it your build_opener call, or use: handler_order = 900 \"\"\"\n def __init__(self, httpout = sys.stdout):\n self.httpout = httpout\n def http_request(self, request):\n if __debug__:\n host, full_url = request.get_host(), request.get_full_url()\n url_path = full_url[full_url.find(host) + len(host):]\n self.httpout.write(\"%s\\n\" % request.get_full_url())\n self.httpout.write(\"\\n\")\n self.httpout.write(\"%s %s\\n\" % (request.get_method(), url_path))\n\n for header in request.header_items():\n self.httpout.write(\"%s: %s\\n\" % header[:])\n\n self.httpout.write(\"\\n\")\n\n return request\n\n def http_response(self, request, response):\n if __debug__:\n code, msg, hdrs = response.code, response.msg, response.info()\n self.httpout.write(\"HTTP/1.x %s %s\\n\" % (code, msg))\n self.httpout.write(str(hdrs))\n\n return response\n\n https_request = http_request\n https_response = http_response\n\n# Example\ncjar = http.cookiejar.LWPCookieJar()\nopener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cjar),HTTPMyDebugProcessor(),)\n#opener = urllib2.build_opener(HTTPMyDebugProcessor(),)\nurllib.request.install_opener(opener)\n##response = urllib2.urlopen(\"http://www.google.com\")\n#response = urllib2.urlopen(\"https://www.idcourts.us/repository/start.do\")\n#response = urllib2.urlopen(\"https://www.idcourts.us/repository/searchParty.do\")\nreq = urllib.request.Request('http://www.microsoft.com/windows/windows-7/default.aspx')\n#req = urllib2.Request('https://www.idcourts.us/repository/start.do')\nres = opener.open(req)\n\nprint(cjar)\nfor c in cjar:\n cookie_str = \"%s=%s\" % (c.name, c.value)\nprint(cookie_str)\n\nreq = urllib.request.Request('http://www.microsoft.com/windows/windows-xp/default.aspx')\n#req.add_header(\"Cookie\",cookie_str)\nopener.open(req)\nprint(cjar)\n", "import sys\nimport urllib.request, urllib.error, urllib.parse\nimport http.cookiejar\n\n\nclass HTTPMyDebugProcessor(urllib2.AbstractHTTPHandler):\n \"\"\"Track HTTP Requests and responses with this custom handlers. Be sure to\n add it your build_opener call, or use: handler_order = 900 \"\"\"\n\n def __init__(self, httpout=sys.stdout):\n self.httpout = httpout\n\n def http_request(self, request):\n if __debug__:\n host, full_url = request.get_host(), request.get_full_url()\n url_path = full_url[full_url.find(host) + len(host):]\n self.httpout.write('%s\\n' % request.get_full_url())\n self.httpout.write('\\n')\n self.httpout.write('%s %s\\n' % (request.get_method(), url_path))\n for header in request.header_items():\n self.httpout.write('%s: %s\\n' % header[:])\n self.httpout.write('\\n')\n return request\n\n def http_response(self, request, response):\n if __debug__:\n code, msg, hdrs = response.code, response.msg, response.info()\n self.httpout.write('HTTP/1.x %s %s\\n' % (code, msg))\n self.httpout.write(str(hdrs))\n return response\n https_request = http_request\n https_response = http_response\n\n\ncjar = http.cookiejar.LWPCookieJar()\nopener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(\n cjar), HTTPMyDebugProcessor())\nurllib.request.install_opener(opener)\nreq = urllib.request.Request(\n 'http://www.microsoft.com/windows/windows-7/default.aspx')\nres = opener.open(req)\nprint(cjar)\nfor c in cjar:\n cookie_str = '%s=%s' % (c.name, c.value)\nprint(cookie_str)\nreq = urllib.request.Request(\n 'http://www.microsoft.com/windows/windows-xp/default.aspx')\nopener.open(req)\nprint(cjar)\n", "<import token>\n\n\nclass HTTPMyDebugProcessor(urllib2.AbstractHTTPHandler):\n \"\"\"Track HTTP Requests and responses with this custom handlers. Be sure to\n add it your build_opener call, or use: handler_order = 900 \"\"\"\n\n def __init__(self, httpout=sys.stdout):\n self.httpout = httpout\n\n def http_request(self, request):\n if __debug__:\n host, full_url = request.get_host(), request.get_full_url()\n url_path = full_url[full_url.find(host) + len(host):]\n self.httpout.write('%s\\n' % request.get_full_url())\n self.httpout.write('\\n')\n self.httpout.write('%s %s\\n' % (request.get_method(), url_path))\n for header in request.header_items():\n self.httpout.write('%s: %s\\n' % header[:])\n self.httpout.write('\\n')\n return request\n\n def http_response(self, request, response):\n if __debug__:\n code, msg, hdrs = response.code, response.msg, response.info()\n self.httpout.write('HTTP/1.x %s %s\\n' % (code, msg))\n self.httpout.write(str(hdrs))\n return response\n https_request = http_request\n https_response = http_response\n\n\ncjar = http.cookiejar.LWPCookieJar()\nopener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(\n cjar), HTTPMyDebugProcessor())\nurllib.request.install_opener(opener)\nreq = urllib.request.Request(\n 'http://www.microsoft.com/windows/windows-7/default.aspx')\nres = opener.open(req)\nprint(cjar)\nfor c in cjar:\n cookie_str = '%s=%s' % (c.name, c.value)\nprint(cookie_str)\nreq = urllib.request.Request(\n 'http://www.microsoft.com/windows/windows-xp/default.aspx')\nopener.open(req)\nprint(cjar)\n", "<import token>\n\n\nclass HTTPMyDebugProcessor(urllib2.AbstractHTTPHandler):\n \"\"\"Track HTTP Requests and responses with this custom handlers. Be sure to\n add it your build_opener call, or use: handler_order = 900 \"\"\"\n\n def __init__(self, httpout=sys.stdout):\n self.httpout = httpout\n\n def http_request(self, request):\n if __debug__:\n host, full_url = request.get_host(), request.get_full_url()\n url_path = full_url[full_url.find(host) + len(host):]\n self.httpout.write('%s\\n' % request.get_full_url())\n self.httpout.write('\\n')\n self.httpout.write('%s %s\\n' % (request.get_method(), url_path))\n for header in request.header_items():\n self.httpout.write('%s: %s\\n' % header[:])\n self.httpout.write('\\n')\n return request\n\n def http_response(self, request, response):\n if __debug__:\n code, msg, hdrs = response.code, response.msg, response.info()\n self.httpout.write('HTTP/1.x %s %s\\n' % (code, msg))\n self.httpout.write(str(hdrs))\n return response\n https_request = http_request\n https_response = http_response\n\n\n<assignment token>\nurllib.request.install_opener(opener)\n<assignment token>\nprint(cjar)\nfor c in cjar:\n cookie_str = '%s=%s' % (c.name, c.value)\nprint(cookie_str)\n<assignment token>\nopener.open(req)\nprint(cjar)\n", "<import token>\n\n\nclass HTTPMyDebugProcessor(urllib2.AbstractHTTPHandler):\n \"\"\"Track HTTP Requests and responses with this custom handlers. Be sure to\n add it your build_opener call, or use: handler_order = 900 \"\"\"\n\n def __init__(self, httpout=sys.stdout):\n self.httpout = httpout\n\n def http_request(self, request):\n if __debug__:\n host, full_url = request.get_host(), request.get_full_url()\n url_path = full_url[full_url.find(host) + len(host):]\n self.httpout.write('%s\\n' % request.get_full_url())\n self.httpout.write('\\n')\n self.httpout.write('%s %s\\n' % (request.get_method(), url_path))\n for header in request.header_items():\n self.httpout.write('%s: %s\\n' % header[:])\n self.httpout.write('\\n')\n return request\n\n def http_response(self, request, response):\n if __debug__:\n code, msg, hdrs = response.code, response.msg, response.info()\n self.httpout.write('HTTP/1.x %s %s\\n' % (code, msg))\n self.httpout.write(str(hdrs))\n return response\n https_request = http_request\n https_response = http_response\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass HTTPMyDebugProcessor(urllib2.AbstractHTTPHandler):\n <docstring token>\n\n def __init__(self, httpout=sys.stdout):\n self.httpout = httpout\n\n def http_request(self, request):\n if __debug__:\n host, full_url = request.get_host(), request.get_full_url()\n url_path = full_url[full_url.find(host) + len(host):]\n self.httpout.write('%s\\n' % request.get_full_url())\n self.httpout.write('\\n')\n self.httpout.write('%s %s\\n' % (request.get_method(), url_path))\n for header in request.header_items():\n self.httpout.write('%s: %s\\n' % header[:])\n self.httpout.write('\\n')\n return request\n\n def http_response(self, request, response):\n if __debug__:\n code, msg, hdrs = response.code, response.msg, response.info()\n self.httpout.write('HTTP/1.x %s %s\\n' % (code, msg))\n self.httpout.write(str(hdrs))\n return response\n https_request = http_request\n https_response = http_response\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass HTTPMyDebugProcessor(urllib2.AbstractHTTPHandler):\n <docstring token>\n\n def __init__(self, httpout=sys.stdout):\n self.httpout = httpout\n\n def http_request(self, request):\n if __debug__:\n host, full_url = request.get_host(), request.get_full_url()\n url_path = full_url[full_url.find(host) + len(host):]\n self.httpout.write('%s\\n' % request.get_full_url())\n self.httpout.write('\\n')\n self.httpout.write('%s %s\\n' % (request.get_method(), url_path))\n for header in request.header_items():\n self.httpout.write('%s: %s\\n' % header[:])\n self.httpout.write('\\n')\n return request\n\n def http_response(self, request, response):\n if __debug__:\n code, msg, hdrs = response.code, response.msg, response.info()\n self.httpout.write('HTTP/1.x %s %s\\n' % (code, msg))\n self.httpout.write(str(hdrs))\n return response\n <assignment token>\n <assignment token>\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass HTTPMyDebugProcessor(urllib2.AbstractHTTPHandler):\n <docstring token>\n\n def __init__(self, httpout=sys.stdout):\n self.httpout = httpout\n\n def http_request(self, request):\n if __debug__:\n host, full_url = request.get_host(), request.get_full_url()\n url_path = full_url[full_url.find(host) + len(host):]\n self.httpout.write('%s\\n' % request.get_full_url())\n self.httpout.write('\\n')\n self.httpout.write('%s %s\\n' % (request.get_method(), url_path))\n for header in request.header_items():\n self.httpout.write('%s: %s\\n' % header[:])\n self.httpout.write('\\n')\n return request\n <function token>\n <assignment token>\n <assignment token>\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass HTTPMyDebugProcessor(urllib2.AbstractHTTPHandler):\n <docstring token>\n\n def __init__(self, httpout=sys.stdout):\n self.httpout = httpout\n <function token>\n <function token>\n <assignment token>\n <assignment token>\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass HTTPMyDebugProcessor(urllib2.AbstractHTTPHandler):\n <docstring token>\n <function token>\n <function token>\n <function token>\n <assignment token>\n <assignment token>\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
99,430
89e187ab58e51020f3bbd157e808dba47151fa90
from copy import copy, deepcopy import math """ Ce module pourvoie des fonctions utilitaires a appliquer sur des formules en CNF(Conjonctive normal form) Qui sont des conjonctions de disjonctions Clause disjonctive de la forme: "(x or y or not x or ...)" Clause Conjonctive de la forme: "(x and y and not x and ...)" (avec ou sans les parentheses) 'cnf_from_string', elle tranforme une str sous forme CNF en list de list de variables. Les variables doivent etres des entiers. Les litteraux de ces variables sont positifs, ou negatifs pour representer des litteraux faux ou vrais. Exemple: str_cnf = "(1 or 2) and ( -3 or 4)" result = cnf_from_str(str_cnf) print(result) # [[1, 2], [-3, 4]] """ def is_cnf(func): def inner(*args, **kwargs): error_msg_list = "'args[0]' should be 'list' of 'list'" error_msg_elmt = "Elements of a 'clause' should only be 'integer'" if type(args[0]) is not list: raise TypeError(f'{error_msg_list}, args[0]:{args[0]}') for supposed_clause in args[0]: if type(supposed_clause) is not list: raise TypeError(f'{error_msg_list}, "{supposed_clause}"') for el in supposed_clause: if type(el) is not int: raise TypeError(f'{error_msg_elmt}, "{el}"') return func(*args, **kwargs) return inner def litteral(var, false=False): if false: return -var return var def cnf_from_str(prop): """ Utilitaire pour obtenir les clauses d'une formule sous forme CNF ou Formule Normale Conjonctive donc de la forme: "(1 or 2 or ...) and (3 or -2 or ...) ... " Retourne un resultat sous la forme: 0: [1, 2] 1: [3, -2] . . . n: ... Return: [[var1, var2], [var3, -var4], ...] """ splitted = prop.split('and') clauses = [] for clause in splitted: clause = clause.replace('(', '').replace(')', '') str_clause = clause.split('or') int_litterals = [int(el) for el in str_clause] clauses.append(int_litterals) return clauses @is_cnf def cnf_variables(cnf): """ 'entry' doit etre issue de cnf_clauses() return: 'set' de variable """ variabs = set() for clause in cnf: for var in clause: var = abs(var) if var not in variabs: variabs.add(var) return variabs
[ "from copy import copy, deepcopy\n\nimport math\n\"\"\"\nCe module pourvoie des fonctions utilitaires a appliquer\nsur des formules en CNF(Conjonctive normal form)\n\nQui sont des conjonctions de disjonctions \n\nClause disjonctive de la forme:\n\"(x or y or not x or ...)\"\n\nClause Conjonctive de la forme:\n\"(x and y and not x and ...)\"\n\n\n\n(avec ou sans les parentheses)\n\n'cnf_from_string', elle tranforme une str\nsous forme CNF en list de list de variables.\n\nLes variables doivent etres des entiers.\n\nLes litteraux de ces variables sont positifs, \nou negatifs pour representer des litteraux faux \nou vrais.\n\nExemple:\nstr_cnf = \"(1 or 2) and ( -3 or 4)\"\nresult = cnf_from_str(str_cnf)\nprint(result) # [[1, 2], [-3, 4]]\n\n\"\"\"\n\n\ndef is_cnf(func):\n\n def inner(*args, **kwargs):\n error_msg_list = \"'args[0]' should be 'list' of 'list'\"\n error_msg_elmt = \"Elements of a 'clause' should only be 'integer'\"\n\n if type(args[0]) is not list:\n raise TypeError(f'{error_msg_list}, args[0]:{args[0]}')\n\n for supposed_clause in args[0]:\n if type(supposed_clause) is not list:\n raise TypeError(f'{error_msg_list}, \"{supposed_clause}\"')\n for el in supposed_clause:\n if type(el) is not int:\n raise TypeError(f'{error_msg_elmt}, \"{el}\"')\n\n return func(*args, **kwargs)\n\n return inner\n\n\ndef litteral(var, false=False):\n if false:\n return -var\n\n return var\n\n\ndef cnf_from_str(prop):\n \"\"\"\n Utilitaire pour obtenir les clauses d'une formule sous forme \n CNF ou Formule Normale Conjonctive donc de la forme:\n\n \"(1 or 2 or ...) and (3 or -2 or ...) ... \"\n\n Retourne un resultat sous la forme:\n 0: [1, 2]\n 1: [3, -2]\n .\n .\n .\n n: ...\n\n Return: [[var1, var2], [var3, -var4], ...]\n\n \"\"\"\n splitted = prop.split('and')\n clauses = []\n\n for clause in splitted:\n clause = clause.replace('(', '').replace(')', '')\n str_clause = clause.split('or')\n\n int_litterals = [int(el) for el in str_clause]\n\n clauses.append(int_litterals)\n \n return clauses\n\n\n@is_cnf\ndef cnf_variables(cnf):\n \"\"\"\n 'entry' doit etre issue de cnf_clauses()\n\n return: 'set' de variable\n \"\"\"\n variabs = set()\n\n for clause in cnf:\n for var in clause:\n var = abs(var)\n\n if var not in variabs:\n variabs.add(var)\n\n return variabs\n\n\n", "from copy import copy, deepcopy\nimport math\n<docstring token>\n\n\ndef is_cnf(func):\n\n def inner(*args, **kwargs):\n error_msg_list = \"'args[0]' should be 'list' of 'list'\"\n error_msg_elmt = \"Elements of a 'clause' should only be 'integer'\"\n if type(args[0]) is not list:\n raise TypeError(f'{error_msg_list}, args[0]:{args[0]}')\n for supposed_clause in args[0]:\n if type(supposed_clause) is not list:\n raise TypeError(f'{error_msg_list}, \"{supposed_clause}\"')\n for el in supposed_clause:\n if type(el) is not int:\n raise TypeError(f'{error_msg_elmt}, \"{el}\"')\n return func(*args, **kwargs)\n return inner\n\n\ndef litteral(var, false=False):\n if false:\n return -var\n return var\n\n\ndef cnf_from_str(prop):\n \"\"\"\n Utilitaire pour obtenir les clauses d'une formule sous forme \n CNF ou Formule Normale Conjonctive donc de la forme:\n\n \"(1 or 2 or ...) and (3 or -2 or ...) ... \"\n\n Retourne un resultat sous la forme:\n 0: [1, 2]\n 1: [3, -2]\n .\n .\n .\n n: ...\n\n Return: [[var1, var2], [var3, -var4], ...]\n\n \"\"\"\n splitted = prop.split('and')\n clauses = []\n for clause in splitted:\n clause = clause.replace('(', '').replace(')', '')\n str_clause = clause.split('or')\n int_litterals = [int(el) for el in str_clause]\n clauses.append(int_litterals)\n return clauses\n\n\n@is_cnf\ndef cnf_variables(cnf):\n \"\"\"\n 'entry' doit etre issue de cnf_clauses()\n\n return: 'set' de variable\n \"\"\"\n variabs = set()\n for clause in cnf:\n for var in clause:\n var = abs(var)\n if var not in variabs:\n variabs.add(var)\n return variabs\n", "<import token>\n<docstring token>\n\n\ndef is_cnf(func):\n\n def inner(*args, **kwargs):\n error_msg_list = \"'args[0]' should be 'list' of 'list'\"\n error_msg_elmt = \"Elements of a 'clause' should only be 'integer'\"\n if type(args[0]) is not list:\n raise TypeError(f'{error_msg_list}, args[0]:{args[0]}')\n for supposed_clause in args[0]:\n if type(supposed_clause) is not list:\n raise TypeError(f'{error_msg_list}, \"{supposed_clause}\"')\n for el in supposed_clause:\n if type(el) is not int:\n raise TypeError(f'{error_msg_elmt}, \"{el}\"')\n return func(*args, **kwargs)\n return inner\n\n\ndef litteral(var, false=False):\n if false:\n return -var\n return var\n\n\ndef cnf_from_str(prop):\n \"\"\"\n Utilitaire pour obtenir les clauses d'une formule sous forme \n CNF ou Formule Normale Conjonctive donc de la forme:\n\n \"(1 or 2 or ...) and (3 or -2 or ...) ... \"\n\n Retourne un resultat sous la forme:\n 0: [1, 2]\n 1: [3, -2]\n .\n .\n .\n n: ...\n\n Return: [[var1, var2], [var3, -var4], ...]\n\n \"\"\"\n splitted = prop.split('and')\n clauses = []\n for clause in splitted:\n clause = clause.replace('(', '').replace(')', '')\n str_clause = clause.split('or')\n int_litterals = [int(el) for el in str_clause]\n clauses.append(int_litterals)\n return clauses\n\n\n@is_cnf\ndef cnf_variables(cnf):\n \"\"\"\n 'entry' doit etre issue de cnf_clauses()\n\n return: 'set' de variable\n \"\"\"\n variabs = set()\n for clause in cnf:\n for var in clause:\n var = abs(var)\n if var not in variabs:\n variabs.add(var)\n return variabs\n", "<import token>\n<docstring token>\n\n\ndef is_cnf(func):\n\n def inner(*args, **kwargs):\n error_msg_list = \"'args[0]' should be 'list' of 'list'\"\n error_msg_elmt = \"Elements of a 'clause' should only be 'integer'\"\n if type(args[0]) is not list:\n raise TypeError(f'{error_msg_list}, args[0]:{args[0]}')\n for supposed_clause in args[0]:\n if type(supposed_clause) is not list:\n raise TypeError(f'{error_msg_list}, \"{supposed_clause}\"')\n for el in supposed_clause:\n if type(el) is not int:\n raise TypeError(f'{error_msg_elmt}, \"{el}\"')\n return func(*args, **kwargs)\n return inner\n\n\n<function token>\n\n\ndef cnf_from_str(prop):\n \"\"\"\n Utilitaire pour obtenir les clauses d'une formule sous forme \n CNF ou Formule Normale Conjonctive donc de la forme:\n\n \"(1 or 2 or ...) and (3 or -2 or ...) ... \"\n\n Retourne un resultat sous la forme:\n 0: [1, 2]\n 1: [3, -2]\n .\n .\n .\n n: ...\n\n Return: [[var1, var2], [var3, -var4], ...]\n\n \"\"\"\n splitted = prop.split('and')\n clauses = []\n for clause in splitted:\n clause = clause.replace('(', '').replace(')', '')\n str_clause = clause.split('or')\n int_litterals = [int(el) for el in str_clause]\n clauses.append(int_litterals)\n return clauses\n\n\n@is_cnf\ndef cnf_variables(cnf):\n \"\"\"\n 'entry' doit etre issue de cnf_clauses()\n\n return: 'set' de variable\n \"\"\"\n variabs = set()\n for clause in cnf:\n for var in clause:\n var = abs(var)\n if var not in variabs:\n variabs.add(var)\n return variabs\n", "<import token>\n<docstring token>\n\n\ndef is_cnf(func):\n\n def inner(*args, **kwargs):\n error_msg_list = \"'args[0]' should be 'list' of 'list'\"\n error_msg_elmt = \"Elements of a 'clause' should only be 'integer'\"\n if type(args[0]) is not list:\n raise TypeError(f'{error_msg_list}, args[0]:{args[0]}')\n for supposed_clause in args[0]:\n if type(supposed_clause) is not list:\n raise TypeError(f'{error_msg_list}, \"{supposed_clause}\"')\n for el in supposed_clause:\n if type(el) is not int:\n raise TypeError(f'{error_msg_elmt}, \"{el}\"')\n return func(*args, **kwargs)\n return inner\n\n\n<function token>\n\n\ndef cnf_from_str(prop):\n \"\"\"\n Utilitaire pour obtenir les clauses d'une formule sous forme \n CNF ou Formule Normale Conjonctive donc de la forme:\n\n \"(1 or 2 or ...) and (3 or -2 or ...) ... \"\n\n Retourne un resultat sous la forme:\n 0: [1, 2]\n 1: [3, -2]\n .\n .\n .\n n: ...\n\n Return: [[var1, var2], [var3, -var4], ...]\n\n \"\"\"\n splitted = prop.split('and')\n clauses = []\n for clause in splitted:\n clause = clause.replace('(', '').replace(')', '')\n str_clause = clause.split('or')\n int_litterals = [int(el) for el in str_clause]\n clauses.append(int_litterals)\n return clauses\n\n\n<function token>\n", "<import token>\n<docstring token>\n\n\ndef is_cnf(func):\n\n def inner(*args, **kwargs):\n error_msg_list = \"'args[0]' should be 'list' of 'list'\"\n error_msg_elmt = \"Elements of a 'clause' should only be 'integer'\"\n if type(args[0]) is not list:\n raise TypeError(f'{error_msg_list}, args[0]:{args[0]}')\n for supposed_clause in args[0]:\n if type(supposed_clause) is not list:\n raise TypeError(f'{error_msg_list}, \"{supposed_clause}\"')\n for el in supposed_clause:\n if type(el) is not int:\n raise TypeError(f'{error_msg_elmt}, \"{el}\"')\n return func(*args, **kwargs)\n return inner\n\n\n<function token>\n<function token>\n<function token>\n", "<import token>\n<docstring token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
99,431
7b3f1432f7ea778e3d5c4a6ebd68a161c171d406
from collections import defaultdict class Subject: """ Represents a Row in a matrix of training data. It contains several features (represents the columns of the data) and a class label. """ def __init__(self, features, class_label=None): self.class_label = class_label self.class_features = features def print(self): print("Subject [ class: " + str(self.class_label) + " ]") def group_has_same_label(subjects): """ Returns true if all subjects contains the same class label. :param subjects: :return: boolean """ first_label = subjects[0].class_label for subject in subjects: if subject.class_label != first_label: # if the subjects class label is not the same as the first label # then this group does not all have the same label return False # They all have the same class label return True def most_common_class_label(subjects): """ Picks the class label which is most common amongst the given set of subjects. :param subjects: :return: class label (Any type) """ result_set = defaultdict(int) for subject in subjects: result_set[subject.class_label[0]] += 1 return max(result_set, key=result_set.get)
[ "from collections import defaultdict\n\nclass Subject:\n \"\"\"\n Represents a Row in a matrix of training data.\n It contains several features (represents the columns of the data)\n and a class label.\n \"\"\"\n def __init__(self, features, class_label=None):\n self.class_label = class_label\n self.class_features = features\n\n def print(self):\n print(\"Subject [ class: \" + str(self.class_label) + \" ]\")\n\n\ndef group_has_same_label(subjects):\n \"\"\"\n Returns true if all subjects contains the same class label.\n :param subjects:\n :return: boolean\n \"\"\"\n first_label = subjects[0].class_label\n for subject in subjects:\n if subject.class_label != first_label:\n # if the subjects class label is not the same as the first label\n # then this group does not all have the same label\n return False\n\n # They all have the same class label\n return True\n\n\ndef most_common_class_label(subjects):\n \"\"\"\n Picks the class label which is most common amongst the given set of subjects.\n :param subjects:\n :return: class label (Any type)\n \"\"\"\n result_set = defaultdict(int)\n for subject in subjects:\n result_set[subject.class_label[0]] += 1\n\n return max(result_set, key=result_set.get)\n", "from collections import defaultdict\n\n\nclass Subject:\n \"\"\"\n Represents a Row in a matrix of training data.\n It contains several features (represents the columns of the data)\n and a class label.\n \"\"\"\n\n def __init__(self, features, class_label=None):\n self.class_label = class_label\n self.class_features = features\n\n def print(self):\n print('Subject [ class: ' + str(self.class_label) + ' ]')\n\n\ndef group_has_same_label(subjects):\n \"\"\"\n Returns true if all subjects contains the same class label.\n :param subjects:\n :return: boolean\n \"\"\"\n first_label = subjects[0].class_label\n for subject in subjects:\n if subject.class_label != first_label:\n return False\n return True\n\n\ndef most_common_class_label(subjects):\n \"\"\"\n Picks the class label which is most common amongst the given set of subjects.\n :param subjects:\n :return: class label (Any type)\n \"\"\"\n result_set = defaultdict(int)\n for subject in subjects:\n result_set[subject.class_label[0]] += 1\n return max(result_set, key=result_set.get)\n", "<import token>\n\n\nclass Subject:\n \"\"\"\n Represents a Row in a matrix of training data.\n It contains several features (represents the columns of the data)\n and a class label.\n \"\"\"\n\n def __init__(self, features, class_label=None):\n self.class_label = class_label\n self.class_features = features\n\n def print(self):\n print('Subject [ class: ' + str(self.class_label) + ' ]')\n\n\ndef group_has_same_label(subjects):\n \"\"\"\n Returns true if all subjects contains the same class label.\n :param subjects:\n :return: boolean\n \"\"\"\n first_label = subjects[0].class_label\n for subject in subjects:\n if subject.class_label != first_label:\n return False\n return True\n\n\ndef most_common_class_label(subjects):\n \"\"\"\n Picks the class label which is most common amongst the given set of subjects.\n :param subjects:\n :return: class label (Any type)\n \"\"\"\n result_set = defaultdict(int)\n for subject in subjects:\n result_set[subject.class_label[0]] += 1\n return max(result_set, key=result_set.get)\n", "<import token>\n\n\nclass Subject:\n \"\"\"\n Represents a Row in a matrix of training data.\n It contains several features (represents the columns of the data)\n and a class label.\n \"\"\"\n\n def __init__(self, features, class_label=None):\n self.class_label = class_label\n self.class_features = features\n\n def print(self):\n print('Subject [ class: ' + str(self.class_label) + ' ]')\n\n\n<function token>\n\n\ndef most_common_class_label(subjects):\n \"\"\"\n Picks the class label which is most common amongst the given set of subjects.\n :param subjects:\n :return: class label (Any type)\n \"\"\"\n result_set = defaultdict(int)\n for subject in subjects:\n result_set[subject.class_label[0]] += 1\n return max(result_set, key=result_set.get)\n", "<import token>\n\n\nclass Subject:\n \"\"\"\n Represents a Row in a matrix of training data.\n It contains several features (represents the columns of the data)\n and a class label.\n \"\"\"\n\n def __init__(self, features, class_label=None):\n self.class_label = class_label\n self.class_features = features\n\n def print(self):\n print('Subject [ class: ' + str(self.class_label) + ' ]')\n\n\n<function token>\n<function token>\n", "<import token>\n\n\nclass Subject:\n <docstring token>\n\n def __init__(self, features, class_label=None):\n self.class_label = class_label\n self.class_features = features\n\n def print(self):\n print('Subject [ class: ' + str(self.class_label) + ' ]')\n\n\n<function token>\n<function token>\n", "<import token>\n\n\nclass Subject:\n <docstring token>\n\n def __init__(self, features, class_label=None):\n self.class_label = class_label\n self.class_features = features\n <function token>\n\n\n<function token>\n<function token>\n", "<import token>\n\n\nclass Subject:\n <docstring token>\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n", "<import token>\n<class token>\n<function token>\n<function token>\n" ]
false
99,432
23fa3fb8197959f16d03e14e20544103bf2cff50
import subprocess import time from random import random, randint, randrange import uuid from bertopic import BERTopic import numpy as np from BuisnessLayer.AnalysisManager.DataObjects import AnalyzedTweet, Claim import pandas as pd import nltk # nltk.download('vader_lexicon') from nltk.sentiment.vader import SentimentIntensityAnalyzer import text2emotion as te from BuisnessLayer.AnalysisManager.DataObjects import * def get_emotion_by_id(id): if id == 1: return 'Anger' elif id == 2: return 'Disgust' elif id == 3: return 'Sad' elif id == 4: return 'Happy' elif id == 5: return 'Surprise' else: return 'Fear' author_columns = ['name', 'domain', 'author_guid', 'author_screen_name', 'author_full_name', 'author_osn_id', 'description', 'created_at', 'statuses_count', 'followers_count', 'favourites_count', 'friends_count', 'listed_count', 'language', 'profile_background_color', 'profile_background_tile', 'profile_banner_url', 'profile_image_url', 'profile_link_color', 'profile_sidebar_fill_color', 'profile_text_color', 'default_profile', 'contributors_enabled', 'default_profile_image', 'geo_enabled', 'protected', 'location', 'notifications', 'time_zone', 'url', 'utc_offset', 'verified', 'is_suspended_or_not_exists', 'default_post_format', 'likes_count', 'allow_questions', 'allow_anonymous_questions', 'image_size', 'media_path', 'author_type', 'bad_actors_collector_insertion_date', 'xml_importer_insertion_date', 'vico_dump_insertion_date', 'missing_data_complementor_insertion_date', 'bad_actors_markup_insertion_date', 'mark_missing_bad_actor_retweeters_insertion_date', 'author_sub_type', 'timeline_overlap_insertion_date', 'original_tweet_importer_insertion_date'] post_columns = ['post_id', 'author', 'guid', 'title', 'url', 'date', 'content', 'description', 'is_detailed', 'is_LB', 'is_valid', 'domain', 'author_guid', 'media_path', 'post_osn_guid', 'post_type', 'post_format', 'reblog_key', 'tags', 'is_created_via_bookmarklet', 'is_created_via_mobile', 'source_url', 'source_title', 'is_liked', 'post_state', 'post_osn_id', 'retweet_count', 'favorite_count', 'created_at', 'xml_importer_insertion_date', 'timeline_importer_insertion_date', 'original_tweet_importer_insertion_date'] claims_columns = ['claim_id', 'title', 'description', 'url', 'verdict_date', 'keywords', 'domain', 'verdict', 'category', 'sub_category'] connection_columns = ['claim_id', 'post_id'] # subprocess.call(['python','run_dataset_builder.py','configuration/config_demo.ini'],cwd= r'D:\aviad fake v3\fake-news-framework_Py3',shell=True) # ours, should write also stub class ClassifierAdapter: def __init__(self): self.sid = SentimentIntensityAnalyzer() self.i=0 def get_sentiment(self,text) -> int: snt = self.sid.polarity_scores(text) return round(snt['pos']*3-snt['neg']*3) def get_emotion(self,text): emo = te.get_emotion(text) return max(emo, key=emo.get) # The output we received, def _trends_to_csv(self, trends_dict, path="C:/fake-news-framework_Py3/data/input/tryout/"): topics = [] tweets = [] authors = [] topic_tweet_connection = [] for trend in trends_dict.keys(): for topic in trends_dict[trend].claims: topics.append({'claim_id':topic.id,'title': topic.name}) # check what is the input for tweet in topic.tweets: topic_tweet_connection.append({'claim_id': topic.id, 'post_id': tweet.id}) tweets.append({'post_id':tweet.id,'author':tweet.author_name,'content':tweet.content,'retweet_count':tweet.retweet_count, 'favorite_count':tweet.favorite_count}) authors.append({'name':tweet.author_name}) pd.DataFrame(topics, columns=claims_columns).to_csv(path + "claims.csv",index=False) pd.DataFrame(tweets, columns=post_columns).to_csv(path + "posts.csv",index=False) pd.DataFrame(authors, columns=author_columns).to_csv(path + "authors.csv",index=False) pd.DataFrame(topic_tweet_connection, columns=connection_columns).to_csv(path + "claim_tweet_connection.csv",index=False) self.i+=1 def _classify_topic(self): subprocess.call(['python','run_dataset_builder.py','configuration/config_demo.ini'],cwd= r'C:/fake-news-framework_Py3',shell=True) results = pd.read_csv("C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv")[['author_guid','pred']] return results def analyze_trends(self, trends_dict, callback): # trends_dict is type of dict {<trend name> : <Trend>} processed_data = {} if len(trends_dict)==0: return self._trends_to_csv(trends_dict) results = self._classify_topic() print("got classifier results\nparsing the results and running sentiment and emotion") for trend in trends_dict.keys(): print("start trend {}".format(trend)) if trend not in processed_data: processed_data[trend] = list() for topic in trends_dict[trend].claims: tweets = list() for tweet in topic.tweets: rand = randrange(100) if rand < 50: prediction = "fake" else: prediction = "true" # sentiment = randint(-3, 3) sentiment = self.get_sentiment(tweet.content) # rand = randrange(6) emotion = self.get_emotion(tweet.content) analyzed_tweet = AnalyzedTweet(tweet.id, tweet.author_name, tweet.content,tweet.location,tweet.date, tweet.trend_id,tweet.favorite_count,tweet.retweet_count, emotion, sentiment, prediction) tweets.append(analyzed_tweet) print(f"add tweet {tweet} to the topic {topic}") print(f"save the topic {topic}, with the list of tweets: {tweets}") processed_data[trend].append(Claim(topic.name, tweets,topic.id)) time.sleep(1) results['pred'] = results['pred'].apply(lambda x:"True" if x else "Fake") return callback(processed_data, trends_dict,results) def analyze_snopes(self, data, callback): # data is type of dict {<claim name> : list <tweets>} # print(data) # processed_data = {} # for key in data.keys(): # if key not in processed_data: # processed_data[key]={} # for tweet in data[key].keys(): # processed_data[key][tweet]={} # rand = randrange(100) # if rand < 50: # processed_data[key][tweet]['prediction'] = "wow it's fake" # else: # processed_data[key][tweet]['prediction'] = "100% true" # sentiment = randint(-3, 3) # processed_data[key][tweet]['sentiment'] = sentiment # rand = randrange(6) # processed_data[key][tweet]['emotional'] = get_emotion_by_id(rand) processed_data = {} for claim in data.keys(): # if claim not in processed_data: # processed_data[claim]= list() tweets = list() for tweet in data[claim]: rand = randrange(100) if rand < 50: prediction = "fake" else: prediction = "true" sentiment = randint(-3, 3) rand = randrange(6) emotion = get_emotion_by_id(rand) analyzed_tweet = AnalyzedTweet(tweet['id'], tweet['author'], tweet['content'], emotion, sentiment, prediction) tweets.append(analyzed_tweet) if claim in processed_data.keys(): processed_data[claim].append(Claim(claim, tweets)) else: processed_data[claim] = Claim(claim, tweets) time.sleep(1) return callback(processed_data) def get_claims_from_trend(self, trends_tweets): claims = {'claim1': {}, 'claim2': {}} for status in trends_tweets: rand = randrange(10) # print(status.id) # print(status.text) # print(status.author.name) if rand < 5: claims["claim1"][status.id]= {'id': status.id, 'author': status.author_name, 'content': status.content} else: # print(status) claims["claim2"][status.id]= {'id': status.id, 'author': status.author_name, 'content': status.content} return claims def _get_claim_from_trend(self, trends_tweets): print("topic model") df = pd.DataFrame([tweet.__dict__ for tweet in trends_tweets]) df = df[['id', 'content','author_name']] if len(df) < 15: print("less then 15 tweets, creating 1 topic") from collections import Counter claim_text = ' '.join([txt[0] for txt in Counter(" ".join(df['content'].str.replace("RT", '').values).split(' ')).most_common( 10)]) return [Claim(claim_text,trends_tweets,0)] print("build bertopic") bt = BERTopic() print("fit bertopic") topics = bt.fit_transform(df['content'].str.replace("RT", '').values) print("done fitting") df['topic_id'] = topics[0] topic_info = bt.get_topics() topics_text = {} for key in topic_info.keys(): lst = topic_info[key] topics_text[key] = ' '.join([x[0] for x in lst]) # df['topic_text'] = df['topic_id'].apply(lambda x:topics_text[x]) claims = [] print("attaching tweet object for topics") for t in topic_info.keys(): fitered = df[df['topic_id'] == t] tweets = list(filter(lambda t:t.id in fitered['id'].values,trends_tweets)) claims.append(Claim(topics_text[t], tweets,0)) return claims
[ "import subprocess\nimport time\nfrom random import random, randint, randrange\nimport uuid\nfrom bertopic import BERTopic\nimport numpy as np\nfrom BuisnessLayer.AnalysisManager.DataObjects import AnalyzedTweet, Claim\nimport pandas as pd\nimport nltk\n# nltk.download('vader_lexicon')\nfrom nltk.sentiment.vader import SentimentIntensityAnalyzer\nimport text2emotion as te\nfrom BuisnessLayer.AnalysisManager.DataObjects import *\n\ndef get_emotion_by_id(id):\n if id == 1:\n return 'Anger'\n elif id == 2:\n return 'Disgust'\n elif id == 3:\n return 'Sad'\n elif id == 4:\n return 'Happy'\n elif id == 5:\n return 'Surprise'\n else:\n return 'Fear'\n\n\nauthor_columns = ['name', 'domain', 'author_guid', 'author_screen_name',\n 'author_full_name', 'author_osn_id', 'description', 'created_at',\n 'statuses_count', 'followers_count', 'favourites_count',\n 'friends_count', 'listed_count', 'language', 'profile_background_color',\n 'profile_background_tile', 'profile_banner_url', 'profile_image_url',\n 'profile_link_color', 'profile_sidebar_fill_color',\n 'profile_text_color', 'default_profile', 'contributors_enabled',\n 'default_profile_image', 'geo_enabled', 'protected', 'location',\n 'notifications', 'time_zone', 'url', 'utc_offset', 'verified',\n 'is_suspended_or_not_exists', 'default_post_format', 'likes_count',\n 'allow_questions', 'allow_anonymous_questions', 'image_size',\n 'media_path', 'author_type', 'bad_actors_collector_insertion_date',\n 'xml_importer_insertion_date', 'vico_dump_insertion_date',\n 'missing_data_complementor_insertion_date',\n 'bad_actors_markup_insertion_date',\n 'mark_missing_bad_actor_retweeters_insertion_date', 'author_sub_type',\n 'timeline_overlap_insertion_date',\n 'original_tweet_importer_insertion_date']\n\npost_columns = ['post_id', 'author', 'guid', 'title', 'url', 'date', 'content',\n 'description', 'is_detailed', 'is_LB', 'is_valid', 'domain',\n 'author_guid', 'media_path', 'post_osn_guid', 'post_type',\n 'post_format', 'reblog_key', 'tags', 'is_created_via_bookmarklet',\n 'is_created_via_mobile', 'source_url', 'source_title', 'is_liked',\n 'post_state', 'post_osn_id', 'retweet_count', 'favorite_count',\n 'created_at', 'xml_importer_insertion_date',\n 'timeline_importer_insertion_date',\n 'original_tweet_importer_insertion_date']\n\nclaims_columns = ['claim_id', 'title', 'description', 'url', 'verdict_date', 'keywords',\n 'domain', 'verdict', 'category', 'sub_category']\n\nconnection_columns = ['claim_id', 'post_id']\n\n# subprocess.call(['python','run_dataset_builder.py','configuration/config_demo.ini'],cwd= r'D:\\aviad fake v3\\fake-news-framework_Py3',shell=True)\n# ours, should write also stub\nclass ClassifierAdapter:\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i=0\n def get_sentiment(self,text) -> int:\n snt = self.sid.polarity_scores(text)\n return round(snt['pos']*3-snt['neg']*3)\n\n def get_emotion(self,text):\n emo = te.get_emotion(text)\n return max(emo, key=emo.get) # The output we received,\n\n def _trends_to_csv(self, trends_dict, path=\"C:/fake-news-framework_Py3/data/input/tryout/\"):\n topics = []\n tweets = []\n authors = []\n topic_tweet_connection = []\n\n for trend in trends_dict.keys():\n for topic in trends_dict[trend].claims:\n topics.append({'claim_id':topic.id,'title': topic.name}) # check what is the input\n for tweet in topic.tweets:\n topic_tweet_connection.append({'claim_id': topic.id, 'post_id': tweet.id})\n tweets.append({'post_id':tweet.id,'author':tweet.author_name,'content':tweet.content,'retweet_count':tweet.retweet_count, 'favorite_count':tweet.favorite_count})\n authors.append({'name':tweet.author_name})\n\n pd.DataFrame(topics, columns=claims_columns).to_csv(path + \"claims.csv\",index=False)\n pd.DataFrame(tweets, columns=post_columns).to_csv(path + \"posts.csv\",index=False)\n pd.DataFrame(authors, columns=author_columns).to_csv(path + \"authors.csv\",index=False)\n pd.DataFrame(topic_tweet_connection, columns=connection_columns).to_csv(path + \"claim_tweet_connection.csv\",index=False)\n self.i+=1\n\n def _classify_topic(self):\n subprocess.call(['python','run_dataset_builder.py','configuration/config_demo.ini'],cwd= r'C:/fake-news-framework_Py3',shell=True)\n results = pd.read_csv(\"C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv\")[['author_guid','pred']]\n return results\n\n\n def analyze_trends(self, trends_dict, callback): # trends_dict is type of dict {<trend name> : <Trend>}\n processed_data = {}\n if len(trends_dict)==0:\n return\n self._trends_to_csv(trends_dict)\n results = self._classify_topic()\n print(\"got classifier results\\nparsing the results and running sentiment and emotion\")\n for trend in trends_dict.keys():\n print(\"start trend {}\".format(trend))\n if trend not in processed_data:\n processed_data[trend] = list()\n for topic in trends_dict[trend].claims:\n tweets = list()\n for tweet in topic.tweets:\n rand = randrange(100)\n if rand < 50:\n prediction = \"fake\"\n else:\n prediction = \"true\"\n # sentiment = randint(-3, 3)\n sentiment = self.get_sentiment(tweet.content)\n # rand = randrange(6)\n emotion = self.get_emotion(tweet.content)\n\n analyzed_tweet = AnalyzedTweet(tweet.id, tweet.author_name, tweet.content,tweet.location,tweet.date,\n tweet.trend_id,tweet.favorite_count,tweet.retweet_count, emotion, sentiment,\n prediction)\n tweets.append(analyzed_tweet)\n print(f\"add tweet {tweet} to the topic {topic}\")\n print(f\"save the topic {topic}, with the list of tweets: {tweets}\")\n processed_data[trend].append(Claim(topic.name, tweets,topic.id))\n\n time.sleep(1)\n results['pred'] = results['pred'].apply(lambda x:\"True\" if x else \"Fake\")\n return callback(processed_data, trends_dict,results)\n\n def analyze_snopes(self, data, callback): # data is type of dict {<claim name> : list <tweets>}\n # print(data)\n # processed_data = {}\n # for key in data.keys():\n # if key not in processed_data:\n # processed_data[key]={}\n # for tweet in data[key].keys():\n # processed_data[key][tweet]={}\n # rand = randrange(100)\n # if rand < 50:\n # processed_data[key][tweet]['prediction'] = \"wow it's fake\"\n # else:\n # processed_data[key][tweet]['prediction'] = \"100% true\"\n # sentiment = randint(-3, 3)\n # processed_data[key][tweet]['sentiment'] = sentiment\n # rand = randrange(6)\n # processed_data[key][tweet]['emotional'] = get_emotion_by_id(rand)\n\n processed_data = {}\n for claim in data.keys():\n # if claim not in processed_data:\n # processed_data[claim]= list()\n tweets = list()\n for tweet in data[claim]:\n rand = randrange(100)\n if rand < 50:\n prediction = \"fake\"\n else:\n prediction = \"true\"\n sentiment = randint(-3, 3)\n rand = randrange(6)\n emotion = get_emotion_by_id(rand)\n\n analyzed_tweet = AnalyzedTweet(tweet['id'], tweet['author'], tweet['content'], emotion, sentiment,\n prediction)\n tweets.append(analyzed_tweet)\n if claim in processed_data.keys():\n processed_data[claim].append(Claim(claim, tweets))\n else:\n processed_data[claim] = Claim(claim, tweets)\n\n time.sleep(1)\n return callback(processed_data)\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n # print(status.id)\n # print(status.text)\n # print(status.author.name)\n if rand < 5:\n claims[\"claim1\"][status.id]= {'id': status.id, 'author': status.author_name, 'content': status.content}\n else:\n # print(status)\n claims[\"claim2\"][status.id]= {'id': status.id, 'author': status.author_name, 'content': status.content}\n return claims\n\n def _get_claim_from_trend(self, trends_tweets):\n print(\"topic model\")\n df = pd.DataFrame([tweet.__dict__ for tweet in trends_tweets])\n df = df[['id', 'content','author_name']]\n if len(df) < 15:\n print(\"less then 15 tweets, creating 1 topic\")\n from collections import Counter\n claim_text = ' '.join([txt[0] for txt in\n Counter(\" \".join(df['content'].str.replace(\"RT\", '').values).split(' ')).most_common(\n 10)])\n return [Claim(claim_text,trends_tweets,0)]\n print(\"build bertopic\")\n bt = BERTopic()\n print(\"fit bertopic\")\n topics = bt.fit_transform(df['content'].str.replace(\"RT\", '').values)\n print(\"done fitting\")\n df['topic_id'] = topics[0]\n topic_info = bt.get_topics()\n topics_text = {}\n for key in topic_info.keys():\n lst = topic_info[key]\n\n topics_text[key] = ' '.join([x[0] for x in lst])\n\n # df['topic_text'] = df['topic_id'].apply(lambda x:topics_text[x])\n claims = []\n print(\"attaching tweet object for topics\")\n for t in topic_info.keys():\n\n fitered = df[df['topic_id'] == t]\n tweets = list(filter(lambda t:t.id in fitered['id'].values,trends_tweets))\n claims.append(Claim(topics_text[t], tweets,0))\n return claims\n", "import subprocess\nimport time\nfrom random import random, randint, randrange\nimport uuid\nfrom bertopic import BERTopic\nimport numpy as np\nfrom BuisnessLayer.AnalysisManager.DataObjects import AnalyzedTweet, Claim\nimport pandas as pd\nimport nltk\nfrom nltk.sentiment.vader import SentimentIntensityAnalyzer\nimport text2emotion as te\nfrom BuisnessLayer.AnalysisManager.DataObjects import *\n\n\ndef get_emotion_by_id(id):\n if id == 1:\n return 'Anger'\n elif id == 2:\n return 'Disgust'\n elif id == 3:\n return 'Sad'\n elif id == 4:\n return 'Happy'\n elif id == 5:\n return 'Surprise'\n else:\n return 'Fear'\n\n\nauthor_columns = ['name', 'domain', 'author_guid', 'author_screen_name',\n 'author_full_name', 'author_osn_id', 'description', 'created_at',\n 'statuses_count', 'followers_count', 'favourites_count',\n 'friends_count', 'listed_count', 'language', 'profile_background_color',\n 'profile_background_tile', 'profile_banner_url', 'profile_image_url',\n 'profile_link_color', 'profile_sidebar_fill_color',\n 'profile_text_color', 'default_profile', 'contributors_enabled',\n 'default_profile_image', 'geo_enabled', 'protected', 'location',\n 'notifications', 'time_zone', 'url', 'utc_offset', 'verified',\n 'is_suspended_or_not_exists', 'default_post_format', 'likes_count',\n 'allow_questions', 'allow_anonymous_questions', 'image_size',\n 'media_path', 'author_type', 'bad_actors_collector_insertion_date',\n 'xml_importer_insertion_date', 'vico_dump_insertion_date',\n 'missing_data_complementor_insertion_date',\n 'bad_actors_markup_insertion_date',\n 'mark_missing_bad_actor_retweeters_insertion_date', 'author_sub_type',\n 'timeline_overlap_insertion_date', 'original_tweet_importer_insertion_date'\n ]\npost_columns = ['post_id', 'author', 'guid', 'title', 'url', 'date',\n 'content', 'description', 'is_detailed', 'is_LB', 'is_valid', 'domain',\n 'author_guid', 'media_path', 'post_osn_guid', 'post_type',\n 'post_format', 'reblog_key', 'tags', 'is_created_via_bookmarklet',\n 'is_created_via_mobile', 'source_url', 'source_title', 'is_liked',\n 'post_state', 'post_osn_id', 'retweet_count', 'favorite_count',\n 'created_at', 'xml_importer_insertion_date',\n 'timeline_importer_insertion_date',\n 'original_tweet_importer_insertion_date']\nclaims_columns = ['claim_id', 'title', 'description', 'url', 'verdict_date',\n 'keywords', 'domain', 'verdict', 'category', 'sub_category']\nconnection_columns = ['claim_id', 'post_id']\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n\n def get_sentiment(self, text) ->int:\n snt = self.sid.polarity_scores(text)\n return round(snt['pos'] * 3 - snt['neg'] * 3)\n\n def get_emotion(self, text):\n emo = te.get_emotion(text)\n return max(emo, key=emo.get)\n\n def _trends_to_csv(self, trends_dict, path=\n 'C:/fake-news-framework_Py3/data/input/tryout/'):\n topics = []\n tweets = []\n authors = []\n topic_tweet_connection = []\n for trend in trends_dict.keys():\n for topic in trends_dict[trend].claims:\n topics.append({'claim_id': topic.id, 'title': topic.name})\n for tweet in topic.tweets:\n topic_tweet_connection.append({'claim_id': topic.id,\n 'post_id': tweet.id})\n tweets.append({'post_id': tweet.id, 'author': tweet.\n author_name, 'content': tweet.content,\n 'retweet_count': tweet.retweet_count,\n 'favorite_count': tweet.favorite_count})\n authors.append({'name': tweet.author_name})\n pd.DataFrame(topics, columns=claims_columns).to_csv(path +\n 'claims.csv', index=False)\n pd.DataFrame(tweets, columns=post_columns).to_csv(path +\n 'posts.csv', index=False)\n pd.DataFrame(authors, columns=author_columns).to_csv(path +\n 'authors.csv', index=False)\n pd.DataFrame(topic_tweet_connection, columns=connection_columns\n ).to_csv(path + 'claim_tweet_connection.csv', index=False)\n self.i += 1\n\n def _classify_topic(self):\n subprocess.call(['python', 'run_dataset_builder.py',\n 'configuration/config_demo.ini'], cwd=\n 'C:/fake-news-framework_Py3', shell=True)\n results = pd.read_csv(\n 'C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv'\n )[['author_guid', 'pred']]\n return results\n\n def analyze_trends(self, trends_dict, callback):\n processed_data = {}\n if len(trends_dict) == 0:\n return\n self._trends_to_csv(trends_dict)\n results = self._classify_topic()\n print(\n 'got classifier results\\nparsing the results and running sentiment and emotion'\n )\n for trend in trends_dict.keys():\n print('start trend {}'.format(trend))\n if trend not in processed_data:\n processed_data[trend] = list()\n for topic in trends_dict[trend].claims:\n tweets = list()\n for tweet in topic.tweets:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = self.get_sentiment(tweet.content)\n emotion = self.get_emotion(tweet.content)\n analyzed_tweet = AnalyzedTweet(tweet.id, tweet.\n author_name, tweet.content, tweet.location, tweet.\n date, tweet.trend_id, tweet.favorite_count, tweet.\n retweet_count, emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n print(f'add tweet {tweet} to the topic {topic}')\n print(\n f'save the topic {topic}, with the list of tweets: {tweets}'\n )\n processed_data[trend].append(Claim(topic.name, tweets,\n topic.id))\n time.sleep(1)\n results['pred'] = results['pred'].apply(lambda x: 'True' if x else\n 'Fake')\n return callback(processed_data, trends_dict, results)\n\n def analyze_snopes(self, data, callback):\n processed_data = {}\n for claim in data.keys():\n tweets = list()\n for tweet in data[claim]:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = randint(-3, 3)\n rand = randrange(6)\n emotion = get_emotion_by_id(rand)\n analyzed_tweet = AnalyzedTweet(tweet['id'], tweet['author'],\n tweet['content'], emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n if claim in processed_data.keys():\n processed_data[claim].append(Claim(claim, tweets))\n else:\n processed_data[claim] = Claim(claim, tweets)\n time.sleep(1)\n return callback(processed_data)\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n if rand < 5:\n claims['claim1'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n else:\n claims['claim2'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n return claims\n\n def _get_claim_from_trend(self, trends_tweets):\n print('topic model')\n df = pd.DataFrame([tweet.__dict__ for tweet in trends_tweets])\n df = df[['id', 'content', 'author_name']]\n if len(df) < 15:\n print('less then 15 tweets, creating 1 topic')\n from collections import Counter\n claim_text = ' '.join([txt[0] for txt in Counter(' '.join(df[\n 'content'].str.replace('RT', '').values).split(' ')).\n most_common(10)])\n return [Claim(claim_text, trends_tweets, 0)]\n print('build bertopic')\n bt = BERTopic()\n print('fit bertopic')\n topics = bt.fit_transform(df['content'].str.replace('RT', '').values)\n print('done fitting')\n df['topic_id'] = topics[0]\n topic_info = bt.get_topics()\n topics_text = {}\n for key in topic_info.keys():\n lst = topic_info[key]\n topics_text[key] = ' '.join([x[0] for x in lst])\n claims = []\n print('attaching tweet object for topics')\n for t in topic_info.keys():\n fitered = df[df['topic_id'] == t]\n tweets = list(filter(lambda t: t.id in fitered['id'].values,\n trends_tweets))\n claims.append(Claim(topics_text[t], tweets, 0))\n return claims\n", "<import token>\n\n\ndef get_emotion_by_id(id):\n if id == 1:\n return 'Anger'\n elif id == 2:\n return 'Disgust'\n elif id == 3:\n return 'Sad'\n elif id == 4:\n return 'Happy'\n elif id == 5:\n return 'Surprise'\n else:\n return 'Fear'\n\n\nauthor_columns = ['name', 'domain', 'author_guid', 'author_screen_name',\n 'author_full_name', 'author_osn_id', 'description', 'created_at',\n 'statuses_count', 'followers_count', 'favourites_count',\n 'friends_count', 'listed_count', 'language', 'profile_background_color',\n 'profile_background_tile', 'profile_banner_url', 'profile_image_url',\n 'profile_link_color', 'profile_sidebar_fill_color',\n 'profile_text_color', 'default_profile', 'contributors_enabled',\n 'default_profile_image', 'geo_enabled', 'protected', 'location',\n 'notifications', 'time_zone', 'url', 'utc_offset', 'verified',\n 'is_suspended_or_not_exists', 'default_post_format', 'likes_count',\n 'allow_questions', 'allow_anonymous_questions', 'image_size',\n 'media_path', 'author_type', 'bad_actors_collector_insertion_date',\n 'xml_importer_insertion_date', 'vico_dump_insertion_date',\n 'missing_data_complementor_insertion_date',\n 'bad_actors_markup_insertion_date',\n 'mark_missing_bad_actor_retweeters_insertion_date', 'author_sub_type',\n 'timeline_overlap_insertion_date', 'original_tweet_importer_insertion_date'\n ]\npost_columns = ['post_id', 'author', 'guid', 'title', 'url', 'date',\n 'content', 'description', 'is_detailed', 'is_LB', 'is_valid', 'domain',\n 'author_guid', 'media_path', 'post_osn_guid', 'post_type',\n 'post_format', 'reblog_key', 'tags', 'is_created_via_bookmarklet',\n 'is_created_via_mobile', 'source_url', 'source_title', 'is_liked',\n 'post_state', 'post_osn_id', 'retweet_count', 'favorite_count',\n 'created_at', 'xml_importer_insertion_date',\n 'timeline_importer_insertion_date',\n 'original_tweet_importer_insertion_date']\nclaims_columns = ['claim_id', 'title', 'description', 'url', 'verdict_date',\n 'keywords', 'domain', 'verdict', 'category', 'sub_category']\nconnection_columns = ['claim_id', 'post_id']\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n\n def get_sentiment(self, text) ->int:\n snt = self.sid.polarity_scores(text)\n return round(snt['pos'] * 3 - snt['neg'] * 3)\n\n def get_emotion(self, text):\n emo = te.get_emotion(text)\n return max(emo, key=emo.get)\n\n def _trends_to_csv(self, trends_dict, path=\n 'C:/fake-news-framework_Py3/data/input/tryout/'):\n topics = []\n tweets = []\n authors = []\n topic_tweet_connection = []\n for trend in trends_dict.keys():\n for topic in trends_dict[trend].claims:\n topics.append({'claim_id': topic.id, 'title': topic.name})\n for tweet in topic.tweets:\n topic_tweet_connection.append({'claim_id': topic.id,\n 'post_id': tweet.id})\n tweets.append({'post_id': tweet.id, 'author': tweet.\n author_name, 'content': tweet.content,\n 'retweet_count': tweet.retweet_count,\n 'favorite_count': tweet.favorite_count})\n authors.append({'name': tweet.author_name})\n pd.DataFrame(topics, columns=claims_columns).to_csv(path +\n 'claims.csv', index=False)\n pd.DataFrame(tweets, columns=post_columns).to_csv(path +\n 'posts.csv', index=False)\n pd.DataFrame(authors, columns=author_columns).to_csv(path +\n 'authors.csv', index=False)\n pd.DataFrame(topic_tweet_connection, columns=connection_columns\n ).to_csv(path + 'claim_tweet_connection.csv', index=False)\n self.i += 1\n\n def _classify_topic(self):\n subprocess.call(['python', 'run_dataset_builder.py',\n 'configuration/config_demo.ini'], cwd=\n 'C:/fake-news-framework_Py3', shell=True)\n results = pd.read_csv(\n 'C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv'\n )[['author_guid', 'pred']]\n return results\n\n def analyze_trends(self, trends_dict, callback):\n processed_data = {}\n if len(trends_dict) == 0:\n return\n self._trends_to_csv(trends_dict)\n results = self._classify_topic()\n print(\n 'got classifier results\\nparsing the results and running sentiment and emotion'\n )\n for trend in trends_dict.keys():\n print('start trend {}'.format(trend))\n if trend not in processed_data:\n processed_data[trend] = list()\n for topic in trends_dict[trend].claims:\n tweets = list()\n for tweet in topic.tweets:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = self.get_sentiment(tweet.content)\n emotion = self.get_emotion(tweet.content)\n analyzed_tweet = AnalyzedTweet(tweet.id, tweet.\n author_name, tweet.content, tweet.location, tweet.\n date, tweet.trend_id, tweet.favorite_count, tweet.\n retweet_count, emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n print(f'add tweet {tweet} to the topic {topic}')\n print(\n f'save the topic {topic}, with the list of tweets: {tweets}'\n )\n processed_data[trend].append(Claim(topic.name, tweets,\n topic.id))\n time.sleep(1)\n results['pred'] = results['pred'].apply(lambda x: 'True' if x else\n 'Fake')\n return callback(processed_data, trends_dict, results)\n\n def analyze_snopes(self, data, callback):\n processed_data = {}\n for claim in data.keys():\n tweets = list()\n for tweet in data[claim]:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = randint(-3, 3)\n rand = randrange(6)\n emotion = get_emotion_by_id(rand)\n analyzed_tweet = AnalyzedTweet(tweet['id'], tweet['author'],\n tweet['content'], emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n if claim in processed_data.keys():\n processed_data[claim].append(Claim(claim, tweets))\n else:\n processed_data[claim] = Claim(claim, tweets)\n time.sleep(1)\n return callback(processed_data)\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n if rand < 5:\n claims['claim1'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n else:\n claims['claim2'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n return claims\n\n def _get_claim_from_trend(self, trends_tweets):\n print('topic model')\n df = pd.DataFrame([tweet.__dict__ for tweet in trends_tweets])\n df = df[['id', 'content', 'author_name']]\n if len(df) < 15:\n print('less then 15 tweets, creating 1 topic')\n from collections import Counter\n claim_text = ' '.join([txt[0] for txt in Counter(' '.join(df[\n 'content'].str.replace('RT', '').values).split(' ')).\n most_common(10)])\n return [Claim(claim_text, trends_tweets, 0)]\n print('build bertopic')\n bt = BERTopic()\n print('fit bertopic')\n topics = bt.fit_transform(df['content'].str.replace('RT', '').values)\n print('done fitting')\n df['topic_id'] = topics[0]\n topic_info = bt.get_topics()\n topics_text = {}\n for key in topic_info.keys():\n lst = topic_info[key]\n topics_text[key] = ' '.join([x[0] for x in lst])\n claims = []\n print('attaching tweet object for topics')\n for t in topic_info.keys():\n fitered = df[df['topic_id'] == t]\n tweets = list(filter(lambda t: t.id in fitered['id'].values,\n trends_tweets))\n claims.append(Claim(topics_text[t], tweets, 0))\n return claims\n", "<import token>\n\n\ndef get_emotion_by_id(id):\n if id == 1:\n return 'Anger'\n elif id == 2:\n return 'Disgust'\n elif id == 3:\n return 'Sad'\n elif id == 4:\n return 'Happy'\n elif id == 5:\n return 'Surprise'\n else:\n return 'Fear'\n\n\n<assignment token>\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n\n def get_sentiment(self, text) ->int:\n snt = self.sid.polarity_scores(text)\n return round(snt['pos'] * 3 - snt['neg'] * 3)\n\n def get_emotion(self, text):\n emo = te.get_emotion(text)\n return max(emo, key=emo.get)\n\n def _trends_to_csv(self, trends_dict, path=\n 'C:/fake-news-framework_Py3/data/input/tryout/'):\n topics = []\n tweets = []\n authors = []\n topic_tweet_connection = []\n for trend in trends_dict.keys():\n for topic in trends_dict[trend].claims:\n topics.append({'claim_id': topic.id, 'title': topic.name})\n for tweet in topic.tweets:\n topic_tweet_connection.append({'claim_id': topic.id,\n 'post_id': tweet.id})\n tweets.append({'post_id': tweet.id, 'author': tweet.\n author_name, 'content': tweet.content,\n 'retweet_count': tweet.retweet_count,\n 'favorite_count': tweet.favorite_count})\n authors.append({'name': tweet.author_name})\n pd.DataFrame(topics, columns=claims_columns).to_csv(path +\n 'claims.csv', index=False)\n pd.DataFrame(tweets, columns=post_columns).to_csv(path +\n 'posts.csv', index=False)\n pd.DataFrame(authors, columns=author_columns).to_csv(path +\n 'authors.csv', index=False)\n pd.DataFrame(topic_tweet_connection, columns=connection_columns\n ).to_csv(path + 'claim_tweet_connection.csv', index=False)\n self.i += 1\n\n def _classify_topic(self):\n subprocess.call(['python', 'run_dataset_builder.py',\n 'configuration/config_demo.ini'], cwd=\n 'C:/fake-news-framework_Py3', shell=True)\n results = pd.read_csv(\n 'C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv'\n )[['author_guid', 'pred']]\n return results\n\n def analyze_trends(self, trends_dict, callback):\n processed_data = {}\n if len(trends_dict) == 0:\n return\n self._trends_to_csv(trends_dict)\n results = self._classify_topic()\n print(\n 'got classifier results\\nparsing the results and running sentiment and emotion'\n )\n for trend in trends_dict.keys():\n print('start trend {}'.format(trend))\n if trend not in processed_data:\n processed_data[trend] = list()\n for topic in trends_dict[trend].claims:\n tweets = list()\n for tweet in topic.tweets:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = self.get_sentiment(tweet.content)\n emotion = self.get_emotion(tweet.content)\n analyzed_tweet = AnalyzedTweet(tweet.id, tweet.\n author_name, tweet.content, tweet.location, tweet.\n date, tweet.trend_id, tweet.favorite_count, tweet.\n retweet_count, emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n print(f'add tweet {tweet} to the topic {topic}')\n print(\n f'save the topic {topic}, with the list of tweets: {tweets}'\n )\n processed_data[trend].append(Claim(topic.name, tweets,\n topic.id))\n time.sleep(1)\n results['pred'] = results['pred'].apply(lambda x: 'True' if x else\n 'Fake')\n return callback(processed_data, trends_dict, results)\n\n def analyze_snopes(self, data, callback):\n processed_data = {}\n for claim in data.keys():\n tweets = list()\n for tweet in data[claim]:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = randint(-3, 3)\n rand = randrange(6)\n emotion = get_emotion_by_id(rand)\n analyzed_tweet = AnalyzedTweet(tweet['id'], tweet['author'],\n tweet['content'], emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n if claim in processed_data.keys():\n processed_data[claim].append(Claim(claim, tweets))\n else:\n processed_data[claim] = Claim(claim, tweets)\n time.sleep(1)\n return callback(processed_data)\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n if rand < 5:\n claims['claim1'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n else:\n claims['claim2'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n return claims\n\n def _get_claim_from_trend(self, trends_tweets):\n print('topic model')\n df = pd.DataFrame([tweet.__dict__ for tweet in trends_tweets])\n df = df[['id', 'content', 'author_name']]\n if len(df) < 15:\n print('less then 15 tweets, creating 1 topic')\n from collections import Counter\n claim_text = ' '.join([txt[0] for txt in Counter(' '.join(df[\n 'content'].str.replace('RT', '').values).split(' ')).\n most_common(10)])\n return [Claim(claim_text, trends_tweets, 0)]\n print('build bertopic')\n bt = BERTopic()\n print('fit bertopic')\n topics = bt.fit_transform(df['content'].str.replace('RT', '').values)\n print('done fitting')\n df['topic_id'] = topics[0]\n topic_info = bt.get_topics()\n topics_text = {}\n for key in topic_info.keys():\n lst = topic_info[key]\n topics_text[key] = ' '.join([x[0] for x in lst])\n claims = []\n print('attaching tweet object for topics')\n for t in topic_info.keys():\n fitered = df[df['topic_id'] == t]\n tweets = list(filter(lambda t: t.id in fitered['id'].values,\n trends_tweets))\n claims.append(Claim(topics_text[t], tweets, 0))\n return claims\n", "<import token>\n<function token>\n<assignment token>\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n\n def get_sentiment(self, text) ->int:\n snt = self.sid.polarity_scores(text)\n return round(snt['pos'] * 3 - snt['neg'] * 3)\n\n def get_emotion(self, text):\n emo = te.get_emotion(text)\n return max(emo, key=emo.get)\n\n def _trends_to_csv(self, trends_dict, path=\n 'C:/fake-news-framework_Py3/data/input/tryout/'):\n topics = []\n tweets = []\n authors = []\n topic_tweet_connection = []\n for trend in trends_dict.keys():\n for topic in trends_dict[trend].claims:\n topics.append({'claim_id': topic.id, 'title': topic.name})\n for tweet in topic.tweets:\n topic_tweet_connection.append({'claim_id': topic.id,\n 'post_id': tweet.id})\n tweets.append({'post_id': tweet.id, 'author': tweet.\n author_name, 'content': tweet.content,\n 'retweet_count': tweet.retweet_count,\n 'favorite_count': tweet.favorite_count})\n authors.append({'name': tweet.author_name})\n pd.DataFrame(topics, columns=claims_columns).to_csv(path +\n 'claims.csv', index=False)\n pd.DataFrame(tweets, columns=post_columns).to_csv(path +\n 'posts.csv', index=False)\n pd.DataFrame(authors, columns=author_columns).to_csv(path +\n 'authors.csv', index=False)\n pd.DataFrame(topic_tweet_connection, columns=connection_columns\n ).to_csv(path + 'claim_tweet_connection.csv', index=False)\n self.i += 1\n\n def _classify_topic(self):\n subprocess.call(['python', 'run_dataset_builder.py',\n 'configuration/config_demo.ini'], cwd=\n 'C:/fake-news-framework_Py3', shell=True)\n results = pd.read_csv(\n 'C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv'\n )[['author_guid', 'pred']]\n return results\n\n def analyze_trends(self, trends_dict, callback):\n processed_data = {}\n if len(trends_dict) == 0:\n return\n self._trends_to_csv(trends_dict)\n results = self._classify_topic()\n print(\n 'got classifier results\\nparsing the results and running sentiment and emotion'\n )\n for trend in trends_dict.keys():\n print('start trend {}'.format(trend))\n if trend not in processed_data:\n processed_data[trend] = list()\n for topic in trends_dict[trend].claims:\n tweets = list()\n for tweet in topic.tweets:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = self.get_sentiment(tweet.content)\n emotion = self.get_emotion(tweet.content)\n analyzed_tweet = AnalyzedTweet(tweet.id, tweet.\n author_name, tweet.content, tweet.location, tweet.\n date, tweet.trend_id, tweet.favorite_count, tweet.\n retweet_count, emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n print(f'add tweet {tweet} to the topic {topic}')\n print(\n f'save the topic {topic}, with the list of tweets: {tweets}'\n )\n processed_data[trend].append(Claim(topic.name, tweets,\n topic.id))\n time.sleep(1)\n results['pred'] = results['pred'].apply(lambda x: 'True' if x else\n 'Fake')\n return callback(processed_data, trends_dict, results)\n\n def analyze_snopes(self, data, callback):\n processed_data = {}\n for claim in data.keys():\n tweets = list()\n for tweet in data[claim]:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = randint(-3, 3)\n rand = randrange(6)\n emotion = get_emotion_by_id(rand)\n analyzed_tweet = AnalyzedTweet(tweet['id'], tweet['author'],\n tweet['content'], emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n if claim in processed_data.keys():\n processed_data[claim].append(Claim(claim, tweets))\n else:\n processed_data[claim] = Claim(claim, tweets)\n time.sleep(1)\n return callback(processed_data)\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n if rand < 5:\n claims['claim1'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n else:\n claims['claim2'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n return claims\n\n def _get_claim_from_trend(self, trends_tweets):\n print('topic model')\n df = pd.DataFrame([tweet.__dict__ for tweet in trends_tweets])\n df = df[['id', 'content', 'author_name']]\n if len(df) < 15:\n print('less then 15 tweets, creating 1 topic')\n from collections import Counter\n claim_text = ' '.join([txt[0] for txt in Counter(' '.join(df[\n 'content'].str.replace('RT', '').values).split(' ')).\n most_common(10)])\n return [Claim(claim_text, trends_tweets, 0)]\n print('build bertopic')\n bt = BERTopic()\n print('fit bertopic')\n topics = bt.fit_transform(df['content'].str.replace('RT', '').values)\n print('done fitting')\n df['topic_id'] = topics[0]\n topic_info = bt.get_topics()\n topics_text = {}\n for key in topic_info.keys():\n lst = topic_info[key]\n topics_text[key] = ' '.join([x[0] for x in lst])\n claims = []\n print('attaching tweet object for topics')\n for t in topic_info.keys():\n fitered = df[df['topic_id'] == t]\n tweets = list(filter(lambda t: t.id in fitered['id'].values,\n trends_tweets))\n claims.append(Claim(topics_text[t], tweets, 0))\n return claims\n", "<import token>\n<function token>\n<assignment token>\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n <function token>\n\n def get_emotion(self, text):\n emo = te.get_emotion(text)\n return max(emo, key=emo.get)\n\n def _trends_to_csv(self, trends_dict, path=\n 'C:/fake-news-framework_Py3/data/input/tryout/'):\n topics = []\n tweets = []\n authors = []\n topic_tweet_connection = []\n for trend in trends_dict.keys():\n for topic in trends_dict[trend].claims:\n topics.append({'claim_id': topic.id, 'title': topic.name})\n for tweet in topic.tweets:\n topic_tweet_connection.append({'claim_id': topic.id,\n 'post_id': tweet.id})\n tweets.append({'post_id': tweet.id, 'author': tweet.\n author_name, 'content': tweet.content,\n 'retweet_count': tweet.retweet_count,\n 'favorite_count': tweet.favorite_count})\n authors.append({'name': tweet.author_name})\n pd.DataFrame(topics, columns=claims_columns).to_csv(path +\n 'claims.csv', index=False)\n pd.DataFrame(tweets, columns=post_columns).to_csv(path +\n 'posts.csv', index=False)\n pd.DataFrame(authors, columns=author_columns).to_csv(path +\n 'authors.csv', index=False)\n pd.DataFrame(topic_tweet_connection, columns=connection_columns\n ).to_csv(path + 'claim_tweet_connection.csv', index=False)\n self.i += 1\n\n def _classify_topic(self):\n subprocess.call(['python', 'run_dataset_builder.py',\n 'configuration/config_demo.ini'], cwd=\n 'C:/fake-news-framework_Py3', shell=True)\n results = pd.read_csv(\n 'C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv'\n )[['author_guid', 'pred']]\n return results\n\n def analyze_trends(self, trends_dict, callback):\n processed_data = {}\n if len(trends_dict) == 0:\n return\n self._trends_to_csv(trends_dict)\n results = self._classify_topic()\n print(\n 'got classifier results\\nparsing the results and running sentiment and emotion'\n )\n for trend in trends_dict.keys():\n print('start trend {}'.format(trend))\n if trend not in processed_data:\n processed_data[trend] = list()\n for topic in trends_dict[trend].claims:\n tweets = list()\n for tweet in topic.tweets:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = self.get_sentiment(tweet.content)\n emotion = self.get_emotion(tweet.content)\n analyzed_tweet = AnalyzedTweet(tweet.id, tweet.\n author_name, tweet.content, tweet.location, tweet.\n date, tweet.trend_id, tweet.favorite_count, tweet.\n retweet_count, emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n print(f'add tweet {tweet} to the topic {topic}')\n print(\n f'save the topic {topic}, with the list of tweets: {tweets}'\n )\n processed_data[trend].append(Claim(topic.name, tweets,\n topic.id))\n time.sleep(1)\n results['pred'] = results['pred'].apply(lambda x: 'True' if x else\n 'Fake')\n return callback(processed_data, trends_dict, results)\n\n def analyze_snopes(self, data, callback):\n processed_data = {}\n for claim in data.keys():\n tweets = list()\n for tweet in data[claim]:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = randint(-3, 3)\n rand = randrange(6)\n emotion = get_emotion_by_id(rand)\n analyzed_tweet = AnalyzedTweet(tweet['id'], tweet['author'],\n tweet['content'], emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n if claim in processed_data.keys():\n processed_data[claim].append(Claim(claim, tweets))\n else:\n processed_data[claim] = Claim(claim, tweets)\n time.sleep(1)\n return callback(processed_data)\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n if rand < 5:\n claims['claim1'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n else:\n claims['claim2'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n return claims\n\n def _get_claim_from_trend(self, trends_tweets):\n print('topic model')\n df = pd.DataFrame([tweet.__dict__ for tweet in trends_tweets])\n df = df[['id', 'content', 'author_name']]\n if len(df) < 15:\n print('less then 15 tweets, creating 1 topic')\n from collections import Counter\n claim_text = ' '.join([txt[0] for txt in Counter(' '.join(df[\n 'content'].str.replace('RT', '').values).split(' ')).\n most_common(10)])\n return [Claim(claim_text, trends_tweets, 0)]\n print('build bertopic')\n bt = BERTopic()\n print('fit bertopic')\n topics = bt.fit_transform(df['content'].str.replace('RT', '').values)\n print('done fitting')\n df['topic_id'] = topics[0]\n topic_info = bt.get_topics()\n topics_text = {}\n for key in topic_info.keys():\n lst = topic_info[key]\n topics_text[key] = ' '.join([x[0] for x in lst])\n claims = []\n print('attaching tweet object for topics')\n for t in topic_info.keys():\n fitered = df[df['topic_id'] == t]\n tweets = list(filter(lambda t: t.id in fitered['id'].values,\n trends_tweets))\n claims.append(Claim(topics_text[t], tweets, 0))\n return claims\n", "<import token>\n<function token>\n<assignment token>\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n <function token>\n\n def get_emotion(self, text):\n emo = te.get_emotion(text)\n return max(emo, key=emo.get)\n <function token>\n\n def _classify_topic(self):\n subprocess.call(['python', 'run_dataset_builder.py',\n 'configuration/config_demo.ini'], cwd=\n 'C:/fake-news-framework_Py3', shell=True)\n results = pd.read_csv(\n 'C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv'\n )[['author_guid', 'pred']]\n return results\n\n def analyze_trends(self, trends_dict, callback):\n processed_data = {}\n if len(trends_dict) == 0:\n return\n self._trends_to_csv(trends_dict)\n results = self._classify_topic()\n print(\n 'got classifier results\\nparsing the results and running sentiment and emotion'\n )\n for trend in trends_dict.keys():\n print('start trend {}'.format(trend))\n if trend not in processed_data:\n processed_data[trend] = list()\n for topic in trends_dict[trend].claims:\n tweets = list()\n for tweet in topic.tweets:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = self.get_sentiment(tweet.content)\n emotion = self.get_emotion(tweet.content)\n analyzed_tweet = AnalyzedTweet(tweet.id, tweet.\n author_name, tweet.content, tweet.location, tweet.\n date, tweet.trend_id, tweet.favorite_count, tweet.\n retweet_count, emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n print(f'add tweet {tweet} to the topic {topic}')\n print(\n f'save the topic {topic}, with the list of tweets: {tweets}'\n )\n processed_data[trend].append(Claim(topic.name, tweets,\n topic.id))\n time.sleep(1)\n results['pred'] = results['pred'].apply(lambda x: 'True' if x else\n 'Fake')\n return callback(processed_data, trends_dict, results)\n\n def analyze_snopes(self, data, callback):\n processed_data = {}\n for claim in data.keys():\n tweets = list()\n for tweet in data[claim]:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = randint(-3, 3)\n rand = randrange(6)\n emotion = get_emotion_by_id(rand)\n analyzed_tweet = AnalyzedTweet(tweet['id'], tweet['author'],\n tweet['content'], emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n if claim in processed_data.keys():\n processed_data[claim].append(Claim(claim, tweets))\n else:\n processed_data[claim] = Claim(claim, tweets)\n time.sleep(1)\n return callback(processed_data)\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n if rand < 5:\n claims['claim1'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n else:\n claims['claim2'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n return claims\n\n def _get_claim_from_trend(self, trends_tweets):\n print('topic model')\n df = pd.DataFrame([tweet.__dict__ for tweet in trends_tweets])\n df = df[['id', 'content', 'author_name']]\n if len(df) < 15:\n print('less then 15 tweets, creating 1 topic')\n from collections import Counter\n claim_text = ' '.join([txt[0] for txt in Counter(' '.join(df[\n 'content'].str.replace('RT', '').values).split(' ')).\n most_common(10)])\n return [Claim(claim_text, trends_tweets, 0)]\n print('build bertopic')\n bt = BERTopic()\n print('fit bertopic')\n topics = bt.fit_transform(df['content'].str.replace('RT', '').values)\n print('done fitting')\n df['topic_id'] = topics[0]\n topic_info = bt.get_topics()\n topics_text = {}\n for key in topic_info.keys():\n lst = topic_info[key]\n topics_text[key] = ' '.join([x[0] for x in lst])\n claims = []\n print('attaching tweet object for topics')\n for t in topic_info.keys():\n fitered = df[df['topic_id'] == t]\n tweets = list(filter(lambda t: t.id in fitered['id'].values,\n trends_tweets))\n claims.append(Claim(topics_text[t], tweets, 0))\n return claims\n", "<import token>\n<function token>\n<assignment token>\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n <function token>\n\n def get_emotion(self, text):\n emo = te.get_emotion(text)\n return max(emo, key=emo.get)\n <function token>\n\n def _classify_topic(self):\n subprocess.call(['python', 'run_dataset_builder.py',\n 'configuration/config_demo.ini'], cwd=\n 'C:/fake-news-framework_Py3', shell=True)\n results = pd.read_csv(\n 'C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv'\n )[['author_guid', 'pred']]\n return results\n\n def analyze_trends(self, trends_dict, callback):\n processed_data = {}\n if len(trends_dict) == 0:\n return\n self._trends_to_csv(trends_dict)\n results = self._classify_topic()\n print(\n 'got classifier results\\nparsing the results and running sentiment and emotion'\n )\n for trend in trends_dict.keys():\n print('start trend {}'.format(trend))\n if trend not in processed_data:\n processed_data[trend] = list()\n for topic in trends_dict[trend].claims:\n tweets = list()\n for tweet in topic.tweets:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = self.get_sentiment(tweet.content)\n emotion = self.get_emotion(tweet.content)\n analyzed_tweet = AnalyzedTweet(tweet.id, tweet.\n author_name, tweet.content, tweet.location, tweet.\n date, tweet.trend_id, tweet.favorite_count, tweet.\n retweet_count, emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n print(f'add tweet {tweet} to the topic {topic}')\n print(\n f'save the topic {topic}, with the list of tweets: {tweets}'\n )\n processed_data[trend].append(Claim(topic.name, tweets,\n topic.id))\n time.sleep(1)\n results['pred'] = results['pred'].apply(lambda x: 'True' if x else\n 'Fake')\n return callback(processed_data, trends_dict, results)\n <function token>\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n if rand < 5:\n claims['claim1'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n else:\n claims['claim2'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n return claims\n\n def _get_claim_from_trend(self, trends_tweets):\n print('topic model')\n df = pd.DataFrame([tweet.__dict__ for tweet in trends_tweets])\n df = df[['id', 'content', 'author_name']]\n if len(df) < 15:\n print('less then 15 tweets, creating 1 topic')\n from collections import Counter\n claim_text = ' '.join([txt[0] for txt in Counter(' '.join(df[\n 'content'].str.replace('RT', '').values).split(' ')).\n most_common(10)])\n return [Claim(claim_text, trends_tweets, 0)]\n print('build bertopic')\n bt = BERTopic()\n print('fit bertopic')\n topics = bt.fit_transform(df['content'].str.replace('RT', '').values)\n print('done fitting')\n df['topic_id'] = topics[0]\n topic_info = bt.get_topics()\n topics_text = {}\n for key in topic_info.keys():\n lst = topic_info[key]\n topics_text[key] = ' '.join([x[0] for x in lst])\n claims = []\n print('attaching tweet object for topics')\n for t in topic_info.keys():\n fitered = df[df['topic_id'] == t]\n tweets = list(filter(lambda t: t.id in fitered['id'].values,\n trends_tweets))\n claims.append(Claim(topics_text[t], tweets, 0))\n return claims\n", "<import token>\n<function token>\n<assignment token>\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n <function token>\n\n def get_emotion(self, text):\n emo = te.get_emotion(text)\n return max(emo, key=emo.get)\n <function token>\n\n def _classify_topic(self):\n subprocess.call(['python', 'run_dataset_builder.py',\n 'configuration/config_demo.ini'], cwd=\n 'C:/fake-news-framework_Py3', shell=True)\n results = pd.read_csv(\n 'C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv'\n )[['author_guid', 'pred']]\n return results\n\n def analyze_trends(self, trends_dict, callback):\n processed_data = {}\n if len(trends_dict) == 0:\n return\n self._trends_to_csv(trends_dict)\n results = self._classify_topic()\n print(\n 'got classifier results\\nparsing the results and running sentiment and emotion'\n )\n for trend in trends_dict.keys():\n print('start trend {}'.format(trend))\n if trend not in processed_data:\n processed_data[trend] = list()\n for topic in trends_dict[trend].claims:\n tweets = list()\n for tweet in topic.tweets:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = self.get_sentiment(tweet.content)\n emotion = self.get_emotion(tweet.content)\n analyzed_tweet = AnalyzedTweet(tweet.id, tweet.\n author_name, tweet.content, tweet.location, tweet.\n date, tweet.trend_id, tweet.favorite_count, tweet.\n retweet_count, emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n print(f'add tweet {tweet} to the topic {topic}')\n print(\n f'save the topic {topic}, with the list of tweets: {tweets}'\n )\n processed_data[trend].append(Claim(topic.name, tweets,\n topic.id))\n time.sleep(1)\n results['pred'] = results['pred'].apply(lambda x: 'True' if x else\n 'Fake')\n return callback(processed_data, trends_dict, results)\n <function token>\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n if rand < 5:\n claims['claim1'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n else:\n claims['claim2'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n return claims\n <function token>\n", "<import token>\n<function token>\n<assignment token>\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n <function token>\n <function token>\n <function token>\n\n def _classify_topic(self):\n subprocess.call(['python', 'run_dataset_builder.py',\n 'configuration/config_demo.ini'], cwd=\n 'C:/fake-news-framework_Py3', shell=True)\n results = pd.read_csv(\n 'C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv'\n )[['author_guid', 'pred']]\n return results\n\n def analyze_trends(self, trends_dict, callback):\n processed_data = {}\n if len(trends_dict) == 0:\n return\n self._trends_to_csv(trends_dict)\n results = self._classify_topic()\n print(\n 'got classifier results\\nparsing the results and running sentiment and emotion'\n )\n for trend in trends_dict.keys():\n print('start trend {}'.format(trend))\n if trend not in processed_data:\n processed_data[trend] = list()\n for topic in trends_dict[trend].claims:\n tweets = list()\n for tweet in topic.tweets:\n rand = randrange(100)\n if rand < 50:\n prediction = 'fake'\n else:\n prediction = 'true'\n sentiment = self.get_sentiment(tweet.content)\n emotion = self.get_emotion(tweet.content)\n analyzed_tweet = AnalyzedTweet(tweet.id, tweet.\n author_name, tweet.content, tweet.location, tweet.\n date, tweet.trend_id, tweet.favorite_count, tweet.\n retweet_count, emotion, sentiment, prediction)\n tweets.append(analyzed_tweet)\n print(f'add tweet {tweet} to the topic {topic}')\n print(\n f'save the topic {topic}, with the list of tweets: {tweets}'\n )\n processed_data[trend].append(Claim(topic.name, tweets,\n topic.id))\n time.sleep(1)\n results['pred'] = results['pred'].apply(lambda x: 'True' if x else\n 'Fake')\n return callback(processed_data, trends_dict, results)\n <function token>\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n if rand < 5:\n claims['claim1'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n else:\n claims['claim2'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n return claims\n <function token>\n", "<import token>\n<function token>\n<assignment token>\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n <function token>\n <function token>\n <function token>\n\n def _classify_topic(self):\n subprocess.call(['python', 'run_dataset_builder.py',\n 'configuration/config_demo.ini'], cwd=\n 'C:/fake-news-framework_Py3', shell=True)\n results = pd.read_csv(\n 'C:/fake-news-framework_Py3/data/output/D/labeled_predictions.csv'\n )[['author_guid', 'pred']]\n return results\n <function token>\n <function token>\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n if rand < 5:\n claims['claim1'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n else:\n claims['claim2'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n return claims\n <function token>\n", "<import token>\n<function token>\n<assignment token>\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_claims_from_trend(self, trends_tweets):\n claims = {'claim1': {}, 'claim2': {}}\n for status in trends_tweets:\n rand = randrange(10)\n if rand < 5:\n claims['claim1'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n else:\n claims['claim2'][status.id] = {'id': status.id, 'author':\n status.author_name, 'content': status.content}\n return claims\n <function token>\n", "<import token>\n<function token>\n<assignment token>\n\n\nclass ClassifierAdapter:\n\n def __init__(self):\n self.sid = SentimentIntensityAnalyzer()\n self.i = 0\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<function token>\n<assignment token>\n\n\nclass ClassifierAdapter:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<function token>\n<assignment token>\n<class token>\n" ]
false
99,433
201dd6d498e77b6b604f512aca1af0c55688f5ae
n1=float(input('digite uma nota :')) n2=float(input('digite outra nota : ')) m = (n1+n2) /2 print('digitando notas entre {:.1f} e {:.1f} sua média é \n{:.1f}'.format(n1,n2,m))
[ "n1=float(input('digite uma nota :'))\nn2=float(input('digite outra nota : '))\nm = (n1+n2) /2\n\nprint('digitando notas entre {:.1f} e {:.1f} sua média é \\n{:.1f}'.format(n1,n2,m))\n", "n1 = float(input('digite uma nota :'))\nn2 = float(input('digite outra nota : '))\nm = (n1 + n2) / 2\nprint(\"\"\"digitando notas entre {:.1f} e {:.1f} sua média é \n{:.1f}\"\"\".\n format(n1, n2, m))\n", "<assignment token>\nprint(\"\"\"digitando notas entre {:.1f} e {:.1f} sua média é \n{:.1f}\"\"\".\n format(n1, n2, m))\n", "<assignment token>\n<code token>\n" ]
false
99,434
8c111c1967b0d38e31252b3193cb487e4fa3ae95
from django.db import models # Create your models here. class Programming_Authors(models.Model): programming_languages = models.CharField(max_length=20) authors = models.CharField(max_length=100) date_of_birth = models.DateField() def __str__(self): return self.authors class ProgrammingFramework(models.Model): framework_name = models.CharField(max_length=40) framework_type = models.CharField(max_length=40) programming_authors = models.ForeignKey(Programming_Authors,on_delete=models.CASCADE) def __str__(self): return self.framework_name
[ "from django.db import models\n\n# Create your models here.\nclass Programming_Authors(models.Model):\n programming_languages = models.CharField(max_length=20)\n authors = models.CharField(max_length=100)\n date_of_birth = models.DateField() \n def __str__(self):\n return self.authors\n\nclass ProgrammingFramework(models.Model):\n framework_name = models.CharField(max_length=40)\n framework_type = models.CharField(max_length=40)\n programming_authors = models.ForeignKey(Programming_Authors,on_delete=models.CASCADE)\n\n def __str__(self):\n return self.framework_name\n", "from django.db import models\n\n\nclass Programming_Authors(models.Model):\n programming_languages = models.CharField(max_length=20)\n authors = models.CharField(max_length=100)\n date_of_birth = models.DateField()\n\n def __str__(self):\n return self.authors\n\n\nclass ProgrammingFramework(models.Model):\n framework_name = models.CharField(max_length=40)\n framework_type = models.CharField(max_length=40)\n programming_authors = models.ForeignKey(Programming_Authors, on_delete=\n models.CASCADE)\n\n def __str__(self):\n return self.framework_name\n", "<import token>\n\n\nclass Programming_Authors(models.Model):\n programming_languages = models.CharField(max_length=20)\n authors = models.CharField(max_length=100)\n date_of_birth = models.DateField()\n\n def __str__(self):\n return self.authors\n\n\nclass ProgrammingFramework(models.Model):\n framework_name = models.CharField(max_length=40)\n framework_type = models.CharField(max_length=40)\n programming_authors = models.ForeignKey(Programming_Authors, on_delete=\n models.CASCADE)\n\n def __str__(self):\n return self.framework_name\n", "<import token>\n\n\nclass Programming_Authors(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.authors\n\n\nclass ProgrammingFramework(models.Model):\n framework_name = models.CharField(max_length=40)\n framework_type = models.CharField(max_length=40)\n programming_authors = models.ForeignKey(Programming_Authors, on_delete=\n models.CASCADE)\n\n def __str__(self):\n return self.framework_name\n", "<import token>\n\n\nclass Programming_Authors(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass ProgrammingFramework(models.Model):\n framework_name = models.CharField(max_length=40)\n framework_type = models.CharField(max_length=40)\n programming_authors = models.ForeignKey(Programming_Authors, on_delete=\n models.CASCADE)\n\n def __str__(self):\n return self.framework_name\n", "<import token>\n<class token>\n\n\nclass ProgrammingFramework(models.Model):\n framework_name = models.CharField(max_length=40)\n framework_type = models.CharField(max_length=40)\n programming_authors = models.ForeignKey(Programming_Authors, on_delete=\n models.CASCADE)\n\n def __str__(self):\n return self.framework_name\n", "<import token>\n<class token>\n\n\nclass ProgrammingFramework(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.framework_name\n", "<import token>\n<class token>\n\n\nclass ProgrammingFramework(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n", "<import token>\n<class token>\n<class token>\n" ]
false
99,435
88e5a31ae5cd1d4224ebe650171669263b108c16
from IPython.core.debugger import set_trace from importlib import reload import rosbag import matplotlib.pyplot as plt import numpy as np from collections import namedtuple from sim_parser import Parser from statistics import mode def main(): filename = '_2019-09-06-10-46-56.bag' path = '../../../data/holodeck_sim/' vals = plot(filename, path) return vals def plot(filename, pathname): parse = Parser() bag = rosbag.Bag(pathname + filename) variables = parse.get_variables(bag, filename) sec = variables.sec nsec = variables.nsec X = variables.X Y = variables.Y Z = variables.Z time = [0]*len(sec) for i in range(0,len(sec)): time[i] = sec[i] + nsec[i]*1e-9 sec_c = [0]*(2*len(variables.sec_c)) Xc = [0]*(2*len(variables.sec_c)) Yc = [0]*(2*len(variables.sec_c)) Zc = [0]*(2*len(variables.sec_c)) for i in range(0,len(variables.sec_c)): sec_c[2*i] = variables.sec_c[i] if i < len(variables.sec_c)-1: sec_c[2*i+1] = variables.sec_c[i+1] else: sec_c[2*i+1] = time[len(time)-1] Xc[2*i] = variables.Xc[i] Xc[2*i+1] = variables.Xc[i] Yc[2*i] = variables.Yc[i] Yc[2*i+1] = variables.Yc[i] Zc[2*i] = variables.Fc[i] Zc[2*i+1] = variables.Fc[i] fig = plt.figure(1) plt.plot(sec_c, Xc, color = "blue", linewidth = 1, label = "Xc") plt.plot(time, X, color = "red", linewidth = 1, label = "X") # plt.xlim([-5, 165]) # plt.ylim([-.5, 14.5]) plt.legend(bbox_to_anchor=(1, .4), prop={'size': 8}, frameon=True) plt.xlabel('Time (s)') plt.ylabel('X Position') fig.suptitle('X') # fig.savefig(path + filename + x') plt.show() fig = plt.figure(2) plt.plot(sec_c, Yc, color = "blue", linewidth = 1, label = "Yc") plt.plot(time, Y, color = "red", linewidth = 1, label = "Y") # plt.xlim([-5, 165]) # plt.ylim([-7.5, 7.5]) plt.legend(bbox_to_anchor=(1, .4), prop={'size': 8}, frameon=True) plt.xlabel('Time (s)') plt.ylabel('Y Position') fig.suptitle('Y') # fig.savefig(path + filename + 'y') fig = plt.figure(3) plt.plot(sec_c, Zc, color = "blue", linewidth = 1, label = "Zc") plt.plot(time, Z, color = "red", linewidth = 1, label = "Z") # plt.xlim([-5, 165]) # plt.ylim([-50, 5]) plt.legend(bbox_to_anchor=(1, .4), prop={'size': 8}, frameon=True) plt.xlabel('Time (s)') plt.ylabel('Z Position') fig.suptitle('Z') # fig.savefig(path + filename + 'z') MyStruct = namedtuple("mystruct", ("sec_c", "Xc", "Yc", "Zc", "Fc", "sec", "nsec", "time", "X", "Y", "Z")) vals = MyStruct(variables.sec_c, variables.Xc, variables.Yc, variables.Zc, variables.Fc, sec, nsec, time, X, Y, Z) return vals if __name__ == '__main__': vals = main()
[ "from IPython.core.debugger import set_trace\nfrom importlib import reload\nimport rosbag\nimport matplotlib.pyplot as plt\nimport numpy as np\n\nfrom collections import namedtuple\nfrom sim_parser import Parser\nfrom statistics import mode\n\ndef main():\n\tfilename = '_2019-09-06-10-46-56.bag'\n\tpath = '../../../data/holodeck_sim/'\n\tvals = plot(filename, path)\n\n\treturn vals\n\n\ndef plot(filename, pathname):\n\tparse = Parser()\n\tbag = rosbag.Bag(pathname + filename)\n\tvariables = parse.get_variables(bag, filename)\n\n\tsec = variables.sec\n\tnsec = variables.nsec\n\tX = variables.X\n\tY = variables.Y\n\tZ = variables.Z\n\ttime = [0]*len(sec)\n\tfor i in range(0,len(sec)):\n\t\ttime[i] = sec[i] + nsec[i]*1e-9\n\n\tsec_c = [0]*(2*len(variables.sec_c))\n\tXc = [0]*(2*len(variables.sec_c))\n\tYc = [0]*(2*len(variables.sec_c))\n\tZc = [0]*(2*len(variables.sec_c))\n\tfor i in range(0,len(variables.sec_c)):\n\t\tsec_c[2*i] = variables.sec_c[i]\n\t\tif i < len(variables.sec_c)-1:\n\t\t\tsec_c[2*i+1] = variables.sec_c[i+1]\n\t\telse:\n\t\t\tsec_c[2*i+1] = time[len(time)-1]\n\n\t\tXc[2*i] = variables.Xc[i]\n\t\tXc[2*i+1] = variables.Xc[i]\t\t\n\t\tYc[2*i] = variables.Yc[i]\n\t\tYc[2*i+1] = variables.Yc[i]\t\t\n\t\tZc[2*i] = variables.Fc[i]\n\t\tZc[2*i+1] = variables.Fc[i]\t\t\n\n\tfig = plt.figure(1)\n\tplt.plot(sec_c, Xc, color = \"blue\", linewidth = 1, label = \"Xc\")\n\tplt.plot(time, X, color = \"red\", linewidth = 1, label = \"X\")\n\t# plt.xlim([-5, 165])\n\t# plt.ylim([-.5, 14.5])\n\tplt.legend(bbox_to_anchor=(1, .4), prop={'size': 8}, frameon=True)\n\tplt.xlabel('Time (s)')\n\tplt.ylabel('X Position')\n\tfig.suptitle('X')\n\t# fig.savefig(path + filename + x')\n\tplt.show()\n\n\tfig = plt.figure(2)\n\tplt.plot(sec_c, Yc, color = \"blue\", linewidth = 1, label = \"Yc\")\n\tplt.plot(time, Y, color = \"red\", linewidth = 1, label = \"Y\")\n\t# plt.xlim([-5, 165])\n\t# plt.ylim([-7.5, 7.5])\n\tplt.legend(bbox_to_anchor=(1, .4), prop={'size': 8}, frameon=True)\n\tplt.xlabel('Time (s)')\n\tplt.ylabel('Y Position')\n\tfig.suptitle('Y')\n\t# fig.savefig(path + filename + 'y')\n\n\tfig = plt.figure(3)\n\tplt.plot(sec_c, Zc, color = \"blue\", linewidth = 1, label = \"Zc\")\n\tplt.plot(time, Z, color = \"red\", linewidth = 1, label = \"Z\")\n\t# plt.xlim([-5, 165])\n\t# plt.ylim([-50, 5])\n\tplt.legend(bbox_to_anchor=(1, .4), prop={'size': 8}, frameon=True)\n\tplt.xlabel('Time (s)')\n\tplt.ylabel('Z Position')\n\tfig.suptitle('Z')\n\t# fig.savefig(path + filename + 'z')\n\n\tMyStruct = namedtuple(\"mystruct\", (\"sec_c\", \"Xc\", \"Yc\", \"Zc\", \"Fc\", \"sec\", \"nsec\", \"time\", \"X\", \"Y\", \"Z\"))\n\tvals = MyStruct(variables.sec_c, variables.Xc, variables.Yc, variables.Zc, variables.Fc, sec, nsec, time, X, Y, Z)\n\n\treturn vals\t\n\nif __name__ == '__main__':\n vals = main()", "from IPython.core.debugger import set_trace\nfrom importlib import reload\nimport rosbag\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom collections import namedtuple\nfrom sim_parser import Parser\nfrom statistics import mode\n\n\ndef main():\n filename = '_2019-09-06-10-46-56.bag'\n path = '../../../data/holodeck_sim/'\n vals = plot(filename, path)\n return vals\n\n\ndef plot(filename, pathname):\n parse = Parser()\n bag = rosbag.Bag(pathname + filename)\n variables = parse.get_variables(bag, filename)\n sec = variables.sec\n nsec = variables.nsec\n X = variables.X\n Y = variables.Y\n Z = variables.Z\n time = [0] * len(sec)\n for i in range(0, len(sec)):\n time[i] = sec[i] + nsec[i] * 1e-09\n sec_c = [0] * (2 * len(variables.sec_c))\n Xc = [0] * (2 * len(variables.sec_c))\n Yc = [0] * (2 * len(variables.sec_c))\n Zc = [0] * (2 * len(variables.sec_c))\n for i in range(0, len(variables.sec_c)):\n sec_c[2 * i] = variables.sec_c[i]\n if i < len(variables.sec_c) - 1:\n sec_c[2 * i + 1] = variables.sec_c[i + 1]\n else:\n sec_c[2 * i + 1] = time[len(time) - 1]\n Xc[2 * i] = variables.Xc[i]\n Xc[2 * i + 1] = variables.Xc[i]\n Yc[2 * i] = variables.Yc[i]\n Yc[2 * i + 1] = variables.Yc[i]\n Zc[2 * i] = variables.Fc[i]\n Zc[2 * i + 1] = variables.Fc[i]\n fig = plt.figure(1)\n plt.plot(sec_c, Xc, color='blue', linewidth=1, label='Xc')\n plt.plot(time, X, color='red', linewidth=1, label='X')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('X Position')\n fig.suptitle('X')\n plt.show()\n fig = plt.figure(2)\n plt.plot(sec_c, Yc, color='blue', linewidth=1, label='Yc')\n plt.plot(time, Y, color='red', linewidth=1, label='Y')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('Y Position')\n fig.suptitle('Y')\n fig = plt.figure(3)\n plt.plot(sec_c, Zc, color='blue', linewidth=1, label='Zc')\n plt.plot(time, Z, color='red', linewidth=1, label='Z')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('Z Position')\n fig.suptitle('Z')\n MyStruct = namedtuple('mystruct', ('sec_c', 'Xc', 'Yc', 'Zc', 'Fc',\n 'sec', 'nsec', 'time', 'X', 'Y', 'Z'))\n vals = MyStruct(variables.sec_c, variables.Xc, variables.Yc, variables.\n Zc, variables.Fc, sec, nsec, time, X, Y, Z)\n return vals\n\n\nif __name__ == '__main__':\n vals = main()\n", "<import token>\n\n\ndef main():\n filename = '_2019-09-06-10-46-56.bag'\n path = '../../../data/holodeck_sim/'\n vals = plot(filename, path)\n return vals\n\n\ndef plot(filename, pathname):\n parse = Parser()\n bag = rosbag.Bag(pathname + filename)\n variables = parse.get_variables(bag, filename)\n sec = variables.sec\n nsec = variables.nsec\n X = variables.X\n Y = variables.Y\n Z = variables.Z\n time = [0] * len(sec)\n for i in range(0, len(sec)):\n time[i] = sec[i] + nsec[i] * 1e-09\n sec_c = [0] * (2 * len(variables.sec_c))\n Xc = [0] * (2 * len(variables.sec_c))\n Yc = [0] * (2 * len(variables.sec_c))\n Zc = [0] * (2 * len(variables.sec_c))\n for i in range(0, len(variables.sec_c)):\n sec_c[2 * i] = variables.sec_c[i]\n if i < len(variables.sec_c) - 1:\n sec_c[2 * i + 1] = variables.sec_c[i + 1]\n else:\n sec_c[2 * i + 1] = time[len(time) - 1]\n Xc[2 * i] = variables.Xc[i]\n Xc[2 * i + 1] = variables.Xc[i]\n Yc[2 * i] = variables.Yc[i]\n Yc[2 * i + 1] = variables.Yc[i]\n Zc[2 * i] = variables.Fc[i]\n Zc[2 * i + 1] = variables.Fc[i]\n fig = plt.figure(1)\n plt.plot(sec_c, Xc, color='blue', linewidth=1, label='Xc')\n plt.plot(time, X, color='red', linewidth=1, label='X')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('X Position')\n fig.suptitle('X')\n plt.show()\n fig = plt.figure(2)\n plt.plot(sec_c, Yc, color='blue', linewidth=1, label='Yc')\n plt.plot(time, Y, color='red', linewidth=1, label='Y')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('Y Position')\n fig.suptitle('Y')\n fig = plt.figure(3)\n plt.plot(sec_c, Zc, color='blue', linewidth=1, label='Zc')\n plt.plot(time, Z, color='red', linewidth=1, label='Z')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('Z Position')\n fig.suptitle('Z')\n MyStruct = namedtuple('mystruct', ('sec_c', 'Xc', 'Yc', 'Zc', 'Fc',\n 'sec', 'nsec', 'time', 'X', 'Y', 'Z'))\n vals = MyStruct(variables.sec_c, variables.Xc, variables.Yc, variables.\n Zc, variables.Fc, sec, nsec, time, X, Y, Z)\n return vals\n\n\nif __name__ == '__main__':\n vals = main()\n", "<import token>\n\n\ndef main():\n filename = '_2019-09-06-10-46-56.bag'\n path = '../../../data/holodeck_sim/'\n vals = plot(filename, path)\n return vals\n\n\ndef plot(filename, pathname):\n parse = Parser()\n bag = rosbag.Bag(pathname + filename)\n variables = parse.get_variables(bag, filename)\n sec = variables.sec\n nsec = variables.nsec\n X = variables.X\n Y = variables.Y\n Z = variables.Z\n time = [0] * len(sec)\n for i in range(0, len(sec)):\n time[i] = sec[i] + nsec[i] * 1e-09\n sec_c = [0] * (2 * len(variables.sec_c))\n Xc = [0] * (2 * len(variables.sec_c))\n Yc = [0] * (2 * len(variables.sec_c))\n Zc = [0] * (2 * len(variables.sec_c))\n for i in range(0, len(variables.sec_c)):\n sec_c[2 * i] = variables.sec_c[i]\n if i < len(variables.sec_c) - 1:\n sec_c[2 * i + 1] = variables.sec_c[i + 1]\n else:\n sec_c[2 * i + 1] = time[len(time) - 1]\n Xc[2 * i] = variables.Xc[i]\n Xc[2 * i + 1] = variables.Xc[i]\n Yc[2 * i] = variables.Yc[i]\n Yc[2 * i + 1] = variables.Yc[i]\n Zc[2 * i] = variables.Fc[i]\n Zc[2 * i + 1] = variables.Fc[i]\n fig = plt.figure(1)\n plt.plot(sec_c, Xc, color='blue', linewidth=1, label='Xc')\n plt.plot(time, X, color='red', linewidth=1, label='X')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('X Position')\n fig.suptitle('X')\n plt.show()\n fig = plt.figure(2)\n plt.plot(sec_c, Yc, color='blue', linewidth=1, label='Yc')\n plt.plot(time, Y, color='red', linewidth=1, label='Y')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('Y Position')\n fig.suptitle('Y')\n fig = plt.figure(3)\n plt.plot(sec_c, Zc, color='blue', linewidth=1, label='Zc')\n plt.plot(time, Z, color='red', linewidth=1, label='Z')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('Z Position')\n fig.suptitle('Z')\n MyStruct = namedtuple('mystruct', ('sec_c', 'Xc', 'Yc', 'Zc', 'Fc',\n 'sec', 'nsec', 'time', 'X', 'Y', 'Z'))\n vals = MyStruct(variables.sec_c, variables.Xc, variables.Yc, variables.\n Zc, variables.Fc, sec, nsec, time, X, Y, Z)\n return vals\n\n\n<code token>\n", "<import token>\n<function token>\n\n\ndef plot(filename, pathname):\n parse = Parser()\n bag = rosbag.Bag(pathname + filename)\n variables = parse.get_variables(bag, filename)\n sec = variables.sec\n nsec = variables.nsec\n X = variables.X\n Y = variables.Y\n Z = variables.Z\n time = [0] * len(sec)\n for i in range(0, len(sec)):\n time[i] = sec[i] + nsec[i] * 1e-09\n sec_c = [0] * (2 * len(variables.sec_c))\n Xc = [0] * (2 * len(variables.sec_c))\n Yc = [0] * (2 * len(variables.sec_c))\n Zc = [0] * (2 * len(variables.sec_c))\n for i in range(0, len(variables.sec_c)):\n sec_c[2 * i] = variables.sec_c[i]\n if i < len(variables.sec_c) - 1:\n sec_c[2 * i + 1] = variables.sec_c[i + 1]\n else:\n sec_c[2 * i + 1] = time[len(time) - 1]\n Xc[2 * i] = variables.Xc[i]\n Xc[2 * i + 1] = variables.Xc[i]\n Yc[2 * i] = variables.Yc[i]\n Yc[2 * i + 1] = variables.Yc[i]\n Zc[2 * i] = variables.Fc[i]\n Zc[2 * i + 1] = variables.Fc[i]\n fig = plt.figure(1)\n plt.plot(sec_c, Xc, color='blue', linewidth=1, label='Xc')\n plt.plot(time, X, color='red', linewidth=1, label='X')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('X Position')\n fig.suptitle('X')\n plt.show()\n fig = plt.figure(2)\n plt.plot(sec_c, Yc, color='blue', linewidth=1, label='Yc')\n plt.plot(time, Y, color='red', linewidth=1, label='Y')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('Y Position')\n fig.suptitle('Y')\n fig = plt.figure(3)\n plt.plot(sec_c, Zc, color='blue', linewidth=1, label='Zc')\n plt.plot(time, Z, color='red', linewidth=1, label='Z')\n plt.legend(bbox_to_anchor=(1, 0.4), prop={'size': 8}, frameon=True)\n plt.xlabel('Time (s)')\n plt.ylabel('Z Position')\n fig.suptitle('Z')\n MyStruct = namedtuple('mystruct', ('sec_c', 'Xc', 'Yc', 'Zc', 'Fc',\n 'sec', 'nsec', 'time', 'X', 'Y', 'Z'))\n vals = MyStruct(variables.sec_c, variables.Xc, variables.Yc, variables.\n Zc, variables.Fc, sec, nsec, time, X, Y, Z)\n return vals\n\n\n<code token>\n", "<import token>\n<function token>\n<function token>\n<code token>\n" ]
false
99,436
b8791052ff396600f23a92df2e52d3b65634ea40
class Solution: def subArrayRanges(self, nums: List[int]) -> int: ret = 0 for i in range(len(nums)) : mine = maxe = nums[i] for j in range(i + 1, len(nums)) : mine = min(mine, nums[j]) maxe = max(maxe, nums[j]) ret += maxe - mine return ret
[ "class Solution:\n def subArrayRanges(self, nums: List[int]) -> int:\n ret = 0\n for i in range(len(nums)) :\n mine = maxe = nums[i]\n for j in range(i + 1, len(nums)) :\n mine = min(mine, nums[j])\n maxe = max(maxe, nums[j])\n ret += maxe - mine\n return ret\n", "class Solution:\n\n def subArrayRanges(self, nums: List[int]) ->int:\n ret = 0\n for i in range(len(nums)):\n mine = maxe = nums[i]\n for j in range(i + 1, len(nums)):\n mine = min(mine, nums[j])\n maxe = max(maxe, nums[j])\n ret += maxe - mine\n return ret\n", "class Solution:\n <function token>\n", "<class token>\n" ]
false
99,437
5b6d34f562efcd3126f01ef3dbf6e1bd6f8b9b69
from django.test import TestCase from django.urls import reverse from django.contrib.auth.models import User from tests.models import TestGroup, TestUnit from .models import Interview # Create your tests here. # Тестирование представлений # - все представления требуют login_required (тестирование с анонимным пользователем и аутентифицированным) # - представление interview/test/<test_id> # - представление interview/<interview_id>/question # - представление interview/<interview_id>/report # - представление interview/test/<test_id> # - если тест не задан в базе данных -> 404 # - если тест завершен -> redirect(report) # - если тест новый -> redirect(question) # - если тест не завершен -> redirect(question) # - представление interview/<interview_id>/question # - интервью отсутствует в базе данных -> 404 # - интервью есть в базе данных - получениие вопроса # - интервью - после последнего вопроса (опрос завершен) перенаправление на report # - представление interview/<interview_id>/report # - если интервью отсутствует в базе данных -> 404 # - если интервью закончено - отображение результатов # - если интервью не закончено -> 404 def create_test(test_owner): tg = TestGroup.objects.create( name = 'testing group', description = 'group for testing interview', ) tu = tg.testunit_set.create( name = 'test unit', description = 'test in group', owner = test_owner, ) q = tu.question_set.create( text = 'test question text', ) q.answer_set.create( name = 'question answer 1', right = True, ) q.answer_set.create( name = 'question answer 2', right = False, ) return tu def create_user(**test_data): user = User.objects.create_user(**test_data) return user class TestLoginRequried(TestCase): def setUp(self): self.user = create_user( username='testuser', password='password', ) self.testunit = create_test(self.user) def test_anonymous_interviews_open(self): response = self.client.get(reverse('interview:open', args=(1,))) self.assertEqual(response.status_code, 302) self.assertIn('login', response.url) def test_anonymous_interviews_question(self): response = self.client.get(reverse('interview:question', args=(1,))) self.assertEqual(response.status_code, 302) self.assertIn('login', response.url) def test_anonymous_interviews_report(self): response = self.client.get(reverse('interview:report', args=(1,))) self.assertEqual(response.status_code, 302) self.assertIn('login', response.url) class TestViewInterviewOpen(TestCase): def setUp(self): self.user = create_user( username='testuser', password='password', ) self.testunit = create_test(self.user) self.client.login( username='testuser', password='password', ) def test_open_broken_test_interview(self): response = self.client.get(reverse('interview:open', args=(2,))) self.assertEqual(response.status_code, 404) def test_open_complete_test_interview(self): tu = TestUnit.objects.get() interview = Interview.objects.create( user = self.user, testunit = tu, is_complete = True, ) response = self.client.get(reverse('interview:open', args=(tu.id,))) self.assertEqual(response.status_code, 302) self.assertIn('report', response.url) def test_open_new_test_interview(self): tu = TestUnit.objects.get() response = self.client.get(reverse('interview:open', args=(tu.id,))) self.assertEqual(response.status_code, 302) self.assertIn('question', response.url) def test_open_early_runed_test_interview(self): tu = TestUnit.objects.get() interview = Interview.objects.create( user = self.user, testunit = tu, is_complete = False, ) response = self.client.get(reverse('interview:open', args=(tu.id,))) self.assertEqual(response.status_code, 302) self.assertIn('question', response.url) class TestViewReplyQuestion(TestCase): def setUp(self): self.user = create_user( username='testuser', password='password', ) self.testunit = create_test(self.user) self.client.login( username='testuser', password='password', ) def test_open_broken_interview_question(self): response = self.client.get(reverse('interview:question', args=(2,))) self.assertEqual(response.status_code, 404) def test_open_new_interview_question(self): tu = TestUnit.objects.get() interview = Interview.objects.create( user = self.user, testunit = tu, is_complete = False, ) response = self.client.get(reverse('interview:question', args=(tu.id,))) self.assertEqual(response.status_code, 200) self.assertContains( response, interview.get_next_question().question.text ) def test_open_report_after_reply_last_question(self): tu = TestUnit.objects.get() interview = Interview.objects.create( user = self.user, testunit = tu, is_complete = False, ) question = interview.get_next_question().question answer = question.answer_set.last() response = self.client.post( reverse('interview:question', args=(interview.id,)), {str(answer.id): 'on'}, ) response = self.client.get(reverse('interview:question', args=(tu.id,))) self.assertEqual(response.status_code, 302) self.assertIn('report', response.url) class TestViewInterviewReport(TestCase): def setUp(self): self.user = create_user( username='testuser', password='password', ) self.testunit = create_test(self.user) self.client.login( username='testuser', password='password', ) def test_report_for_broken_test_interview(self): response = self.client.get(reverse('interview:report', args=(2,))) self.assertEqual(response.status_code, 404) def test_report_for_complete_test_interview(self): tu = TestUnit.objects.get() interview = Interview.objects.create( user = self.user, testunit = tu, is_complete = False, ) question = interview.get_next_question().question answer = question.answer_set.first() response = self.client.post( reverse('interview:question', args=(interview.id,)), {str(answer.id): 'on'}, follow=True, ) response = self.client.get(reverse('interview:report', args=(interview.id,))) self.assertEqual(response.status_code, 200) def test_report_for_early_runed_test_interview(self): tu = TestUnit.objects.get() interview = Interview.objects.create( user = self.user, testunit = tu, is_complete = False, ) response = self.client.get(reverse('interview:report', args=(interview.id,))) self.assertEqual(response.status_code, 404)
[ "from django.test import TestCase\nfrom django.urls import reverse\n\nfrom django.contrib.auth.models import User\nfrom tests.models import TestGroup, TestUnit\nfrom .models import Interview\n\n# Create your tests here.\n# Тестирование представлений\n# - все представления требуют login_required (тестирование с анонимным пользователем и аутентифицированным)\n# - представление interview/test/<test_id>\n# - представление interview/<interview_id>/question\n# - представление interview/<interview_id>/report\n# - представление interview/test/<test_id>\n# - если тест не задан в базе данных -> 404\n# - если тест завершен -> redirect(report)\n# - если тест новый -> redirect(question)\n# - если тест не завершен -> redirect(question)\n# - представление interview/<interview_id>/question\n# - интервью отсутствует в базе данных -> 404\n# - интервью есть в базе данных - получениие вопроса\n# - интервью - после последнего вопроса (опрос завершен) перенаправление на report\n# - представление interview/<interview_id>/report\n# - если интервью отсутствует в базе данных -> 404\n# - если интервью закончено - отображение результатов\n# - если интервью не закончено -> 404\n\n\ndef create_test(test_owner):\n tg = TestGroup.objects.create(\n name = 'testing group',\n description = 'group for testing interview',\n )\n tu = tg.testunit_set.create(\n name = 'test unit',\n description = 'test in group',\n owner = test_owner,\n )\n q = tu.question_set.create(\n text = 'test question text',\n )\n q.answer_set.create(\n name = 'question answer 1',\n right = True,\n )\n q.answer_set.create(\n name = 'question answer 2',\n right = False,\n )\n return tu\n\n\ndef create_user(**test_data):\n user = User.objects.create_user(**test_data)\n return user\n\n\nclass TestLoginRequried(TestCase):\n def setUp(self):\n self.user = create_user(\n username='testuser',\n password='password',\n )\n self.testunit = create_test(self.user)\n\n def test_anonymous_interviews_open(self):\n response = self.client.get(reverse('interview:open', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n def test_anonymous_interviews_question(self):\n response = self.client.get(reverse('interview:question', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n def test_anonymous_interviews_report(self):\n response = self.client.get(reverse('interview:report', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n\nclass TestViewInterviewOpen(TestCase):\n def setUp(self):\n self.user = create_user(\n username='testuser',\n password='password',\n )\n self.testunit = create_test(self.user)\n self.client.login(\n username='testuser',\n password='password',\n )\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(\n user = self.user,\n testunit = tu,\n is_complete = True,\n )\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n def test_open_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(\n user = self.user,\n testunit = tu,\n is_complete = False,\n )\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n\nclass TestViewReplyQuestion(TestCase):\n def setUp(self):\n self.user = create_user(\n username='testuser',\n password='password',\n )\n self.testunit = create_test(self.user)\n self.client.login(\n username='testuser',\n password='password',\n )\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(\n user = self.user,\n testunit = tu,\n is_complete = False,\n )\n response = self.client.get(reverse('interview:question', args=(tu.id,)))\n self.assertEqual(response.status_code, 200)\n self.assertContains(\n response,\n interview.get_next_question().question.text\n )\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(\n user = self.user,\n testunit = tu,\n is_complete = False,\n )\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(\n reverse('interview:question', args=(interview.id,)),\n {str(answer.id): 'on'},\n )\n response = self.client.get(reverse('interview:question', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\nclass TestViewInterviewReport(TestCase):\n def setUp(self):\n self.user = create_user(\n username='testuser',\n password='password',\n )\n self.testunit = create_test(self.user)\n self.client.login(\n username='testuser',\n password='password',\n )\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(\n user = self.user,\n testunit = tu,\n is_complete = False,\n )\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(\n reverse('interview:question', args=(interview.id,)),\n {str(answer.id): 'on'},\n follow=True,\n )\n response = self.client.get(reverse('interview:report', args=(interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(\n user = self.user,\n testunit = tu,\n is_complete = False,\n )\n response = self.client.get(reverse('interview:report', args=(interview.id,)))\n self.assertEqual(response.status_code, 404)", "from django.test import TestCase\nfrom django.urls import reverse\nfrom django.contrib.auth.models import User\nfrom tests.models import TestGroup, TestUnit\nfrom .models import Interview\n\n\ndef create_test(test_owner):\n tg = TestGroup.objects.create(name='testing group', description=\n 'group for testing interview')\n tu = tg.testunit_set.create(name='test unit', description=\n 'test in group', owner=test_owner)\n q = tu.question_set.create(text='test question text')\n q.answer_set.create(name='question answer 1', right=True)\n q.answer_set.create(name='question answer 2', right=False)\n return tu\n\n\ndef create_user(**test_data):\n user = User.objects.create_user(**test_data)\n return user\n\n\nclass TestLoginRequried(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n\n def test_anonymous_interviews_open(self):\n response = self.client.get(reverse('interview:open', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n def test_anonymous_interviews_question(self):\n response = self.client.get(reverse('interview:question', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n def test_anonymous_interviews_report(self):\n response = self.client.get(reverse('interview:report', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n\nclass TestViewInterviewOpen(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n def test_open_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n\n\ndef create_test(test_owner):\n tg = TestGroup.objects.create(name='testing group', description=\n 'group for testing interview')\n tu = tg.testunit_set.create(name='test unit', description=\n 'test in group', owner=test_owner)\n q = tu.question_set.create(text='test question text')\n q.answer_set.create(name='question answer 1', right=True)\n q.answer_set.create(name='question answer 2', right=False)\n return tu\n\n\ndef create_user(**test_data):\n user = User.objects.create_user(**test_data)\n return user\n\n\nclass TestLoginRequried(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n\n def test_anonymous_interviews_open(self):\n response = self.client.get(reverse('interview:open', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n def test_anonymous_interviews_question(self):\n response = self.client.get(reverse('interview:question', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n def test_anonymous_interviews_report(self):\n response = self.client.get(reverse('interview:report', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n\nclass TestViewInterviewOpen(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n def test_open_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n\n\ndef create_test(test_owner):\n tg = TestGroup.objects.create(name='testing group', description=\n 'group for testing interview')\n tu = tg.testunit_set.create(name='test unit', description=\n 'test in group', owner=test_owner)\n q = tu.question_set.create(text='test question text')\n q.answer_set.create(name='question answer 1', right=True)\n q.answer_set.create(name='question answer 2', right=False)\n return tu\n\n\n<function token>\n\n\nclass TestLoginRequried(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n\n def test_anonymous_interviews_open(self):\n response = self.client.get(reverse('interview:open', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n def test_anonymous_interviews_question(self):\n response = self.client.get(reverse('interview:question', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n def test_anonymous_interviews_report(self):\n response = self.client.get(reverse('interview:report', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n\nclass TestViewInterviewOpen(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n def test_open_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestLoginRequried(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n\n def test_anonymous_interviews_open(self):\n response = self.client.get(reverse('interview:open', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n def test_anonymous_interviews_question(self):\n response = self.client.get(reverse('interview:question', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n def test_anonymous_interviews_report(self):\n response = self.client.get(reverse('interview:report', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n\nclass TestViewInterviewOpen(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n def test_open_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestLoginRequried(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n\n def test_anonymous_interviews_open(self):\n response = self.client.get(reverse('interview:open', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n <function token>\n\n def test_anonymous_interviews_report(self):\n response = self.client.get(reverse('interview:report', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n\nclass TestViewInterviewOpen(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n def test_open_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestLoginRequried(TestCase):\n <function token>\n\n def test_anonymous_interviews_open(self):\n response = self.client.get(reverse('interview:open', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n <function token>\n\n def test_anonymous_interviews_report(self):\n response = self.client.get(reverse('interview:report', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n\nclass TestViewInterviewOpen(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n def test_open_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestLoginRequried(TestCase):\n <function token>\n <function token>\n <function token>\n\n def test_anonymous_interviews_report(self):\n response = self.client.get(reverse('interview:report', args=(1,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('login', response.url)\n\n\nclass TestViewInterviewOpen(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n def test_open_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestLoginRequried(TestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass TestViewInterviewOpen(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n def test_open_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n\n\nclass TestViewInterviewOpen(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n def test_open_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n\n\nclass TestViewInterviewOpen(TestCase):\n <function token>\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n def test_open_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n\n\nclass TestViewInterviewOpen(TestCase):\n <function token>\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n def test_open_new_test_interview(self):\n tu = TestUnit.objects.get()\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('question', response.url)\n <function token>\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n\n\nclass TestViewInterviewOpen(TestCase):\n <function token>\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=True)\n response = self.client.get(reverse('interview:open', args=(tu.id,)))\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n <function token>\n <function token>\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n\n\nclass TestViewInterviewOpen(TestCase):\n <function token>\n\n def test_open_broken_test_interview(self):\n response = self.client.get(reverse('interview:open', args=(2,)))\n self.assertEqual(response.status_code, 404)\n <function token>\n <function token>\n <function token>\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n\n\nclass TestViewInterviewOpen(TestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n<class token>\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_open_new_interview_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, interview.get_next_question().\n question.text)\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n<class token>\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n <function token>\n\n def test_open_report_after_reply_last_question(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.last()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'})\n response = self.client.get(reverse('interview:question', args=(tu.id,))\n )\n self.assertEqual(response.status_code, 302)\n self.assertIn('report', response.url)\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n<class token>\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_open_broken_interview_question(self):\n response = self.client.get(reverse('interview:question', args=(2,)))\n self.assertEqual(response.status_code, 404)\n <function token>\n <function token>\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n<class token>\n\n\nclass TestViewReplyQuestion(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n <function token>\n <function token>\n <function token>\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n<class token>\n\n\nclass TestViewReplyQuestion(TestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestViewInterviewReport(TestCase):\n\n def setUp(self):\n self.user = create_user(username='testuser', password='password')\n self.testunit = create_test(self.user)\n self.client.login(username='testuser', password='password')\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestViewInterviewReport(TestCase):\n <function token>\n\n def test_report_for_broken_test_interview(self):\n response = self.client.get(reverse('interview:report', args=(2,)))\n self.assertEqual(response.status_code, 404)\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestViewInterviewReport(TestCase):\n <function token>\n <function token>\n\n def test_report_for_complete_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n question = interview.get_next_question().question\n answer = question.answer_set.first()\n response = self.client.post(reverse('interview:question', args=(\n interview.id,)), {str(answer.id): 'on'}, follow=True)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 200)\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestViewInterviewReport(TestCase):\n <function token>\n <function token>\n <function token>\n\n def test_report_for_early_runed_test_interview(self):\n tu = TestUnit.objects.get()\n interview = Interview.objects.create(user=self.user, testunit=tu,\n is_complete=False)\n response = self.client.get(reverse('interview:report', args=(\n interview.id,)))\n self.assertEqual(response.status_code, 404)\n", "<import token>\n<function token>\n<function token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestViewInterviewReport(TestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<function token>\n<function token>\n<class token>\n<class token>\n<class token>\n<class token>\n" ]
false
99,438
d08df0175a4bbe51b1a76b8232ce8c46ef045d7f
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import numpy as np import itertools sys.path.append("..") import george from george.kernels import TaskKernel num_tasks = 10 kernel = TaskKernel(1,0,num_tasks) print(kernel.vector) kernel.vector=range(1, len(kernel.vector)+1) print(kernel.vector) K = np.zeros([num_tasks, num_tasks]) for (i,j) in itertools.product(range(num_tasks), repeat=2): K[i,j] = (kernel.value(np.array([[i]]),np.array([[j]]))[0,0]) print(K) print(np.linalg.cholesky(K))
[ "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport os\nimport sys\nimport numpy as np\nimport itertools\n\nsys.path.append(\"..\")\n\nimport george\nfrom george.kernels import TaskKernel\n\n\nnum_tasks = 10\n\n\nkernel = TaskKernel(1,0,num_tasks)\n\nprint(kernel.vector)\nkernel.vector=range(1, len(kernel.vector)+1)\nprint(kernel.vector)\n\n\nK = np.zeros([num_tasks, num_tasks])\n\nfor (i,j) in itertools.product(range(num_tasks), repeat=2):\n\tK[i,j] = (kernel.value(np.array([[i]]),np.array([[j]]))[0,0])\n\nprint(K)\n\n\nprint(np.linalg.cholesky(K))\n", "import os\nimport sys\nimport numpy as np\nimport itertools\nsys.path.append('..')\nimport george\nfrom george.kernels import TaskKernel\nnum_tasks = 10\nkernel = TaskKernel(1, 0, num_tasks)\nprint(kernel.vector)\nkernel.vector = range(1, len(kernel.vector) + 1)\nprint(kernel.vector)\nK = np.zeros([num_tasks, num_tasks])\nfor i, j in itertools.product(range(num_tasks), repeat=2):\n K[i, j] = kernel.value(np.array([[i]]), np.array([[j]]))[0, 0]\nprint(K)\nprint(np.linalg.cholesky(K))\n", "<import token>\nsys.path.append('..')\n<import token>\nnum_tasks = 10\nkernel = TaskKernel(1, 0, num_tasks)\nprint(kernel.vector)\nkernel.vector = range(1, len(kernel.vector) + 1)\nprint(kernel.vector)\nK = np.zeros([num_tasks, num_tasks])\nfor i, j in itertools.product(range(num_tasks), repeat=2):\n K[i, j] = kernel.value(np.array([[i]]), np.array([[j]]))[0, 0]\nprint(K)\nprint(np.linalg.cholesky(K))\n", "<import token>\nsys.path.append('..')\n<import token>\n<assignment token>\nprint(kernel.vector)\n<assignment token>\nprint(kernel.vector)\n<assignment token>\nfor i, j in itertools.product(range(num_tasks), repeat=2):\n K[i, j] = kernel.value(np.array([[i]]), np.array([[j]]))[0, 0]\nprint(K)\nprint(np.linalg.cholesky(K))\n", "<import token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
99,439
eda136454aba14062953bb8894923a6d340a8b2f
import matplotlib.pyplot as plt import numpy as np from sklearn import datasets, linear_model, discriminant_analysis from sklearn.model_selection import train_test_split def load_data(): iris = datasets.load_iris() X_train = iris.data y_train = iris.target return train_test_split(X_train,y_train,test_size=0.25,random_state=0,stratify=y_train) def test_linearDiscriminantAnalysis(*data): X_train,X_test,y_train,y_test = data lda = discriminant_analysis.LinearDiscriminantAnalysis() lda.fit(X_train,y_train) print('Coefficients:%s,intercept %s' % (lda.coef_, lda.intercept_)) print('Score:%.2f' % lda.score(X_test, y_test)) def plot_LDA(converted_X,y): from mpl_toolkits.mplot3d import Axes3D fig = plt.figure() ax = Axes3D(fig) colors = 'rgb' markers = 'o*s' for target,colors,markers in zip([0,1,2],colors,markers): pos = (y == target).ravel() X = converted_X[pos,:] ax.scatter(X[:,0],X[:,1],X[:,2],color=colors,marker=markers,label='Label %d'%target) ax.legend(loc = 'best') fig.suptitle('Iris After LDA') plt.show() def test_linearDiscriminantAnalysis_solver(*data): X_train,X_test,y_train,y_test = data solvers = ['svd','lsqr','eigen'] for solver in solvers: if(solver=='svd'): lda = discriminant_analysis.LinearDiscriminantAnalysis(solver = solver) else: lda = discriminant_analysis.LinearDiscriminantAnalysis(solver = solver,shrinkage=None) lda.fit(X_train,y_train) print('Score at solve=%s:%.2f'%(solver,lda.score(X_test,y_test))) def test_linearDiscriminantAnalysis_shrinkage(*data): X_train,X_test,y_train,y_test = data shrinkages = np.linspace(0.0,1.0,num=20) scores = [] for shrinkage in shrinkages: lda = discriminant_analysis.LinearDiscriminantAnalysis(solver='lsqr',shrinkage=shrinkage) lda.fit(X_train,y_train) scores.append(lda.score(X_test,y_test)) ##plot fig = plt.figure() ax = fig.add_subplot(1,1,1) ax.plot(shrinkages,scores) ax.set_xlabel(r'shrinkage') ax.set_ylabel(r'score') ax.set_ylim(0,1.05) ax.set_title("LinearDiscriminanAnalysis") plt.show() if __name__ == "__main__": X_train,X_test,y_train,y_test = load_data() """ X = np.vstack((X_train,X_test)) Y = np.vstack((y_train.reshape(y_train.size,1),y_test.reshape(y_test.size,1))) lda = discriminant_analysis.LinearDiscriminantAnalysis() lda.fit(X,Y) converted_X = np.dot(X,np.transpose(lda.coef_)) + lda.intercept_ plot_LDA(converted_X,Y) """ #test_linearDiscriminantAnalysis_solver(X_train,X_test,y_train,y_test) test_linearDiscriminantAnalysis_shrinkage(X_train,X_test,y_train,y_test)
[ "import matplotlib.pyplot as plt\nimport numpy as np\nfrom sklearn import datasets, linear_model, discriminant_analysis\nfrom sklearn.model_selection import train_test_split\n\ndef load_data():\n iris = datasets.load_iris()\n X_train = iris.data\n y_train = iris.target\n return train_test_split(X_train,y_train,test_size=0.25,random_state=0,stratify=y_train)\n\ndef test_linearDiscriminantAnalysis(*data):\n X_train,X_test,y_train,y_test = data\n lda = discriminant_analysis.LinearDiscriminantAnalysis()\n lda.fit(X_train,y_train)\n print('Coefficients:%s,intercept %s' % (lda.coef_, lda.intercept_))\n print('Score:%.2f' % lda.score(X_test, y_test))\n\ndef plot_LDA(converted_X,y):\n from mpl_toolkits.mplot3d import Axes3D\n fig = plt.figure()\n ax = Axes3D(fig)\n colors = 'rgb'\n markers = 'o*s'\n for target,colors,markers in zip([0,1,2],colors,markers):\n pos = (y == target).ravel()\n X = converted_X[pos,:]\n ax.scatter(X[:,0],X[:,1],X[:,2],color=colors,marker=markers,label='Label %d'%target)\n ax.legend(loc = 'best')\n fig.suptitle('Iris After LDA')\n plt.show()\n\ndef test_linearDiscriminantAnalysis_solver(*data):\n X_train,X_test,y_train,y_test = data\n solvers = ['svd','lsqr','eigen']\n for solver in solvers:\n if(solver=='svd'):\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver = solver)\n else:\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver = solver,shrinkage=None)\n lda.fit(X_train,y_train)\n print('Score at solve=%s:%.2f'%(solver,lda.score(X_test,y_test)))\n\ndef test_linearDiscriminantAnalysis_shrinkage(*data):\n X_train,X_test,y_train,y_test = data\n shrinkages = np.linspace(0.0,1.0,num=20)\n scores = []\n for shrinkage in shrinkages:\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver='lsqr',shrinkage=shrinkage)\n lda.fit(X_train,y_train)\n scores.append(lda.score(X_test,y_test))\n ##plot\n fig = plt.figure()\n ax = fig.add_subplot(1,1,1)\n ax.plot(shrinkages,scores)\n ax.set_xlabel(r'shrinkage')\n ax.set_ylabel(r'score')\n ax.set_ylim(0,1.05)\n ax.set_title(\"LinearDiscriminanAnalysis\")\n plt.show()\n\n\nif __name__ == \"__main__\":\n X_train,X_test,y_train,y_test = load_data()\n \"\"\"\n X = np.vstack((X_train,X_test))\n Y = np.vstack((y_train.reshape(y_train.size,1),y_test.reshape(y_test.size,1)))\n lda = discriminant_analysis.LinearDiscriminantAnalysis()\n lda.fit(X,Y)\n converted_X = np.dot(X,np.transpose(lda.coef_)) + lda.intercept_\n plot_LDA(converted_X,Y)\n \"\"\"\n #test_linearDiscriminantAnalysis_solver(X_train,X_test,y_train,y_test)\n\n test_linearDiscriminantAnalysis_shrinkage(X_train,X_test,y_train,y_test)", "import matplotlib.pyplot as plt\nimport numpy as np\nfrom sklearn import datasets, linear_model, discriminant_analysis\nfrom sklearn.model_selection import train_test_split\n\n\ndef load_data():\n iris = datasets.load_iris()\n X_train = iris.data\n y_train = iris.target\n return train_test_split(X_train, y_train, test_size=0.25, random_state=\n 0, stratify=y_train)\n\n\ndef test_linearDiscriminantAnalysis(*data):\n X_train, X_test, y_train, y_test = data\n lda = discriminant_analysis.LinearDiscriminantAnalysis()\n lda.fit(X_train, y_train)\n print('Coefficients:%s,intercept %s' % (lda.coef_, lda.intercept_))\n print('Score:%.2f' % lda.score(X_test, y_test))\n\n\ndef plot_LDA(converted_X, y):\n from mpl_toolkits.mplot3d import Axes3D\n fig = plt.figure()\n ax = Axes3D(fig)\n colors = 'rgb'\n markers = 'o*s'\n for target, colors, markers in zip([0, 1, 2], colors, markers):\n pos = (y == target).ravel()\n X = converted_X[pos, :]\n ax.scatter(X[:, 0], X[:, 1], X[:, 2], color=colors, marker=markers,\n label='Label %d' % target)\n ax.legend(loc='best')\n fig.suptitle('Iris After LDA')\n plt.show()\n\n\ndef test_linearDiscriminantAnalysis_solver(*data):\n X_train, X_test, y_train, y_test = data\n solvers = ['svd', 'lsqr', 'eigen']\n for solver in solvers:\n if solver == 'svd':\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver=\n solver)\n else:\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver=\n solver, shrinkage=None)\n lda.fit(X_train, y_train)\n print('Score at solve=%s:%.2f' % (solver, lda.score(X_test, y_test)))\n\n\ndef test_linearDiscriminantAnalysis_shrinkage(*data):\n X_train, X_test, y_train, y_test = data\n shrinkages = np.linspace(0.0, 1.0, num=20)\n scores = []\n for shrinkage in shrinkages:\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver=\n 'lsqr', shrinkage=shrinkage)\n lda.fit(X_train, y_train)\n scores.append(lda.score(X_test, y_test))\n fig = plt.figure()\n ax = fig.add_subplot(1, 1, 1)\n ax.plot(shrinkages, scores)\n ax.set_xlabel('shrinkage')\n ax.set_ylabel('score')\n ax.set_ylim(0, 1.05)\n ax.set_title('LinearDiscriminanAnalysis')\n plt.show()\n\n\nif __name__ == '__main__':\n X_train, X_test, y_train, y_test = load_data()\n \"\"\"\n X = np.vstack((X_train,X_test))\n Y = np.vstack((y_train.reshape(y_train.size,1),y_test.reshape(y_test.size,1)))\n lda = discriminant_analysis.LinearDiscriminantAnalysis()\n lda.fit(X,Y)\n converted_X = np.dot(X,np.transpose(lda.coef_)) + lda.intercept_\n plot_LDA(converted_X,Y)\n \"\"\"\n test_linearDiscriminantAnalysis_shrinkage(X_train, X_test, y_train, y_test)\n", "<import token>\n\n\ndef load_data():\n iris = datasets.load_iris()\n X_train = iris.data\n y_train = iris.target\n return train_test_split(X_train, y_train, test_size=0.25, random_state=\n 0, stratify=y_train)\n\n\ndef test_linearDiscriminantAnalysis(*data):\n X_train, X_test, y_train, y_test = data\n lda = discriminant_analysis.LinearDiscriminantAnalysis()\n lda.fit(X_train, y_train)\n print('Coefficients:%s,intercept %s' % (lda.coef_, lda.intercept_))\n print('Score:%.2f' % lda.score(X_test, y_test))\n\n\ndef plot_LDA(converted_X, y):\n from mpl_toolkits.mplot3d import Axes3D\n fig = plt.figure()\n ax = Axes3D(fig)\n colors = 'rgb'\n markers = 'o*s'\n for target, colors, markers in zip([0, 1, 2], colors, markers):\n pos = (y == target).ravel()\n X = converted_X[pos, :]\n ax.scatter(X[:, 0], X[:, 1], X[:, 2], color=colors, marker=markers,\n label='Label %d' % target)\n ax.legend(loc='best')\n fig.suptitle('Iris After LDA')\n plt.show()\n\n\ndef test_linearDiscriminantAnalysis_solver(*data):\n X_train, X_test, y_train, y_test = data\n solvers = ['svd', 'lsqr', 'eigen']\n for solver in solvers:\n if solver == 'svd':\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver=\n solver)\n else:\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver=\n solver, shrinkage=None)\n lda.fit(X_train, y_train)\n print('Score at solve=%s:%.2f' % (solver, lda.score(X_test, y_test)))\n\n\ndef test_linearDiscriminantAnalysis_shrinkage(*data):\n X_train, X_test, y_train, y_test = data\n shrinkages = np.linspace(0.0, 1.0, num=20)\n scores = []\n for shrinkage in shrinkages:\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver=\n 'lsqr', shrinkage=shrinkage)\n lda.fit(X_train, y_train)\n scores.append(lda.score(X_test, y_test))\n fig = plt.figure()\n ax = fig.add_subplot(1, 1, 1)\n ax.plot(shrinkages, scores)\n ax.set_xlabel('shrinkage')\n ax.set_ylabel('score')\n ax.set_ylim(0, 1.05)\n ax.set_title('LinearDiscriminanAnalysis')\n plt.show()\n\n\nif __name__ == '__main__':\n X_train, X_test, y_train, y_test = load_data()\n \"\"\"\n X = np.vstack((X_train,X_test))\n Y = np.vstack((y_train.reshape(y_train.size,1),y_test.reshape(y_test.size,1)))\n lda = discriminant_analysis.LinearDiscriminantAnalysis()\n lda.fit(X,Y)\n converted_X = np.dot(X,np.transpose(lda.coef_)) + lda.intercept_\n plot_LDA(converted_X,Y)\n \"\"\"\n test_linearDiscriminantAnalysis_shrinkage(X_train, X_test, y_train, y_test)\n", "<import token>\n\n\ndef load_data():\n iris = datasets.load_iris()\n X_train = iris.data\n y_train = iris.target\n return train_test_split(X_train, y_train, test_size=0.25, random_state=\n 0, stratify=y_train)\n\n\ndef test_linearDiscriminantAnalysis(*data):\n X_train, X_test, y_train, y_test = data\n lda = discriminant_analysis.LinearDiscriminantAnalysis()\n lda.fit(X_train, y_train)\n print('Coefficients:%s,intercept %s' % (lda.coef_, lda.intercept_))\n print('Score:%.2f' % lda.score(X_test, y_test))\n\n\ndef plot_LDA(converted_X, y):\n from mpl_toolkits.mplot3d import Axes3D\n fig = plt.figure()\n ax = Axes3D(fig)\n colors = 'rgb'\n markers = 'o*s'\n for target, colors, markers in zip([0, 1, 2], colors, markers):\n pos = (y == target).ravel()\n X = converted_X[pos, :]\n ax.scatter(X[:, 0], X[:, 1], X[:, 2], color=colors, marker=markers,\n label='Label %d' % target)\n ax.legend(loc='best')\n fig.suptitle('Iris After LDA')\n plt.show()\n\n\ndef test_linearDiscriminantAnalysis_solver(*data):\n X_train, X_test, y_train, y_test = data\n solvers = ['svd', 'lsqr', 'eigen']\n for solver in solvers:\n if solver == 'svd':\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver=\n solver)\n else:\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver=\n solver, shrinkage=None)\n lda.fit(X_train, y_train)\n print('Score at solve=%s:%.2f' % (solver, lda.score(X_test, y_test)))\n\n\ndef test_linearDiscriminantAnalysis_shrinkage(*data):\n X_train, X_test, y_train, y_test = data\n shrinkages = np.linspace(0.0, 1.0, num=20)\n scores = []\n for shrinkage in shrinkages:\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver=\n 'lsqr', shrinkage=shrinkage)\n lda.fit(X_train, y_train)\n scores.append(lda.score(X_test, y_test))\n fig = plt.figure()\n ax = fig.add_subplot(1, 1, 1)\n ax.plot(shrinkages, scores)\n ax.set_xlabel('shrinkage')\n ax.set_ylabel('score')\n ax.set_ylim(0, 1.05)\n ax.set_title('LinearDiscriminanAnalysis')\n plt.show()\n\n\n<code token>\n", "<import token>\n\n\ndef load_data():\n iris = datasets.load_iris()\n X_train = iris.data\n y_train = iris.target\n return train_test_split(X_train, y_train, test_size=0.25, random_state=\n 0, stratify=y_train)\n\n\ndef test_linearDiscriminantAnalysis(*data):\n X_train, X_test, y_train, y_test = data\n lda = discriminant_analysis.LinearDiscriminantAnalysis()\n lda.fit(X_train, y_train)\n print('Coefficients:%s,intercept %s' % (lda.coef_, lda.intercept_))\n print('Score:%.2f' % lda.score(X_test, y_test))\n\n\ndef plot_LDA(converted_X, y):\n from mpl_toolkits.mplot3d import Axes3D\n fig = plt.figure()\n ax = Axes3D(fig)\n colors = 'rgb'\n markers = 'o*s'\n for target, colors, markers in zip([0, 1, 2], colors, markers):\n pos = (y == target).ravel()\n X = converted_X[pos, :]\n ax.scatter(X[:, 0], X[:, 1], X[:, 2], color=colors, marker=markers,\n label='Label %d' % target)\n ax.legend(loc='best')\n fig.suptitle('Iris After LDA')\n plt.show()\n\n\n<function token>\n\n\ndef test_linearDiscriminantAnalysis_shrinkage(*data):\n X_train, X_test, y_train, y_test = data\n shrinkages = np.linspace(0.0, 1.0, num=20)\n scores = []\n for shrinkage in shrinkages:\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver=\n 'lsqr', shrinkage=shrinkage)\n lda.fit(X_train, y_train)\n scores.append(lda.score(X_test, y_test))\n fig = plt.figure()\n ax = fig.add_subplot(1, 1, 1)\n ax.plot(shrinkages, scores)\n ax.set_xlabel('shrinkage')\n ax.set_ylabel('score')\n ax.set_ylim(0, 1.05)\n ax.set_title('LinearDiscriminanAnalysis')\n plt.show()\n\n\n<code token>\n", "<import token>\n<function token>\n\n\ndef test_linearDiscriminantAnalysis(*data):\n X_train, X_test, y_train, y_test = data\n lda = discriminant_analysis.LinearDiscriminantAnalysis()\n lda.fit(X_train, y_train)\n print('Coefficients:%s,intercept %s' % (lda.coef_, lda.intercept_))\n print('Score:%.2f' % lda.score(X_test, y_test))\n\n\ndef plot_LDA(converted_X, y):\n from mpl_toolkits.mplot3d import Axes3D\n fig = plt.figure()\n ax = Axes3D(fig)\n colors = 'rgb'\n markers = 'o*s'\n for target, colors, markers in zip([0, 1, 2], colors, markers):\n pos = (y == target).ravel()\n X = converted_X[pos, :]\n ax.scatter(X[:, 0], X[:, 1], X[:, 2], color=colors, marker=markers,\n label='Label %d' % target)\n ax.legend(loc='best')\n fig.suptitle('Iris After LDA')\n plt.show()\n\n\n<function token>\n\n\ndef test_linearDiscriminantAnalysis_shrinkage(*data):\n X_train, X_test, y_train, y_test = data\n shrinkages = np.linspace(0.0, 1.0, num=20)\n scores = []\n for shrinkage in shrinkages:\n lda = discriminant_analysis.LinearDiscriminantAnalysis(solver=\n 'lsqr', shrinkage=shrinkage)\n lda.fit(X_train, y_train)\n scores.append(lda.score(X_test, y_test))\n fig = plt.figure()\n ax = fig.add_subplot(1, 1, 1)\n ax.plot(shrinkages, scores)\n ax.set_xlabel('shrinkage')\n ax.set_ylabel('score')\n ax.set_ylim(0, 1.05)\n ax.set_title('LinearDiscriminanAnalysis')\n plt.show()\n\n\n<code token>\n", "<import token>\n<function token>\n\n\ndef test_linearDiscriminantAnalysis(*data):\n X_train, X_test, y_train, y_test = data\n lda = discriminant_analysis.LinearDiscriminantAnalysis()\n lda.fit(X_train, y_train)\n print('Coefficients:%s,intercept %s' % (lda.coef_, lda.intercept_))\n print('Score:%.2f' % lda.score(X_test, y_test))\n\n\ndef plot_LDA(converted_X, y):\n from mpl_toolkits.mplot3d import Axes3D\n fig = plt.figure()\n ax = Axes3D(fig)\n colors = 'rgb'\n markers = 'o*s'\n for target, colors, markers in zip([0, 1, 2], colors, markers):\n pos = (y == target).ravel()\n X = converted_X[pos, :]\n ax.scatter(X[:, 0], X[:, 1], X[:, 2], color=colors, marker=markers,\n label='Label %d' % target)\n ax.legend(loc='best')\n fig.suptitle('Iris After LDA')\n plt.show()\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<function token>\n<function token>\n\n\ndef plot_LDA(converted_X, y):\n from mpl_toolkits.mplot3d import Axes3D\n fig = plt.figure()\n ax = Axes3D(fig)\n colors = 'rgb'\n markers = 'o*s'\n for target, colors, markers in zip([0, 1, 2], colors, markers):\n pos = (y == target).ravel()\n X = converted_X[pos, :]\n ax.scatter(X[:, 0], X[:, 1], X[:, 2], color=colors, marker=markers,\n label='Label %d' % target)\n ax.legend(loc='best')\n fig.suptitle('Iris After LDA')\n plt.show()\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n" ]
false
99,440
8f09e98b7f8e0f20a6b4d0012688669d4a1932ce
import os #this script download the AI2020 library from github def download_AI_library(): os.system('rm -r AI2020/') os.system('git clone https://github.com/UmbertoJr/AI2020.git &> /dev/null')
[ "import os\n\n#this script download the AI2020 library from github\ndef download_AI_library():\n\tos.system('rm -r AI2020/')\n\tos.system('git clone https://github.com/UmbertoJr/AI2020.git &> /dev/null')", "import os\n\n\ndef download_AI_library():\n os.system('rm -r AI2020/')\n os.system('git clone https://github.com/UmbertoJr/AI2020.git &> /dev/null')\n", "<import token>\n\n\ndef download_AI_library():\n os.system('rm -r AI2020/')\n os.system('git clone https://github.com/UmbertoJr/AI2020.git &> /dev/null')\n", "<import token>\n<function token>\n" ]
false
99,441
5614a3bde5a5e5860d04f3ac1124332dce8bae3f
# Importing this module will bind routes to the app. # This could be futher split up into submodules if the number of endpoints grows too large for one file. from . import app from .models import User from flask import abort, jsonify, request, session, render_template as render from flask_user import current_user, login_required from flask.ext.login import login_user # home @app.route('/') def index(): return render('index.html') # view all users @app.route('/user/list') @login_required def user_list(): return render('users.html', users=User.query.all()) # view a user's status # (should have some security on this) @app.route('/user/<uid>') @login_required def user_view(uid): return render('map.html',user=User.query.get(uid)) # view myself @app.route('/user/me') @login_required def user_view_me(): return render('map.html',user=current_user) # update my position @app.route('/api/user/update', methods=['POST']) @login_required def user_update(): current_user.set_location(request.form['lng'],request.form['lat']) return "Location updated." # get my user info @app.route('/api/user/info', methods=['GET']) @login_required def api_user(): user = current_user return jsonify({ 'id':user.id, 'name':user.name, 'updated_location':user.updated_location, 'lat':user.lat, 'lng':user.lng }) @app.route('/api/login', methods=['POST']) def api_login(): email = request.form['email'] password = request.form['password'] user, user_email = app.user_manager.find_user_by_email(email) ok = False if user and user.active: if app.user_manager.verify_password(password, user) is True: user.authenticated = True login_user(user, remember=True) ok = True return jsonify({ 'success': ok })
[ "# Importing this module will bind routes to the app.\n# This could be futher split up into submodules if the number of endpoints grows too large for one file.\n\nfrom . import app\nfrom .models import User\nfrom flask import abort, jsonify, request, session, render_template as render\nfrom flask_user import current_user, login_required\n\nfrom flask.ext.login import login_user \n\n# home\[email protected]('/')\ndef index():\n return render('index.html')\n\n# view all users\[email protected]('/user/list')\n@login_required\ndef user_list():\n return render('users.html', users=User.query.all())\n\n# view a user's status\n# (should have some security on this)\[email protected]('/user/<uid>')\n@login_required\ndef user_view(uid):\n return render('map.html',user=User.query.get(uid))\n\n# view myself\[email protected]('/user/me')\n@login_required\ndef user_view_me():\n return render('map.html',user=current_user)\n\n# update my position\[email protected]('/api/user/update', methods=['POST'])\n@login_required\ndef user_update():\n current_user.set_location(request.form['lng'],request.form['lat'])\n return \"Location updated.\"\n\n# get my user info\[email protected]('/api/user/info', methods=['GET'])\n@login_required\ndef api_user():\n user = current_user\n return jsonify({\n 'id':user.id,\n 'name':user.name,\n 'updated_location':user.updated_location,\n 'lat':user.lat,\n 'lng':user.lng\n })\n\[email protected]('/api/login', methods=['POST'])\ndef api_login():\n email = request.form['email']\n password = request.form['password']\n \n user, user_email = app.user_manager.find_user_by_email(email)\n\n ok = False\n\n if user and user.active:\n if app.user_manager.verify_password(password, user) is True:\n user.authenticated = True\n login_user(user, remember=True)\n ok = True\n\n return jsonify({ 'success': ok })\n", "from . import app\nfrom .models import User\nfrom flask import abort, jsonify, request, session, render_template as render\nfrom flask_user import current_user, login_required\nfrom flask.ext.login import login_user\n\n\[email protected]('/')\ndef index():\n return render('index.html')\n\n\[email protected]('/user/list')\n@login_required\ndef user_list():\n return render('users.html', users=User.query.all())\n\n\[email protected]('/user/<uid>')\n@login_required\ndef user_view(uid):\n return render('map.html', user=User.query.get(uid))\n\n\[email protected]('/user/me')\n@login_required\ndef user_view_me():\n return render('map.html', user=current_user)\n\n\[email protected]('/api/user/update', methods=['POST'])\n@login_required\ndef user_update():\n current_user.set_location(request.form['lng'], request.form['lat'])\n return 'Location updated.'\n\n\[email protected]('/api/user/info', methods=['GET'])\n@login_required\ndef api_user():\n user = current_user\n return jsonify({'id': user.id, 'name': user.name, 'updated_location':\n user.updated_location, 'lat': user.lat, 'lng': user.lng})\n\n\[email protected]('/api/login', methods=['POST'])\ndef api_login():\n email = request.form['email']\n password = request.form['password']\n user, user_email = app.user_manager.find_user_by_email(email)\n ok = False\n if user and user.active:\n if app.user_manager.verify_password(password, user) is True:\n user.authenticated = True\n login_user(user, remember=True)\n ok = True\n return jsonify({'success': ok})\n", "<import token>\n\n\[email protected]('/')\ndef index():\n return render('index.html')\n\n\[email protected]('/user/list')\n@login_required\ndef user_list():\n return render('users.html', users=User.query.all())\n\n\[email protected]('/user/<uid>')\n@login_required\ndef user_view(uid):\n return render('map.html', user=User.query.get(uid))\n\n\[email protected]('/user/me')\n@login_required\ndef user_view_me():\n return render('map.html', user=current_user)\n\n\[email protected]('/api/user/update', methods=['POST'])\n@login_required\ndef user_update():\n current_user.set_location(request.form['lng'], request.form['lat'])\n return 'Location updated.'\n\n\[email protected]('/api/user/info', methods=['GET'])\n@login_required\ndef api_user():\n user = current_user\n return jsonify({'id': user.id, 'name': user.name, 'updated_location':\n user.updated_location, 'lat': user.lat, 'lng': user.lng})\n\n\[email protected]('/api/login', methods=['POST'])\ndef api_login():\n email = request.form['email']\n password = request.form['password']\n user, user_email = app.user_manager.find_user_by_email(email)\n ok = False\n if user and user.active:\n if app.user_manager.verify_password(password, user) is True:\n user.authenticated = True\n login_user(user, remember=True)\n ok = True\n return jsonify({'success': ok})\n", "<import token>\n\n\[email protected]('/')\ndef index():\n return render('index.html')\n\n\[email protected]('/user/list')\n@login_required\ndef user_list():\n return render('users.html', users=User.query.all())\n\n\[email protected]('/user/<uid>')\n@login_required\ndef user_view(uid):\n return render('map.html', user=User.query.get(uid))\n\n\n<function token>\n\n\[email protected]('/api/user/update', methods=['POST'])\n@login_required\ndef user_update():\n current_user.set_location(request.form['lng'], request.form['lat'])\n return 'Location updated.'\n\n\[email protected]('/api/user/info', methods=['GET'])\n@login_required\ndef api_user():\n user = current_user\n return jsonify({'id': user.id, 'name': user.name, 'updated_location':\n user.updated_location, 'lat': user.lat, 'lng': user.lng})\n\n\[email protected]('/api/login', methods=['POST'])\ndef api_login():\n email = request.form['email']\n password = request.form['password']\n user, user_email = app.user_manager.find_user_by_email(email)\n ok = False\n if user and user.active:\n if app.user_manager.verify_password(password, user) is True:\n user.authenticated = True\n login_user(user, remember=True)\n ok = True\n return jsonify({'success': ok})\n", "<import token>\n<function token>\n\n\[email protected]('/user/list')\n@login_required\ndef user_list():\n return render('users.html', users=User.query.all())\n\n\[email protected]('/user/<uid>')\n@login_required\ndef user_view(uid):\n return render('map.html', user=User.query.get(uid))\n\n\n<function token>\n\n\[email protected]('/api/user/update', methods=['POST'])\n@login_required\ndef user_update():\n current_user.set_location(request.form['lng'], request.form['lat'])\n return 'Location updated.'\n\n\[email protected]('/api/user/info', methods=['GET'])\n@login_required\ndef api_user():\n user = current_user\n return jsonify({'id': user.id, 'name': user.name, 'updated_location':\n user.updated_location, 'lat': user.lat, 'lng': user.lng})\n\n\[email protected]('/api/login', methods=['POST'])\ndef api_login():\n email = request.form['email']\n password = request.form['password']\n user, user_email = app.user_manager.find_user_by_email(email)\n ok = False\n if user and user.active:\n if app.user_manager.verify_password(password, user) is True:\n user.authenticated = True\n login_user(user, remember=True)\n ok = True\n return jsonify({'success': ok})\n", "<import token>\n<function token>\n\n\[email protected]('/user/list')\n@login_required\ndef user_list():\n return render('users.html', users=User.query.all())\n\n\[email protected]('/user/<uid>')\n@login_required\ndef user_view(uid):\n return render('map.html', user=User.query.get(uid))\n\n\n<function token>\n\n\[email protected]('/api/user/update', methods=['POST'])\n@login_required\ndef user_update():\n current_user.set_location(request.form['lng'], request.form['lat'])\n return 'Location updated.'\n\n\[email protected]('/api/user/info', methods=['GET'])\n@login_required\ndef api_user():\n user = current_user\n return jsonify({'id': user.id, 'name': user.name, 'updated_location':\n user.updated_location, 'lat': user.lat, 'lng': user.lng})\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n\n\[email protected]('/user/<uid>')\n@login_required\ndef user_view(uid):\n return render('map.html', user=User.query.get(uid))\n\n\n<function token>\n\n\[email protected]('/api/user/update', methods=['POST'])\n@login_required\ndef user_update():\n current_user.set_location(request.form['lng'], request.form['lat'])\n return 'Location updated.'\n\n\[email protected]('/api/user/info', methods=['GET'])\n@login_required\ndef api_user():\n user = current_user\n return jsonify({'id': user.id, 'name': user.name, 'updated_location':\n user.updated_location, 'lat': user.lat, 'lng': user.lng})\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/api/user/update', methods=['POST'])\n@login_required\ndef user_update():\n current_user.set_location(request.form['lng'], request.form['lat'])\n return 'Location updated.'\n\n\[email protected]('/api/user/info', methods=['GET'])\n@login_required\ndef api_user():\n user = current_user\n return jsonify({'id': user.id, 'name': user.name, 'updated_location':\n user.updated_location, 'lat': user.lat, 'lng': user.lng})\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]('/api/user/info', methods=['GET'])\n@login_required\ndef api_user():\n user = current_user\n return jsonify({'id': user.id, 'name': user.name, 'updated_location':\n user.updated_location, 'lat': user.lat, 'lng': user.lng})\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
99,442
cf20d21c1d95e4cdf7015f609d289cc7454333dc
# -*- coding: utf-8 -*- import logging from flask import Blueprint, abort, render_template, request from flask.ext.login import login_required from troika.card.models import Card from troika.history.models import CardsHistory logger = logging.getLogger(__name__) blueprint = Blueprint("history", __name__, url_prefix='/history', static_folder="../static") @blueprint.route("/", methods=['GET']) @login_required def list(): try: page = int(request.args.get('page', 1)) card_id = int(request.args.get('card_id', 0)) except ValueError: abort(404) card = None query = CardsHistory.query.order_by('action_date desc') if card_id: card = Card.query.get(card_id) if not card: abort(404) query = query.filter_by(card_id=card_id) history = query.paginate(page, CardsHistory.PER_PAGE, False) return render_template("history/list.html", card=card, history=history, action_title=CardsHistory.ACTION_TITLE, card_id=card_id) @blueprint.route("/<int:history_id>", methods=['GET']) @login_required def show(history_id): history = CardsHistory.query.get(history_id) if not history: abort(404) return render_template("history/show.html", history=history, to_text=CardsHistory.to_text, action_title=CardsHistory.ACTION_TITLE)
[ "# -*- coding: utf-8 -*-\nimport logging\n\nfrom flask import Blueprint, abort, render_template, request\nfrom flask.ext.login import login_required\n\nfrom troika.card.models import Card\nfrom troika.history.models import CardsHistory\n\nlogger = logging.getLogger(__name__)\n\nblueprint = Blueprint(\"history\", __name__, url_prefix='/history',\n static_folder=\"../static\")\n\n\[email protected](\"/\", methods=['GET'])\n@login_required\ndef list():\n\n try:\n page = int(request.args.get('page', 1))\n card_id = int(request.args.get('card_id', 0))\n except ValueError:\n abort(404)\n\n card = None\n query = CardsHistory.query.order_by('action_date desc')\n if card_id:\n card = Card.query.get(card_id)\n if not card:\n abort(404)\n\n query = query.filter_by(card_id=card_id)\n history = query.paginate(page, CardsHistory.PER_PAGE, False)\n return render_template(\"history/list.html\",\n card=card,\n history=history,\n action_title=CardsHistory.ACTION_TITLE,\n card_id=card_id)\n\n\[email protected](\"/<int:history_id>\", methods=['GET'])\n@login_required\ndef show(history_id):\n\n history = CardsHistory.query.get(history_id)\n if not history:\n abort(404)\n\n return render_template(\"history/show.html\",\n history=history,\n to_text=CardsHistory.to_text,\n action_title=CardsHistory.ACTION_TITLE)\n", "import logging\nfrom flask import Blueprint, abort, render_template, request\nfrom flask.ext.login import login_required\nfrom troika.card.models import Card\nfrom troika.history.models import CardsHistory\nlogger = logging.getLogger(__name__)\nblueprint = Blueprint('history', __name__, url_prefix='/history',\n static_folder='../static')\n\n\[email protected]('/', methods=['GET'])\n@login_required\ndef list():\n try:\n page = int(request.args.get('page', 1))\n card_id = int(request.args.get('card_id', 0))\n except ValueError:\n abort(404)\n card = None\n query = CardsHistory.query.order_by('action_date desc')\n if card_id:\n card = Card.query.get(card_id)\n if not card:\n abort(404)\n query = query.filter_by(card_id=card_id)\n history = query.paginate(page, CardsHistory.PER_PAGE, False)\n return render_template('history/list.html', card=card, history=history,\n action_title=CardsHistory.ACTION_TITLE, card_id=card_id)\n\n\[email protected]('/<int:history_id>', methods=['GET'])\n@login_required\ndef show(history_id):\n history = CardsHistory.query.get(history_id)\n if not history:\n abort(404)\n return render_template('history/show.html', history=history, to_text=\n CardsHistory.to_text, action_title=CardsHistory.ACTION_TITLE)\n", "<import token>\nlogger = logging.getLogger(__name__)\nblueprint = Blueprint('history', __name__, url_prefix='/history',\n static_folder='../static')\n\n\[email protected]('/', methods=['GET'])\n@login_required\ndef list():\n try:\n page = int(request.args.get('page', 1))\n card_id = int(request.args.get('card_id', 0))\n except ValueError:\n abort(404)\n card = None\n query = CardsHistory.query.order_by('action_date desc')\n if card_id:\n card = Card.query.get(card_id)\n if not card:\n abort(404)\n query = query.filter_by(card_id=card_id)\n history = query.paginate(page, CardsHistory.PER_PAGE, False)\n return render_template('history/list.html', card=card, history=history,\n action_title=CardsHistory.ACTION_TITLE, card_id=card_id)\n\n\[email protected]('/<int:history_id>', methods=['GET'])\n@login_required\ndef show(history_id):\n history = CardsHistory.query.get(history_id)\n if not history:\n abort(404)\n return render_template('history/show.html', history=history, to_text=\n CardsHistory.to_text, action_title=CardsHistory.ACTION_TITLE)\n", "<import token>\n<assignment token>\n\n\[email protected]('/', methods=['GET'])\n@login_required\ndef list():\n try:\n page = int(request.args.get('page', 1))\n card_id = int(request.args.get('card_id', 0))\n except ValueError:\n abort(404)\n card = None\n query = CardsHistory.query.order_by('action_date desc')\n if card_id:\n card = Card.query.get(card_id)\n if not card:\n abort(404)\n query = query.filter_by(card_id=card_id)\n history = query.paginate(page, CardsHistory.PER_PAGE, False)\n return render_template('history/list.html', card=card, history=history,\n action_title=CardsHistory.ACTION_TITLE, card_id=card_id)\n\n\[email protected]('/<int:history_id>', methods=['GET'])\n@login_required\ndef show(history_id):\n history = CardsHistory.query.get(history_id)\n if not history:\n abort(404)\n return render_template('history/show.html', history=history, to_text=\n CardsHistory.to_text, action_title=CardsHistory.ACTION_TITLE)\n", "<import token>\n<assignment token>\n<function token>\n\n\[email protected]('/<int:history_id>', methods=['GET'])\n@login_required\ndef show(history_id):\n history = CardsHistory.query.get(history_id)\n if not history:\n abort(404)\n return render_template('history/show.html', history=history, to_text=\n CardsHistory.to_text, action_title=CardsHistory.ACTION_TITLE)\n", "<import token>\n<assignment token>\n<function token>\n<function token>\n" ]
false
99,443
50a85d0d41a1f5c8e197659644b2f6a961035110
#coding=utf-8 ''' convert excel to python data author: cowboyyang date: 2016-12-18 ''' import xlrd import os import sys from xml.dom import minidom import optparse import json type_convert_map = {} type_convert_map["int"] = long type_convert_map["long"] = long type_convert_map["string"] = str type_convert_map["float"] = float class Excel2PythonDataConverter: def __init__(self, excelname, excelsheet, outdir, targetfilename, messagemeta, xmlfie): self.excel = excelname self.sheet = excelsheet self.outdir = outdir self.targetfile = targetfilename self.metaname = messagemeta self.xmlfile = xmlfie self.metadict = {} def build_xml_dict(self): '''构造一个元数据字典''' domtree = minidom.parse(self.xmlfile) value = domtree.documentElement for node in value.childNodes: if node.nodeName == "struct": structname = node.getAttribute("name") self.metadict[structname] = {} for child in node.childNodes: if child.nodeName == "entry": cname = child.getAttribute("cname") self.metadict[structname][cname] = {} self.metadict[structname][cname]["name"]=child.getAttribute("name") self.metadict[structname][cname]["type"]=child.getAttribute("type") self.metadict[structname][cname]["option"]=child.getAttribute("option") def set_raw_filed(self, sheet, row, meta, key, itemmsg, leftkey="", rightkey=""): '''设置好一个属性''' keytype = meta[key].get("type") keyname = meta[key].get("name") pType = type_convert_map.get(keytype) properkey= leftkey + key + rightkey bFound = False bSetValue = False bStr = False for col in xrange(0, sheet.ncols): cname=sheet.cell_value(0, col) if cname == properkey: bFound = True value = sheet.cell_value(row, col) vlen = 0 bStr = False if keytype == "string": #value = value.encode('utf-8') vlen = len(value) bStr = True else: if str(value) == "": vlen = 0 else: vlen = len(str(pType(value))) # 无数据,不写入字典 if vlen > 0 and isinstance(itemmsg, dict): if bStr: itemmsg[keyname] = value else: itemmsg[keyname] = pType(value) bSetValue = True elif vlen > 0 and isinstance(itemmsg, list): itemmsg.append(pType(value)) bSetValue = True break if bFound is False: # 说明没有找到对应key字段 return -1 elif bSetValue is False: # 说明对应key数据为空 return 1 else: # 说明写入了对应key的数据 return 0 def gen_one_row_data(self, sheet, row): '''解析sheet中的第row行,生成python字典数据''' metadata = self.metadict.get(self.metaname) onerowdata = {} for key in metadata: keytype = metadata[key].get("type") keyname = metadata[key].get("name") option = metadata[key].get("option") if option == "repeated": # 说明是数组类型 #print "found repeated %s " % key array = [] structmeta = self.metadict.get(keytype) if type_convert_map.get(keytype): seq = 1 while True: ret = self.set_raw_filed(sheet, row, metadata, key, array, rightkey=str(seq)) if ret < 0: break seq += 1 else: # 复合结构的数组类型 seq = 1 while True: structitem = {} for structkey in structmeta: ret = self.set_raw_filed(sheet, row, structmeta, structkey, structitem, leftkey=key+str(seq)) if ret < 0 : break if structitem: array.append(structitem) else: # 一个值都没有设置上,终止 break seq += 1 if array: onerowdata[keyname] = array else: # 非数组类型 # 原始类型 if type_convert_map.get(keytype): self.set_raw_filed(sheet, row, metadata, key, onerowdata) else: # 结构体类型 structmeta = self.metadict.get(keytype) structitem = {} for structkey in structmeta: self.set_raw_filed(sheet, row, structmeta, structkey, structitem, leftkey=key) if structitem: onerowdata[keyname] = structitem return onerowdata def convert_excel_to_python(self): '''将excel转换成python格式''' # 首先构建一个元数据字典 self.build_xml_dict() # 读取excel workbook = xlrd.open_workbook(self.excel) sheet = workbook.sheet_by_name(self.sheet) # 生成python字典 row_array_msg = [] for row in xrange(1, sheet.nrows): onerow = self.gen_one_row_data(sheet, row) if onerow: row_array_msg.append(onerow) self.write_to_file(row_array_msg) def write_to_file(self, msg): content = json.dumps(msg, indent=2, ensure_ascii=False).encode('utf-8') visualfile = self.targetfile.split('.')[0] + ".py" realfilename = os.path.join(self.outdir, visualfile) if os.path.exists(realfilename): # 如果有旧文件,先删除 os.remove(realfilename) handle = open(realfilename, 'w') # 写入编码格式 handle.writelines("#coding=utf-8\n") dictname = "configdata_" + self.metaname + " = \\" + "\n" handle.writelines(dictname) handle.writelines(content) handle.flush() handle.close() if __name__ == "__main__": # cmdline config info parser = optparse.OptionParser() parser.add_option("--xmlfile", dest="xmlfile", help="process target xml files") parser.add_option("--outdir", dest="outdir", help="target file store dir") parser.add_option("--excelfile", dest="excelfile", help="excel file name") parser.add_option("--sheetname", dest="sheetname", help="excel sheet name") parser.add_option("--messagemeta", dest="messagemeta", help="message meta data") parser.add_option("--dataname", dest="dataname", help="convert protobuf data name") (options, args) = parser.parse_args() procxmlfilelist = [] if options.xmlfile is None: print "no input xml file" parser.print_help() exit(1) else: procxmlfilelist = options.xmlfile.split(" ") if options.outdir is None: print "need store target dir" parser.print_help() exit(1) outdir = os.path.abspath(options.outdir) excelfile = str(options.excelfile).strip() excelsheetname = str(options.sheetname).strip().decode("utf-8") targetfilename = str(options.dataname).strip().decode("utf-8") messagemeta = str(options.messagemeta).strip() msgxmlfile = procxmlfilelist[0] excelconvert = Excel2PythonDataConverter(excelfile, excelsheetname, outdir, targetfilename, messagemeta, msgxmlfile) excelconvert.convert_excel_to_python()
[ "#coding=utf-8\n\n'''\nconvert excel to python data\nauthor: cowboyyang\ndate: 2016-12-18\n'''\n\nimport xlrd\nimport os\nimport sys\nfrom xml.dom import minidom\nimport optparse\nimport json\n\ntype_convert_map = {}\ntype_convert_map[\"int\"] = long\ntype_convert_map[\"long\"] = long\ntype_convert_map[\"string\"] = str\ntype_convert_map[\"float\"] = float\n\nclass Excel2PythonDataConverter:\n def __init__(self, excelname, excelsheet, outdir, targetfilename, messagemeta, xmlfie):\n self.excel = excelname\n self.sheet = excelsheet\n self.outdir = outdir\n self.targetfile = targetfilename\n self.metaname = messagemeta\n self.xmlfile = xmlfie\n self.metadict = {}\n\n def build_xml_dict(self):\n '''构造一个元数据字典'''\n domtree = minidom.parse(self.xmlfile)\n value = domtree.documentElement\n for node in value.childNodes:\n if node.nodeName == \"struct\":\n structname = node.getAttribute(\"name\")\n self.metadict[structname] = {}\n for child in node.childNodes:\n if child.nodeName == \"entry\":\n cname = child.getAttribute(\"cname\")\n self.metadict[structname][cname] = {}\n self.metadict[structname][cname][\"name\"]=child.getAttribute(\"name\")\n self.metadict[structname][cname][\"type\"]=child.getAttribute(\"type\")\n self.metadict[structname][cname][\"option\"]=child.getAttribute(\"option\")\n\n def set_raw_filed(self, sheet, row, meta, key, itemmsg, leftkey=\"\", rightkey=\"\"):\n '''设置好一个属性'''\n keytype = meta[key].get(\"type\")\n keyname = meta[key].get(\"name\")\n pType = type_convert_map.get(keytype)\n properkey= leftkey + key + rightkey\n bFound = False\n bSetValue = False\n bStr = False\n for col in xrange(0, sheet.ncols):\n cname=sheet.cell_value(0, col)\n if cname == properkey:\n bFound = True\n value = sheet.cell_value(row, col)\n vlen = 0\n bStr = False\n if keytype == \"string\":\n #value = value.encode('utf-8')\n vlen = len(value)\n bStr = True\n else:\n if str(value) == \"\":\n vlen = 0\n else:\n vlen = len(str(pType(value)))\n\n # 无数据,不写入字典\n if vlen > 0 and isinstance(itemmsg, dict):\n if bStr:\n itemmsg[keyname] = value\n else:\n itemmsg[keyname] = pType(value)\n bSetValue = True\n elif vlen > 0 and isinstance(itemmsg, list):\n itemmsg.append(pType(value))\n bSetValue = True\n break\n\n if bFound is False:\n # 说明没有找到对应key字段\n return -1\n elif bSetValue is False:\n # 说明对应key数据为空\n return 1\n else:\n # 说明写入了对应key的数据\n return 0\n\n def gen_one_row_data(self, sheet, row):\n '''解析sheet中的第row行,生成python字典数据'''\n metadata = self.metadict.get(self.metaname)\n onerowdata = {}\n for key in metadata:\n keytype = metadata[key].get(\"type\")\n keyname = metadata[key].get(\"name\")\n option = metadata[key].get(\"option\")\n\n if option == \"repeated\":\n # 说明是数组类型\n #print \"found repeated %s \" % key\n array = []\n structmeta = self.metadict.get(keytype)\n if type_convert_map.get(keytype):\n seq = 1\n while True:\n ret = self.set_raw_filed(sheet, row, metadata, key, array, rightkey=str(seq))\n if ret < 0:\n break\n seq += 1\n else:\n # 复合结构的数组类型\n seq = 1\n while True:\n structitem = {}\n for structkey in structmeta:\n ret = self.set_raw_filed(sheet, row, structmeta, structkey, structitem, leftkey=key+str(seq))\n if ret < 0 :\n break\n if structitem:\n array.append(structitem)\n else:\n # 一个值都没有设置上,终止\n break\n seq += 1\n if array:\n onerowdata[keyname] = array\n else:\n # 非数组类型\n # 原始类型\n if type_convert_map.get(keytype):\n self.set_raw_filed(sheet, row, metadata, key, onerowdata)\n else:\n # 结构体类型\n structmeta = self.metadict.get(keytype)\n structitem = {}\n for structkey in structmeta:\n self.set_raw_filed(sheet, row, structmeta, structkey, structitem, leftkey=key)\n if structitem:\n onerowdata[keyname] = structitem\n return onerowdata\n\n def convert_excel_to_python(self):\n '''将excel转换成python格式'''\n # 首先构建一个元数据字典\n self.build_xml_dict()\n # 读取excel\n workbook = xlrd.open_workbook(self.excel)\n sheet = workbook.sheet_by_name(self.sheet)\n\n # 生成python字典\n row_array_msg = []\n for row in xrange(1, sheet.nrows):\n onerow = self.gen_one_row_data(sheet, row)\n if onerow:\n row_array_msg.append(onerow)\n\n self.write_to_file(row_array_msg)\n\n def write_to_file(self, msg):\n content = json.dumps(msg, indent=2, ensure_ascii=False).encode('utf-8')\n visualfile = self.targetfile.split('.')[0] + \".py\"\n realfilename = os.path.join(self.outdir, visualfile)\n if os.path.exists(realfilename):\n # 如果有旧文件,先删除\n os.remove(realfilename)\n handle = open(realfilename, 'w')\n # 写入编码格式\n handle.writelines(\"#coding=utf-8\\n\")\n dictname = \"configdata_\" + self.metaname + \" = \\\\\" + \"\\n\"\n handle.writelines(dictname)\n handle.writelines(content)\n handle.flush()\n handle.close()\n\nif __name__ == \"__main__\":\n # cmdline config info\n parser = optparse.OptionParser()\n parser.add_option(\"--xmlfile\", dest=\"xmlfile\", help=\"process target xml files\")\n parser.add_option(\"--outdir\", dest=\"outdir\", help=\"target file store dir\")\n parser.add_option(\"--excelfile\", dest=\"excelfile\", help=\"excel file name\")\n parser.add_option(\"--sheetname\", dest=\"sheetname\", help=\"excel sheet name\")\n parser.add_option(\"--messagemeta\", dest=\"messagemeta\", help=\"message meta data\")\n parser.add_option(\"--dataname\", dest=\"dataname\", help=\"convert protobuf data name\")\n\n (options, args) = parser.parse_args()\n procxmlfilelist = []\n if options.xmlfile is None:\n print \"no input xml file\"\n parser.print_help()\n exit(1)\n else:\n procxmlfilelist = options.xmlfile.split(\" \")\n\n if options.outdir is None:\n print \"need store target dir\"\n parser.print_help()\n exit(1)\n\n outdir = os.path.abspath(options.outdir)\n excelfile = str(options.excelfile).strip()\n excelsheetname = str(options.sheetname).strip().decode(\"utf-8\")\n targetfilename = str(options.dataname).strip().decode(\"utf-8\")\n messagemeta = str(options.messagemeta).strip()\n msgxmlfile = procxmlfilelist[0]\n excelconvert = Excel2PythonDataConverter(excelfile,\n excelsheetname,\n outdir,\n targetfilename,\n messagemeta,\n msgxmlfile)\n excelconvert.convert_excel_to_python()" ]
true
99,444
4cc8a92880490c0915cafe45dcc8f8582407cbe3
version https://git-lfs.github.com/spec/v1 oid sha256:6d7020584427040bc4b4d457dbb070e50ef5121fe0cb6e4a2a75939d22dceed8 size 1003
[ "version https://git-lfs.github.com/spec/v1\noid sha256:6d7020584427040bc4b4d457dbb070e50ef5121fe0cb6e4a2a75939d22dceed8\nsize 1003\n" ]
true
99,445
57d433d91139ce656cb82e3281fa10e1ff02ba7d
class InterfaceActivation: def __init__(self): self.menu_active = False self.game_active = False def get_game_active(self): return self.game_active class GameStats(): def __init__(self, ai_game): self.screen = ai_game.screen self.setting = ai_game.setting self.high_score = 0 self.score = 0 self.level = 1 self.ships_left = self.setting.ship_limit def reset_stats(self): self.ships_left = self.setting.ship_limit self.score = 0 self.level = 1
[ "class InterfaceActivation:\n def __init__(self):\n self.menu_active = False\n self.game_active = False\n\n def get_game_active(self):\n return self.game_active\n\n\nclass GameStats():\n def __init__(self, ai_game):\n self.screen = ai_game.screen\n self.setting = ai_game.setting\n self.high_score = 0\n self.score = 0\n self.level = 1\n self.ships_left = self.setting.ship_limit\n\n def reset_stats(self):\n self.ships_left = self.setting.ship_limit\n self.score = 0\n self.level = 1\n", "class InterfaceActivation:\n\n def __init__(self):\n self.menu_active = False\n self.game_active = False\n\n def get_game_active(self):\n return self.game_active\n\n\nclass GameStats:\n\n def __init__(self, ai_game):\n self.screen = ai_game.screen\n self.setting = ai_game.setting\n self.high_score = 0\n self.score = 0\n self.level = 1\n self.ships_left = self.setting.ship_limit\n\n def reset_stats(self):\n self.ships_left = self.setting.ship_limit\n self.score = 0\n self.level = 1\n", "class InterfaceActivation:\n\n def __init__(self):\n self.menu_active = False\n self.game_active = False\n <function token>\n\n\nclass GameStats:\n\n def __init__(self, ai_game):\n self.screen = ai_game.screen\n self.setting = ai_game.setting\n self.high_score = 0\n self.score = 0\n self.level = 1\n self.ships_left = self.setting.ship_limit\n\n def reset_stats(self):\n self.ships_left = self.setting.ship_limit\n self.score = 0\n self.level = 1\n", "class InterfaceActivation:\n <function token>\n <function token>\n\n\nclass GameStats:\n\n def __init__(self, ai_game):\n self.screen = ai_game.screen\n self.setting = ai_game.setting\n self.high_score = 0\n self.score = 0\n self.level = 1\n self.ships_left = self.setting.ship_limit\n\n def reset_stats(self):\n self.ships_left = self.setting.ship_limit\n self.score = 0\n self.level = 1\n", "<class token>\n\n\nclass GameStats:\n\n def __init__(self, ai_game):\n self.screen = ai_game.screen\n self.setting = ai_game.setting\n self.high_score = 0\n self.score = 0\n self.level = 1\n self.ships_left = self.setting.ship_limit\n\n def reset_stats(self):\n self.ships_left = self.setting.ship_limit\n self.score = 0\n self.level = 1\n", "<class token>\n\n\nclass GameStats:\n <function token>\n\n def reset_stats(self):\n self.ships_left = self.setting.ship_limit\n self.score = 0\n self.level = 1\n", "<class token>\n\n\nclass GameStats:\n <function token>\n <function token>\n", "<class token>\n<class token>\n" ]
false
99,446
f1a6e0a540b9b7e003c96a767c83cc7dc72919bf
# -*- coding: utf-8 -*- """ Created on Fri Apr 27 01:12:06 2018 @author: Lourenço Neto """ """ O problema consiste em verificar quais das figuras podem ser desenhadas atendendo o requisito de passar por cada aresta somente uma vez Levando em conta que cada figura dessa pode ser tratada como um grafo não direcionado e cíclico, O percurso, conforme indicação do exercício, se trata de um caminho Euleriano: Caso o número de vértices do grafo que possuam grau ímpar seja 0 ou 2, é possível traçar um caminho Euleriano por ele Sendo assim, para verificação que a figura é possível de se desenhar ou não, Bastaria passar a matriz de adjacências do grafo e contar quantos vértices está ligado a uma lista ligada (Que informa com quais vértices ele está conectado) com número ímpar de elementos. Se a quantidade de vértices que atingem esse requisito for 0 ou 2, a figura pode ser desenhada. """
[ "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Apr 27 01:12:06 2018\n\n@author: Lourenço Neto\n\"\"\"\n\n\"\"\"\nO problema consiste em verificar quais das figuras podem ser desenhadas atendendo o requisito\nde passar por cada aresta somente uma vez\n\nLevando em conta que cada figura dessa pode ser tratada como um grafo não direcionado e cíclico,\nO percurso, conforme indicação do exercício, se trata de um caminho Euleriano:\n Caso o número de vértices do grafo que possuam grau ímpar seja 0 ou 2, é possível traçar um caminho Euleriano por ele\nSendo assim, para verificação que a figura é possível de se desenhar ou não,\nBastaria passar a matriz de adjacências do grafo e contar quantos vértices está ligado a uma lista ligada (Que informa com quais vértices ele está conectado) \ncom número ímpar de elementos. Se a quantidade de vértices que atingem esse requisito for 0 ou 2, a figura pode ser desenhada.\n\"\"\"", "<docstring token>\n" ]
false
99,447
ff78282e7ec53e97ff631b49caefae5774290043
import time import sys sys.setrecursionlimit(20999) start_time = time.time() f = open('names_1.txt', 'r') names_1 = f.read().split("\n") # List containing 10000 names f.close() f = open('names_2.txt', 'r') names_2 = f.read().split("\n") # List containing 10000 names f.close() duplicates = [] # Return the list of duplicates in this data structure # Replace the nested for loops below with your improvements # THE RUNTIME FOR THIS STARTER CODE IS O(m*n) BECAUSE THE INPUT COMES FROM TWO DIFFERENT FILES THAT COULD BE DIFFERENT SIZES. # for name_1 in names_1: # for name_2 in names_2: # if name_1 == name_2: # duplicates.append(name_1) class BinarySearchTree: def __init__(self, value): self.value = value self.left = None self.right = None def insert(self, value): if value[0] < self.value[0]: if self.left is None: self.left = BinarySearchTree(value) else: self.left.insert(value) # elif value[0] >= self.value[0]: else: if self.right is None: self.right = BinarySearchTree(value) else: self.right.insert(value) def contains(self, target): if target == self.value: return True if target[0] < self.value[0]: if self.left is None: return False else: return self.left.contains(target) else: if self.right is None: return False else: return self.right.contains(target) # def contains(self, target): # current = self # while current is not None: # # if target == current.value: # # return True # if target[0] < current.value[0]: # current = current.left # elif target[0] > current.value[0]: # current = current.right # # elif target == current.value: # # return True # else: # print(current.value) # return True # return False # name_1_tree = [] # i = 0 # for name_1 in names_1: # if i == 0: # name_1_tree = BinarySearchTree(name_1) # i += 1 # else: # name_1_tree.insert(name_1) # i += 1 # for name_2 in names_2: # if name_1_tree.contains(name_2): # duplicates.append(name_2) # First, we create a Binary Search Tree using Names as the root node. name_1_tree = BinarySearchTree("Names") # Second, we populate the tree by looping through the first file for name_1 in names_1: name_1_tree.insert(name_1) # Then we loop through the second file and check to see which names from the second list are in the tree we just created, and for any that return true, we append that name to the duplicates list. for name_2 in names_2: if name_1_tree.contains(name_2): duplicates.append(name_2) end_time = time.time() print (f"{len(duplicates)} duplicates:\n\n{', '.join(duplicates)}\n\n") print (f"runtime: {end_time - start_time} seconds") # ---------- Stretch Goal ----------- # Python has built-in tools that allow for a very efficient approach to this problem # What's the best time you can accomplish? Thare are no restrictions on techniques or data # structures, but you may not import any additional libraries that you did not write yourself.
[ "import time\nimport sys\n\nsys.setrecursionlimit(20999)\n\nstart_time = time.time()\n\nf = open('names_1.txt', 'r')\nnames_1 = f.read().split(\"\\n\") # List containing 10000 names\nf.close()\n\nf = open('names_2.txt', 'r')\nnames_2 = f.read().split(\"\\n\") # List containing 10000 names\nf.close()\n\nduplicates = [] # Return the list of duplicates in this data structure\n\n# Replace the nested for loops below with your improvements\n# THE RUNTIME FOR THIS STARTER CODE IS O(m*n) BECAUSE THE INPUT COMES FROM TWO DIFFERENT FILES THAT COULD BE DIFFERENT SIZES.\n# for name_1 in names_1:\n# for name_2 in names_2:\n# if name_1 == name_2:\n# duplicates.append(name_1)\n\nclass BinarySearchTree:\n def __init__(self, value):\n self.value = value\n self.left = None\n self.right = None\n\n def insert(self, value):\n if value[0] < self.value[0]:\n if self.left is None:\n self.left = BinarySearchTree(value)\n else:\n self.left.insert(value)\n # elif value[0] >= self.value[0]:\n else:\n if self.right is None:\n self.right = BinarySearchTree(value)\n else:\n self.right.insert(value)\n \n def contains(self, target):\n if target == self.value:\n return True\n if target[0] < self.value[0]:\n if self.left is None:\n return False\n else:\n return self.left.contains(target)\n else:\n if self.right is None:\n return False\n else:\n return self.right.contains(target)\n\n # def contains(self, target):\n # current = self\n # while current is not None:\n # # if target == current.value:\n # # return True\n # if target[0] < current.value[0]:\n # current = current.left\n # elif target[0] > current.value[0]:\n # current = current.right\n # # elif target == current.value:\n # # return True\n # else:\n # print(current.value)\n # return True\n # return False\n\n# name_1_tree = []\n# i = 0\n\n# for name_1 in names_1:\n# if i == 0:\n# name_1_tree = BinarySearchTree(name_1)\n# i += 1\n# else:\n# name_1_tree.insert(name_1)\n# i += 1\n# for name_2 in names_2:\n# if name_1_tree.contains(name_2):\n# duplicates.append(name_2)\n\n# First, we create a Binary Search Tree using Names as the root node.\nname_1_tree = BinarySearchTree(\"Names\")\n\n# Second, we populate the tree by looping through the first file\nfor name_1 in names_1:\n name_1_tree.insert(name_1)\n\n# Then we loop through the second file and check to see which names from the second list are in the tree we just created, and for any that return true, we append that name to the duplicates list.\nfor name_2 in names_2:\n if name_1_tree.contains(name_2):\n duplicates.append(name_2)\n\n\nend_time = time.time()\nprint (f\"{len(duplicates)} duplicates:\\n\\n{', '.join(duplicates)}\\n\\n\")\nprint (f\"runtime: {end_time - start_time} seconds\")\n\n# ---------- Stretch Goal -----------\n# Python has built-in tools that allow for a very efficient approach to this problem\n# What's the best time you can accomplish? Thare are no restrictions on techniques or data\n# structures, but you may not import any additional libraries that you did not write yourself.\n\n\n", "import time\nimport sys\nsys.setrecursionlimit(20999)\nstart_time = time.time()\nf = open('names_1.txt', 'r')\nnames_1 = f.read().split('\\n')\nf.close()\nf = open('names_2.txt', 'r')\nnames_2 = f.read().split('\\n')\nf.close()\nduplicates = []\n\n\nclass BinarySearchTree:\n\n def __init__(self, value):\n self.value = value\n self.left = None\n self.right = None\n\n def insert(self, value):\n if value[0] < self.value[0]:\n if self.left is None:\n self.left = BinarySearchTree(value)\n else:\n self.left.insert(value)\n elif self.right is None:\n self.right = BinarySearchTree(value)\n else:\n self.right.insert(value)\n\n def contains(self, target):\n if target == self.value:\n return True\n if target[0] < self.value[0]:\n if self.left is None:\n return False\n else:\n return self.left.contains(target)\n elif self.right is None:\n return False\n else:\n return self.right.contains(target)\n\n\nname_1_tree = BinarySearchTree('Names')\nfor name_1 in names_1:\n name_1_tree.insert(name_1)\nfor name_2 in names_2:\n if name_1_tree.contains(name_2):\n duplicates.append(name_2)\nend_time = time.time()\nprint(f\"\"\"{len(duplicates)} duplicates:\n\n{', '.join(duplicates)}\n\n\"\"\")\nprint(f'runtime: {end_time - start_time} seconds')\n", "<import token>\nsys.setrecursionlimit(20999)\nstart_time = time.time()\nf = open('names_1.txt', 'r')\nnames_1 = f.read().split('\\n')\nf.close()\nf = open('names_2.txt', 'r')\nnames_2 = f.read().split('\\n')\nf.close()\nduplicates = []\n\n\nclass BinarySearchTree:\n\n def __init__(self, value):\n self.value = value\n self.left = None\n self.right = None\n\n def insert(self, value):\n if value[0] < self.value[0]:\n if self.left is None:\n self.left = BinarySearchTree(value)\n else:\n self.left.insert(value)\n elif self.right is None:\n self.right = BinarySearchTree(value)\n else:\n self.right.insert(value)\n\n def contains(self, target):\n if target == self.value:\n return True\n if target[0] < self.value[0]:\n if self.left is None:\n return False\n else:\n return self.left.contains(target)\n elif self.right is None:\n return False\n else:\n return self.right.contains(target)\n\n\nname_1_tree = BinarySearchTree('Names')\nfor name_1 in names_1:\n name_1_tree.insert(name_1)\nfor name_2 in names_2:\n if name_1_tree.contains(name_2):\n duplicates.append(name_2)\nend_time = time.time()\nprint(f\"\"\"{len(duplicates)} duplicates:\n\n{', '.join(duplicates)}\n\n\"\"\")\nprint(f'runtime: {end_time - start_time} seconds')\n", "<import token>\nsys.setrecursionlimit(20999)\n<assignment token>\nf.close()\n<assignment token>\nf.close()\n<assignment token>\n\n\nclass BinarySearchTree:\n\n def __init__(self, value):\n self.value = value\n self.left = None\n self.right = None\n\n def insert(self, value):\n if value[0] < self.value[0]:\n if self.left is None:\n self.left = BinarySearchTree(value)\n else:\n self.left.insert(value)\n elif self.right is None:\n self.right = BinarySearchTree(value)\n else:\n self.right.insert(value)\n\n def contains(self, target):\n if target == self.value:\n return True\n if target[0] < self.value[0]:\n if self.left is None:\n return False\n else:\n return self.left.contains(target)\n elif self.right is None:\n return False\n else:\n return self.right.contains(target)\n\n\n<assignment token>\nfor name_1 in names_1:\n name_1_tree.insert(name_1)\nfor name_2 in names_2:\n if name_1_tree.contains(name_2):\n duplicates.append(name_2)\n<assignment token>\nprint(f\"\"\"{len(duplicates)} duplicates:\n\n{', '.join(duplicates)}\n\n\"\"\")\nprint(f'runtime: {end_time - start_time} seconds')\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass BinarySearchTree:\n\n def __init__(self, value):\n self.value = value\n self.left = None\n self.right = None\n\n def insert(self, value):\n if value[0] < self.value[0]:\n if self.left is None:\n self.left = BinarySearchTree(value)\n else:\n self.left.insert(value)\n elif self.right is None:\n self.right = BinarySearchTree(value)\n else:\n self.right.insert(value)\n\n def contains(self, target):\n if target == self.value:\n return True\n if target[0] < self.value[0]:\n if self.left is None:\n return False\n else:\n return self.left.contains(target)\n elif self.right is None:\n return False\n else:\n return self.right.contains(target)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass BinarySearchTree:\n <function token>\n\n def insert(self, value):\n if value[0] < self.value[0]:\n if self.left is None:\n self.left = BinarySearchTree(value)\n else:\n self.left.insert(value)\n elif self.right is None:\n self.right = BinarySearchTree(value)\n else:\n self.right.insert(value)\n\n def contains(self, target):\n if target == self.value:\n return True\n if target[0] < self.value[0]:\n if self.left is None:\n return False\n else:\n return self.left.contains(target)\n elif self.right is None:\n return False\n else:\n return self.right.contains(target)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass BinarySearchTree:\n <function token>\n <function token>\n\n def contains(self, target):\n if target == self.value:\n return True\n if target[0] < self.value[0]:\n if self.left is None:\n return False\n else:\n return self.left.contains(target)\n elif self.right is None:\n return False\n else:\n return self.right.contains(target)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass BinarySearchTree:\n <function token>\n <function token>\n <function token>\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
99,448
7907f4d2cf067225a4fff288673c33b7c3072d33
import traceback import logging import dataset from technews_nlp_aggregator.common.util import extract_host, extract_normpath, extract_source_without_www, extract_start_url from technews_nlp_aggregator.scraping.main.scrapy.spiders import all_start_urls class ArticlesSpiderRepo: def get_connection(self): return self.dataset_connection def __init__(self, db_connection): self.db_connection = db_connection self.dataset_connection = dataset.connect(self.db_connection, engine_kwargs={ 'connect_args': {'charset': 'utf8'} }) self.engine = self.dataset_connection.engine def retrieve_urls_queued(self): sql_user_similar = "SELECT UTA_SPIDER, UTA_URL FROM URLS_TO_ADD WHERE UTA_PROCESSED IS NULL" similar_stories = [] con = self.get_connection() query_result= con.query(sql_user_similar ) return query_result def add_url_list(self, url_list): sql_add_user = "INSERT INTO URLS_TO_ADD (UTA_SPIDER, UTA_URL) VALUES (:uta_spider, :uta_url) " con = self.get_connection() messages = [] for url in url_list: url = extract_normpath(url) start_url = extract_start_url(url) logging.info("Starting url: {}".format(start_url)) if (start_url in all_start_urls): host = extract_source_without_www(url).lower().capitalize() if url and host: try: con.begin() con.query(sql_add_user , {"uta_spider": host, "uta_url": url.strip()}) messages.append("Added {} : {}".format(host, url)) con.commit() except: con.rollback() messages.append('Could not add {}: {}'.format(host, url)) traceback.print_stack() else: messages.append('Urls from {} cannot be parsed yet'.format(start_url)) return messages def update_to_crawled(self, con=None): sql_update = "UPDATE URLS_TO_ADD SET UTA_PROCESSED = SYSDATE()" con = self.get_connection() if not con else con try: con.begin() article_query = con.query(sql_update) con.commit() except: con.rollback() traceback.print_stack()
[ "import traceback\nimport logging\nimport dataset\nfrom technews_nlp_aggregator.common.util import extract_host, extract_normpath, extract_source_without_www, extract_start_url\nfrom technews_nlp_aggregator.scraping.main.scrapy.spiders import all_start_urls\n\nclass ArticlesSpiderRepo:\n\n def get_connection(self):\n return self.dataset_connection\n\n def __init__(self, db_connection):\n self.db_connection = db_connection\n self.dataset_connection = dataset.connect(self.db_connection, engine_kwargs={\n 'connect_args': {'charset': 'utf8'}\n })\n self.engine = self.dataset_connection.engine\n\n def retrieve_urls_queued(self):\n sql_user_similar = \"SELECT UTA_SPIDER, UTA_URL FROM URLS_TO_ADD WHERE UTA_PROCESSED IS NULL\"\n similar_stories = []\n con = self.get_connection()\n query_result= con.query(sql_user_similar )\n\n return query_result\n\n def add_url_list(self, url_list):\n sql_add_user = \"INSERT INTO URLS_TO_ADD (UTA_SPIDER, UTA_URL) VALUES (:uta_spider, :uta_url) \"\n con = self.get_connection()\n messages = []\n for url in url_list:\n url = extract_normpath(url)\n start_url = extract_start_url(url)\n logging.info(\"Starting url: {}\".format(start_url))\n if (start_url in all_start_urls):\n host = extract_source_without_www(url).lower().capitalize()\n if url and host:\n try:\n con.begin()\n con.query(sql_add_user , {\"uta_spider\": host, \"uta_url\": url.strip()})\n messages.append(\"Added {} : {}\".format(host, url))\n con.commit()\n except:\n con.rollback()\n messages.append('Could not add {}: {}'.format(host, url))\n traceback.print_stack()\n else:\n messages.append('Urls from {} cannot be parsed yet'.format(start_url))\n return messages\n\n\n def update_to_crawled(self, con=None):\n sql_update = \"UPDATE URLS_TO_ADD SET UTA_PROCESSED = SYSDATE()\"\n con = self.get_connection() if not con else con\n try:\n con.begin()\n article_query = con.query(sql_update)\n con.commit()\n except:\n con.rollback()\n traceback.print_stack()", "import traceback\nimport logging\nimport dataset\nfrom technews_nlp_aggregator.common.util import extract_host, extract_normpath, extract_source_without_www, extract_start_url\nfrom technews_nlp_aggregator.scraping.main.scrapy.spiders import all_start_urls\n\n\nclass ArticlesSpiderRepo:\n\n def get_connection(self):\n return self.dataset_connection\n\n def __init__(self, db_connection):\n self.db_connection = db_connection\n self.dataset_connection = dataset.connect(self.db_connection,\n engine_kwargs={'connect_args': {'charset': 'utf8'}})\n self.engine = self.dataset_connection.engine\n\n def retrieve_urls_queued(self):\n sql_user_similar = (\n 'SELECT UTA_SPIDER, UTA_URL FROM URLS_TO_ADD WHERE UTA_PROCESSED IS NULL'\n )\n similar_stories = []\n con = self.get_connection()\n query_result = con.query(sql_user_similar)\n return query_result\n\n def add_url_list(self, url_list):\n sql_add_user = (\n 'INSERT INTO URLS_TO_ADD (UTA_SPIDER, UTA_URL) VALUES (:uta_spider, :uta_url) '\n )\n con = self.get_connection()\n messages = []\n for url in url_list:\n url = extract_normpath(url)\n start_url = extract_start_url(url)\n logging.info('Starting url: {}'.format(start_url))\n if start_url in all_start_urls:\n host = extract_source_without_www(url).lower().capitalize()\n if url and host:\n try:\n con.begin()\n con.query(sql_add_user, {'uta_spider': host,\n 'uta_url': url.strip()})\n messages.append('Added {} : {}'.format(host, url))\n con.commit()\n except:\n con.rollback()\n messages.append('Could not add {}: {}'.format(host,\n url))\n traceback.print_stack()\n else:\n messages.append('Urls from {} cannot be parsed yet'.format(\n start_url))\n return messages\n\n def update_to_crawled(self, con=None):\n sql_update = 'UPDATE URLS_TO_ADD SET UTA_PROCESSED = SYSDATE()'\n con = self.get_connection() if not con else con\n try:\n con.begin()\n article_query = con.query(sql_update)\n con.commit()\n except:\n con.rollback()\n traceback.print_stack()\n", "<import token>\n\n\nclass ArticlesSpiderRepo:\n\n def get_connection(self):\n return self.dataset_connection\n\n def __init__(self, db_connection):\n self.db_connection = db_connection\n self.dataset_connection = dataset.connect(self.db_connection,\n engine_kwargs={'connect_args': {'charset': 'utf8'}})\n self.engine = self.dataset_connection.engine\n\n def retrieve_urls_queued(self):\n sql_user_similar = (\n 'SELECT UTA_SPIDER, UTA_URL FROM URLS_TO_ADD WHERE UTA_PROCESSED IS NULL'\n )\n similar_stories = []\n con = self.get_connection()\n query_result = con.query(sql_user_similar)\n return query_result\n\n def add_url_list(self, url_list):\n sql_add_user = (\n 'INSERT INTO URLS_TO_ADD (UTA_SPIDER, UTA_URL) VALUES (:uta_spider, :uta_url) '\n )\n con = self.get_connection()\n messages = []\n for url in url_list:\n url = extract_normpath(url)\n start_url = extract_start_url(url)\n logging.info('Starting url: {}'.format(start_url))\n if start_url in all_start_urls:\n host = extract_source_without_www(url).lower().capitalize()\n if url and host:\n try:\n con.begin()\n con.query(sql_add_user, {'uta_spider': host,\n 'uta_url': url.strip()})\n messages.append('Added {} : {}'.format(host, url))\n con.commit()\n except:\n con.rollback()\n messages.append('Could not add {}: {}'.format(host,\n url))\n traceback.print_stack()\n else:\n messages.append('Urls from {} cannot be parsed yet'.format(\n start_url))\n return messages\n\n def update_to_crawled(self, con=None):\n sql_update = 'UPDATE URLS_TO_ADD SET UTA_PROCESSED = SYSDATE()'\n con = self.get_connection() if not con else con\n try:\n con.begin()\n article_query = con.query(sql_update)\n con.commit()\n except:\n con.rollback()\n traceback.print_stack()\n", "<import token>\n\n\nclass ArticlesSpiderRepo:\n\n def get_connection(self):\n return self.dataset_connection\n\n def __init__(self, db_connection):\n self.db_connection = db_connection\n self.dataset_connection = dataset.connect(self.db_connection,\n engine_kwargs={'connect_args': {'charset': 'utf8'}})\n self.engine = self.dataset_connection.engine\n <function token>\n\n def add_url_list(self, url_list):\n sql_add_user = (\n 'INSERT INTO URLS_TO_ADD (UTA_SPIDER, UTA_URL) VALUES (:uta_spider, :uta_url) '\n )\n con = self.get_connection()\n messages = []\n for url in url_list:\n url = extract_normpath(url)\n start_url = extract_start_url(url)\n logging.info('Starting url: {}'.format(start_url))\n if start_url in all_start_urls:\n host = extract_source_without_www(url).lower().capitalize()\n if url and host:\n try:\n con.begin()\n con.query(sql_add_user, {'uta_spider': host,\n 'uta_url': url.strip()})\n messages.append('Added {} : {}'.format(host, url))\n con.commit()\n except:\n con.rollback()\n messages.append('Could not add {}: {}'.format(host,\n url))\n traceback.print_stack()\n else:\n messages.append('Urls from {} cannot be parsed yet'.format(\n start_url))\n return messages\n\n def update_to_crawled(self, con=None):\n sql_update = 'UPDATE URLS_TO_ADD SET UTA_PROCESSED = SYSDATE()'\n con = self.get_connection() if not con else con\n try:\n con.begin()\n article_query = con.query(sql_update)\n con.commit()\n except:\n con.rollback()\n traceback.print_stack()\n", "<import token>\n\n\nclass ArticlesSpiderRepo:\n\n def get_connection(self):\n return self.dataset_connection\n\n def __init__(self, db_connection):\n self.db_connection = db_connection\n self.dataset_connection = dataset.connect(self.db_connection,\n engine_kwargs={'connect_args': {'charset': 'utf8'}})\n self.engine = self.dataset_connection.engine\n <function token>\n <function token>\n\n def update_to_crawled(self, con=None):\n sql_update = 'UPDATE URLS_TO_ADD SET UTA_PROCESSED = SYSDATE()'\n con = self.get_connection() if not con else con\n try:\n con.begin()\n article_query = con.query(sql_update)\n con.commit()\n except:\n con.rollback()\n traceback.print_stack()\n", "<import token>\n\n\nclass ArticlesSpiderRepo:\n\n def get_connection(self):\n return self.dataset_connection\n <function token>\n <function token>\n <function token>\n\n def update_to_crawled(self, con=None):\n sql_update = 'UPDATE URLS_TO_ADD SET UTA_PROCESSED = SYSDATE()'\n con = self.get_connection() if not con else con\n try:\n con.begin()\n article_query = con.query(sql_update)\n con.commit()\n except:\n con.rollback()\n traceback.print_stack()\n", "<import token>\n\n\nclass ArticlesSpiderRepo:\n <function token>\n <function token>\n <function token>\n <function token>\n\n def update_to_crawled(self, con=None):\n sql_update = 'UPDATE URLS_TO_ADD SET UTA_PROCESSED = SYSDATE()'\n con = self.get_connection() if not con else con\n try:\n con.begin()\n article_query = con.query(sql_update)\n con.commit()\n except:\n con.rollback()\n traceback.print_stack()\n", "<import token>\n\n\nclass ArticlesSpiderRepo:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
99,449
2e5ea871e96d8cdcf969a4e76f3551092888b7af
import logging def logging_config(): logging.getLogger("PIL").setLevel(logging.WARNING) logging.getLogger("openapi_spec_validator").setLevel(logging.WARNING) logging.getLogger("connexion").setLevel(logging.WARNING) logging.getLogger("pika").setLevel(logging.WARNING)
[ "import logging\n\n\ndef logging_config():\n logging.getLogger(\"PIL\").setLevel(logging.WARNING)\n logging.getLogger(\"openapi_spec_validator\").setLevel(logging.WARNING)\n logging.getLogger(\"connexion\").setLevel(logging.WARNING)\n logging.getLogger(\"pika\").setLevel(logging.WARNING)\n", "import logging\n\n\ndef logging_config():\n logging.getLogger('PIL').setLevel(logging.WARNING)\n logging.getLogger('openapi_spec_validator').setLevel(logging.WARNING)\n logging.getLogger('connexion').setLevel(logging.WARNING)\n logging.getLogger('pika').setLevel(logging.WARNING)\n", "<import token>\n\n\ndef logging_config():\n logging.getLogger('PIL').setLevel(logging.WARNING)\n logging.getLogger('openapi_spec_validator').setLevel(logging.WARNING)\n logging.getLogger('connexion').setLevel(logging.WARNING)\n logging.getLogger('pika').setLevel(logging.WARNING)\n", "<import token>\n<function token>\n" ]
false
99,450
49f3e1ed0234c39fd38658c5753308b219ff3aa7
import beverages import random class CoffeeMachine: def __init__(self): self.serve_count = 0 class EmptyCup (beverages.HotBeverage): def __init__(self, price = 0.90, name = "empty cup"): beverages.HotBeverage.__init__(self, price, name) def description(serlf): return "An empty cup ?! Gimme my money back!" class BrokenMachineException (Exception): def __init__(self): Exception.__init__(self, "This coffee machine has to be repaired.") def repair(self): self.serve_count = 0 def serve(self, obj: beverages.HotBeverage): if (self.serve_count > 9): raise self.BrokenMachineException() if random.randint(0, 1): self.serve_count = self.serve_count + 1 # print (self.serve_count) return obj else: return self.EmptyCup() if __name__ == '__main__': machine = CoffeeMachine() for i in range(0, 8): try: print(machine.serve(beverages.HotBeverage())) print(machine.serve(beverages.Coffee())) print(machine.serve(beverages.Tea())) print(machine.serve(beverages.Chocolate())) print(machine.serve(beverages.Cappuccino())) except Exception as a: print("***************** Warning ********************") print(a) print("==============================================\n") machine.repair()
[ "import beverages\nimport random\n\nclass CoffeeMachine:\n def __init__(self):\n self.serve_count = 0\n\n class EmptyCup (beverages.HotBeverage):\n def __init__(self, price = 0.90, name = \"empty cup\"):\n beverages.HotBeverage.__init__(self, price, name)\n \n def description(serlf):\n return \"An empty cup ?! Gimme my money back!\"\n\n class BrokenMachineException (Exception):\n def __init__(self):\n Exception.__init__(self, \"This coffee machine has to be repaired.\")\n\n def repair(self):\n self.serve_count = 0\n\n def serve(self, obj: beverages.HotBeverage):\n if (self.serve_count > 9):\n raise self.BrokenMachineException()\n \n if random.randint(0, 1):\n self.serve_count = self.serve_count + 1\n # print (self.serve_count)\n return obj\n else:\n return self.EmptyCup()\n\nif __name__ == '__main__':\n\n machine = CoffeeMachine()\n for i in range(0, 8):\n try:\n print(machine.serve(beverages.HotBeverage()))\n print(machine.serve(beverages.Coffee()))\n print(machine.serve(beverages.Tea()))\n print(machine.serve(beverages.Chocolate()))\n print(machine.serve(beverages.Cappuccino()))\n except Exception as a:\n print(\"***************** Warning ********************\")\n print(a)\n print(\"==============================================\\n\")\n machine.repair()\n \n\n", "import beverages\nimport random\n\n\nclass CoffeeMachine:\n\n def __init__(self):\n self.serve_count = 0\n\n\n class EmptyCup(beverages.HotBeverage):\n\n def __init__(self, price=0.9, name='empty cup'):\n beverages.HotBeverage.__init__(self, price, name)\n\n def description(serlf):\n return 'An empty cup ?! Gimme my money back!'\n\n\n class BrokenMachineException(Exception):\n\n def __init__(self):\n Exception.__init__(self, 'This coffee machine has to be repaired.')\n\n def repair(self):\n self.serve_count = 0\n\n def serve(self, obj: beverages.HotBeverage):\n if self.serve_count > 9:\n raise self.BrokenMachineException()\n if random.randint(0, 1):\n self.serve_count = self.serve_count + 1\n return obj\n else:\n return self.EmptyCup()\n\n\nif __name__ == '__main__':\n machine = CoffeeMachine()\n for i in range(0, 8):\n try:\n print(machine.serve(beverages.HotBeverage()))\n print(machine.serve(beverages.Coffee()))\n print(machine.serve(beverages.Tea()))\n print(machine.serve(beverages.Chocolate()))\n print(machine.serve(beverages.Cappuccino()))\n except Exception as a:\n print('***************** Warning ********************')\n print(a)\n print('==============================================\\n')\n machine.repair()\n", "<import token>\n\n\nclass CoffeeMachine:\n\n def __init__(self):\n self.serve_count = 0\n\n\n class EmptyCup(beverages.HotBeverage):\n\n def __init__(self, price=0.9, name='empty cup'):\n beverages.HotBeverage.__init__(self, price, name)\n\n def description(serlf):\n return 'An empty cup ?! Gimme my money back!'\n\n\n class BrokenMachineException(Exception):\n\n def __init__(self):\n Exception.__init__(self, 'This coffee machine has to be repaired.')\n\n def repair(self):\n self.serve_count = 0\n\n def serve(self, obj: beverages.HotBeverage):\n if self.serve_count > 9:\n raise self.BrokenMachineException()\n if random.randint(0, 1):\n self.serve_count = self.serve_count + 1\n return obj\n else:\n return self.EmptyCup()\n\n\nif __name__ == '__main__':\n machine = CoffeeMachine()\n for i in range(0, 8):\n try:\n print(machine.serve(beverages.HotBeverage()))\n print(machine.serve(beverages.Coffee()))\n print(machine.serve(beverages.Tea()))\n print(machine.serve(beverages.Chocolate()))\n print(machine.serve(beverages.Cappuccino()))\n except Exception as a:\n print('***************** Warning ********************')\n print(a)\n print('==============================================\\n')\n machine.repair()\n", "<import token>\n\n\nclass CoffeeMachine:\n\n def __init__(self):\n self.serve_count = 0\n\n\n class EmptyCup(beverages.HotBeverage):\n\n def __init__(self, price=0.9, name='empty cup'):\n beverages.HotBeverage.__init__(self, price, name)\n\n def description(serlf):\n return 'An empty cup ?! Gimme my money back!'\n\n\n class BrokenMachineException(Exception):\n\n def __init__(self):\n Exception.__init__(self, 'This coffee machine has to be repaired.')\n\n def repair(self):\n self.serve_count = 0\n\n def serve(self, obj: beverages.HotBeverage):\n if self.serve_count > 9:\n raise self.BrokenMachineException()\n if random.randint(0, 1):\n self.serve_count = self.serve_count + 1\n return obj\n else:\n return self.EmptyCup()\n\n\n<code token>\n", "<import token>\n\n\nclass CoffeeMachine:\n\n def __init__(self):\n self.serve_count = 0\n\n\n class EmptyCup(beverages.HotBeverage):\n\n def __init__(self, price=0.9, name='empty cup'):\n beverages.HotBeverage.__init__(self, price, name)\n\n def description(serlf):\n return 'An empty cup ?! Gimme my money back!'\n\n\n class BrokenMachineException(Exception):\n\n def __init__(self):\n Exception.__init__(self, 'This coffee machine has to be repaired.')\n\n def repair(self):\n self.serve_count = 0\n <function token>\n\n\n<code token>\n", "<import token>\n\n\nclass CoffeeMachine:\n\n def __init__(self):\n self.serve_count = 0\n\n\n class EmptyCup(beverages.HotBeverage):\n\n def __init__(self, price=0.9, name='empty cup'):\n beverages.HotBeverage.__init__(self, price, name)\n\n def description(serlf):\n return 'An empty cup ?! Gimme my money back!'\n\n\n class BrokenMachineException(Exception):\n\n def __init__(self):\n Exception.__init__(self, 'This coffee machine has to be repaired.')\n <function token>\n <function token>\n\n\n<code token>\n", "<import token>\n\n\nclass CoffeeMachine:\n <function token>\n\n\n class EmptyCup(beverages.HotBeverage):\n\n def __init__(self, price=0.9, name='empty cup'):\n beverages.HotBeverage.__init__(self, price, name)\n\n def description(serlf):\n return 'An empty cup ?! Gimme my money back!'\n\n\n class BrokenMachineException(Exception):\n\n def __init__(self):\n Exception.__init__(self, 'This coffee machine has to be repaired.')\n <function token>\n <function token>\n\n\n<code token>\n", "<import token>\n<class token>\n<code token>\n" ]
false
99,451
ce04c698fdb95fdec1c792b19ee9aee70f914d1e
"""systemcall URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.1/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.conf.urls import include from django.contrib import admin from django.urls import path from rest_framework import routers from django.conf.urls import url from rest_framework_swagger.views import get_swagger_view from rest_framework.authtoken.views import obtain_auth_token from apps.registercall.views import RegisterCallViewSet from apps.phonebill.views import PhoneBillViewSet from apps.phonebill.views import RegisterViewSet schema_view = get_swagger_view(title='System Call') router = routers.DefaultRouter() router.register(r'registercall', RegisterCallViewSet, base_name='RegisterCall') router.register(r'phonebill', PhoneBillViewSet, base_name='PhoneBill') router.register(r'registers', RegisterViewSet, base_name='Registers') urlpatterns = [ url(r'^$', schema_view), path('', include(router.urls)), path('admin/', admin.site.urls), path('api-token-auth/', obtain_auth_token) ]
[ "\"\"\"systemcall URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/2.1/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns: path('', views.home, name='home')\nClass-based views\n 1. Add an import: from other_app.views import Home\n 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')\nIncluding another URLconf\n 1. Import the include() function: from django.urls import include, path\n 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))\n\"\"\"\nfrom django.conf.urls import include\nfrom django.contrib import admin\nfrom django.urls import path\nfrom rest_framework import routers\nfrom django.conf.urls import url\nfrom rest_framework_swagger.views import get_swagger_view\nfrom rest_framework.authtoken.views import obtain_auth_token\n\nfrom apps.registercall.views import RegisterCallViewSet\nfrom apps.phonebill.views import PhoneBillViewSet\nfrom apps.phonebill.views import RegisterViewSet\n\nschema_view = get_swagger_view(title='System Call')\n\nrouter = routers.DefaultRouter()\nrouter.register(r'registercall', RegisterCallViewSet, base_name='RegisterCall')\nrouter.register(r'phonebill', PhoneBillViewSet, base_name='PhoneBill')\nrouter.register(r'registers', RegisterViewSet, base_name='Registers')\n\nurlpatterns = [\n url(r'^$', schema_view),\n path('', include(router.urls)),\n path('admin/', admin.site.urls),\n path('api-token-auth/', obtain_auth_token)\n]\n", "<docstring token>\nfrom django.conf.urls import include\nfrom django.contrib import admin\nfrom django.urls import path\nfrom rest_framework import routers\nfrom django.conf.urls import url\nfrom rest_framework_swagger.views import get_swagger_view\nfrom rest_framework.authtoken.views import obtain_auth_token\nfrom apps.registercall.views import RegisterCallViewSet\nfrom apps.phonebill.views import PhoneBillViewSet\nfrom apps.phonebill.views import RegisterViewSet\nschema_view = get_swagger_view(title='System Call')\nrouter = routers.DefaultRouter()\nrouter.register('registercall', RegisterCallViewSet, base_name='RegisterCall')\nrouter.register('phonebill', PhoneBillViewSet, base_name='PhoneBill')\nrouter.register('registers', RegisterViewSet, base_name='Registers')\nurlpatterns = [url('^$', schema_view), path('', include(router.urls)), path\n ('admin/', admin.site.urls), path('api-token-auth/', obtain_auth_token)]\n", "<docstring token>\n<import token>\nschema_view = get_swagger_view(title='System Call')\nrouter = routers.DefaultRouter()\nrouter.register('registercall', RegisterCallViewSet, base_name='RegisterCall')\nrouter.register('phonebill', PhoneBillViewSet, base_name='PhoneBill')\nrouter.register('registers', RegisterViewSet, base_name='Registers')\nurlpatterns = [url('^$', schema_view), path('', include(router.urls)), path\n ('admin/', admin.site.urls), path('api-token-auth/', obtain_auth_token)]\n", "<docstring token>\n<import token>\n<assignment token>\nrouter.register('registercall', RegisterCallViewSet, base_name='RegisterCall')\nrouter.register('phonebill', PhoneBillViewSet, base_name='PhoneBill')\nrouter.register('registers', RegisterViewSet, base_name='Registers')\n<assignment token>\n", "<docstring token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n" ]
false
99,452
403a0f3d6b01b226d28d7bfe97d9c1bc3199f80e
""" tessbatman.py This file contains helper functions for the tessbatman pipeline. It is divided into Batman, TESS, and Convolve functions. """ from time import time import glob import os.path as p import json import numpy as np import pandas as pd import matplotlib.pyplot as plt import scipy.signal as sig import scipy.stats as stat import astropy as ast import astropy.table as tbl import batman # Batman Functions def make_batman_config(tmin, tmax, tstep, wmin, wmax, wnum, wlog=True, suffix="", path="."): """ Write batman parameters to a JSON param file used to generate batmanCurves. Parameters ---------- tmin (num): minimum time tmax (num): maximum time tnum (num): time step wmin (num): minimum width wmax (num): maximum width wnum (num): number of widths to generate wlog (bool): use logspace for widths if True, else use linspace suffix (str): append suffix to config and curve file names """ params = {} params["curves_fname"] = p.join(path, 'batmanCurves{}.csv'.format(suffix)) params["params_fname"] = p.join(path, 'batmanParams{}.csv'.format(suffix)) params["tmin"] = tmin params["tmax"] = tmax params["tstep"] = tstep params["wmin"] = wmin params["wmax"] = wmax params["wnum"] = wnum params["wlog"] = wlog outfile = p.join(path, 'batmanConfig{}.param'.format(suffix)) with open(outfile, "w+") as f: json.dump(params, f) print("Batman config written to {}".format(outfile)) def make_lightcurve(t0, r, i, p, width, u_type, u_param, t): """ Generate a batman lightcurve with the given parameters. Parameters ---------- t0 (num): time of inferior conjunction r (num): planet radius (in stellar radii) i (num): orbital inclination (in degrees) p (num): orbital period width (num): width parameter (defined as a**3/p**2) u_type (str): limb darkening model u_param (list): parameters for limb darkening t: timesteps that you want the fluxes at assume circular orbit """ # Init batman model params = batman.TransitParams() params.rp = r params.inc = i params.w = 0 # longitude of periastron (degenerate with width) params.ecc = 0 # eccentricity (0 for circular orbits) params.per = p # orbital period params.t0 = t0 params.a = (width * p ** 2) ** (1 / 3) # semi-major axis (stellar radii) params.limb_dark = u_type params.u = u_param model = batman.TransitModel(params, t) # Generate curve flux = model.light_curve(params) # compute light curve return flux def make_batman(paramfile, outdir, norm=False, write=True, verbose=True): """ Return astropy tables of batman params and generated curves based on the parameters given in paramfile. Parameters ---------- paramfile (str): path to JSON param file written by make_batman_config outdir (str): path to write output curve and param files norm (bool): normalize curves to unit integrated area write (bool): write param and curve tables to files verbose (bool): print logging and timing info """ # read batman param file if verbose: print("Reading param file", flush=True) with open(paramfile, "r") as f: d = json.load(f) # init time array and parameter ranges if verbose: print("Setting param ranges", flush=True) t = np.arange(d['tmin'], d['tmax'], d['tstep']) if d['wlog']: widths = np.logspace(d['wmin'], d['wmax'], d['wnum']) else: widths = np.linspace(d['wmin'], d['wmax'], d['wnum']) nparams = len(widths) radii = 0.1 * np.ones(nparams) incs = 90 * np.ones(nparams) u = ['0.1 0.3'] * nparams ld = ['quadratic'] * nparams per = 100*np.ones(nparams) t0 = np.zeros(nparams) e = np.zeros(nparams) w = np.zeros(nparams) # Old # radii = [] # widths = [] # incs = [] # widths_arr = np.logspace(d['wmin'], d['wmax'], d['wnum']) # radii_arr = np.logspace(d['rmin'], d['rmax'], d['rnum']) # for r in radii_arr: # for w in widths_arr: # a = (w * (100)**2)**(1.0/3.0) # lim = np.arccos((1 + r)/(a))/(2 * np.pi) * 360 # inc = np.linspace(90, lim, 11)[:-1] # last inc always fails so exclude # for i in inc: # incs.append(i) # radii.append(r) # widths.append(w) # add params to batman param table curveID = ['curve{}'.format(i) for i in range(nparams)] cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w] colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'] batmanParams = tbl.Table(cols, names=colnames) # generate curves if verbose: print("Generating curves", flush=True) start = time() batmanDict = {'times': t} err = 0 # keep track of errored curves for i in range(len(batmanParams)): p = batmanParams[i] cID = p['curveID'] c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'], p['ld'], [float(val) for val in p['u'].split()], t) # normalize curve c if norm: cmax = np.max(c) cmin = np.min(c) c = (c-cmin)/(cmax-cmin) # scale to [0,1] c = 1-c # flip c = c / np.sum(c) # normalize area under curve to 1 c = 1-c # flip back if np.isnan(c).any() or (sum(c==1) < 5): print("Batman {} failed".format(cID), flush=True) err += 1 continue # Save curve to dict batmanDict[cID] = c # Progress report every 100 if verbose and (i % 100 == 0): elapsed = time() - start print("Generated {}/{} curves in {} s".format(i+1-err, nparams, elapsed), flush=True) # add curves to table batmanCurves = tbl.Table(batmanDict) if verbose: elapsed = time() - start print("Generated {}/{} curves in {} s".format(nparams-err, nparams, elapsed), flush=True) # Write batman params and curves tables to files if write: if verbose: start = time() print("Writing files", flush=True) ast.io.ascii.write(batmanParams, d['params_fname'], format='csv', overwrite=True, comment='#', fast_writer=False) if verbose: print("Wrote params to {}".format(d['params_fname'])) ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv', overwrite=True, comment='#', fast_writer=False) if verbose: print("Wrote curves to {}".format(d['curves_fname'])) elapsed = time() - start print("Wrote files in {} s".format(elapsed), flush=True) return(batmanParams, batmanCurves) def read_batman(batmancurves_file): """ Return times, cureve name, and batman curves from a batmanCurves file. Parameters ---------- batmancurves_file (str): Path to a batmanCurves file Return ------ times (numpy Array): The times array (x axis) of all batmanCurves curve_names (numpy Array): The name of each batmanCurve batmanCurves (astropy Table): The table of batmanCurves """ # Read in Batman Curves print("Reading batmanCurves from {}...".format(batmancurves_file)) batmanCurves = ast.io.ascii.read(batmancurves_file, data_start=1, format='csv') times = np.array(batmanCurves['times']) curve_names = np.array(batmanCurves.colnames[1:]) return times, curve_names, batmanCurves # TESS Functions def read_tess(tess_dir, sector_name, start=0, end=None): """ Return list of tess .fits files in tess_dir from [start:end]. Default to all fits files in directory if start and end are not specified. Parameters ---------- tess_dir (str): path to tess data directory sector_name (str): name of sector subdirectory (e.g. Sector1) start (int): (Optional) Index of file in directory to start at end (int): (Optional) Index of file to end at Return ------ tess_names (list): List of file paths to tess .fits data """ print("Reading TESS from {}, s:{}, e:{}...".format(sector_name, start, end)) sector_path = p.join(tess_dir, sector_name) sector_files = glob.glob(p.join(sector_path,"*.fits")) tess_names = sector_files[start:end] return tess_names def open_tess_fits(tess_fpath, norm=False): try: with ast.io.fits.open(tess_fpath, mode="readonly") as hdulist: hdr = hdulist[0].header tess_time = hdulist[1].data['TIME'] tess_flux = hdulist[1].data['PDCSAP_FLUX'] # set NaNs to median med = np.nanmedian(tess_flux) tess_flux[np.isnan(tess_flux)] = med if norm: # tess_flux[tess_flux > np.median(tess_flux)] = np.median(tess_flux) tmin = np.min(tess_flux) tmax = np.max(tess_flux) tess_flux = (tess_flux - tmin)/(tmax-tmin) except Exception as e: print("ERROR reading file: ", tess_fpath, " with error: ", e,flush=True) return None, None return tess_time, tess_flux # Convolve Fucntions def convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep=10, plot=False): conv_start = time() curves = [] times = np.zeros(num_keep) convs = np.zeros(num_keep) print("Starting convolutions...",flush=True) for i, curvename in enumerate(curve_names): # do convolution batman_curve = batmanCurves[curvename] conv = np.abs(sig.fftconvolve(1-tess_flux, (1-batman_curve), 'same')) ind_max = np.argmax(conv) conv_max = conv[ind_max] # if num_keep, save only the top num_keep curves if num_keep < len(curve_names): if conv_max > convs[-1]: # insert in reverse sorted order ind = np.searchsorted(-convs, -conv_max) curves = curves[:ind] + [curvename] + curves[ind:-1] times = np.insert(times, ind, tess_time[ind_max])[:-1] convs = np.insert(convs, ind, conv_max)[:-1] else: curves.append(curvename) times[i] = tess_time[ind_max] convs[i] = conv_max if plot: plt.plot(tess_time, conv, label=curvename) conv_time = time() - conv_start print("Convolved {} curves in {:.3} s".format(len(curve_names), conv_time),flush=True) return curves, times, convs def tbconvolve(tess_dir, batman_dir, batman_suffix, sector, start, end, output_dir, num_keep=10, norm_tess=False, write=True, writechunk=10, verbosity=0): """ Parameters ---------- tess_dir(str): directory to TESS data batman_dir (str): directory to model data batman_suffix(str): suffix to append to barmanCurves file (e.g. _small) sector (int): sector to pull data from start (int): file to start at end (int): file to end at output_dir (str): directory to write candidates.csv """ tconv_start = time() print("===START TCONVOLVE===",flush=True) # Handle relative paths tess_dir = p.abspath(tess_dir) batman_dir = p.abspath(batman_dir) output_dir = p.abspath(output_dir) # Read in TESS Sector data sector_name = "Sector{}".format(sector) if sector == 0: sector_name = "sample_"+sector_name tess_names = read_tess(tess_dir, sector_name, start, end) ntess = len(tess_names) print("Found {} TESS files to process".format(ntess),flush=True) if ntess < 1: print("No tess curves found, quitting....") return None # Read in Batman Curves batmanCurves_file = p.join(batman_dir,"batmanCurves{}.csv".format(batman_suffix)) times, curve_names, batmanCurves = read_batman(batmanCurves_file) nbatman = len(curve_names) print("Found {} Batman curves".format(nbatman),flush=True) if ntess < 1: print("No batman curves found, quitting....") return None # Read in Batman Params params = pd.read_csv(p.join(batman_dir, "batmanParams{}.csv".format(batman_suffix))) #Init dict for saving best batman curves colnames = ['sector', 'tessFile', 'curveID', 'tcorr', 'correlation', 'chisq'] d = {key : [] for key in colnames} s = 0 nerr = 0 # count number of failed files # Do convolution on all tess files for tind, tess_fpath in enumerate(tess_names): tess_start = time() tess_fname = p.basename(tess_fpath) print("Starting TESS file: {}".format(tess_fname),flush=True) # Read tess lightcurve tess_time, tess_flux = open_tess_fits(tess_fpath, norm_tess) if tess_time is None: nerr += 1 continue # skip to next iter if read failed # Do convolution and keep num_keep best curves if num_keep < 1: num_keep = len(curve_names) curves, times, convs = convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep) # Save this TESS curve's best batman curves to dict d['sector'].extend([sector_name]*num_keep) d['tessFile'].extend([tess_fname]*num_keep) d['curveID'].extend(curves) d['tcorr'].extend(times) d['correlation'].extend(convs) d['chisq'].extend(get_chi_sq(tess_time, tess_flux, times, params)) print(len(d['tcorr']), len(d['chisq'])) if write: # Make table every writechunk tess curves if (tind % writechunk == writechunk-1) or (tind == len(tess_names)-1): e = start+tind outname = 'candidates_sector{}_s{}_e{}.csv'.format(sector, s, e) outpath = p.join(output_dir, outname) # Convert to astropy table and write to csv candidates = tbl.Table(d,names=colnames) ast.io.ascii.write(candidates, outpath, format='csv', overwrite=True, comment='#', fast_writer=False) print("Wrote file {} at {} s".format(outname,time()-tess_start),flush=True) # reset dicts # d = {key : [] for key in ['sector','tessFile','curveID','tcorr','correlation']} s=e+1 candidates = tbl.Table(d,names=colnames) # make merged table cdf = pd.DataFrame.from_dict(d) cdf = cdf[colnames] df = pd.merge(cdf, params, on="curveID", how="left") df.to_csv(p.join(output_dir, "chisq{}.csv".format(batman_suffix))) tconv_time = time() - tconv_start print("Convolved {}/{} tess files with {} curves in {:.3} s".format(ntess-nerr, ntess, nbatman, tconv_time),flush=True) print("===END TCONVOLVE===",flush=True) return candidates def get_chi_sq(tess_time, tess_flux, tcorr, params): current_fname = "" chi_squared = [] #find the lightcurve minima to calculate the exoplanet period arr = tess_flux / np.nanmedian(tess_flux) arr[np.isnan(arr)] = np.nanmedian(arr) arr[arr==0] = np.nanmedian(arr) mu, std = stat.norm.fit(1 / arr) peaks, _ = sig.find_peaks(1 / arr, height = mu + 4 * std, distance = 1000) p = np.diff(tess_time[peaks]) #define parameters PER = np.mean(p) u_type = 'quadratic' u_param = [0.1, 0.3] t = tess_time - tess_time[0] #normalize flux outcounts = np.nan_to_num(tess_flux[tess_flux > np.nanmean(tess_flux)]) mu, sigma = stat.norm.fit(outcounts) normalized_fluxes = tess_flux / mu normalized_sigma = np.sqrt(tess_flux)/mu for i, row in params.iterrows(): #get params for this row T0 = tcorr[i]- tess_time[0] RP = row["rp"] INC = row["i"] width = row["width"] #calculate reduced chi-squared chi_squared.append(np.nansum(((normalized_fluxes - make_lightcurve(T0, RP, INC, PER, width, u_type, u_param, t)) ** 2 / normalized_sigma ** 2) / 8)) return chi_squared def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument("tess_dir", type=str) parser.add_argument("batman_dir", type=str) parser.add_argument("sector", type=int) parser.add_argument("start", type=int) parser.add_argument("end", type=int) parser.add_argument("output_dir", type=str) parser.add_argument("batman_suffix",type=str,default="") parser.add_argument("-v", "--verbosity", default=False, action="store_true", help="Print console output") args = parser.parse_args() tbconvolve(args.tess_dir, args.batman_dir, args.batman_suffix, args.sector, args.start, args.end, args.output_dir, num_keep=-1, norm_tess=True, verbosity=args.verbosity) if __name__ == '__main__': main()
[ "\"\"\" tessbatman.py\nThis file contains helper functions for the tessbatman pipeline.\n\nIt is divided into Batman, TESS, and Convolve functions.\n\"\"\"\nfrom time import time\nimport glob\nimport os.path as p\nimport json\n\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport scipy.signal as sig\nimport scipy.stats as stat\n\nimport astropy as ast\nimport astropy.table as tbl\nimport batman\n\n\n# Batman Functions\ndef make_batman_config(tmin, tmax, tstep, wmin, wmax, wnum, wlog=True, suffix=\"\", path=\".\"):\n \"\"\"\n Write batman parameters to a JSON param file used to generate batmanCurves.\n\n Parameters\n ----------\n tmin (num): minimum time\n tmax (num): maximum time\n tnum (num): time step\n wmin (num): minimum width\n wmax (num): maximum width\n wnum (num): number of widths to generate\n wlog (bool): use logspace for widths if True, else use linspace\n suffix (str): append suffix to config and curve file names\n \"\"\"\n params = {}\n params[\"curves_fname\"] = p.join(path, 'batmanCurves{}.csv'.format(suffix))\n params[\"params_fname\"] = p.join(path, 'batmanParams{}.csv'.format(suffix))\n params[\"tmin\"] = tmin\n params[\"tmax\"] = tmax\n params[\"tstep\"] = tstep\n params[\"wmin\"] = wmin\n params[\"wmax\"] = wmax\n params[\"wnum\"] = wnum\n params[\"wlog\"] = wlog\n\n outfile = p.join(path, 'batmanConfig{}.param'.format(suffix))\n with open(outfile, \"w+\") as f:\n json.dump(params, f)\n print(\"Batman config written to {}\".format(outfile))\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n # Init batman model\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0 # longitude of periastron (degenerate with width)\n params.ecc = 0 # eccentricity (0 for circular orbits)\n params.per = p # orbital period\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3) # semi-major axis (stellar radii)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n \n # Generate curve\n flux = model.light_curve(params) # compute light curve\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n # read batman param file\n if verbose:\n print(\"Reading param file\", flush=True)\n\n with open(paramfile, \"r\") as f:\n d = json.load(f)\n\n # init time array and parameter ranges\n if verbose:\n print(\"Setting param ranges\", flush=True)\n\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100*np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n\n # Old\n # radii = []\n # widths = []\n # incs = []\n # widths_arr = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n # radii_arr = np.logspace(d['rmin'], d['rmax'], d['rnum'])\n # for r in radii_arr:\n # for w in widths_arr:\n # a = (w * (100)**2)**(1.0/3.0)\n # lim = np.arccos((1 + r)/(a))/(2 * np.pi) * 360\n # inc = np.linspace(90, lim, 11)[:-1] # last inc always fails so exclude\n # for i in inc: \n # incs.append(i)\n # radii.append(r)\n # widths.append(w)\n \n # add params to batman param table\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w']\n batmanParams = tbl.Table(cols, names=colnames)\n\n # generate curves\n if verbose:\n print(\"Generating curves\", flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0 # keep track of errored curves\n for i in range(len(batmanParams)): \n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'], p['ld'], \n [float(val) for val in p['u'].split()], t)\n\n # normalize curve c\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c-cmin)/(cmax-cmin) # scale to [0,1]\n c = 1-c # flip\n c = c / np.sum(c) # normalize area under curve to 1\n c = 1-c # flip back\n if np.isnan(c).any() or (sum(c==1) < 5):\n print(\"Batman {} failed\".format(cID), flush=True)\n err += 1\n continue \n\n # Save curve to dict\n batmanDict[cID] = c\n\n # Progress report every 100\n if verbose and (i % 100 == 0):\n elapsed = time() - start\n print(\"Generated {}/{} curves in {} s\".format(i+1-err, nparams,\n elapsed), flush=True)\n \n # add curves to table\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print(\"Generated {}/{} curves in {} s\".format(nparams-err, nparams,\n elapsed), flush=True) \n \n # Write batman params and curves tables to files\n if write:\n if verbose:\n start = time()\n print(\"Writing files\", flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv', \n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print(\"Wrote params to {}\".format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv', \n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print(\"Wrote curves to {}\".format(d['curves_fname']))\n elapsed = time() - start\n print(\"Wrote files in {} s\".format(elapsed), flush=True)\n return(batmanParams, batmanCurves)\n\n\ndef read_batman(batmancurves_file):\n \"\"\"\n Return times, cureve name, and batman curves from a batmanCurves file.\n \n Parameters\n ----------\n batmancurves_file (str): Path to a batmanCurves file\n\n Return\n ------\n times (numpy Array): The times array (x axis) of all batmanCurves\n curve_names (numpy Array): The name of each batmanCurve\n batmanCurves (astropy Table): The table of batmanCurves\n \"\"\"\n # Read in Batman Curves \n print(\"Reading batmanCurves from {}...\".format(batmancurves_file))\n batmanCurves = ast.io.ascii.read(batmancurves_file, data_start=1, format='csv')\n times = np.array(batmanCurves['times'])\n curve_names = np.array(batmanCurves.colnames[1:])\n return times, curve_names, batmanCurves\n\n\n# TESS Functions\ndef read_tess(tess_dir, sector_name, start=0, end=None):\n \"\"\"\n Return list of tess .fits files in tess_dir from [start:end]. Default\n to all fits files in directory if start and end are not specified.\n\n Parameters\n ----------\n tess_dir (str): path to tess data directory\n sector_name (str): name of sector subdirectory (e.g. Sector1)\n start (int): (Optional) Index of file in directory to start at\n end (int): (Optional) Index of file to end at\n \n Return\n ------\n tess_names (list): List of file paths to tess .fits data\n \"\"\"\n print(\"Reading TESS from {}, s:{}, e:{}...\".format(sector_name, start, end))\n sector_path = p.join(tess_dir, sector_name)\n sector_files = glob.glob(p.join(sector_path,\"*.fits\"))\n tess_names = sector_files[start:end]\n return tess_names\n\n \ndef open_tess_fits(tess_fpath, norm=False):\n try:\n with ast.io.fits.open(tess_fpath, mode=\"readonly\") as hdulist:\n hdr = hdulist[0].header\n tess_time = hdulist[1].data['TIME']\n tess_flux = hdulist[1].data['PDCSAP_FLUX']\n # set NaNs to median\n med = np.nanmedian(tess_flux)\n tess_flux[np.isnan(tess_flux)] = med\n \n if norm:\n# tess_flux[tess_flux > np.median(tess_flux)] = np.median(tess_flux)\n tmin = np.min(tess_flux)\n tmax = np.max(tess_flux)\n tess_flux = (tess_flux - tmin)/(tmax-tmin)\n\n except Exception as e: \n print(\"ERROR reading file: \", tess_fpath, \" with error: \", e,flush=True)\n return None, None\n return tess_time, tess_flux\n \n \n# Convolve Fucntions\ndef convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep=10, plot=False):\n conv_start = time()\n curves = []\n times = np.zeros(num_keep)\n convs = np.zeros(num_keep)\n print(\"Starting convolutions...\",flush=True)\n for i, curvename in enumerate(curve_names):\n # do convolution\n batman_curve = batmanCurves[curvename]\n conv = np.abs(sig.fftconvolve(1-tess_flux, (1-batman_curve), 'same'))\n ind_max = np.argmax(conv)\n conv_max = conv[ind_max]\n \n # if num_keep, save only the top num_keep curves\n if num_keep < len(curve_names):\n if conv_max > convs[-1]:\n # insert in reverse sorted order\n ind = np.searchsorted(-convs, -conv_max)\n curves = curves[:ind] + [curvename] + curves[ind:-1]\n times = np.insert(times, ind, tess_time[ind_max])[:-1]\n convs = np.insert(convs, ind, conv_max)[:-1]\n else:\n curves.append(curvename)\n times[i] = tess_time[ind_max]\n convs[i] = conv_max\n if plot:\n plt.plot(tess_time, conv, label=curvename)\n\n conv_time = time() - conv_start\n print(\"Convolved {} curves in {:.3} s\".format(len(curve_names), conv_time),flush=True)\n return curves, times, convs\n\n \ndef tbconvolve(tess_dir, batman_dir, batman_suffix, sector, start, end, output_dir, num_keep=10, norm_tess=False, write=True, writechunk=10, verbosity=0):\n \"\"\"\n \n Parameters\n ----------\n tess_dir(str): directory to TESS data\n batman_dir (str): directory to model data\n batman_suffix(str): suffix to append to barmanCurves file (e.g. _small)\n sector (int): sector to pull data from\n start (int): file to start at\n end (int): file to end at\n output_dir (str): directory to write candidates.csv\n \"\"\" \n tconv_start = time()\n print(\"===START TCONVOLVE===\",flush=True)\n \n # Handle relative paths\n tess_dir = p.abspath(tess_dir)\n batman_dir = p.abspath(batman_dir)\n output_dir = p.abspath(output_dir)\n \n # Read in TESS Sector data\n sector_name = \"Sector{}\".format(sector)\n if sector == 0:\n sector_name = \"sample_\"+sector_name\n tess_names = read_tess(tess_dir, sector_name, start, end)\n ntess = len(tess_names)\n print(\"Found {} TESS files to process\".format(ntess),flush=True)\n if ntess < 1:\n print(\"No tess curves found, quitting....\")\n return None\n \n # Read in Batman Curves\n batmanCurves_file = p.join(batman_dir,\"batmanCurves{}.csv\".format(batman_suffix))\n times, curve_names, batmanCurves = read_batman(batmanCurves_file)\n nbatman = len(curve_names)\n print(\"Found {} Batman curves\".format(nbatman),flush=True)\n if ntess < 1:\n print(\"No batman curves found, quitting....\")\n return None\n\n # Read in Batman Params\n params = pd.read_csv(p.join(batman_dir, \"batmanParams{}.csv\".format(batman_suffix)))\n\n\n\n #Init dict for saving best batman curves \n colnames = ['sector', 'tessFile', 'curveID', 'tcorr', 'correlation', 'chisq']\n d = {key : [] for key in colnames}\n s = 0\n nerr = 0 # count number of failed files\n \n # Do convolution on all tess files\n for tind, tess_fpath in enumerate(tess_names):\n tess_start = time()\n tess_fname = p.basename(tess_fpath)\n print(\"Starting TESS file: {}\".format(tess_fname),flush=True)\n \n # Read tess lightcurve\n tess_time, tess_flux = open_tess_fits(tess_fpath, norm_tess)\n if tess_time is None:\n nerr += 1\n continue # skip to next iter if read failed\n \n # Do convolution and keep num_keep best curves\n if num_keep < 1:\n num_keep = len(curve_names)\n curves, times, convs = convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep)\n \n # Save this TESS curve's best batman curves to dict\n d['sector'].extend([sector_name]*num_keep)\n d['tessFile'].extend([tess_fname]*num_keep)\n d['curveID'].extend(curves)\n d['tcorr'].extend(times)\n d['correlation'].extend(convs)\n d['chisq'].extend(get_chi_sq(tess_time, tess_flux, times, params))\n print(len(d['tcorr']), len(d['chisq']))\n if write:\n # Make table every writechunk tess curves\n if (tind % writechunk == writechunk-1) or (tind == len(tess_names)-1):\n e = start+tind\n outname = 'candidates_sector{}_s{}_e{}.csv'.format(sector, s, e)\n outpath = p.join(output_dir, outname)\n # Convert to astropy table and write to csv\n candidates = tbl.Table(d,names=colnames)\n ast.io.ascii.write(candidates, outpath, format='csv', overwrite=True, comment='#', fast_writer=False)\n print(\"Wrote file {} at {} s\".format(outname,time()-tess_start),flush=True)\n # reset dicts\n# d = {key : [] for key in ['sector','tessFile','curveID','tcorr','correlation']}\n s=e+1\n candidates = tbl.Table(d,names=colnames)\n \n # make merged table\n cdf = pd.DataFrame.from_dict(d)\n cdf = cdf[colnames]\n df = pd.merge(cdf, params, on=\"curveID\", how=\"left\")\n df.to_csv(p.join(output_dir, \"chisq{}.csv\".format(batman_suffix)))\n \n tconv_time = time() - tconv_start\n print(\"Convolved {}/{} tess files with {} curves in {:.3} s\".format(ntess-nerr, ntess, nbatman, tconv_time),flush=True)\n print(\"===END TCONVOLVE===\",flush=True)\n return candidates\n\ndef get_chi_sq(tess_time, tess_flux, tcorr, params):\n current_fname = \"\"\n chi_squared = []\n #find the lightcurve minima to calculate the exoplanet period\n arr = tess_flux / np.nanmedian(tess_flux)\n arr[np.isnan(arr)] = np.nanmedian(arr)\n arr[arr==0] = np.nanmedian(arr)\n mu, std = stat.norm.fit(1 / arr)\n peaks, _ = sig.find_peaks(1 / arr, height = mu + 4 * std, distance = 1000)\n p = np.diff(tess_time[peaks])\n #define parameters\n PER = np.mean(p)\n u_type = 'quadratic'\n u_param = [0.1, 0.3]\n t = tess_time - tess_time[0]\n #normalize flux\n outcounts = np.nan_to_num(tess_flux[tess_flux > np.nanmean(tess_flux)])\n mu, sigma = stat.norm.fit(outcounts)\n normalized_fluxes = tess_flux / mu\n normalized_sigma = np.sqrt(tess_flux)/mu\n \n for i, row in params.iterrows():\n #get params for this row\n T0 = tcorr[i]- tess_time[0]\n RP = row[\"rp\"]\n INC = row[\"i\"]\n width = row[\"width\"]\n\n #calculate reduced chi-squared\n chi_squared.append(np.nansum(((normalized_fluxes - make_lightcurve(T0, RP, INC, PER, width, u_type, u_param, t)) ** 2 / normalized_sigma ** 2) / 8))\n\n return chi_squared\n \ndef main():\n import argparse\n parser = argparse.ArgumentParser()\n parser.add_argument(\"tess_dir\", type=str)\n parser.add_argument(\"batman_dir\", type=str)\n parser.add_argument(\"sector\", type=int)\n parser.add_argument(\"start\", type=int)\n parser.add_argument(\"end\", type=int)\n parser.add_argument(\"output_dir\", type=str) \n parser.add_argument(\"batman_suffix\",type=str,default=\"\")\n parser.add_argument(\"-v\", \"--verbosity\", default=False, \n action=\"store_true\", help=\"Print console output\")\n args = parser.parse_args()\n tbconvolve(args.tess_dir, args.batman_dir, args.batman_suffix, args.sector, args.start, \n args.end, args.output_dir, num_keep=-1, norm_tess=True, verbosity=args.verbosity)\n \nif __name__ == '__main__':\n main()\n", "<docstring token>\nfrom time import time\nimport glob\nimport os.path as p\nimport json\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport scipy.signal as sig\nimport scipy.stats as stat\nimport astropy as ast\nimport astropy.table as tbl\nimport batman\n\n\ndef make_batman_config(tmin, tmax, tstep, wmin, wmax, wnum, wlog=True,\n suffix='', path='.'):\n \"\"\"\n Write batman parameters to a JSON param file used to generate batmanCurves.\n\n Parameters\n ----------\n tmin (num): minimum time\n tmax (num): maximum time\n tnum (num): time step\n wmin (num): minimum width\n wmax (num): maximum width\n wnum (num): number of widths to generate\n wlog (bool): use logspace for widths if True, else use linspace\n suffix (str): append suffix to config and curve file names\n \"\"\"\n params = {}\n params['curves_fname'] = p.join(path, 'batmanCurves{}.csv'.format(suffix))\n params['params_fname'] = p.join(path, 'batmanParams{}.csv'.format(suffix))\n params['tmin'] = tmin\n params['tmax'] = tmax\n params['tstep'] = tstep\n params['wmin'] = wmin\n params['wmax'] = wmax\n params['wnum'] = wnum\n params['wlog'] = wlog\n outfile = p.join(path, 'batmanConfig{}.param'.format(suffix))\n with open(outfile, 'w+') as f:\n json.dump(params, f)\n print('Batman config written to {}'.format(outfile))\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n if verbose:\n print('Reading param file', flush=True)\n with open(paramfile, 'r') as f:\n d = json.load(f)\n if verbose:\n print('Setting param ranges', flush=True)\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100 * np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'\n ]\n batmanParams = tbl.Table(cols, names=colnames)\n if verbose:\n print('Generating curves', flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0\n for i in range(len(batmanParams)):\n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'],\n p['ld'], [float(val) for val in p['u'].split()], t)\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c - cmin) / (cmax - cmin)\n c = 1 - c\n c = c / np.sum(c)\n c = 1 - c\n if np.isnan(c).any() or sum(c == 1) < 5:\n print('Batman {} failed'.format(cID), flush=True)\n err += 1\n continue\n batmanDict[cID] = c\n if verbose and i % 100 == 0:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(i + 1 - err,\n nparams, elapsed), flush=True)\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(nparams - err,\n nparams, elapsed), flush=True)\n if write:\n if verbose:\n start = time()\n print('Writing files', flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote params to {}'.format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote curves to {}'.format(d['curves_fname']))\n elapsed = time() - start\n print('Wrote files in {} s'.format(elapsed), flush=True)\n return batmanParams, batmanCurves\n\n\ndef read_batman(batmancurves_file):\n \"\"\"\n Return times, cureve name, and batman curves from a batmanCurves file.\n \n Parameters\n ----------\n batmancurves_file (str): Path to a batmanCurves file\n\n Return\n ------\n times (numpy Array): The times array (x axis) of all batmanCurves\n curve_names (numpy Array): The name of each batmanCurve\n batmanCurves (astropy Table): The table of batmanCurves\n \"\"\"\n print('Reading batmanCurves from {}...'.format(batmancurves_file))\n batmanCurves = ast.io.ascii.read(batmancurves_file, data_start=1,\n format='csv')\n times = np.array(batmanCurves['times'])\n curve_names = np.array(batmanCurves.colnames[1:])\n return times, curve_names, batmanCurves\n\n\ndef read_tess(tess_dir, sector_name, start=0, end=None):\n \"\"\"\n Return list of tess .fits files in tess_dir from [start:end]. Default\n to all fits files in directory if start and end are not specified.\n\n Parameters\n ----------\n tess_dir (str): path to tess data directory\n sector_name (str): name of sector subdirectory (e.g. Sector1)\n start (int): (Optional) Index of file in directory to start at\n end (int): (Optional) Index of file to end at\n \n Return\n ------\n tess_names (list): List of file paths to tess .fits data\n \"\"\"\n print('Reading TESS from {}, s:{}, e:{}...'.format(sector_name, start, end)\n )\n sector_path = p.join(tess_dir, sector_name)\n sector_files = glob.glob(p.join(sector_path, '*.fits'))\n tess_names = sector_files[start:end]\n return tess_names\n\n\ndef open_tess_fits(tess_fpath, norm=False):\n try:\n with ast.io.fits.open(tess_fpath, mode='readonly') as hdulist:\n hdr = hdulist[0].header\n tess_time = hdulist[1].data['TIME']\n tess_flux = hdulist[1].data['PDCSAP_FLUX']\n med = np.nanmedian(tess_flux)\n tess_flux[np.isnan(tess_flux)] = med\n if norm:\n tmin = np.min(tess_flux)\n tmax = np.max(tess_flux)\n tess_flux = (tess_flux - tmin) / (tmax - tmin)\n except Exception as e:\n print('ERROR reading file: ', tess_fpath, ' with error: ', e, flush\n =True)\n return None, None\n return tess_time, tess_flux\n\n\ndef convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep=10,\n plot=False):\n conv_start = time()\n curves = []\n times = np.zeros(num_keep)\n convs = np.zeros(num_keep)\n print('Starting convolutions...', flush=True)\n for i, curvename in enumerate(curve_names):\n batman_curve = batmanCurves[curvename]\n conv = np.abs(sig.fftconvolve(1 - tess_flux, 1 - batman_curve, 'same'))\n ind_max = np.argmax(conv)\n conv_max = conv[ind_max]\n if num_keep < len(curve_names):\n if conv_max > convs[-1]:\n ind = np.searchsorted(-convs, -conv_max)\n curves = curves[:ind] + [curvename] + curves[ind:-1]\n times = np.insert(times, ind, tess_time[ind_max])[:-1]\n convs = np.insert(convs, ind, conv_max)[:-1]\n else:\n curves.append(curvename)\n times[i] = tess_time[ind_max]\n convs[i] = conv_max\n if plot:\n plt.plot(tess_time, conv, label=curvename)\n conv_time = time() - conv_start\n print('Convolved {} curves in {:.3} s'.format(len(curve_names),\n conv_time), flush=True)\n return curves, times, convs\n\n\ndef tbconvolve(tess_dir, batman_dir, batman_suffix, sector, start, end,\n output_dir, num_keep=10, norm_tess=False, write=True, writechunk=10,\n verbosity=0):\n \"\"\"\n \n Parameters\n ----------\n tess_dir(str): directory to TESS data\n batman_dir (str): directory to model data\n batman_suffix(str): suffix to append to barmanCurves file (e.g. _small)\n sector (int): sector to pull data from\n start (int): file to start at\n end (int): file to end at\n output_dir (str): directory to write candidates.csv\n \"\"\"\n tconv_start = time()\n print('===START TCONVOLVE===', flush=True)\n tess_dir = p.abspath(tess_dir)\n batman_dir = p.abspath(batman_dir)\n output_dir = p.abspath(output_dir)\n sector_name = 'Sector{}'.format(sector)\n if sector == 0:\n sector_name = 'sample_' + sector_name\n tess_names = read_tess(tess_dir, sector_name, start, end)\n ntess = len(tess_names)\n print('Found {} TESS files to process'.format(ntess), flush=True)\n if ntess < 1:\n print('No tess curves found, quitting....')\n return None\n batmanCurves_file = p.join(batman_dir, 'batmanCurves{}.csv'.format(\n batman_suffix))\n times, curve_names, batmanCurves = read_batman(batmanCurves_file)\n nbatman = len(curve_names)\n print('Found {} Batman curves'.format(nbatman), flush=True)\n if ntess < 1:\n print('No batman curves found, quitting....')\n return None\n params = pd.read_csv(p.join(batman_dir, 'batmanParams{}.csv'.format(\n batman_suffix)))\n colnames = ['sector', 'tessFile', 'curveID', 'tcorr', 'correlation',\n 'chisq']\n d = {key: [] for key in colnames}\n s = 0\n nerr = 0\n for tind, tess_fpath in enumerate(tess_names):\n tess_start = time()\n tess_fname = p.basename(tess_fpath)\n print('Starting TESS file: {}'.format(tess_fname), flush=True)\n tess_time, tess_flux = open_tess_fits(tess_fpath, norm_tess)\n if tess_time is None:\n nerr += 1\n continue\n if num_keep < 1:\n num_keep = len(curve_names)\n curves, times, convs = convolve(tess_time, tess_flux, batmanCurves,\n curve_names, num_keep)\n d['sector'].extend([sector_name] * num_keep)\n d['tessFile'].extend([tess_fname] * num_keep)\n d['curveID'].extend(curves)\n d['tcorr'].extend(times)\n d['correlation'].extend(convs)\n d['chisq'].extend(get_chi_sq(tess_time, tess_flux, times, params))\n print(len(d['tcorr']), len(d['chisq']))\n if write:\n if tind % writechunk == writechunk - 1 or tind == len(tess_names\n ) - 1:\n e = start + tind\n outname = 'candidates_sector{}_s{}_e{}.csv'.format(sector, s, e\n )\n outpath = p.join(output_dir, outname)\n candidates = tbl.Table(d, names=colnames)\n ast.io.ascii.write(candidates, outpath, format='csv',\n overwrite=True, comment='#', fast_writer=False)\n print('Wrote file {} at {} s'.format(outname, time() -\n tess_start), flush=True)\n s = e + 1\n candidates = tbl.Table(d, names=colnames)\n cdf = pd.DataFrame.from_dict(d)\n cdf = cdf[colnames]\n df = pd.merge(cdf, params, on='curveID', how='left')\n df.to_csv(p.join(output_dir, 'chisq{}.csv'.format(batman_suffix)))\n tconv_time = time() - tconv_start\n print('Convolved {}/{} tess files with {} curves in {:.3} s'.format(\n ntess - nerr, ntess, nbatman, tconv_time), flush=True)\n print('===END TCONVOLVE===', flush=True)\n return candidates\n\n\ndef get_chi_sq(tess_time, tess_flux, tcorr, params):\n current_fname = ''\n chi_squared = []\n arr = tess_flux / np.nanmedian(tess_flux)\n arr[np.isnan(arr)] = np.nanmedian(arr)\n arr[arr == 0] = np.nanmedian(arr)\n mu, std = stat.norm.fit(1 / arr)\n peaks, _ = sig.find_peaks(1 / arr, height=mu + 4 * std, distance=1000)\n p = np.diff(tess_time[peaks])\n PER = np.mean(p)\n u_type = 'quadratic'\n u_param = [0.1, 0.3]\n t = tess_time - tess_time[0]\n outcounts = np.nan_to_num(tess_flux[tess_flux > np.nanmean(tess_flux)])\n mu, sigma = stat.norm.fit(outcounts)\n normalized_fluxes = tess_flux / mu\n normalized_sigma = np.sqrt(tess_flux) / mu\n for i, row in params.iterrows():\n T0 = tcorr[i] - tess_time[0]\n RP = row['rp']\n INC = row['i']\n width = row['width']\n chi_squared.append(np.nansum((normalized_fluxes - make_lightcurve(\n T0, RP, INC, PER, width, u_type, u_param, t)) ** 2 / \n normalized_sigma ** 2 / 8))\n return chi_squared\n\n\ndef main():\n import argparse\n parser = argparse.ArgumentParser()\n parser.add_argument('tess_dir', type=str)\n parser.add_argument('batman_dir', type=str)\n parser.add_argument('sector', type=int)\n parser.add_argument('start', type=int)\n parser.add_argument('end', type=int)\n parser.add_argument('output_dir', type=str)\n parser.add_argument('batman_suffix', type=str, default='')\n parser.add_argument('-v', '--verbosity', default=False, action=\n 'store_true', help='Print console output')\n args = parser.parse_args()\n tbconvolve(args.tess_dir, args.batman_dir, args.batman_suffix, args.\n sector, args.start, args.end, args.output_dir, num_keep=-1,\n norm_tess=True, verbosity=args.verbosity)\n\n\nif __name__ == '__main__':\n main()\n", "<docstring token>\n<import token>\n\n\ndef make_batman_config(tmin, tmax, tstep, wmin, wmax, wnum, wlog=True,\n suffix='', path='.'):\n \"\"\"\n Write batman parameters to a JSON param file used to generate batmanCurves.\n\n Parameters\n ----------\n tmin (num): minimum time\n tmax (num): maximum time\n tnum (num): time step\n wmin (num): minimum width\n wmax (num): maximum width\n wnum (num): number of widths to generate\n wlog (bool): use logspace for widths if True, else use linspace\n suffix (str): append suffix to config and curve file names\n \"\"\"\n params = {}\n params['curves_fname'] = p.join(path, 'batmanCurves{}.csv'.format(suffix))\n params['params_fname'] = p.join(path, 'batmanParams{}.csv'.format(suffix))\n params['tmin'] = tmin\n params['tmax'] = tmax\n params['tstep'] = tstep\n params['wmin'] = wmin\n params['wmax'] = wmax\n params['wnum'] = wnum\n params['wlog'] = wlog\n outfile = p.join(path, 'batmanConfig{}.param'.format(suffix))\n with open(outfile, 'w+') as f:\n json.dump(params, f)\n print('Batman config written to {}'.format(outfile))\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n if verbose:\n print('Reading param file', flush=True)\n with open(paramfile, 'r') as f:\n d = json.load(f)\n if verbose:\n print('Setting param ranges', flush=True)\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100 * np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'\n ]\n batmanParams = tbl.Table(cols, names=colnames)\n if verbose:\n print('Generating curves', flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0\n for i in range(len(batmanParams)):\n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'],\n p['ld'], [float(val) for val in p['u'].split()], t)\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c - cmin) / (cmax - cmin)\n c = 1 - c\n c = c / np.sum(c)\n c = 1 - c\n if np.isnan(c).any() or sum(c == 1) < 5:\n print('Batman {} failed'.format(cID), flush=True)\n err += 1\n continue\n batmanDict[cID] = c\n if verbose and i % 100 == 0:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(i + 1 - err,\n nparams, elapsed), flush=True)\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(nparams - err,\n nparams, elapsed), flush=True)\n if write:\n if verbose:\n start = time()\n print('Writing files', flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote params to {}'.format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote curves to {}'.format(d['curves_fname']))\n elapsed = time() - start\n print('Wrote files in {} s'.format(elapsed), flush=True)\n return batmanParams, batmanCurves\n\n\ndef read_batman(batmancurves_file):\n \"\"\"\n Return times, cureve name, and batman curves from a batmanCurves file.\n \n Parameters\n ----------\n batmancurves_file (str): Path to a batmanCurves file\n\n Return\n ------\n times (numpy Array): The times array (x axis) of all batmanCurves\n curve_names (numpy Array): The name of each batmanCurve\n batmanCurves (astropy Table): The table of batmanCurves\n \"\"\"\n print('Reading batmanCurves from {}...'.format(batmancurves_file))\n batmanCurves = ast.io.ascii.read(batmancurves_file, data_start=1,\n format='csv')\n times = np.array(batmanCurves['times'])\n curve_names = np.array(batmanCurves.colnames[1:])\n return times, curve_names, batmanCurves\n\n\ndef read_tess(tess_dir, sector_name, start=0, end=None):\n \"\"\"\n Return list of tess .fits files in tess_dir from [start:end]. Default\n to all fits files in directory if start and end are not specified.\n\n Parameters\n ----------\n tess_dir (str): path to tess data directory\n sector_name (str): name of sector subdirectory (e.g. Sector1)\n start (int): (Optional) Index of file in directory to start at\n end (int): (Optional) Index of file to end at\n \n Return\n ------\n tess_names (list): List of file paths to tess .fits data\n \"\"\"\n print('Reading TESS from {}, s:{}, e:{}...'.format(sector_name, start, end)\n )\n sector_path = p.join(tess_dir, sector_name)\n sector_files = glob.glob(p.join(sector_path, '*.fits'))\n tess_names = sector_files[start:end]\n return tess_names\n\n\ndef open_tess_fits(tess_fpath, norm=False):\n try:\n with ast.io.fits.open(tess_fpath, mode='readonly') as hdulist:\n hdr = hdulist[0].header\n tess_time = hdulist[1].data['TIME']\n tess_flux = hdulist[1].data['PDCSAP_FLUX']\n med = np.nanmedian(tess_flux)\n tess_flux[np.isnan(tess_flux)] = med\n if norm:\n tmin = np.min(tess_flux)\n tmax = np.max(tess_flux)\n tess_flux = (tess_flux - tmin) / (tmax - tmin)\n except Exception as e:\n print('ERROR reading file: ', tess_fpath, ' with error: ', e, flush\n =True)\n return None, None\n return tess_time, tess_flux\n\n\ndef convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep=10,\n plot=False):\n conv_start = time()\n curves = []\n times = np.zeros(num_keep)\n convs = np.zeros(num_keep)\n print('Starting convolutions...', flush=True)\n for i, curvename in enumerate(curve_names):\n batman_curve = batmanCurves[curvename]\n conv = np.abs(sig.fftconvolve(1 - tess_flux, 1 - batman_curve, 'same'))\n ind_max = np.argmax(conv)\n conv_max = conv[ind_max]\n if num_keep < len(curve_names):\n if conv_max > convs[-1]:\n ind = np.searchsorted(-convs, -conv_max)\n curves = curves[:ind] + [curvename] + curves[ind:-1]\n times = np.insert(times, ind, tess_time[ind_max])[:-1]\n convs = np.insert(convs, ind, conv_max)[:-1]\n else:\n curves.append(curvename)\n times[i] = tess_time[ind_max]\n convs[i] = conv_max\n if plot:\n plt.plot(tess_time, conv, label=curvename)\n conv_time = time() - conv_start\n print('Convolved {} curves in {:.3} s'.format(len(curve_names),\n conv_time), flush=True)\n return curves, times, convs\n\n\ndef tbconvolve(tess_dir, batman_dir, batman_suffix, sector, start, end,\n output_dir, num_keep=10, norm_tess=False, write=True, writechunk=10,\n verbosity=0):\n \"\"\"\n \n Parameters\n ----------\n tess_dir(str): directory to TESS data\n batman_dir (str): directory to model data\n batman_suffix(str): suffix to append to barmanCurves file (e.g. _small)\n sector (int): sector to pull data from\n start (int): file to start at\n end (int): file to end at\n output_dir (str): directory to write candidates.csv\n \"\"\"\n tconv_start = time()\n print('===START TCONVOLVE===', flush=True)\n tess_dir = p.abspath(tess_dir)\n batman_dir = p.abspath(batman_dir)\n output_dir = p.abspath(output_dir)\n sector_name = 'Sector{}'.format(sector)\n if sector == 0:\n sector_name = 'sample_' + sector_name\n tess_names = read_tess(tess_dir, sector_name, start, end)\n ntess = len(tess_names)\n print('Found {} TESS files to process'.format(ntess), flush=True)\n if ntess < 1:\n print('No tess curves found, quitting....')\n return None\n batmanCurves_file = p.join(batman_dir, 'batmanCurves{}.csv'.format(\n batman_suffix))\n times, curve_names, batmanCurves = read_batman(batmanCurves_file)\n nbatman = len(curve_names)\n print('Found {} Batman curves'.format(nbatman), flush=True)\n if ntess < 1:\n print('No batman curves found, quitting....')\n return None\n params = pd.read_csv(p.join(batman_dir, 'batmanParams{}.csv'.format(\n batman_suffix)))\n colnames = ['sector', 'tessFile', 'curveID', 'tcorr', 'correlation',\n 'chisq']\n d = {key: [] for key in colnames}\n s = 0\n nerr = 0\n for tind, tess_fpath in enumerate(tess_names):\n tess_start = time()\n tess_fname = p.basename(tess_fpath)\n print('Starting TESS file: {}'.format(tess_fname), flush=True)\n tess_time, tess_flux = open_tess_fits(tess_fpath, norm_tess)\n if tess_time is None:\n nerr += 1\n continue\n if num_keep < 1:\n num_keep = len(curve_names)\n curves, times, convs = convolve(tess_time, tess_flux, batmanCurves,\n curve_names, num_keep)\n d['sector'].extend([sector_name] * num_keep)\n d['tessFile'].extend([tess_fname] * num_keep)\n d['curveID'].extend(curves)\n d['tcorr'].extend(times)\n d['correlation'].extend(convs)\n d['chisq'].extend(get_chi_sq(tess_time, tess_flux, times, params))\n print(len(d['tcorr']), len(d['chisq']))\n if write:\n if tind % writechunk == writechunk - 1 or tind == len(tess_names\n ) - 1:\n e = start + tind\n outname = 'candidates_sector{}_s{}_e{}.csv'.format(sector, s, e\n )\n outpath = p.join(output_dir, outname)\n candidates = tbl.Table(d, names=colnames)\n ast.io.ascii.write(candidates, outpath, format='csv',\n overwrite=True, comment='#', fast_writer=False)\n print('Wrote file {} at {} s'.format(outname, time() -\n tess_start), flush=True)\n s = e + 1\n candidates = tbl.Table(d, names=colnames)\n cdf = pd.DataFrame.from_dict(d)\n cdf = cdf[colnames]\n df = pd.merge(cdf, params, on='curveID', how='left')\n df.to_csv(p.join(output_dir, 'chisq{}.csv'.format(batman_suffix)))\n tconv_time = time() - tconv_start\n print('Convolved {}/{} tess files with {} curves in {:.3} s'.format(\n ntess - nerr, ntess, nbatman, tconv_time), flush=True)\n print('===END TCONVOLVE===', flush=True)\n return candidates\n\n\ndef get_chi_sq(tess_time, tess_flux, tcorr, params):\n current_fname = ''\n chi_squared = []\n arr = tess_flux / np.nanmedian(tess_flux)\n arr[np.isnan(arr)] = np.nanmedian(arr)\n arr[arr == 0] = np.nanmedian(arr)\n mu, std = stat.norm.fit(1 / arr)\n peaks, _ = sig.find_peaks(1 / arr, height=mu + 4 * std, distance=1000)\n p = np.diff(tess_time[peaks])\n PER = np.mean(p)\n u_type = 'quadratic'\n u_param = [0.1, 0.3]\n t = tess_time - tess_time[0]\n outcounts = np.nan_to_num(tess_flux[tess_flux > np.nanmean(tess_flux)])\n mu, sigma = stat.norm.fit(outcounts)\n normalized_fluxes = tess_flux / mu\n normalized_sigma = np.sqrt(tess_flux) / mu\n for i, row in params.iterrows():\n T0 = tcorr[i] - tess_time[0]\n RP = row['rp']\n INC = row['i']\n width = row['width']\n chi_squared.append(np.nansum((normalized_fluxes - make_lightcurve(\n T0, RP, INC, PER, width, u_type, u_param, t)) ** 2 / \n normalized_sigma ** 2 / 8))\n return chi_squared\n\n\ndef main():\n import argparse\n parser = argparse.ArgumentParser()\n parser.add_argument('tess_dir', type=str)\n parser.add_argument('batman_dir', type=str)\n parser.add_argument('sector', type=int)\n parser.add_argument('start', type=int)\n parser.add_argument('end', type=int)\n parser.add_argument('output_dir', type=str)\n parser.add_argument('batman_suffix', type=str, default='')\n parser.add_argument('-v', '--verbosity', default=False, action=\n 'store_true', help='Print console output')\n args = parser.parse_args()\n tbconvolve(args.tess_dir, args.batman_dir, args.batman_suffix, args.\n sector, args.start, args.end, args.output_dir, num_keep=-1,\n norm_tess=True, verbosity=args.verbosity)\n\n\nif __name__ == '__main__':\n main()\n", "<docstring token>\n<import token>\n\n\ndef make_batman_config(tmin, tmax, tstep, wmin, wmax, wnum, wlog=True,\n suffix='', path='.'):\n \"\"\"\n Write batman parameters to a JSON param file used to generate batmanCurves.\n\n Parameters\n ----------\n tmin (num): minimum time\n tmax (num): maximum time\n tnum (num): time step\n wmin (num): minimum width\n wmax (num): maximum width\n wnum (num): number of widths to generate\n wlog (bool): use logspace for widths if True, else use linspace\n suffix (str): append suffix to config and curve file names\n \"\"\"\n params = {}\n params['curves_fname'] = p.join(path, 'batmanCurves{}.csv'.format(suffix))\n params['params_fname'] = p.join(path, 'batmanParams{}.csv'.format(suffix))\n params['tmin'] = tmin\n params['tmax'] = tmax\n params['tstep'] = tstep\n params['wmin'] = wmin\n params['wmax'] = wmax\n params['wnum'] = wnum\n params['wlog'] = wlog\n outfile = p.join(path, 'batmanConfig{}.param'.format(suffix))\n with open(outfile, 'w+') as f:\n json.dump(params, f)\n print('Batman config written to {}'.format(outfile))\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n if verbose:\n print('Reading param file', flush=True)\n with open(paramfile, 'r') as f:\n d = json.load(f)\n if verbose:\n print('Setting param ranges', flush=True)\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100 * np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'\n ]\n batmanParams = tbl.Table(cols, names=colnames)\n if verbose:\n print('Generating curves', flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0\n for i in range(len(batmanParams)):\n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'],\n p['ld'], [float(val) for val in p['u'].split()], t)\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c - cmin) / (cmax - cmin)\n c = 1 - c\n c = c / np.sum(c)\n c = 1 - c\n if np.isnan(c).any() or sum(c == 1) < 5:\n print('Batman {} failed'.format(cID), flush=True)\n err += 1\n continue\n batmanDict[cID] = c\n if verbose and i % 100 == 0:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(i + 1 - err,\n nparams, elapsed), flush=True)\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(nparams - err,\n nparams, elapsed), flush=True)\n if write:\n if verbose:\n start = time()\n print('Writing files', flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote params to {}'.format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote curves to {}'.format(d['curves_fname']))\n elapsed = time() - start\n print('Wrote files in {} s'.format(elapsed), flush=True)\n return batmanParams, batmanCurves\n\n\ndef read_batman(batmancurves_file):\n \"\"\"\n Return times, cureve name, and batman curves from a batmanCurves file.\n \n Parameters\n ----------\n batmancurves_file (str): Path to a batmanCurves file\n\n Return\n ------\n times (numpy Array): The times array (x axis) of all batmanCurves\n curve_names (numpy Array): The name of each batmanCurve\n batmanCurves (astropy Table): The table of batmanCurves\n \"\"\"\n print('Reading batmanCurves from {}...'.format(batmancurves_file))\n batmanCurves = ast.io.ascii.read(batmancurves_file, data_start=1,\n format='csv')\n times = np.array(batmanCurves['times'])\n curve_names = np.array(batmanCurves.colnames[1:])\n return times, curve_names, batmanCurves\n\n\ndef read_tess(tess_dir, sector_name, start=0, end=None):\n \"\"\"\n Return list of tess .fits files in tess_dir from [start:end]. Default\n to all fits files in directory if start and end are not specified.\n\n Parameters\n ----------\n tess_dir (str): path to tess data directory\n sector_name (str): name of sector subdirectory (e.g. Sector1)\n start (int): (Optional) Index of file in directory to start at\n end (int): (Optional) Index of file to end at\n \n Return\n ------\n tess_names (list): List of file paths to tess .fits data\n \"\"\"\n print('Reading TESS from {}, s:{}, e:{}...'.format(sector_name, start, end)\n )\n sector_path = p.join(tess_dir, sector_name)\n sector_files = glob.glob(p.join(sector_path, '*.fits'))\n tess_names = sector_files[start:end]\n return tess_names\n\n\ndef open_tess_fits(tess_fpath, norm=False):\n try:\n with ast.io.fits.open(tess_fpath, mode='readonly') as hdulist:\n hdr = hdulist[0].header\n tess_time = hdulist[1].data['TIME']\n tess_flux = hdulist[1].data['PDCSAP_FLUX']\n med = np.nanmedian(tess_flux)\n tess_flux[np.isnan(tess_flux)] = med\n if norm:\n tmin = np.min(tess_flux)\n tmax = np.max(tess_flux)\n tess_flux = (tess_flux - tmin) / (tmax - tmin)\n except Exception as e:\n print('ERROR reading file: ', tess_fpath, ' with error: ', e, flush\n =True)\n return None, None\n return tess_time, tess_flux\n\n\ndef convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep=10,\n plot=False):\n conv_start = time()\n curves = []\n times = np.zeros(num_keep)\n convs = np.zeros(num_keep)\n print('Starting convolutions...', flush=True)\n for i, curvename in enumerate(curve_names):\n batman_curve = batmanCurves[curvename]\n conv = np.abs(sig.fftconvolve(1 - tess_flux, 1 - batman_curve, 'same'))\n ind_max = np.argmax(conv)\n conv_max = conv[ind_max]\n if num_keep < len(curve_names):\n if conv_max > convs[-1]:\n ind = np.searchsorted(-convs, -conv_max)\n curves = curves[:ind] + [curvename] + curves[ind:-1]\n times = np.insert(times, ind, tess_time[ind_max])[:-1]\n convs = np.insert(convs, ind, conv_max)[:-1]\n else:\n curves.append(curvename)\n times[i] = tess_time[ind_max]\n convs[i] = conv_max\n if plot:\n plt.plot(tess_time, conv, label=curvename)\n conv_time = time() - conv_start\n print('Convolved {} curves in {:.3} s'.format(len(curve_names),\n conv_time), flush=True)\n return curves, times, convs\n\n\ndef tbconvolve(tess_dir, batman_dir, batman_suffix, sector, start, end,\n output_dir, num_keep=10, norm_tess=False, write=True, writechunk=10,\n verbosity=0):\n \"\"\"\n \n Parameters\n ----------\n tess_dir(str): directory to TESS data\n batman_dir (str): directory to model data\n batman_suffix(str): suffix to append to barmanCurves file (e.g. _small)\n sector (int): sector to pull data from\n start (int): file to start at\n end (int): file to end at\n output_dir (str): directory to write candidates.csv\n \"\"\"\n tconv_start = time()\n print('===START TCONVOLVE===', flush=True)\n tess_dir = p.abspath(tess_dir)\n batman_dir = p.abspath(batman_dir)\n output_dir = p.abspath(output_dir)\n sector_name = 'Sector{}'.format(sector)\n if sector == 0:\n sector_name = 'sample_' + sector_name\n tess_names = read_tess(tess_dir, sector_name, start, end)\n ntess = len(tess_names)\n print('Found {} TESS files to process'.format(ntess), flush=True)\n if ntess < 1:\n print('No tess curves found, quitting....')\n return None\n batmanCurves_file = p.join(batman_dir, 'batmanCurves{}.csv'.format(\n batman_suffix))\n times, curve_names, batmanCurves = read_batman(batmanCurves_file)\n nbatman = len(curve_names)\n print('Found {} Batman curves'.format(nbatman), flush=True)\n if ntess < 1:\n print('No batman curves found, quitting....')\n return None\n params = pd.read_csv(p.join(batman_dir, 'batmanParams{}.csv'.format(\n batman_suffix)))\n colnames = ['sector', 'tessFile', 'curveID', 'tcorr', 'correlation',\n 'chisq']\n d = {key: [] for key in colnames}\n s = 0\n nerr = 0\n for tind, tess_fpath in enumerate(tess_names):\n tess_start = time()\n tess_fname = p.basename(tess_fpath)\n print('Starting TESS file: {}'.format(tess_fname), flush=True)\n tess_time, tess_flux = open_tess_fits(tess_fpath, norm_tess)\n if tess_time is None:\n nerr += 1\n continue\n if num_keep < 1:\n num_keep = len(curve_names)\n curves, times, convs = convolve(tess_time, tess_flux, batmanCurves,\n curve_names, num_keep)\n d['sector'].extend([sector_name] * num_keep)\n d['tessFile'].extend([tess_fname] * num_keep)\n d['curveID'].extend(curves)\n d['tcorr'].extend(times)\n d['correlation'].extend(convs)\n d['chisq'].extend(get_chi_sq(tess_time, tess_flux, times, params))\n print(len(d['tcorr']), len(d['chisq']))\n if write:\n if tind % writechunk == writechunk - 1 or tind == len(tess_names\n ) - 1:\n e = start + tind\n outname = 'candidates_sector{}_s{}_e{}.csv'.format(sector, s, e\n )\n outpath = p.join(output_dir, outname)\n candidates = tbl.Table(d, names=colnames)\n ast.io.ascii.write(candidates, outpath, format='csv',\n overwrite=True, comment='#', fast_writer=False)\n print('Wrote file {} at {} s'.format(outname, time() -\n tess_start), flush=True)\n s = e + 1\n candidates = tbl.Table(d, names=colnames)\n cdf = pd.DataFrame.from_dict(d)\n cdf = cdf[colnames]\n df = pd.merge(cdf, params, on='curveID', how='left')\n df.to_csv(p.join(output_dir, 'chisq{}.csv'.format(batman_suffix)))\n tconv_time = time() - tconv_start\n print('Convolved {}/{} tess files with {} curves in {:.3} s'.format(\n ntess - nerr, ntess, nbatman, tconv_time), flush=True)\n print('===END TCONVOLVE===', flush=True)\n return candidates\n\n\ndef get_chi_sq(tess_time, tess_flux, tcorr, params):\n current_fname = ''\n chi_squared = []\n arr = tess_flux / np.nanmedian(tess_flux)\n arr[np.isnan(arr)] = np.nanmedian(arr)\n arr[arr == 0] = np.nanmedian(arr)\n mu, std = stat.norm.fit(1 / arr)\n peaks, _ = sig.find_peaks(1 / arr, height=mu + 4 * std, distance=1000)\n p = np.diff(tess_time[peaks])\n PER = np.mean(p)\n u_type = 'quadratic'\n u_param = [0.1, 0.3]\n t = tess_time - tess_time[0]\n outcounts = np.nan_to_num(tess_flux[tess_flux > np.nanmean(tess_flux)])\n mu, sigma = stat.norm.fit(outcounts)\n normalized_fluxes = tess_flux / mu\n normalized_sigma = np.sqrt(tess_flux) / mu\n for i, row in params.iterrows():\n T0 = tcorr[i] - tess_time[0]\n RP = row['rp']\n INC = row['i']\n width = row['width']\n chi_squared.append(np.nansum((normalized_fluxes - make_lightcurve(\n T0, RP, INC, PER, width, u_type, u_param, t)) ** 2 / \n normalized_sigma ** 2 / 8))\n return chi_squared\n\n\ndef main():\n import argparse\n parser = argparse.ArgumentParser()\n parser.add_argument('tess_dir', type=str)\n parser.add_argument('batman_dir', type=str)\n parser.add_argument('sector', type=int)\n parser.add_argument('start', type=int)\n parser.add_argument('end', type=int)\n parser.add_argument('output_dir', type=str)\n parser.add_argument('batman_suffix', type=str, default='')\n parser.add_argument('-v', '--verbosity', default=False, action=\n 'store_true', help='Print console output')\n args = parser.parse_args()\n tbconvolve(args.tess_dir, args.batman_dir, args.batman_suffix, args.\n sector, args.start, args.end, args.output_dir, num_keep=-1,\n norm_tess=True, verbosity=args.verbosity)\n\n\n<code token>\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n if verbose:\n print('Reading param file', flush=True)\n with open(paramfile, 'r') as f:\n d = json.load(f)\n if verbose:\n print('Setting param ranges', flush=True)\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100 * np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'\n ]\n batmanParams = tbl.Table(cols, names=colnames)\n if verbose:\n print('Generating curves', flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0\n for i in range(len(batmanParams)):\n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'],\n p['ld'], [float(val) for val in p['u'].split()], t)\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c - cmin) / (cmax - cmin)\n c = 1 - c\n c = c / np.sum(c)\n c = 1 - c\n if np.isnan(c).any() or sum(c == 1) < 5:\n print('Batman {} failed'.format(cID), flush=True)\n err += 1\n continue\n batmanDict[cID] = c\n if verbose and i % 100 == 0:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(i + 1 - err,\n nparams, elapsed), flush=True)\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(nparams - err,\n nparams, elapsed), flush=True)\n if write:\n if verbose:\n start = time()\n print('Writing files', flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote params to {}'.format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote curves to {}'.format(d['curves_fname']))\n elapsed = time() - start\n print('Wrote files in {} s'.format(elapsed), flush=True)\n return batmanParams, batmanCurves\n\n\ndef read_batman(batmancurves_file):\n \"\"\"\n Return times, cureve name, and batman curves from a batmanCurves file.\n \n Parameters\n ----------\n batmancurves_file (str): Path to a batmanCurves file\n\n Return\n ------\n times (numpy Array): The times array (x axis) of all batmanCurves\n curve_names (numpy Array): The name of each batmanCurve\n batmanCurves (astropy Table): The table of batmanCurves\n \"\"\"\n print('Reading batmanCurves from {}...'.format(batmancurves_file))\n batmanCurves = ast.io.ascii.read(batmancurves_file, data_start=1,\n format='csv')\n times = np.array(batmanCurves['times'])\n curve_names = np.array(batmanCurves.colnames[1:])\n return times, curve_names, batmanCurves\n\n\ndef read_tess(tess_dir, sector_name, start=0, end=None):\n \"\"\"\n Return list of tess .fits files in tess_dir from [start:end]. Default\n to all fits files in directory if start and end are not specified.\n\n Parameters\n ----------\n tess_dir (str): path to tess data directory\n sector_name (str): name of sector subdirectory (e.g. Sector1)\n start (int): (Optional) Index of file in directory to start at\n end (int): (Optional) Index of file to end at\n \n Return\n ------\n tess_names (list): List of file paths to tess .fits data\n \"\"\"\n print('Reading TESS from {}, s:{}, e:{}...'.format(sector_name, start, end)\n )\n sector_path = p.join(tess_dir, sector_name)\n sector_files = glob.glob(p.join(sector_path, '*.fits'))\n tess_names = sector_files[start:end]\n return tess_names\n\n\ndef open_tess_fits(tess_fpath, norm=False):\n try:\n with ast.io.fits.open(tess_fpath, mode='readonly') as hdulist:\n hdr = hdulist[0].header\n tess_time = hdulist[1].data['TIME']\n tess_flux = hdulist[1].data['PDCSAP_FLUX']\n med = np.nanmedian(tess_flux)\n tess_flux[np.isnan(tess_flux)] = med\n if norm:\n tmin = np.min(tess_flux)\n tmax = np.max(tess_flux)\n tess_flux = (tess_flux - tmin) / (tmax - tmin)\n except Exception as e:\n print('ERROR reading file: ', tess_fpath, ' with error: ', e, flush\n =True)\n return None, None\n return tess_time, tess_flux\n\n\ndef convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep=10,\n plot=False):\n conv_start = time()\n curves = []\n times = np.zeros(num_keep)\n convs = np.zeros(num_keep)\n print('Starting convolutions...', flush=True)\n for i, curvename in enumerate(curve_names):\n batman_curve = batmanCurves[curvename]\n conv = np.abs(sig.fftconvolve(1 - tess_flux, 1 - batman_curve, 'same'))\n ind_max = np.argmax(conv)\n conv_max = conv[ind_max]\n if num_keep < len(curve_names):\n if conv_max > convs[-1]:\n ind = np.searchsorted(-convs, -conv_max)\n curves = curves[:ind] + [curvename] + curves[ind:-1]\n times = np.insert(times, ind, tess_time[ind_max])[:-1]\n convs = np.insert(convs, ind, conv_max)[:-1]\n else:\n curves.append(curvename)\n times[i] = tess_time[ind_max]\n convs[i] = conv_max\n if plot:\n plt.plot(tess_time, conv, label=curvename)\n conv_time = time() - conv_start\n print('Convolved {} curves in {:.3} s'.format(len(curve_names),\n conv_time), flush=True)\n return curves, times, convs\n\n\ndef tbconvolve(tess_dir, batman_dir, batman_suffix, sector, start, end,\n output_dir, num_keep=10, norm_tess=False, write=True, writechunk=10,\n verbosity=0):\n \"\"\"\n \n Parameters\n ----------\n tess_dir(str): directory to TESS data\n batman_dir (str): directory to model data\n batman_suffix(str): suffix to append to barmanCurves file (e.g. _small)\n sector (int): sector to pull data from\n start (int): file to start at\n end (int): file to end at\n output_dir (str): directory to write candidates.csv\n \"\"\"\n tconv_start = time()\n print('===START TCONVOLVE===', flush=True)\n tess_dir = p.abspath(tess_dir)\n batman_dir = p.abspath(batman_dir)\n output_dir = p.abspath(output_dir)\n sector_name = 'Sector{}'.format(sector)\n if sector == 0:\n sector_name = 'sample_' + sector_name\n tess_names = read_tess(tess_dir, sector_name, start, end)\n ntess = len(tess_names)\n print('Found {} TESS files to process'.format(ntess), flush=True)\n if ntess < 1:\n print('No tess curves found, quitting....')\n return None\n batmanCurves_file = p.join(batman_dir, 'batmanCurves{}.csv'.format(\n batman_suffix))\n times, curve_names, batmanCurves = read_batman(batmanCurves_file)\n nbatman = len(curve_names)\n print('Found {} Batman curves'.format(nbatman), flush=True)\n if ntess < 1:\n print('No batman curves found, quitting....')\n return None\n params = pd.read_csv(p.join(batman_dir, 'batmanParams{}.csv'.format(\n batman_suffix)))\n colnames = ['sector', 'tessFile', 'curveID', 'tcorr', 'correlation',\n 'chisq']\n d = {key: [] for key in colnames}\n s = 0\n nerr = 0\n for tind, tess_fpath in enumerate(tess_names):\n tess_start = time()\n tess_fname = p.basename(tess_fpath)\n print('Starting TESS file: {}'.format(tess_fname), flush=True)\n tess_time, tess_flux = open_tess_fits(tess_fpath, norm_tess)\n if tess_time is None:\n nerr += 1\n continue\n if num_keep < 1:\n num_keep = len(curve_names)\n curves, times, convs = convolve(tess_time, tess_flux, batmanCurves,\n curve_names, num_keep)\n d['sector'].extend([sector_name] * num_keep)\n d['tessFile'].extend([tess_fname] * num_keep)\n d['curveID'].extend(curves)\n d['tcorr'].extend(times)\n d['correlation'].extend(convs)\n d['chisq'].extend(get_chi_sq(tess_time, tess_flux, times, params))\n print(len(d['tcorr']), len(d['chisq']))\n if write:\n if tind % writechunk == writechunk - 1 or tind == len(tess_names\n ) - 1:\n e = start + tind\n outname = 'candidates_sector{}_s{}_e{}.csv'.format(sector, s, e\n )\n outpath = p.join(output_dir, outname)\n candidates = tbl.Table(d, names=colnames)\n ast.io.ascii.write(candidates, outpath, format='csv',\n overwrite=True, comment='#', fast_writer=False)\n print('Wrote file {} at {} s'.format(outname, time() -\n tess_start), flush=True)\n s = e + 1\n candidates = tbl.Table(d, names=colnames)\n cdf = pd.DataFrame.from_dict(d)\n cdf = cdf[colnames]\n df = pd.merge(cdf, params, on='curveID', how='left')\n df.to_csv(p.join(output_dir, 'chisq{}.csv'.format(batman_suffix)))\n tconv_time = time() - tconv_start\n print('Convolved {}/{} tess files with {} curves in {:.3} s'.format(\n ntess - nerr, ntess, nbatman, tconv_time), flush=True)\n print('===END TCONVOLVE===', flush=True)\n return candidates\n\n\ndef get_chi_sq(tess_time, tess_flux, tcorr, params):\n current_fname = ''\n chi_squared = []\n arr = tess_flux / np.nanmedian(tess_flux)\n arr[np.isnan(arr)] = np.nanmedian(arr)\n arr[arr == 0] = np.nanmedian(arr)\n mu, std = stat.norm.fit(1 / arr)\n peaks, _ = sig.find_peaks(1 / arr, height=mu + 4 * std, distance=1000)\n p = np.diff(tess_time[peaks])\n PER = np.mean(p)\n u_type = 'quadratic'\n u_param = [0.1, 0.3]\n t = tess_time - tess_time[0]\n outcounts = np.nan_to_num(tess_flux[tess_flux > np.nanmean(tess_flux)])\n mu, sigma = stat.norm.fit(outcounts)\n normalized_fluxes = tess_flux / mu\n normalized_sigma = np.sqrt(tess_flux) / mu\n for i, row in params.iterrows():\n T0 = tcorr[i] - tess_time[0]\n RP = row['rp']\n INC = row['i']\n width = row['width']\n chi_squared.append(np.nansum((normalized_fluxes - make_lightcurve(\n T0, RP, INC, PER, width, u_type, u_param, t)) ** 2 / \n normalized_sigma ** 2 / 8))\n return chi_squared\n\n\ndef main():\n import argparse\n parser = argparse.ArgumentParser()\n parser.add_argument('tess_dir', type=str)\n parser.add_argument('batman_dir', type=str)\n parser.add_argument('sector', type=int)\n parser.add_argument('start', type=int)\n parser.add_argument('end', type=int)\n parser.add_argument('output_dir', type=str)\n parser.add_argument('batman_suffix', type=str, default='')\n parser.add_argument('-v', '--verbosity', default=False, action=\n 'store_true', help='Print console output')\n args = parser.parse_args()\n tbconvolve(args.tess_dir, args.batman_dir, args.batman_suffix, args.\n sector, args.start, args.end, args.output_dir, num_keep=-1,\n norm_tess=True, verbosity=args.verbosity)\n\n\n<code token>\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n if verbose:\n print('Reading param file', flush=True)\n with open(paramfile, 'r') as f:\n d = json.load(f)\n if verbose:\n print('Setting param ranges', flush=True)\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100 * np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'\n ]\n batmanParams = tbl.Table(cols, names=colnames)\n if verbose:\n print('Generating curves', flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0\n for i in range(len(batmanParams)):\n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'],\n p['ld'], [float(val) for val in p['u'].split()], t)\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c - cmin) / (cmax - cmin)\n c = 1 - c\n c = c / np.sum(c)\n c = 1 - c\n if np.isnan(c).any() or sum(c == 1) < 5:\n print('Batman {} failed'.format(cID), flush=True)\n err += 1\n continue\n batmanDict[cID] = c\n if verbose and i % 100 == 0:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(i + 1 - err,\n nparams, elapsed), flush=True)\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(nparams - err,\n nparams, elapsed), flush=True)\n if write:\n if verbose:\n start = time()\n print('Writing files', flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote params to {}'.format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote curves to {}'.format(d['curves_fname']))\n elapsed = time() - start\n print('Wrote files in {} s'.format(elapsed), flush=True)\n return batmanParams, batmanCurves\n\n\ndef read_batman(batmancurves_file):\n \"\"\"\n Return times, cureve name, and batman curves from a batmanCurves file.\n \n Parameters\n ----------\n batmancurves_file (str): Path to a batmanCurves file\n\n Return\n ------\n times (numpy Array): The times array (x axis) of all batmanCurves\n curve_names (numpy Array): The name of each batmanCurve\n batmanCurves (astropy Table): The table of batmanCurves\n \"\"\"\n print('Reading batmanCurves from {}...'.format(batmancurves_file))\n batmanCurves = ast.io.ascii.read(batmancurves_file, data_start=1,\n format='csv')\n times = np.array(batmanCurves['times'])\n curve_names = np.array(batmanCurves.colnames[1:])\n return times, curve_names, batmanCurves\n\n\ndef read_tess(tess_dir, sector_name, start=0, end=None):\n \"\"\"\n Return list of tess .fits files in tess_dir from [start:end]. Default\n to all fits files in directory if start and end are not specified.\n\n Parameters\n ----------\n tess_dir (str): path to tess data directory\n sector_name (str): name of sector subdirectory (e.g. Sector1)\n start (int): (Optional) Index of file in directory to start at\n end (int): (Optional) Index of file to end at\n \n Return\n ------\n tess_names (list): List of file paths to tess .fits data\n \"\"\"\n print('Reading TESS from {}, s:{}, e:{}...'.format(sector_name, start, end)\n )\n sector_path = p.join(tess_dir, sector_name)\n sector_files = glob.glob(p.join(sector_path, '*.fits'))\n tess_names = sector_files[start:end]\n return tess_names\n\n\ndef open_tess_fits(tess_fpath, norm=False):\n try:\n with ast.io.fits.open(tess_fpath, mode='readonly') as hdulist:\n hdr = hdulist[0].header\n tess_time = hdulist[1].data['TIME']\n tess_flux = hdulist[1].data['PDCSAP_FLUX']\n med = np.nanmedian(tess_flux)\n tess_flux[np.isnan(tess_flux)] = med\n if norm:\n tmin = np.min(tess_flux)\n tmax = np.max(tess_flux)\n tess_flux = (tess_flux - tmin) / (tmax - tmin)\n except Exception as e:\n print('ERROR reading file: ', tess_fpath, ' with error: ', e, flush\n =True)\n return None, None\n return tess_time, tess_flux\n\n\ndef convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep=10,\n plot=False):\n conv_start = time()\n curves = []\n times = np.zeros(num_keep)\n convs = np.zeros(num_keep)\n print('Starting convolutions...', flush=True)\n for i, curvename in enumerate(curve_names):\n batman_curve = batmanCurves[curvename]\n conv = np.abs(sig.fftconvolve(1 - tess_flux, 1 - batman_curve, 'same'))\n ind_max = np.argmax(conv)\n conv_max = conv[ind_max]\n if num_keep < len(curve_names):\n if conv_max > convs[-1]:\n ind = np.searchsorted(-convs, -conv_max)\n curves = curves[:ind] + [curvename] + curves[ind:-1]\n times = np.insert(times, ind, tess_time[ind_max])[:-1]\n convs = np.insert(convs, ind, conv_max)[:-1]\n else:\n curves.append(curvename)\n times[i] = tess_time[ind_max]\n convs[i] = conv_max\n if plot:\n plt.plot(tess_time, conv, label=curvename)\n conv_time = time() - conv_start\n print('Convolved {} curves in {:.3} s'.format(len(curve_names),\n conv_time), flush=True)\n return curves, times, convs\n\n\ndef tbconvolve(tess_dir, batman_dir, batman_suffix, sector, start, end,\n output_dir, num_keep=10, norm_tess=False, write=True, writechunk=10,\n verbosity=0):\n \"\"\"\n \n Parameters\n ----------\n tess_dir(str): directory to TESS data\n batman_dir (str): directory to model data\n batman_suffix(str): suffix to append to barmanCurves file (e.g. _small)\n sector (int): sector to pull data from\n start (int): file to start at\n end (int): file to end at\n output_dir (str): directory to write candidates.csv\n \"\"\"\n tconv_start = time()\n print('===START TCONVOLVE===', flush=True)\n tess_dir = p.abspath(tess_dir)\n batman_dir = p.abspath(batman_dir)\n output_dir = p.abspath(output_dir)\n sector_name = 'Sector{}'.format(sector)\n if sector == 0:\n sector_name = 'sample_' + sector_name\n tess_names = read_tess(tess_dir, sector_name, start, end)\n ntess = len(tess_names)\n print('Found {} TESS files to process'.format(ntess), flush=True)\n if ntess < 1:\n print('No tess curves found, quitting....')\n return None\n batmanCurves_file = p.join(batman_dir, 'batmanCurves{}.csv'.format(\n batman_suffix))\n times, curve_names, batmanCurves = read_batman(batmanCurves_file)\n nbatman = len(curve_names)\n print('Found {} Batman curves'.format(nbatman), flush=True)\n if ntess < 1:\n print('No batman curves found, quitting....')\n return None\n params = pd.read_csv(p.join(batman_dir, 'batmanParams{}.csv'.format(\n batman_suffix)))\n colnames = ['sector', 'tessFile', 'curveID', 'tcorr', 'correlation',\n 'chisq']\n d = {key: [] for key in colnames}\n s = 0\n nerr = 0\n for tind, tess_fpath in enumerate(tess_names):\n tess_start = time()\n tess_fname = p.basename(tess_fpath)\n print('Starting TESS file: {}'.format(tess_fname), flush=True)\n tess_time, tess_flux = open_tess_fits(tess_fpath, norm_tess)\n if tess_time is None:\n nerr += 1\n continue\n if num_keep < 1:\n num_keep = len(curve_names)\n curves, times, convs = convolve(tess_time, tess_flux, batmanCurves,\n curve_names, num_keep)\n d['sector'].extend([sector_name] * num_keep)\n d['tessFile'].extend([tess_fname] * num_keep)\n d['curveID'].extend(curves)\n d['tcorr'].extend(times)\n d['correlation'].extend(convs)\n d['chisq'].extend(get_chi_sq(tess_time, tess_flux, times, params))\n print(len(d['tcorr']), len(d['chisq']))\n if write:\n if tind % writechunk == writechunk - 1 or tind == len(tess_names\n ) - 1:\n e = start + tind\n outname = 'candidates_sector{}_s{}_e{}.csv'.format(sector, s, e\n )\n outpath = p.join(output_dir, outname)\n candidates = tbl.Table(d, names=colnames)\n ast.io.ascii.write(candidates, outpath, format='csv',\n overwrite=True, comment='#', fast_writer=False)\n print('Wrote file {} at {} s'.format(outname, time() -\n tess_start), flush=True)\n s = e + 1\n candidates = tbl.Table(d, names=colnames)\n cdf = pd.DataFrame.from_dict(d)\n cdf = cdf[colnames]\n df = pd.merge(cdf, params, on='curveID', how='left')\n df.to_csv(p.join(output_dir, 'chisq{}.csv'.format(batman_suffix)))\n tconv_time = time() - tconv_start\n print('Convolved {}/{} tess files with {} curves in {:.3} s'.format(\n ntess - nerr, ntess, nbatman, tconv_time), flush=True)\n print('===END TCONVOLVE===', flush=True)\n return candidates\n\n\ndef get_chi_sq(tess_time, tess_flux, tcorr, params):\n current_fname = ''\n chi_squared = []\n arr = tess_flux / np.nanmedian(tess_flux)\n arr[np.isnan(arr)] = np.nanmedian(arr)\n arr[arr == 0] = np.nanmedian(arr)\n mu, std = stat.norm.fit(1 / arr)\n peaks, _ = sig.find_peaks(1 / arr, height=mu + 4 * std, distance=1000)\n p = np.diff(tess_time[peaks])\n PER = np.mean(p)\n u_type = 'quadratic'\n u_param = [0.1, 0.3]\n t = tess_time - tess_time[0]\n outcounts = np.nan_to_num(tess_flux[tess_flux > np.nanmean(tess_flux)])\n mu, sigma = stat.norm.fit(outcounts)\n normalized_fluxes = tess_flux / mu\n normalized_sigma = np.sqrt(tess_flux) / mu\n for i, row in params.iterrows():\n T0 = tcorr[i] - tess_time[0]\n RP = row['rp']\n INC = row['i']\n width = row['width']\n chi_squared.append(np.nansum((normalized_fluxes - make_lightcurve(\n T0, RP, INC, PER, width, u_type, u_param, t)) ** 2 / \n normalized_sigma ** 2 / 8))\n return chi_squared\n\n\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n if verbose:\n print('Reading param file', flush=True)\n with open(paramfile, 'r') as f:\n d = json.load(f)\n if verbose:\n print('Setting param ranges', flush=True)\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100 * np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'\n ]\n batmanParams = tbl.Table(cols, names=colnames)\n if verbose:\n print('Generating curves', flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0\n for i in range(len(batmanParams)):\n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'],\n p['ld'], [float(val) for val in p['u'].split()], t)\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c - cmin) / (cmax - cmin)\n c = 1 - c\n c = c / np.sum(c)\n c = 1 - c\n if np.isnan(c).any() or sum(c == 1) < 5:\n print('Batman {} failed'.format(cID), flush=True)\n err += 1\n continue\n batmanDict[cID] = c\n if verbose and i % 100 == 0:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(i + 1 - err,\n nparams, elapsed), flush=True)\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(nparams - err,\n nparams, elapsed), flush=True)\n if write:\n if verbose:\n start = time()\n print('Writing files', flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote params to {}'.format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote curves to {}'.format(d['curves_fname']))\n elapsed = time() - start\n print('Wrote files in {} s'.format(elapsed), flush=True)\n return batmanParams, batmanCurves\n\n\ndef read_batman(batmancurves_file):\n \"\"\"\n Return times, cureve name, and batman curves from a batmanCurves file.\n \n Parameters\n ----------\n batmancurves_file (str): Path to a batmanCurves file\n\n Return\n ------\n times (numpy Array): The times array (x axis) of all batmanCurves\n curve_names (numpy Array): The name of each batmanCurve\n batmanCurves (astropy Table): The table of batmanCurves\n \"\"\"\n print('Reading batmanCurves from {}...'.format(batmancurves_file))\n batmanCurves = ast.io.ascii.read(batmancurves_file, data_start=1,\n format='csv')\n times = np.array(batmanCurves['times'])\n curve_names = np.array(batmanCurves.colnames[1:])\n return times, curve_names, batmanCurves\n\n\n<function token>\n\n\ndef open_tess_fits(tess_fpath, norm=False):\n try:\n with ast.io.fits.open(tess_fpath, mode='readonly') as hdulist:\n hdr = hdulist[0].header\n tess_time = hdulist[1].data['TIME']\n tess_flux = hdulist[1].data['PDCSAP_FLUX']\n med = np.nanmedian(tess_flux)\n tess_flux[np.isnan(tess_flux)] = med\n if norm:\n tmin = np.min(tess_flux)\n tmax = np.max(tess_flux)\n tess_flux = (tess_flux - tmin) / (tmax - tmin)\n except Exception as e:\n print('ERROR reading file: ', tess_fpath, ' with error: ', e, flush\n =True)\n return None, None\n return tess_time, tess_flux\n\n\ndef convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep=10,\n plot=False):\n conv_start = time()\n curves = []\n times = np.zeros(num_keep)\n convs = np.zeros(num_keep)\n print('Starting convolutions...', flush=True)\n for i, curvename in enumerate(curve_names):\n batman_curve = batmanCurves[curvename]\n conv = np.abs(sig.fftconvolve(1 - tess_flux, 1 - batman_curve, 'same'))\n ind_max = np.argmax(conv)\n conv_max = conv[ind_max]\n if num_keep < len(curve_names):\n if conv_max > convs[-1]:\n ind = np.searchsorted(-convs, -conv_max)\n curves = curves[:ind] + [curvename] + curves[ind:-1]\n times = np.insert(times, ind, tess_time[ind_max])[:-1]\n convs = np.insert(convs, ind, conv_max)[:-1]\n else:\n curves.append(curvename)\n times[i] = tess_time[ind_max]\n convs[i] = conv_max\n if plot:\n plt.plot(tess_time, conv, label=curvename)\n conv_time = time() - conv_start\n print('Convolved {} curves in {:.3} s'.format(len(curve_names),\n conv_time), flush=True)\n return curves, times, convs\n\n\ndef tbconvolve(tess_dir, batman_dir, batman_suffix, sector, start, end,\n output_dir, num_keep=10, norm_tess=False, write=True, writechunk=10,\n verbosity=0):\n \"\"\"\n \n Parameters\n ----------\n tess_dir(str): directory to TESS data\n batman_dir (str): directory to model data\n batman_suffix(str): suffix to append to barmanCurves file (e.g. _small)\n sector (int): sector to pull data from\n start (int): file to start at\n end (int): file to end at\n output_dir (str): directory to write candidates.csv\n \"\"\"\n tconv_start = time()\n print('===START TCONVOLVE===', flush=True)\n tess_dir = p.abspath(tess_dir)\n batman_dir = p.abspath(batman_dir)\n output_dir = p.abspath(output_dir)\n sector_name = 'Sector{}'.format(sector)\n if sector == 0:\n sector_name = 'sample_' + sector_name\n tess_names = read_tess(tess_dir, sector_name, start, end)\n ntess = len(tess_names)\n print('Found {} TESS files to process'.format(ntess), flush=True)\n if ntess < 1:\n print('No tess curves found, quitting....')\n return None\n batmanCurves_file = p.join(batman_dir, 'batmanCurves{}.csv'.format(\n batman_suffix))\n times, curve_names, batmanCurves = read_batman(batmanCurves_file)\n nbatman = len(curve_names)\n print('Found {} Batman curves'.format(nbatman), flush=True)\n if ntess < 1:\n print('No batman curves found, quitting....')\n return None\n params = pd.read_csv(p.join(batman_dir, 'batmanParams{}.csv'.format(\n batman_suffix)))\n colnames = ['sector', 'tessFile', 'curveID', 'tcorr', 'correlation',\n 'chisq']\n d = {key: [] for key in colnames}\n s = 0\n nerr = 0\n for tind, tess_fpath in enumerate(tess_names):\n tess_start = time()\n tess_fname = p.basename(tess_fpath)\n print('Starting TESS file: {}'.format(tess_fname), flush=True)\n tess_time, tess_flux = open_tess_fits(tess_fpath, norm_tess)\n if tess_time is None:\n nerr += 1\n continue\n if num_keep < 1:\n num_keep = len(curve_names)\n curves, times, convs = convolve(tess_time, tess_flux, batmanCurves,\n curve_names, num_keep)\n d['sector'].extend([sector_name] * num_keep)\n d['tessFile'].extend([tess_fname] * num_keep)\n d['curveID'].extend(curves)\n d['tcorr'].extend(times)\n d['correlation'].extend(convs)\n d['chisq'].extend(get_chi_sq(tess_time, tess_flux, times, params))\n print(len(d['tcorr']), len(d['chisq']))\n if write:\n if tind % writechunk == writechunk - 1 or tind == len(tess_names\n ) - 1:\n e = start + tind\n outname = 'candidates_sector{}_s{}_e{}.csv'.format(sector, s, e\n )\n outpath = p.join(output_dir, outname)\n candidates = tbl.Table(d, names=colnames)\n ast.io.ascii.write(candidates, outpath, format='csv',\n overwrite=True, comment='#', fast_writer=False)\n print('Wrote file {} at {} s'.format(outname, time() -\n tess_start), flush=True)\n s = e + 1\n candidates = tbl.Table(d, names=colnames)\n cdf = pd.DataFrame.from_dict(d)\n cdf = cdf[colnames]\n df = pd.merge(cdf, params, on='curveID', how='left')\n df.to_csv(p.join(output_dir, 'chisq{}.csv'.format(batman_suffix)))\n tconv_time = time() - tconv_start\n print('Convolved {}/{} tess files with {} curves in {:.3} s'.format(\n ntess - nerr, ntess, nbatman, tconv_time), flush=True)\n print('===END TCONVOLVE===', flush=True)\n return candidates\n\n\ndef get_chi_sq(tess_time, tess_flux, tcorr, params):\n current_fname = ''\n chi_squared = []\n arr = tess_flux / np.nanmedian(tess_flux)\n arr[np.isnan(arr)] = np.nanmedian(arr)\n arr[arr == 0] = np.nanmedian(arr)\n mu, std = stat.norm.fit(1 / arr)\n peaks, _ = sig.find_peaks(1 / arr, height=mu + 4 * std, distance=1000)\n p = np.diff(tess_time[peaks])\n PER = np.mean(p)\n u_type = 'quadratic'\n u_param = [0.1, 0.3]\n t = tess_time - tess_time[0]\n outcounts = np.nan_to_num(tess_flux[tess_flux > np.nanmean(tess_flux)])\n mu, sigma = stat.norm.fit(outcounts)\n normalized_fluxes = tess_flux / mu\n normalized_sigma = np.sqrt(tess_flux) / mu\n for i, row in params.iterrows():\n T0 = tcorr[i] - tess_time[0]\n RP = row['rp']\n INC = row['i']\n width = row['width']\n chi_squared.append(np.nansum((normalized_fluxes - make_lightcurve(\n T0, RP, INC, PER, width, u_type, u_param, t)) ** 2 / \n normalized_sigma ** 2 / 8))\n return chi_squared\n\n\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n if verbose:\n print('Reading param file', flush=True)\n with open(paramfile, 'r') as f:\n d = json.load(f)\n if verbose:\n print('Setting param ranges', flush=True)\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100 * np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'\n ]\n batmanParams = tbl.Table(cols, names=colnames)\n if verbose:\n print('Generating curves', flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0\n for i in range(len(batmanParams)):\n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'],\n p['ld'], [float(val) for val in p['u'].split()], t)\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c - cmin) / (cmax - cmin)\n c = 1 - c\n c = c / np.sum(c)\n c = 1 - c\n if np.isnan(c).any() or sum(c == 1) < 5:\n print('Batman {} failed'.format(cID), flush=True)\n err += 1\n continue\n batmanDict[cID] = c\n if verbose and i % 100 == 0:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(i + 1 - err,\n nparams, elapsed), flush=True)\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(nparams - err,\n nparams, elapsed), flush=True)\n if write:\n if verbose:\n start = time()\n print('Writing files', flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote params to {}'.format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote curves to {}'.format(d['curves_fname']))\n elapsed = time() - start\n print('Wrote files in {} s'.format(elapsed), flush=True)\n return batmanParams, batmanCurves\n\n\ndef read_batman(batmancurves_file):\n \"\"\"\n Return times, cureve name, and batman curves from a batmanCurves file.\n \n Parameters\n ----------\n batmancurves_file (str): Path to a batmanCurves file\n\n Return\n ------\n times (numpy Array): The times array (x axis) of all batmanCurves\n curve_names (numpy Array): The name of each batmanCurve\n batmanCurves (astropy Table): The table of batmanCurves\n \"\"\"\n print('Reading batmanCurves from {}...'.format(batmancurves_file))\n batmanCurves = ast.io.ascii.read(batmancurves_file, data_start=1,\n format='csv')\n times = np.array(batmanCurves['times'])\n curve_names = np.array(batmanCurves.colnames[1:])\n return times, curve_names, batmanCurves\n\n\n<function token>\n\n\ndef open_tess_fits(tess_fpath, norm=False):\n try:\n with ast.io.fits.open(tess_fpath, mode='readonly') as hdulist:\n hdr = hdulist[0].header\n tess_time = hdulist[1].data['TIME']\n tess_flux = hdulist[1].data['PDCSAP_FLUX']\n med = np.nanmedian(tess_flux)\n tess_flux[np.isnan(tess_flux)] = med\n if norm:\n tmin = np.min(tess_flux)\n tmax = np.max(tess_flux)\n tess_flux = (tess_flux - tmin) / (tmax - tmin)\n except Exception as e:\n print('ERROR reading file: ', tess_fpath, ' with error: ', e, flush\n =True)\n return None, None\n return tess_time, tess_flux\n\n\ndef convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep=10,\n plot=False):\n conv_start = time()\n curves = []\n times = np.zeros(num_keep)\n convs = np.zeros(num_keep)\n print('Starting convolutions...', flush=True)\n for i, curvename in enumerate(curve_names):\n batman_curve = batmanCurves[curvename]\n conv = np.abs(sig.fftconvolve(1 - tess_flux, 1 - batman_curve, 'same'))\n ind_max = np.argmax(conv)\n conv_max = conv[ind_max]\n if num_keep < len(curve_names):\n if conv_max > convs[-1]:\n ind = np.searchsorted(-convs, -conv_max)\n curves = curves[:ind] + [curvename] + curves[ind:-1]\n times = np.insert(times, ind, tess_time[ind_max])[:-1]\n convs = np.insert(convs, ind, conv_max)[:-1]\n else:\n curves.append(curvename)\n times[i] = tess_time[ind_max]\n convs[i] = conv_max\n if plot:\n plt.plot(tess_time, conv, label=curvename)\n conv_time = time() - conv_start\n print('Convolved {} curves in {:.3} s'.format(len(curve_names),\n conv_time), flush=True)\n return curves, times, convs\n\n\ndef tbconvolve(tess_dir, batman_dir, batman_suffix, sector, start, end,\n output_dir, num_keep=10, norm_tess=False, write=True, writechunk=10,\n verbosity=0):\n \"\"\"\n \n Parameters\n ----------\n tess_dir(str): directory to TESS data\n batman_dir (str): directory to model data\n batman_suffix(str): suffix to append to barmanCurves file (e.g. _small)\n sector (int): sector to pull data from\n start (int): file to start at\n end (int): file to end at\n output_dir (str): directory to write candidates.csv\n \"\"\"\n tconv_start = time()\n print('===START TCONVOLVE===', flush=True)\n tess_dir = p.abspath(tess_dir)\n batman_dir = p.abspath(batman_dir)\n output_dir = p.abspath(output_dir)\n sector_name = 'Sector{}'.format(sector)\n if sector == 0:\n sector_name = 'sample_' + sector_name\n tess_names = read_tess(tess_dir, sector_name, start, end)\n ntess = len(tess_names)\n print('Found {} TESS files to process'.format(ntess), flush=True)\n if ntess < 1:\n print('No tess curves found, quitting....')\n return None\n batmanCurves_file = p.join(batman_dir, 'batmanCurves{}.csv'.format(\n batman_suffix))\n times, curve_names, batmanCurves = read_batman(batmanCurves_file)\n nbatman = len(curve_names)\n print('Found {} Batman curves'.format(nbatman), flush=True)\n if ntess < 1:\n print('No batman curves found, quitting....')\n return None\n params = pd.read_csv(p.join(batman_dir, 'batmanParams{}.csv'.format(\n batman_suffix)))\n colnames = ['sector', 'tessFile', 'curveID', 'tcorr', 'correlation',\n 'chisq']\n d = {key: [] for key in colnames}\n s = 0\n nerr = 0\n for tind, tess_fpath in enumerate(tess_names):\n tess_start = time()\n tess_fname = p.basename(tess_fpath)\n print('Starting TESS file: {}'.format(tess_fname), flush=True)\n tess_time, tess_flux = open_tess_fits(tess_fpath, norm_tess)\n if tess_time is None:\n nerr += 1\n continue\n if num_keep < 1:\n num_keep = len(curve_names)\n curves, times, convs = convolve(tess_time, tess_flux, batmanCurves,\n curve_names, num_keep)\n d['sector'].extend([sector_name] * num_keep)\n d['tessFile'].extend([tess_fname] * num_keep)\n d['curveID'].extend(curves)\n d['tcorr'].extend(times)\n d['correlation'].extend(convs)\n d['chisq'].extend(get_chi_sq(tess_time, tess_flux, times, params))\n print(len(d['tcorr']), len(d['chisq']))\n if write:\n if tind % writechunk == writechunk - 1 or tind == len(tess_names\n ) - 1:\n e = start + tind\n outname = 'candidates_sector{}_s{}_e{}.csv'.format(sector, s, e\n )\n outpath = p.join(output_dir, outname)\n candidates = tbl.Table(d, names=colnames)\n ast.io.ascii.write(candidates, outpath, format='csv',\n overwrite=True, comment='#', fast_writer=False)\n print('Wrote file {} at {} s'.format(outname, time() -\n tess_start), flush=True)\n s = e + 1\n candidates = tbl.Table(d, names=colnames)\n cdf = pd.DataFrame.from_dict(d)\n cdf = cdf[colnames]\n df = pd.merge(cdf, params, on='curveID', how='left')\n df.to_csv(p.join(output_dir, 'chisq{}.csv'.format(batman_suffix)))\n tconv_time = time() - tconv_start\n print('Convolved {}/{} tess files with {} curves in {:.3} s'.format(\n ntess - nerr, ntess, nbatman, tconv_time), flush=True)\n print('===END TCONVOLVE===', flush=True)\n return candidates\n\n\n<function token>\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n if verbose:\n print('Reading param file', flush=True)\n with open(paramfile, 'r') as f:\n d = json.load(f)\n if verbose:\n print('Setting param ranges', flush=True)\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100 * np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'\n ]\n batmanParams = tbl.Table(cols, names=colnames)\n if verbose:\n print('Generating curves', flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0\n for i in range(len(batmanParams)):\n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'],\n p['ld'], [float(val) for val in p['u'].split()], t)\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c - cmin) / (cmax - cmin)\n c = 1 - c\n c = c / np.sum(c)\n c = 1 - c\n if np.isnan(c).any() or sum(c == 1) < 5:\n print('Batman {} failed'.format(cID), flush=True)\n err += 1\n continue\n batmanDict[cID] = c\n if verbose and i % 100 == 0:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(i + 1 - err,\n nparams, elapsed), flush=True)\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(nparams - err,\n nparams, elapsed), flush=True)\n if write:\n if verbose:\n start = time()\n print('Writing files', flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote params to {}'.format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote curves to {}'.format(d['curves_fname']))\n elapsed = time() - start\n print('Wrote files in {} s'.format(elapsed), flush=True)\n return batmanParams, batmanCurves\n\n\n<function token>\n<function token>\n\n\ndef open_tess_fits(tess_fpath, norm=False):\n try:\n with ast.io.fits.open(tess_fpath, mode='readonly') as hdulist:\n hdr = hdulist[0].header\n tess_time = hdulist[1].data['TIME']\n tess_flux = hdulist[1].data['PDCSAP_FLUX']\n med = np.nanmedian(tess_flux)\n tess_flux[np.isnan(tess_flux)] = med\n if norm:\n tmin = np.min(tess_flux)\n tmax = np.max(tess_flux)\n tess_flux = (tess_flux - tmin) / (tmax - tmin)\n except Exception as e:\n print('ERROR reading file: ', tess_fpath, ' with error: ', e, flush\n =True)\n return None, None\n return tess_time, tess_flux\n\n\ndef convolve(tess_time, tess_flux, batmanCurves, curve_names, num_keep=10,\n plot=False):\n conv_start = time()\n curves = []\n times = np.zeros(num_keep)\n convs = np.zeros(num_keep)\n print('Starting convolutions...', flush=True)\n for i, curvename in enumerate(curve_names):\n batman_curve = batmanCurves[curvename]\n conv = np.abs(sig.fftconvolve(1 - tess_flux, 1 - batman_curve, 'same'))\n ind_max = np.argmax(conv)\n conv_max = conv[ind_max]\n if num_keep < len(curve_names):\n if conv_max > convs[-1]:\n ind = np.searchsorted(-convs, -conv_max)\n curves = curves[:ind] + [curvename] + curves[ind:-1]\n times = np.insert(times, ind, tess_time[ind_max])[:-1]\n convs = np.insert(convs, ind, conv_max)[:-1]\n else:\n curves.append(curvename)\n times[i] = tess_time[ind_max]\n convs[i] = conv_max\n if plot:\n plt.plot(tess_time, conv, label=curvename)\n conv_time = time() - conv_start\n print('Convolved {} curves in {:.3} s'.format(len(curve_names),\n conv_time), flush=True)\n return curves, times, convs\n\n\ndef tbconvolve(tess_dir, batman_dir, batman_suffix, sector, start, end,\n output_dir, num_keep=10, norm_tess=False, write=True, writechunk=10,\n verbosity=0):\n \"\"\"\n \n Parameters\n ----------\n tess_dir(str): directory to TESS data\n batman_dir (str): directory to model data\n batman_suffix(str): suffix to append to barmanCurves file (e.g. _small)\n sector (int): sector to pull data from\n start (int): file to start at\n end (int): file to end at\n output_dir (str): directory to write candidates.csv\n \"\"\"\n tconv_start = time()\n print('===START TCONVOLVE===', flush=True)\n tess_dir = p.abspath(tess_dir)\n batman_dir = p.abspath(batman_dir)\n output_dir = p.abspath(output_dir)\n sector_name = 'Sector{}'.format(sector)\n if sector == 0:\n sector_name = 'sample_' + sector_name\n tess_names = read_tess(tess_dir, sector_name, start, end)\n ntess = len(tess_names)\n print('Found {} TESS files to process'.format(ntess), flush=True)\n if ntess < 1:\n print('No tess curves found, quitting....')\n return None\n batmanCurves_file = p.join(batman_dir, 'batmanCurves{}.csv'.format(\n batman_suffix))\n times, curve_names, batmanCurves = read_batman(batmanCurves_file)\n nbatman = len(curve_names)\n print('Found {} Batman curves'.format(nbatman), flush=True)\n if ntess < 1:\n print('No batman curves found, quitting....')\n return None\n params = pd.read_csv(p.join(batman_dir, 'batmanParams{}.csv'.format(\n batman_suffix)))\n colnames = ['sector', 'tessFile', 'curveID', 'tcorr', 'correlation',\n 'chisq']\n d = {key: [] for key in colnames}\n s = 0\n nerr = 0\n for tind, tess_fpath in enumerate(tess_names):\n tess_start = time()\n tess_fname = p.basename(tess_fpath)\n print('Starting TESS file: {}'.format(tess_fname), flush=True)\n tess_time, tess_flux = open_tess_fits(tess_fpath, norm_tess)\n if tess_time is None:\n nerr += 1\n continue\n if num_keep < 1:\n num_keep = len(curve_names)\n curves, times, convs = convolve(tess_time, tess_flux, batmanCurves,\n curve_names, num_keep)\n d['sector'].extend([sector_name] * num_keep)\n d['tessFile'].extend([tess_fname] * num_keep)\n d['curveID'].extend(curves)\n d['tcorr'].extend(times)\n d['correlation'].extend(convs)\n d['chisq'].extend(get_chi_sq(tess_time, tess_flux, times, params))\n print(len(d['tcorr']), len(d['chisq']))\n if write:\n if tind % writechunk == writechunk - 1 or tind == len(tess_names\n ) - 1:\n e = start + tind\n outname = 'candidates_sector{}_s{}_e{}.csv'.format(sector, s, e\n )\n outpath = p.join(output_dir, outname)\n candidates = tbl.Table(d, names=colnames)\n ast.io.ascii.write(candidates, outpath, format='csv',\n overwrite=True, comment='#', fast_writer=False)\n print('Wrote file {} at {} s'.format(outname, time() -\n tess_start), flush=True)\n s = e + 1\n candidates = tbl.Table(d, names=colnames)\n cdf = pd.DataFrame.from_dict(d)\n cdf = cdf[colnames]\n df = pd.merge(cdf, params, on='curveID', how='left')\n df.to_csv(p.join(output_dir, 'chisq{}.csv'.format(batman_suffix)))\n tconv_time = time() - tconv_start\n print('Convolved {}/{} tess files with {} curves in {:.3} s'.format(\n ntess - nerr, ntess, nbatman, tconv_time), flush=True)\n print('===END TCONVOLVE===', flush=True)\n return candidates\n\n\n<function token>\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n if verbose:\n print('Reading param file', flush=True)\n with open(paramfile, 'r') as f:\n d = json.load(f)\n if verbose:\n print('Setting param ranges', flush=True)\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100 * np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'\n ]\n batmanParams = tbl.Table(cols, names=colnames)\n if verbose:\n print('Generating curves', flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0\n for i in range(len(batmanParams)):\n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'],\n p['ld'], [float(val) for val in p['u'].split()], t)\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c - cmin) / (cmax - cmin)\n c = 1 - c\n c = c / np.sum(c)\n c = 1 - c\n if np.isnan(c).any() or sum(c == 1) < 5:\n print('Batman {} failed'.format(cID), flush=True)\n err += 1\n continue\n batmanDict[cID] = c\n if verbose and i % 100 == 0:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(i + 1 - err,\n nparams, elapsed), flush=True)\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(nparams - err,\n nparams, elapsed), flush=True)\n if write:\n if verbose:\n start = time()\n print('Writing files', flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote params to {}'.format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote curves to {}'.format(d['curves_fname']))\n elapsed = time() - start\n print('Wrote files in {} s'.format(elapsed), flush=True)\n return batmanParams, batmanCurves\n\n\n<function token>\n<function token>\n\n\ndef open_tess_fits(tess_fpath, norm=False):\n try:\n with ast.io.fits.open(tess_fpath, mode='readonly') as hdulist:\n hdr = hdulist[0].header\n tess_time = hdulist[1].data['TIME']\n tess_flux = hdulist[1].data['PDCSAP_FLUX']\n med = np.nanmedian(tess_flux)\n tess_flux[np.isnan(tess_flux)] = med\n if norm:\n tmin = np.min(tess_flux)\n tmax = np.max(tess_flux)\n tess_flux = (tess_flux - tmin) / (tmax - tmin)\n except Exception as e:\n print('ERROR reading file: ', tess_fpath, ' with error: ', e, flush\n =True)\n return None, None\n return tess_time, tess_flux\n\n\n<function token>\n\n\ndef tbconvolve(tess_dir, batman_dir, batman_suffix, sector, start, end,\n output_dir, num_keep=10, norm_tess=False, write=True, writechunk=10,\n verbosity=0):\n \"\"\"\n \n Parameters\n ----------\n tess_dir(str): directory to TESS data\n batman_dir (str): directory to model data\n batman_suffix(str): suffix to append to barmanCurves file (e.g. _small)\n sector (int): sector to pull data from\n start (int): file to start at\n end (int): file to end at\n output_dir (str): directory to write candidates.csv\n \"\"\"\n tconv_start = time()\n print('===START TCONVOLVE===', flush=True)\n tess_dir = p.abspath(tess_dir)\n batman_dir = p.abspath(batman_dir)\n output_dir = p.abspath(output_dir)\n sector_name = 'Sector{}'.format(sector)\n if sector == 0:\n sector_name = 'sample_' + sector_name\n tess_names = read_tess(tess_dir, sector_name, start, end)\n ntess = len(tess_names)\n print('Found {} TESS files to process'.format(ntess), flush=True)\n if ntess < 1:\n print('No tess curves found, quitting....')\n return None\n batmanCurves_file = p.join(batman_dir, 'batmanCurves{}.csv'.format(\n batman_suffix))\n times, curve_names, batmanCurves = read_batman(batmanCurves_file)\n nbatman = len(curve_names)\n print('Found {} Batman curves'.format(nbatman), flush=True)\n if ntess < 1:\n print('No batman curves found, quitting....')\n return None\n params = pd.read_csv(p.join(batman_dir, 'batmanParams{}.csv'.format(\n batman_suffix)))\n colnames = ['sector', 'tessFile', 'curveID', 'tcorr', 'correlation',\n 'chisq']\n d = {key: [] for key in colnames}\n s = 0\n nerr = 0\n for tind, tess_fpath in enumerate(tess_names):\n tess_start = time()\n tess_fname = p.basename(tess_fpath)\n print('Starting TESS file: {}'.format(tess_fname), flush=True)\n tess_time, tess_flux = open_tess_fits(tess_fpath, norm_tess)\n if tess_time is None:\n nerr += 1\n continue\n if num_keep < 1:\n num_keep = len(curve_names)\n curves, times, convs = convolve(tess_time, tess_flux, batmanCurves,\n curve_names, num_keep)\n d['sector'].extend([sector_name] * num_keep)\n d['tessFile'].extend([tess_fname] * num_keep)\n d['curveID'].extend(curves)\n d['tcorr'].extend(times)\n d['correlation'].extend(convs)\n d['chisq'].extend(get_chi_sq(tess_time, tess_flux, times, params))\n print(len(d['tcorr']), len(d['chisq']))\n if write:\n if tind % writechunk == writechunk - 1 or tind == len(tess_names\n ) - 1:\n e = start + tind\n outname = 'candidates_sector{}_s{}_e{}.csv'.format(sector, s, e\n )\n outpath = p.join(output_dir, outname)\n candidates = tbl.Table(d, names=colnames)\n ast.io.ascii.write(candidates, outpath, format='csv',\n overwrite=True, comment='#', fast_writer=False)\n print('Wrote file {} at {} s'.format(outname, time() -\n tess_start), flush=True)\n s = e + 1\n candidates = tbl.Table(d, names=colnames)\n cdf = pd.DataFrame.from_dict(d)\n cdf = cdf[colnames]\n df = pd.merge(cdf, params, on='curveID', how='left')\n df.to_csv(p.join(output_dir, 'chisq{}.csv'.format(batman_suffix)))\n tconv_time = time() - tconv_start\n print('Convolved {}/{} tess files with {} curves in {:.3} s'.format(\n ntess - nerr, ntess, nbatman, tconv_time), flush=True)\n print('===END TCONVOLVE===', flush=True)\n return candidates\n\n\n<function token>\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n if verbose:\n print('Reading param file', flush=True)\n with open(paramfile, 'r') as f:\n d = json.load(f)\n if verbose:\n print('Setting param ranges', flush=True)\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100 * np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'\n ]\n batmanParams = tbl.Table(cols, names=colnames)\n if verbose:\n print('Generating curves', flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0\n for i in range(len(batmanParams)):\n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'],\n p['ld'], [float(val) for val in p['u'].split()], t)\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c - cmin) / (cmax - cmin)\n c = 1 - c\n c = c / np.sum(c)\n c = 1 - c\n if np.isnan(c).any() or sum(c == 1) < 5:\n print('Batman {} failed'.format(cID), flush=True)\n err += 1\n continue\n batmanDict[cID] = c\n if verbose and i % 100 == 0:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(i + 1 - err,\n nparams, elapsed), flush=True)\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(nparams - err,\n nparams, elapsed), flush=True)\n if write:\n if verbose:\n start = time()\n print('Writing files', flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote params to {}'.format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote curves to {}'.format(d['curves_fname']))\n elapsed = time() - start\n print('Wrote files in {} s'.format(elapsed), flush=True)\n return batmanParams, batmanCurves\n\n\n<function token>\n<function token>\n\n\ndef open_tess_fits(tess_fpath, norm=False):\n try:\n with ast.io.fits.open(tess_fpath, mode='readonly') as hdulist:\n hdr = hdulist[0].header\n tess_time = hdulist[1].data['TIME']\n tess_flux = hdulist[1].data['PDCSAP_FLUX']\n med = np.nanmedian(tess_flux)\n tess_flux[np.isnan(tess_flux)] = med\n if norm:\n tmin = np.min(tess_flux)\n tmax = np.max(tess_flux)\n tess_flux = (tess_flux - tmin) / (tmax - tmin)\n except Exception as e:\n print('ERROR reading file: ', tess_fpath, ' with error: ', e, flush\n =True)\n return None, None\n return tess_time, tess_flux\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\ndef make_batman(paramfile, outdir, norm=False, write=True, verbose=True):\n \"\"\" \n Return astropy tables of batman params and generated curves based on the\n parameters given in paramfile. \n\n Parameters\n ----------\n paramfile (str): path to JSON param file written by make_batman_config\n outdir (str): path to write output curve and param files\n norm (bool): normalize curves to unit integrated area\n write (bool): write param and curve tables to files\n verbose (bool): print logging and timing info\n \"\"\"\n if verbose:\n print('Reading param file', flush=True)\n with open(paramfile, 'r') as f:\n d = json.load(f)\n if verbose:\n print('Setting param ranges', flush=True)\n t = np.arange(d['tmin'], d['tmax'], d['tstep'])\n if d['wlog']:\n widths = np.logspace(d['wmin'], d['wmax'], d['wnum'])\n else:\n widths = np.linspace(d['wmin'], d['wmax'], d['wnum'])\n nparams = len(widths)\n radii = 0.1 * np.ones(nparams)\n incs = 90 * np.ones(nparams)\n u = ['0.1 0.3'] * nparams\n ld = ['quadratic'] * nparams\n per = 100 * np.ones(nparams)\n t0 = np.zeros(nparams)\n e = np.zeros(nparams)\n w = np.zeros(nparams)\n curveID = ['curve{}'.format(i) for i in range(nparams)]\n cols = [curveID, radii, incs, widths, per, u, ld, t0, e, w]\n colnames = ['curveID', 'rp', 'i', 'width', 'per', 'u', 'ld', 't0', 'e', 'w'\n ]\n batmanParams = tbl.Table(cols, names=colnames)\n if verbose:\n print('Generating curves', flush=True)\n start = time()\n batmanDict = {'times': t}\n err = 0\n for i in range(len(batmanParams)):\n p = batmanParams[i]\n cID = p['curveID']\n c = make_lightcurve(p['t0'], p['rp'], p['i'], p['per'], p['width'],\n p['ld'], [float(val) for val in p['u'].split()], t)\n if norm:\n cmax = np.max(c)\n cmin = np.min(c)\n c = (c - cmin) / (cmax - cmin)\n c = 1 - c\n c = c / np.sum(c)\n c = 1 - c\n if np.isnan(c).any() or sum(c == 1) < 5:\n print('Batman {} failed'.format(cID), flush=True)\n err += 1\n continue\n batmanDict[cID] = c\n if verbose and i % 100 == 0:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(i + 1 - err,\n nparams, elapsed), flush=True)\n batmanCurves = tbl.Table(batmanDict)\n if verbose:\n elapsed = time() - start\n print('Generated {}/{} curves in {} s'.format(nparams - err,\n nparams, elapsed), flush=True)\n if write:\n if verbose:\n start = time()\n print('Writing files', flush=True)\n ast.io.ascii.write(batmanParams, d['params_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote params to {}'.format(d['params_fname']))\n ast.io.ascii.write(batmanCurves, d['curves_fname'], format='csv',\n overwrite=True, comment='#', fast_writer=False)\n if verbose:\n print('Wrote curves to {}'.format(d['curves_fname']))\n elapsed = time() - start\n print('Wrote files in {} s'.format(elapsed), flush=True)\n return batmanParams, batmanCurves\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef make_lightcurve(t0, r, i, p, width, u_type, u_param, t):\n \"\"\"\n Generate a batman lightcurve with the given parameters.\n \n Parameters\n ----------\n t0 (num): time of inferior conjunction\n r (num): planet radius (in stellar radii)\n i (num): orbital inclination (in degrees)\n p (num): orbital period\n width (num): width parameter (defined as a**3/p**2)\n u_type (str): limb darkening model\n u_param (list): parameters for limb darkening\n \n t: timesteps that you want the fluxes at\n \n assume circular orbit\n \"\"\"\n params = batman.TransitParams()\n params.rp = r\n params.inc = i\n params.w = 0\n params.ecc = 0\n params.per = p\n params.t0 = t0\n params.a = (width * p ** 2) ** (1 / 3)\n params.limb_dark = u_type\n params.u = u_param\n model = batman.TransitModel(params, t)\n flux = model.light_curve(params)\n return flux\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n" ]
false
99,453
45a27d5d0c146d37bd41e9da914f40f9362cd32e
""" Utility functions for storing ML-derived task annotations related to buildings. The functions here are meant to help organize data in databases stored outside of HOT's Tasking Manager. """ from sqlalchemy import (Column, Integer, String, Float, ForeignKey) from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base from ml_tm_utils_pub.utils_geodata import (get_tile_pyramid) ####################################### # Set the declarative base to prep creation of SQL classes Base = declarative_base() class Project(Base): """Project class meant to hold information on mapping projects in TM. Attributes ---------- id: int The object's UID for the relational DB tm_index: int ID of the project on Tasking Manager's servers md5_hash: str MD5 hash of the project geometry. Useful for checking if a split occured json_geometry: str Stripped down version of the geojson project geometry. """ __tablename__ = 'ml_projects' id = Column(Integer, primary_key=True) tm_index = Column(Integer) md5_hash = Column(String) json_geometry = Column(String) # Add a relationship with the tile prediction class building_tiles = relationship( "TilePredBA", back_populates="project") def __repr__(self): """Define string representation.""" return "<Project(TM index={}, md5_hash={}, {} tiles>".format( self.tm_index, self.md5_hash, len(self.building_tiles)) class TilePredBA(Base): """Tile prediction building area (storing both ML estimate and OSM) Attributes ---------- id: int The tile objects UID for the relational DB project_id: int Project ID keyed to the project table tile_index: str Tile index in string format specifying the x/y/z tile coords. building_area_ml: float Total building area for a tile as predicted by the ML algorithm building_area_osm: float Total building area for a tile mapped in OSM """ __tablename__ = 'tile_pred_buildings' id = Column(Integer, primary_key=True) project_id = Column(Integer, ForeignKey('ml_projects.id')) tile_index = Column(String) building_area_ml = Column(Float) building_area_osm = Column(Float) # Add a relationship with the project class project = relationship('Project', back_populates='building_tiles') def __repr__(self): """Define string representation.""" return ("<TilePredBA(Project={}, Tile Index={} " "Building Area ML={}, Building Area OSM={}>").format( self.project.tm_index, self.tile_index, self.building_area_ml, self.building_area_osm) def get_total_tiles_building_area(tile_ind_list, session): """Get total area of all tile indices specified in a list. Parameters ----------- tile_ind_list: list of str List of tile indices to query session: sqlalchemy.orm.session.Session Handle to database Returns ------- total_area_ml: float Sum of predicted building area for all tiles total_area_osm: float Sum of mapped building area in OSM for all tiles """ total_area_ml, total_area_osm = 0, 0 for row in session.query(TilePredBA).filter( TilePredBA.tile_index.in_(tile_ind_list)): total_area_ml += row.building_area_ml total_area_osm += row.building_area_osm return total_area_ml, total_area_osm def augment_geojson_building_area(project, session): """Add building area information to each tile in a geojson dict. Parameters ---------- project: dict geojson to be augmented with new information session: sqlalchemy.orm.session.Session Handle to database """ # Loop through tasks in TM visualization for ti, task in enumerate(project['tasks']['features']): # Get total area tile_dict = dict(x=task['properties']['taskX'], y=task['properties']['taskY'], z=task['properties']['taskZoom']) child_tiles = get_tile_pyramid(tile_dict, max_zoom=18) area_ml, area_osm = get_total_tiles_building_area(child_tiles, session) # Add information to geojson task['properties']['building_area_ml_pred'] = area_ml task['properties']['building_area_osm'] = area_osm project['tasks']['features'][ti] = task # Return geojson return project def update_db_project(proj_id, geojson, geojson_hash, session): """Update a project geojson and hash Parameters ---------- proj_id: int TM Project ID corresponding to database entry for updating geojson: str Geojson string of project geometry geojson_hash: str MD5 hash of geojson object session: sqlalchemy.orm.session.Session Handle to database """ project = session.query(Project).filter( Project.tm_index == proj_id).one() project.json_geometry = geojson project.md5_hash = geojson_hash
[ "\"\"\"\nUtility functions for storing ML-derived task annotations related to buildings.\n\nThe functions here are meant to help organize data in databases stored outside\nof HOT's Tasking Manager.\n\"\"\"\n\n\nfrom sqlalchemy import (Column, Integer, String, Float,\n ForeignKey)\nfrom sqlalchemy.orm import relationship\nfrom sqlalchemy.ext.declarative import declarative_base\n\nfrom ml_tm_utils_pub.utils_geodata import (get_tile_pyramid)\n\n#######################################\n# Set the declarative base to prep creation of SQL classes\nBase = declarative_base()\n\n\nclass Project(Base):\n \"\"\"Project class meant to hold information on mapping projects in TM.\n\n Attributes\n ----------\n id: int\n The object's UID for the relational DB\n tm_index: int\n ID of the project on Tasking Manager's servers\n md5_hash: str\n MD5 hash of the project geometry. Useful for checking if a split\n occured\n json_geometry: str\n Stripped down version of the geojson project geometry.\n \"\"\"\n\n __tablename__ = 'ml_projects'\n id = Column(Integer, primary_key=True)\n tm_index = Column(Integer)\n md5_hash = Column(String)\n json_geometry = Column(String)\n\n # Add a relationship with the tile prediction class\n building_tiles = relationship(\n \"TilePredBA\", back_populates=\"project\")\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return \"<Project(TM index={}, md5_hash={}, {} tiles>\".format(\n self.tm_index, self.md5_hash, len(self.building_tiles))\n\n\nclass TilePredBA(Base):\n \"\"\"Tile prediction building area (storing both ML estimate and OSM)\n\n Attributes\n ----------\n id: int\n The tile objects UID for the relational DB\n project_id: int\n Project ID keyed to the project table\n tile_index: str\n Tile index in string format specifying the x/y/z tile coords.\n building_area_ml: float\n Total building area for a tile as predicted by the ML algorithm\n building_area_osm: float\n Total building area for a tile mapped in OSM\n \"\"\"\n\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n\n # Add a relationship with the project class\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\"<TilePredBA(Project={}, Tile Index={} \"\n \"Building Area ML={}, Building Area OSM={}>\").format(\n self.project.tm_index, self.tile_index,\n self.building_area_ml, self.building_area_osm)\n\n\ndef get_total_tiles_building_area(tile_ind_list, session):\n \"\"\"Get total area of all tile indices specified in a list.\n\n Parameters\n -----------\n tile_ind_list: list of str\n List of tile indices to query\n session: sqlalchemy.orm.session.Session\n Handle to database\n\n Returns\n -------\n total_area_ml: float\n Sum of predicted building area for all tiles\n total_area_osm: float\n Sum of mapped building area in OSM for all tiles\n \"\"\"\n\n total_area_ml, total_area_osm = 0, 0\n for row in session.query(TilePredBA).filter(\n TilePredBA.tile_index.in_(tile_ind_list)):\n total_area_ml += row.building_area_ml\n total_area_osm += row.building_area_osm\n\n return total_area_ml, total_area_osm\n\n\ndef augment_geojson_building_area(project, session):\n \"\"\"Add building area information to each tile in a geojson dict.\n\n Parameters\n ----------\n project: dict\n geojson to be augmented with new information\n session: sqlalchemy.orm.session.Session\n Handle to database\n \"\"\"\n\n # Loop through tasks in TM visualization\n for ti, task in enumerate(project['tasks']['features']):\n\n # Get total area\n tile_dict = dict(x=task['properties']['taskX'],\n y=task['properties']['taskY'],\n z=task['properties']['taskZoom'])\n child_tiles = get_tile_pyramid(tile_dict, max_zoom=18)\n\n area_ml, area_osm = get_total_tiles_building_area(child_tiles, session)\n\n # Add information to geojson\n task['properties']['building_area_ml_pred'] = area_ml\n task['properties']['building_area_osm'] = area_osm\n project['tasks']['features'][ti] = task\n\n # Return geojson\n return project\n\n\ndef update_db_project(proj_id, geojson, geojson_hash, session):\n \"\"\"Update a project geojson and hash\n\n Parameters\n ----------\n proj_id: int\n TM Project ID corresponding to database entry for updating\n geojson: str\n Geojson string of project geometry\n geojson_hash: str\n MD5 hash of geojson object\n session: sqlalchemy.orm.session.Session\n Handle to database\n \"\"\"\n\n project = session.query(Project).filter(\n Project.tm_index == proj_id).one()\n project.json_geometry = geojson\n project.md5_hash = geojson_hash\n", "<docstring token>\nfrom sqlalchemy import Column, Integer, String, Float, ForeignKey\nfrom sqlalchemy.orm import relationship\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom ml_tm_utils_pub.utils_geodata import get_tile_pyramid\nBase = declarative_base()\n\n\nclass Project(Base):\n \"\"\"Project class meant to hold information on mapping projects in TM.\n\n Attributes\n ----------\n id: int\n The object's UID for the relational DB\n tm_index: int\n ID of the project on Tasking Manager's servers\n md5_hash: str\n MD5 hash of the project geometry. Useful for checking if a split\n occured\n json_geometry: str\n Stripped down version of the geojson project geometry.\n \"\"\"\n __tablename__ = 'ml_projects'\n id = Column(Integer, primary_key=True)\n tm_index = Column(Integer)\n md5_hash = Column(String)\n json_geometry = Column(String)\n building_tiles = relationship('TilePredBA', back_populates='project')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return '<Project(TM index={}, md5_hash={}, {} tiles>'.format(self.\n tm_index, self.md5_hash, len(self.building_tiles))\n\n\nclass TilePredBA(Base):\n \"\"\"Tile prediction building area (storing both ML estimate and OSM)\n\n Attributes\n ----------\n id: int\n The tile objects UID for the relational DB\n project_id: int\n Project ID keyed to the project table\n tile_index: str\n Tile index in string format specifying the x/y/z tile coords.\n building_area_ml: float\n Total building area for a tile as predicted by the ML algorithm\n building_area_osm: float\n Total building area for a tile mapped in OSM\n \"\"\"\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\ndef get_total_tiles_building_area(tile_ind_list, session):\n \"\"\"Get total area of all tile indices specified in a list.\n\n Parameters\n -----------\n tile_ind_list: list of str\n List of tile indices to query\n session: sqlalchemy.orm.session.Session\n Handle to database\n\n Returns\n -------\n total_area_ml: float\n Sum of predicted building area for all tiles\n total_area_osm: float\n Sum of mapped building area in OSM for all tiles\n \"\"\"\n total_area_ml, total_area_osm = 0, 0\n for row in session.query(TilePredBA).filter(TilePredBA.tile_index.in_(\n tile_ind_list)):\n total_area_ml += row.building_area_ml\n total_area_osm += row.building_area_osm\n return total_area_ml, total_area_osm\n\n\ndef augment_geojson_building_area(project, session):\n \"\"\"Add building area information to each tile in a geojson dict.\n\n Parameters\n ----------\n project: dict\n geojson to be augmented with new information\n session: sqlalchemy.orm.session.Session\n Handle to database\n \"\"\"\n for ti, task in enumerate(project['tasks']['features']):\n tile_dict = dict(x=task['properties']['taskX'], y=task['properties'\n ]['taskY'], z=task['properties']['taskZoom'])\n child_tiles = get_tile_pyramid(tile_dict, max_zoom=18)\n area_ml, area_osm = get_total_tiles_building_area(child_tiles, session)\n task['properties']['building_area_ml_pred'] = area_ml\n task['properties']['building_area_osm'] = area_osm\n project['tasks']['features'][ti] = task\n return project\n\n\ndef update_db_project(proj_id, geojson, geojson_hash, session):\n \"\"\"Update a project geojson and hash\n\n Parameters\n ----------\n proj_id: int\n TM Project ID corresponding to database entry for updating\n geojson: str\n Geojson string of project geometry\n geojson_hash: str\n MD5 hash of geojson object\n session: sqlalchemy.orm.session.Session\n Handle to database\n \"\"\"\n project = session.query(Project).filter(Project.tm_index == proj_id).one()\n project.json_geometry = geojson\n project.md5_hash = geojson_hash\n", "<docstring token>\n<import token>\nBase = declarative_base()\n\n\nclass Project(Base):\n \"\"\"Project class meant to hold information on mapping projects in TM.\n\n Attributes\n ----------\n id: int\n The object's UID for the relational DB\n tm_index: int\n ID of the project on Tasking Manager's servers\n md5_hash: str\n MD5 hash of the project geometry. Useful for checking if a split\n occured\n json_geometry: str\n Stripped down version of the geojson project geometry.\n \"\"\"\n __tablename__ = 'ml_projects'\n id = Column(Integer, primary_key=True)\n tm_index = Column(Integer)\n md5_hash = Column(String)\n json_geometry = Column(String)\n building_tiles = relationship('TilePredBA', back_populates='project')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return '<Project(TM index={}, md5_hash={}, {} tiles>'.format(self.\n tm_index, self.md5_hash, len(self.building_tiles))\n\n\nclass TilePredBA(Base):\n \"\"\"Tile prediction building area (storing both ML estimate and OSM)\n\n Attributes\n ----------\n id: int\n The tile objects UID for the relational DB\n project_id: int\n Project ID keyed to the project table\n tile_index: str\n Tile index in string format specifying the x/y/z tile coords.\n building_area_ml: float\n Total building area for a tile as predicted by the ML algorithm\n building_area_osm: float\n Total building area for a tile mapped in OSM\n \"\"\"\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\ndef get_total_tiles_building_area(tile_ind_list, session):\n \"\"\"Get total area of all tile indices specified in a list.\n\n Parameters\n -----------\n tile_ind_list: list of str\n List of tile indices to query\n session: sqlalchemy.orm.session.Session\n Handle to database\n\n Returns\n -------\n total_area_ml: float\n Sum of predicted building area for all tiles\n total_area_osm: float\n Sum of mapped building area in OSM for all tiles\n \"\"\"\n total_area_ml, total_area_osm = 0, 0\n for row in session.query(TilePredBA).filter(TilePredBA.tile_index.in_(\n tile_ind_list)):\n total_area_ml += row.building_area_ml\n total_area_osm += row.building_area_osm\n return total_area_ml, total_area_osm\n\n\ndef augment_geojson_building_area(project, session):\n \"\"\"Add building area information to each tile in a geojson dict.\n\n Parameters\n ----------\n project: dict\n geojson to be augmented with new information\n session: sqlalchemy.orm.session.Session\n Handle to database\n \"\"\"\n for ti, task in enumerate(project['tasks']['features']):\n tile_dict = dict(x=task['properties']['taskX'], y=task['properties'\n ]['taskY'], z=task['properties']['taskZoom'])\n child_tiles = get_tile_pyramid(tile_dict, max_zoom=18)\n area_ml, area_osm = get_total_tiles_building_area(child_tiles, session)\n task['properties']['building_area_ml_pred'] = area_ml\n task['properties']['building_area_osm'] = area_osm\n project['tasks']['features'][ti] = task\n return project\n\n\ndef update_db_project(proj_id, geojson, geojson_hash, session):\n \"\"\"Update a project geojson and hash\n\n Parameters\n ----------\n proj_id: int\n TM Project ID corresponding to database entry for updating\n geojson: str\n Geojson string of project geometry\n geojson_hash: str\n MD5 hash of geojson object\n session: sqlalchemy.orm.session.Session\n Handle to database\n \"\"\"\n project = session.query(Project).filter(Project.tm_index == proj_id).one()\n project.json_geometry = geojson\n project.md5_hash = geojson_hash\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass Project(Base):\n \"\"\"Project class meant to hold information on mapping projects in TM.\n\n Attributes\n ----------\n id: int\n The object's UID for the relational DB\n tm_index: int\n ID of the project on Tasking Manager's servers\n md5_hash: str\n MD5 hash of the project geometry. Useful for checking if a split\n occured\n json_geometry: str\n Stripped down version of the geojson project geometry.\n \"\"\"\n __tablename__ = 'ml_projects'\n id = Column(Integer, primary_key=True)\n tm_index = Column(Integer)\n md5_hash = Column(String)\n json_geometry = Column(String)\n building_tiles = relationship('TilePredBA', back_populates='project')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return '<Project(TM index={}, md5_hash={}, {} tiles>'.format(self.\n tm_index, self.md5_hash, len(self.building_tiles))\n\n\nclass TilePredBA(Base):\n \"\"\"Tile prediction building area (storing both ML estimate and OSM)\n\n Attributes\n ----------\n id: int\n The tile objects UID for the relational DB\n project_id: int\n Project ID keyed to the project table\n tile_index: str\n Tile index in string format specifying the x/y/z tile coords.\n building_area_ml: float\n Total building area for a tile as predicted by the ML algorithm\n building_area_osm: float\n Total building area for a tile mapped in OSM\n \"\"\"\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\ndef get_total_tiles_building_area(tile_ind_list, session):\n \"\"\"Get total area of all tile indices specified in a list.\n\n Parameters\n -----------\n tile_ind_list: list of str\n List of tile indices to query\n session: sqlalchemy.orm.session.Session\n Handle to database\n\n Returns\n -------\n total_area_ml: float\n Sum of predicted building area for all tiles\n total_area_osm: float\n Sum of mapped building area in OSM for all tiles\n \"\"\"\n total_area_ml, total_area_osm = 0, 0\n for row in session.query(TilePredBA).filter(TilePredBA.tile_index.in_(\n tile_ind_list)):\n total_area_ml += row.building_area_ml\n total_area_osm += row.building_area_osm\n return total_area_ml, total_area_osm\n\n\ndef augment_geojson_building_area(project, session):\n \"\"\"Add building area information to each tile in a geojson dict.\n\n Parameters\n ----------\n project: dict\n geojson to be augmented with new information\n session: sqlalchemy.orm.session.Session\n Handle to database\n \"\"\"\n for ti, task in enumerate(project['tasks']['features']):\n tile_dict = dict(x=task['properties']['taskX'], y=task['properties'\n ]['taskY'], z=task['properties']['taskZoom'])\n child_tiles = get_tile_pyramid(tile_dict, max_zoom=18)\n area_ml, area_osm = get_total_tiles_building_area(child_tiles, session)\n task['properties']['building_area_ml_pred'] = area_ml\n task['properties']['building_area_osm'] = area_osm\n project['tasks']['features'][ti] = task\n return project\n\n\ndef update_db_project(proj_id, geojson, geojson_hash, session):\n \"\"\"Update a project geojson and hash\n\n Parameters\n ----------\n proj_id: int\n TM Project ID corresponding to database entry for updating\n geojson: str\n Geojson string of project geometry\n geojson_hash: str\n MD5 hash of geojson object\n session: sqlalchemy.orm.session.Session\n Handle to database\n \"\"\"\n project = session.query(Project).filter(Project.tm_index == proj_id).one()\n project.json_geometry = geojson\n project.md5_hash = geojson_hash\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass Project(Base):\n \"\"\"Project class meant to hold information on mapping projects in TM.\n\n Attributes\n ----------\n id: int\n The object's UID for the relational DB\n tm_index: int\n ID of the project on Tasking Manager's servers\n md5_hash: str\n MD5 hash of the project geometry. Useful for checking if a split\n occured\n json_geometry: str\n Stripped down version of the geojson project geometry.\n \"\"\"\n __tablename__ = 'ml_projects'\n id = Column(Integer, primary_key=True)\n tm_index = Column(Integer)\n md5_hash = Column(String)\n json_geometry = Column(String)\n building_tiles = relationship('TilePredBA', back_populates='project')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return '<Project(TM index={}, md5_hash={}, {} tiles>'.format(self.\n tm_index, self.md5_hash, len(self.building_tiles))\n\n\nclass TilePredBA(Base):\n \"\"\"Tile prediction building area (storing both ML estimate and OSM)\n\n Attributes\n ----------\n id: int\n The tile objects UID for the relational DB\n project_id: int\n Project ID keyed to the project table\n tile_index: str\n Tile index in string format specifying the x/y/z tile coords.\n building_area_ml: float\n Total building area for a tile as predicted by the ML algorithm\n building_area_osm: float\n Total building area for a tile mapped in OSM\n \"\"\"\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\ndef get_total_tiles_building_area(tile_ind_list, session):\n \"\"\"Get total area of all tile indices specified in a list.\n\n Parameters\n -----------\n tile_ind_list: list of str\n List of tile indices to query\n session: sqlalchemy.orm.session.Session\n Handle to database\n\n Returns\n -------\n total_area_ml: float\n Sum of predicted building area for all tiles\n total_area_osm: float\n Sum of mapped building area in OSM for all tiles\n \"\"\"\n total_area_ml, total_area_osm = 0, 0\n for row in session.query(TilePredBA).filter(TilePredBA.tile_index.in_(\n tile_ind_list)):\n total_area_ml += row.building_area_ml\n total_area_osm += row.building_area_osm\n return total_area_ml, total_area_osm\n\n\ndef augment_geojson_building_area(project, session):\n \"\"\"Add building area information to each tile in a geojson dict.\n\n Parameters\n ----------\n project: dict\n geojson to be augmented with new information\n session: sqlalchemy.orm.session.Session\n Handle to database\n \"\"\"\n for ti, task in enumerate(project['tasks']['features']):\n tile_dict = dict(x=task['properties']['taskX'], y=task['properties'\n ]['taskY'], z=task['properties']['taskZoom'])\n child_tiles = get_tile_pyramid(tile_dict, max_zoom=18)\n area_ml, area_osm = get_total_tiles_building_area(child_tiles, session)\n task['properties']['building_area_ml_pred'] = area_ml\n task['properties']['building_area_osm'] = area_osm\n project['tasks']['features'][ti] = task\n return project\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass Project(Base):\n \"\"\"Project class meant to hold information on mapping projects in TM.\n\n Attributes\n ----------\n id: int\n The object's UID for the relational DB\n tm_index: int\n ID of the project on Tasking Manager's servers\n md5_hash: str\n MD5 hash of the project geometry. Useful for checking if a split\n occured\n json_geometry: str\n Stripped down version of the geojson project geometry.\n \"\"\"\n __tablename__ = 'ml_projects'\n id = Column(Integer, primary_key=True)\n tm_index = Column(Integer)\n md5_hash = Column(String)\n json_geometry = Column(String)\n building_tiles = relationship('TilePredBA', back_populates='project')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return '<Project(TM index={}, md5_hash={}, {} tiles>'.format(self.\n tm_index, self.md5_hash, len(self.building_tiles))\n\n\nclass TilePredBA(Base):\n \"\"\"Tile prediction building area (storing both ML estimate and OSM)\n\n Attributes\n ----------\n id: int\n The tile objects UID for the relational DB\n project_id: int\n Project ID keyed to the project table\n tile_index: str\n Tile index in string format specifying the x/y/z tile coords.\n building_area_ml: float\n Total building area for a tile as predicted by the ML algorithm\n building_area_osm: float\n Total building area for a tile mapped in OSM\n \"\"\"\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\n<function token>\n\n\ndef augment_geojson_building_area(project, session):\n \"\"\"Add building area information to each tile in a geojson dict.\n\n Parameters\n ----------\n project: dict\n geojson to be augmented with new information\n session: sqlalchemy.orm.session.Session\n Handle to database\n \"\"\"\n for ti, task in enumerate(project['tasks']['features']):\n tile_dict = dict(x=task['properties']['taskX'], y=task['properties'\n ]['taskY'], z=task['properties']['taskZoom'])\n child_tiles = get_tile_pyramid(tile_dict, max_zoom=18)\n area_ml, area_osm = get_total_tiles_building_area(child_tiles, session)\n task['properties']['building_area_ml_pred'] = area_ml\n task['properties']['building_area_osm'] = area_osm\n project['tasks']['features'][ti] = task\n return project\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass Project(Base):\n \"\"\"Project class meant to hold information on mapping projects in TM.\n\n Attributes\n ----------\n id: int\n The object's UID for the relational DB\n tm_index: int\n ID of the project on Tasking Manager's servers\n md5_hash: str\n MD5 hash of the project geometry. Useful for checking if a split\n occured\n json_geometry: str\n Stripped down version of the geojson project geometry.\n \"\"\"\n __tablename__ = 'ml_projects'\n id = Column(Integer, primary_key=True)\n tm_index = Column(Integer)\n md5_hash = Column(String)\n json_geometry = Column(String)\n building_tiles = relationship('TilePredBA', back_populates='project')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return '<Project(TM index={}, md5_hash={}, {} tiles>'.format(self.\n tm_index, self.md5_hash, len(self.building_tiles))\n\n\nclass TilePredBA(Base):\n \"\"\"Tile prediction building area (storing both ML estimate and OSM)\n\n Attributes\n ----------\n id: int\n The tile objects UID for the relational DB\n project_id: int\n Project ID keyed to the project table\n tile_index: str\n Tile index in string format specifying the x/y/z tile coords.\n building_area_ml: float\n Total building area for a tile as predicted by the ML algorithm\n building_area_osm: float\n Total building area for a tile mapped in OSM\n \"\"\"\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass Project(Base):\n <docstring token>\n __tablename__ = 'ml_projects'\n id = Column(Integer, primary_key=True)\n tm_index = Column(Integer)\n md5_hash = Column(String)\n json_geometry = Column(String)\n building_tiles = relationship('TilePredBA', back_populates='project')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return '<Project(TM index={}, md5_hash={}, {} tiles>'.format(self.\n tm_index, self.md5_hash, len(self.building_tiles))\n\n\nclass TilePredBA(Base):\n \"\"\"Tile prediction building area (storing both ML estimate and OSM)\n\n Attributes\n ----------\n id: int\n The tile objects UID for the relational DB\n project_id: int\n Project ID keyed to the project table\n tile_index: str\n Tile index in string format specifying the x/y/z tile coords.\n building_area_ml: float\n Total building area for a tile as predicted by the ML algorithm\n building_area_osm: float\n Total building area for a tile mapped in OSM\n \"\"\"\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass Project(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return '<Project(TM index={}, md5_hash={}, {} tiles>'.format(self.\n tm_index, self.md5_hash, len(self.building_tiles))\n\n\nclass TilePredBA(Base):\n \"\"\"Tile prediction building area (storing both ML estimate and OSM)\n\n Attributes\n ----------\n id: int\n The tile objects UID for the relational DB\n project_id: int\n Project ID keyed to the project table\n tile_index: str\n Tile index in string format specifying the x/y/z tile coords.\n building_area_ml: float\n Total building area for a tile as predicted by the ML algorithm\n building_area_osm: float\n Total building area for a tile mapped in OSM\n \"\"\"\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass Project(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass TilePredBA(Base):\n \"\"\"Tile prediction building area (storing both ML estimate and OSM)\n\n Attributes\n ----------\n id: int\n The tile objects UID for the relational DB\n project_id: int\n Project ID keyed to the project table\n tile_index: str\n Tile index in string format specifying the x/y/z tile coords.\n building_area_ml: float\n Total building area for a tile as predicted by the ML algorithm\n building_area_osm: float\n Total building area for a tile mapped in OSM\n \"\"\"\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass TilePredBA(Base):\n \"\"\"Tile prediction building area (storing both ML estimate and OSM)\n\n Attributes\n ----------\n id: int\n The tile objects UID for the relational DB\n project_id: int\n Project ID keyed to the project table\n tile_index: str\n Tile index in string format specifying the x/y/z tile coords.\n building_area_ml: float\n Total building area for a tile as predicted by the ML algorithm\n building_area_osm: float\n Total building area for a tile mapped in OSM\n \"\"\"\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass TilePredBA(Base):\n <docstring token>\n __tablename__ = 'tile_pred_buildings'\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, ForeignKey('ml_projects.id'))\n tile_index = Column(String)\n building_area_ml = Column(Float)\n building_area_osm = Column(Float)\n project = relationship('Project', back_populates='building_tiles')\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass TilePredBA(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __repr__(self):\n \"\"\"Define string representation.\"\"\"\n return (\n '<TilePredBA(Project={}, Tile Index={} Building Area ML={}, Building Area OSM={}>'\n .format(self.project.tm_index, self.tile_index, self.\n building_area_ml, self.building_area_osm))\n\n\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass TilePredBA(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<function token>\n<function token>\n<function token>\n" ]
false
99,454
516aa7b8b1dca8d4075842b03e99d23d9d38a76b
class Qaslkdjfenurbasdfkjalsrke(object): pass def g(a: Qaslkdjfenurbasdfkjalsrke, b: Qaslkdjfenurbasdfkjalsrke = 100) -> int: pass class Goddamn(object): __metaclass__= type @staticmethod def u (a , b ): if a < b: return b- a else: return a -b class Hey(Goddamn, Qaslkdjfenurbasdfkjalsrke): pass print "Hello world"; print("Hello world")
[ "class Qaslkdjfenurbasdfkjalsrke(object): pass\ndef g(a: Qaslkdjfenurbasdfkjalsrke, b: Qaslkdjfenurbasdfkjalsrke = 100) -> int: pass\nclass Goddamn(object):\n __metaclass__= type\n @staticmethod\n def u (a , b ):\n if a < b: return b- a\n else:\n return a -b\n\n\n\n\nclass Hey(Goddamn, Qaslkdjfenurbasdfkjalsrke): pass\n\nprint \"Hello world\"; print(\"Hello world\")\n" ]
true
99,455
5841061d81e2049ddbedbedb4d044647cb9ac7ff
############################################################################## # Johnathan Clementi # Advanced Python Programming for GIS - PSU GEOG 489 # Prof. James O’Brien, Grading Assistant Rossana Grzinic # Final Project Deliverables # Purpose: NJ Highlands Region annual preserved lands breakdown ############################################################################## ''' Import necessary libraries ''' import os, sys import re import arcpy arcpy.env.overwriteOutput = True # For testing purposes, allows us to overwrite old outputs import multiprocessing from workers import worker import time startTime = time.time() # Set workspace to in memory to increase efficiency arcpy.env.workspace = r'in_memory' ''' Data Input/Output''' # Municipalities of New Jersey: # https://njogis-newjersey.opendata.arcgis.com/datasets/3d5d1db8a1b34b418c331f4ce1fd0fef_2 njMuni = r'C:\Users\Johnathan\Google Drive\Grad School\PSU_GIS_Cert\GEOG 489\FinalPrj\data\HighlandsProtectedLands.gdb\NJ_Municipalities' # Highlands Region # http://highlands-data-njhighlands.opendata.arcgis.com/datasets/highlands-boundary highlandsBoundary = r'C:\Users\Johnathan\Google Drive\Grad School\PSU_GIS_Cert\GEOG 489\FinalPrj\data\HighlandsProtectedLands.gdb\Highlands_Boundary' # Municipalities of the Highlands Region (NJ_Municipalities clipped to Highlands_Boundary) # Note: There are two 'Washington Townships' within the Highlands Region highlandsMuni = r'C:\Users\Johnathan\Google Drive\Grad School\PSU_GIS_Cert\GEOG 489\FinalPrj\data\HighlandsProtectedLands.gdb\highlandsMuni' # Planning and Preservation Designations # http://highlands-data-njhighlands.opendata.arcgis.com/datasets/preservation-and-planning-area planPresPoly = r'C:\Users\Johnathan\Google Drive\Grad School\PSU_GIS_Cert\GEOG 489\FinalPrj\data\HighlandsProtectedLands.gdb\Preservation_and_Planning_Area' # Preserved Lands within the Highlands Region # http://highlands-data-njhighlands.opendata.arcgis.com/datasets/preserved-lands presLands = r'C:\Users\Johnathan\Google Drive\Grad School\PSU_GIS_Cert\GEOG 489\FinalPrj\data\HighlandsProtectedLands.gdb\Preserved_Lands' # Input feature classes - on disk # clipper = highlandsMuni # tobeclipped = [presLands, planPresPoly] # Output directory outFolder = r'C:\Users\Johnathan\Google Drive\Grad School\PSU_GIS_Cert\GEOG 489\FinalPrj\data\output' # Check if output directory exists. Create a directory if one does not exist if os.path.exists(outFolder): if os.path.isdir(outFolder): print('The proper output folder exists, moving on') else: os.mkdir(outFolder) print('Created the output directory') else: os.mkdir(outFolder) print('Created the output directory') ''' In Memory Data ''' # Make an in_memory feature layer for clip feature which is the Highlands Municipalities clipper = "in_memory" + "\\" + "highlandsMuni" arcpy.MakeFeatureLayer_management(highlandsMuni, clipper) # Make an in_memory feature layer for Preserved lands inMemPresLands = "in_memory" + "\\" + "Preserved_Lands" arcpy.MakeFeatureLayer_management(presLands, inMemPresLands) # Make an in_memory feature layer for Planning/Preservation Regions inMemPlanPresPoly = "in_memory" + "\\" + "Preservation_and_Planning_Area" arcpy.MakeFeatureLayer_management(planPresPoly, inMemPlanPresPoly) # Add in memory preserved lands and planning/preservation regions to tobeclipped list tobeclipped = [inMemPresLands, inMemPlanPresPoly] ''' Check for and use 64 bit processing ''' def get_install_path(): ''' Return 64bit python install path from registry (if installed and registered), otherwise fall back to current 32bit process install path. ''' if sys.maxsize > 2**32: return sys.exec_prefix #We're running in a 64bit process #We're 32 bit so see if there's a 64bit install path = r'SOFTWARE\Python\PythonCore\2.7' from _winreg import OpenKey, QueryValue from _winreg import HKEY_LOCAL_MACHINE, KEY_READ, KEY_WOW64_64KEY try: with OpenKey(HKEY_LOCAL_MACHINE, path, 0, KEY_READ | KEY_WOW64_64KEY) as key: return QueryValue(key, "InstallPath").strip(os.sep) #We have a 64bit install, so return that. except: return sys.exec_prefix #No 64bit, so return 32bit path ''' Multiprocessing Handler Function ''' def mp_handler(): try: print("Creating Polygon OID list...") # These are the fields we want to grab from the clip feature layer field = ['OID@', 'MUN_LABEL'] # Create a list of object IDs for clipper polygons idList = [] # Initialize list of municipality names (municipalities are used as clip features) clipperNameList = [] # Iterate through the rows of the municipality feature layer (clipper) and return the OID and name field data with arcpy.da.SearchCursor(clipper, field) as cursor: for row in cursor: id = row[0] # Retrieve OID from first element in row name = row[1] # Retrieve Municipality name from second element in row name = name.replace(" ", "_") # Replace illegal characters so we can use this field as the name of the output file later on name = name.replace("-", "_") idList.append(id) clipperNameList.append(name) print("There are " + str(len(idList)) + " object IDs (polygons) to process.") # Reset field variable to just that of the OIDFieldName of the municipality feature layer clipperDescObj = arcpy.Describe(clipper) field = clipperDescObj.OIDFieldName # Initialize tuples (not list because tuples are immutable) of tasks that will be sent to workers jobs = [] ''' Nested loop creates job list for each input feature layer of clip (preserved lands and planning/preservation regions) and each feature of clip feature layer Use enumerate to get index of tobeclipped list then assign value at that index to a variable holding one element (instead of a list) ''' for i, item in enumerate (tobeclipped): tobeclippeditem = tobeclipped[i] # Get just one clip input feature layer j = 0 # Initialize index used for retrieving municipality name for id in idList: name = clipperNameList[j] # Get municipality name from current index j += 1 # Advance municipality name index jobs.append((clipper,tobeclippeditem,field,id,outFolder, name)) # Add tuples of the parameters that need to be given to the worker function to the jobs list print("Job list has " + str(len(jobs)) + " elements.") ''' Multiprocessing Pool ''' # Create and run multiprocessing pool. multiprocessing.set_executable(os.path.join(get_install_path(), 'pythonw.exe')) # make sure Python environment is used for running processes, even when this is run as a script tool print("Sending to pool") cpuNum = multiprocessing.cpu_count() # determine number of cores to use print("There are: " + str(cpuNum) + " cpu cores on this machine") with multiprocessing.Pool(processes=cpuNum) as pool: # Create the pool object res = pool.starmap(worker, jobs) # run jobs in job list; res is a list with return values of the worker function ''' Error Reporting if successful try ''' failed = res.count(False) # count how many times False appears in the list with the return values if failed > 0: arcpy.AddError("{} workers failed!".format(failed)) print("{} workers failed!".format(failed)) # If the process was completed, print a message arcpy.AddMessage("Finished multiprocessing!") print("Finished multiprocessing!") # Clean up in_memory arcpy.Delete_management("in_memory") # Print processing time arcpy.AddMessage("Total time: %s seconds" % (time.time() - startTime)) # Error Reporting if unsuccessful try except arcpy.ExecuteError: # Geoprocessor threw an error arcpy.AddError(arcpy.GetMessages(2)) print("Execute Error:", arcpy.ExecuteError) except Exception as e: # Capture all other errors arcpy.AddError(str(e)) print("Exception:", e) # Clean up in_memory arcpy.Delete_management("in_memory") # Print processing time arcpy.AddMessage("Total time: %s seconds" % (time.time() - startTime)) ''' Call multiprocessing handler function ''' if __name__ == '__main__': mp_handler()
[ "##############################################################################\n# Johnathan Clementi\n# Advanced Python Programming for GIS - PSU GEOG 489\n# Prof. James O’Brien, Grading Assistant Rossana Grzinic\n# Final Project Deliverables\n# Purpose: NJ Highlands Region annual preserved lands breakdown\n##############################################################################\n\n''' Import necessary libraries '''\nimport os, sys\nimport re\nimport arcpy\narcpy.env.overwriteOutput = True # For testing purposes, allows us to overwrite old outputs\nimport multiprocessing\nfrom workers import worker\nimport time\nstartTime = time.time()\n\n# Set workspace to in memory to increase efficiency\narcpy.env.workspace = r'in_memory'\n\n\n''' Data Input/Output'''\n\n# Municipalities of New Jersey:\n# https://njogis-newjersey.opendata.arcgis.com/datasets/3d5d1db8a1b34b418c331f4ce1fd0fef_2\nnjMuni = r'C:\\Users\\Johnathan\\Google Drive\\Grad School\\PSU_GIS_Cert\\GEOG 489\\FinalPrj\\data\\HighlandsProtectedLands.gdb\\NJ_Municipalities'\n\n# Highlands Region\n# http://highlands-data-njhighlands.opendata.arcgis.com/datasets/highlands-boundary\nhighlandsBoundary = r'C:\\Users\\Johnathan\\Google Drive\\Grad School\\PSU_GIS_Cert\\GEOG 489\\FinalPrj\\data\\HighlandsProtectedLands.gdb\\Highlands_Boundary'\n\n# Municipalities of the Highlands Region (NJ_Municipalities clipped to Highlands_Boundary)\n# Note: There are two 'Washington Townships' within the Highlands Region\nhighlandsMuni = r'C:\\Users\\Johnathan\\Google Drive\\Grad School\\PSU_GIS_Cert\\GEOG 489\\FinalPrj\\data\\HighlandsProtectedLands.gdb\\highlandsMuni'\n\n# Planning and Preservation Designations\n# http://highlands-data-njhighlands.opendata.arcgis.com/datasets/preservation-and-planning-area\nplanPresPoly = r'C:\\Users\\Johnathan\\Google Drive\\Grad School\\PSU_GIS_Cert\\GEOG 489\\FinalPrj\\data\\HighlandsProtectedLands.gdb\\Preservation_and_Planning_Area'\n\n# Preserved Lands within the Highlands Region\n# http://highlands-data-njhighlands.opendata.arcgis.com/datasets/preserved-lands\npresLands = r'C:\\Users\\Johnathan\\Google Drive\\Grad School\\PSU_GIS_Cert\\GEOG 489\\FinalPrj\\data\\HighlandsProtectedLands.gdb\\Preserved_Lands'\n\n\n# Input feature classes - on disk\n# clipper = highlandsMuni \n# tobeclipped = [presLands, planPresPoly]\n\n# Output directory\noutFolder = r'C:\\Users\\Johnathan\\Google Drive\\Grad School\\PSU_GIS_Cert\\GEOG 489\\FinalPrj\\data\\output'\n\n# Check if output directory exists. Create a directory if one does not exist\nif os.path.exists(outFolder):\n if os.path.isdir(outFolder):\n print('The proper output folder exists, moving on')\n else:\n os.mkdir(outFolder)\n print('Created the output directory')\nelse: \n os.mkdir(outFolder)\n print('Created the output directory')\n\n\n\n''' In Memory Data '''\n\n# Make an in_memory feature layer for clip feature which is the Highlands Municipalities\nclipper = \"in_memory\" + \"\\\\\" + \"highlandsMuni\"\narcpy.MakeFeatureLayer_management(highlandsMuni, clipper)\n\n# Make an in_memory feature layer for Preserved lands\ninMemPresLands = \"in_memory\" + \"\\\\\" + \"Preserved_Lands\"\narcpy.MakeFeatureLayer_management(presLands, inMemPresLands)\n\n# Make an in_memory feature layer for Planning/Preservation Regions\ninMemPlanPresPoly = \"in_memory\" + \"\\\\\" + \"Preservation_and_Planning_Area\"\narcpy.MakeFeatureLayer_management(planPresPoly, inMemPlanPresPoly)\n\n# Add in memory preserved lands and planning/preservation regions to tobeclipped list\ntobeclipped = [inMemPresLands, inMemPlanPresPoly]\n\n\n''' Check for and use 64 bit processing '''\n\ndef get_install_path():\n ''' Return 64bit python install path from registry (if installed and registered),\n otherwise fall back to current 32bit process install path.\n '''\n if sys.maxsize > 2**32: return sys.exec_prefix #We're running in a 64bit process\n \n #We're 32 bit so see if there's a 64bit install\n path = r'SOFTWARE\\Python\\PythonCore\\2.7'\n \n from _winreg import OpenKey, QueryValue\n from _winreg import HKEY_LOCAL_MACHINE, KEY_READ, KEY_WOW64_64KEY\n \n try:\n with OpenKey(HKEY_LOCAL_MACHINE, path, 0, KEY_READ | KEY_WOW64_64KEY) as key:\n return QueryValue(key, \"InstallPath\").strip(os.sep) #We have a 64bit install, so return that.\n except: return sys.exec_prefix #No 64bit, so return 32bit path \n\n\n''' Multiprocessing Handler Function '''\n\ndef mp_handler():\n \n try:\n \n print(\"Creating Polygon OID list...\") \n \n # These are the fields we want to grab from the clip feature layer\n field = ['OID@', 'MUN_LABEL']\n \n # Create a list of object IDs for clipper polygons\n idList = []\n\n # Initialize list of municipality names (municipalities are used as clip features)\n clipperNameList = []\n\n # Iterate through the rows of the municipality feature layer (clipper) and return the OID and name field data\n with arcpy.da.SearchCursor(clipper, field) as cursor:\n for row in cursor:\n id = row[0] # Retrieve OID from first element in row \n name = row[1] # Retrieve Municipality name from second element in row\n name = name.replace(\" \", \"_\") # Replace illegal characters so we can use this field as the name of the output file later on\n name = name.replace(\"-\", \"_\")\n idList.append(id)\n clipperNameList.append(name)\n \n print(\"There are \" + str(len(idList)) + \" object IDs (polygons) to process.\") \n\n\n # Reset field variable to just that of the OIDFieldName of the municipality feature layer\n clipperDescObj = arcpy.Describe(clipper) \n field = clipperDescObj.OIDFieldName\n\n\n # Initialize tuples (not list because tuples are immutable) of tasks that will be sent to workers \n jobs = []\n\n '''\n Nested loop creates job list for each input feature layer of clip (preserved lands and planning/preservation regions) and each feature of clip feature layer\n Use enumerate to get index of tobeclipped list then assign value at that index to a variable holding one element (instead of a list)\n '''\n for i, item in enumerate (tobeclipped):\n tobeclippeditem = tobeclipped[i] # Get just one clip input feature layer\n j = 0 # Initialize index used for retrieving municipality name \n for id in idList:\n name = clipperNameList[j] # Get municipality name from current index\n j += 1 # Advance municipality name index\n jobs.append((clipper,tobeclippeditem,field,id,outFolder, name)) # Add tuples of the parameters that need to be given to the worker function to the jobs list\n\n print(\"Job list has \" + str(len(jobs)) + \" elements.\") \n\n\n ''' Multiprocessing Pool '''\n\n # Create and run multiprocessing pool.\n multiprocessing.set_executable(os.path.join(get_install_path(), 'pythonw.exe')) # make sure Python environment is used for running processes, even when this is run as a script tool\n\n print(\"Sending to pool\") \n\n cpuNum = multiprocessing.cpu_count() # determine number of cores to use\n print(\"There are: \" + str(cpuNum) + \" cpu cores on this machine\") \n\n with multiprocessing.Pool(processes=cpuNum) as pool: # Create the pool object \n res = pool.starmap(worker, jobs) # run jobs in job list; res is a list with return values of the worker function\n\n\n ''' Error Reporting if successful try '''\n \n failed = res.count(False) # count how many times False appears in the list with the return values\n if failed > 0:\n arcpy.AddError(\"{} workers failed!\".format(failed)) \n print(\"{} workers failed!\".format(failed)) \n\n\n # If the process was completed, print a message \n arcpy.AddMessage(\"Finished multiprocessing!\") \n print(\"Finished multiprocessing!\")\n\n # Clean up in_memory\n arcpy.Delete_management(\"in_memory\") \n\n # Print processing time\n arcpy.AddMessage(\"Total time: %s seconds\" % (time.time() - startTime))\n \n\n \n # Error Reporting if unsuccessful try \n except arcpy.ExecuteError:\n # Geoprocessor threw an error \n arcpy.AddError(arcpy.GetMessages(2)) \n print(\"Execute Error:\", arcpy.ExecuteError) \n except Exception as e: \n # Capture all other errors \n arcpy.AddError(str(e)) \n print(\"Exception:\", e)\n\n\n # Clean up in_memory\n arcpy.Delete_management(\"in_memory\") \n\n # Print processing time\n arcpy.AddMessage(\"Total time: %s seconds\" % (time.time() - startTime))\n\n\n\n''' Call multiprocessing handler function ''' \nif __name__ == '__main__': \n mp_handler() \n", "<docstring token>\nimport os, sys\nimport re\nimport arcpy\narcpy.env.overwriteOutput = True\nimport multiprocessing\nfrom workers import worker\nimport time\nstartTime = time.time()\narcpy.env.workspace = 'in_memory'\n<docstring token>\nnjMuni = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\HighlandsProtectedLands.gdb\\\\NJ_Municipalities'\n )\nhighlandsBoundary = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\HighlandsProtectedLands.gdb\\\\Highlands_Boundary'\n )\nhighlandsMuni = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\HighlandsProtectedLands.gdb\\\\highlandsMuni'\n )\nplanPresPoly = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\HighlandsProtectedLands.gdb\\\\Preservation_and_Planning_Area'\n )\npresLands = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\HighlandsProtectedLands.gdb\\\\Preserved_Lands'\n )\noutFolder = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\output'\n )\nif os.path.exists(outFolder):\n if os.path.isdir(outFolder):\n print('The proper output folder exists, moving on')\n else:\n os.mkdir(outFolder)\n print('Created the output directory')\nelse:\n os.mkdir(outFolder)\n print('Created the output directory')\n<docstring token>\nclipper = 'in_memory' + '\\\\' + 'highlandsMuni'\narcpy.MakeFeatureLayer_management(highlandsMuni, clipper)\ninMemPresLands = 'in_memory' + '\\\\' + 'Preserved_Lands'\narcpy.MakeFeatureLayer_management(presLands, inMemPresLands)\ninMemPlanPresPoly = 'in_memory' + '\\\\' + 'Preservation_and_Planning_Area'\narcpy.MakeFeatureLayer_management(planPresPoly, inMemPlanPresPoly)\ntobeclipped = [inMemPresLands, inMemPlanPresPoly]\n<docstring token>\n\n\ndef get_install_path():\n \"\"\" Return 64bit python install path from registry (if installed and registered),\n otherwise fall back to current 32bit process install path.\n \"\"\"\n if sys.maxsize > 2 ** 32:\n return sys.exec_prefix\n path = 'SOFTWARE\\\\Python\\\\PythonCore\\\\2.7'\n from _winreg import OpenKey, QueryValue\n from _winreg import HKEY_LOCAL_MACHINE, KEY_READ, KEY_WOW64_64KEY\n try:\n with OpenKey(HKEY_LOCAL_MACHINE, path, 0, KEY_READ | KEY_WOW64_64KEY\n ) as key:\n return QueryValue(key, 'InstallPath').strip(os.sep)\n except:\n return sys.exec_prefix\n\n\n<docstring token>\n\n\ndef mp_handler():\n try:\n print('Creating Polygon OID list...')\n field = ['OID@', 'MUN_LABEL']\n idList = []\n clipperNameList = []\n with arcpy.da.SearchCursor(clipper, field) as cursor:\n for row in cursor:\n id = row[0]\n name = row[1]\n name = name.replace(' ', '_')\n name = name.replace('-', '_')\n idList.append(id)\n clipperNameList.append(name)\n print('There are ' + str(len(idList)) +\n ' object IDs (polygons) to process.')\n clipperDescObj = arcpy.Describe(clipper)\n field = clipperDescObj.OIDFieldName\n jobs = []\n \"\"\"\n Nested loop creates job list for each input feature layer of clip (preserved lands and planning/preservation regions) and each feature of clip feature layer\n Use enumerate to get index of tobeclipped list then assign value at that index to a variable holding one element (instead of a list)\n \"\"\"\n for i, item in enumerate(tobeclipped):\n tobeclippeditem = tobeclipped[i]\n j = 0\n for id in idList:\n name = clipperNameList[j]\n j += 1\n jobs.append((clipper, tobeclippeditem, field, id, outFolder,\n name))\n print('Job list has ' + str(len(jobs)) + ' elements.')\n \"\"\" Multiprocessing Pool \"\"\"\n multiprocessing.set_executable(os.path.join(get_install_path(),\n 'pythonw.exe'))\n print('Sending to pool')\n cpuNum = multiprocessing.cpu_count()\n print('There are: ' + str(cpuNum) + ' cpu cores on this machine')\n with multiprocessing.Pool(processes=cpuNum) as pool:\n res = pool.starmap(worker, jobs)\n \"\"\" Error Reporting if successful try \"\"\"\n failed = res.count(False)\n if failed > 0:\n arcpy.AddError('{} workers failed!'.format(failed))\n print('{} workers failed!'.format(failed))\n arcpy.AddMessage('Finished multiprocessing!')\n print('Finished multiprocessing!')\n arcpy.Delete_management('in_memory')\n arcpy.AddMessage('Total time: %s seconds' % (time.time() - startTime))\n except arcpy.ExecuteError:\n arcpy.AddError(arcpy.GetMessages(2))\n print('Execute Error:', arcpy.ExecuteError)\n except Exception as e:\n arcpy.AddError(str(e))\n print('Exception:', e)\n arcpy.Delete_management('in_memory')\n arcpy.AddMessage('Total time: %s seconds' % (time.time() - startTime))\n\n\n<docstring token>\nif __name__ == '__main__':\n mp_handler()\n", "<docstring token>\n<import token>\narcpy.env.overwriteOutput = True\n<import token>\nstartTime = time.time()\narcpy.env.workspace = 'in_memory'\n<docstring token>\nnjMuni = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\HighlandsProtectedLands.gdb\\\\NJ_Municipalities'\n )\nhighlandsBoundary = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\HighlandsProtectedLands.gdb\\\\Highlands_Boundary'\n )\nhighlandsMuni = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\HighlandsProtectedLands.gdb\\\\highlandsMuni'\n )\nplanPresPoly = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\HighlandsProtectedLands.gdb\\\\Preservation_and_Planning_Area'\n )\npresLands = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\HighlandsProtectedLands.gdb\\\\Preserved_Lands'\n )\noutFolder = (\n 'C:\\\\Users\\\\Johnathan\\\\Google Drive\\\\Grad School\\\\PSU_GIS_Cert\\\\GEOG 489\\\\FinalPrj\\\\data\\\\output'\n )\nif os.path.exists(outFolder):\n if os.path.isdir(outFolder):\n print('The proper output folder exists, moving on')\n else:\n os.mkdir(outFolder)\n print('Created the output directory')\nelse:\n os.mkdir(outFolder)\n print('Created the output directory')\n<docstring token>\nclipper = 'in_memory' + '\\\\' + 'highlandsMuni'\narcpy.MakeFeatureLayer_management(highlandsMuni, clipper)\ninMemPresLands = 'in_memory' + '\\\\' + 'Preserved_Lands'\narcpy.MakeFeatureLayer_management(presLands, inMemPresLands)\ninMemPlanPresPoly = 'in_memory' + '\\\\' + 'Preservation_and_Planning_Area'\narcpy.MakeFeatureLayer_management(planPresPoly, inMemPlanPresPoly)\ntobeclipped = [inMemPresLands, inMemPlanPresPoly]\n<docstring token>\n\n\ndef get_install_path():\n \"\"\" Return 64bit python install path from registry (if installed and registered),\n otherwise fall back to current 32bit process install path.\n \"\"\"\n if sys.maxsize > 2 ** 32:\n return sys.exec_prefix\n path = 'SOFTWARE\\\\Python\\\\PythonCore\\\\2.7'\n from _winreg import OpenKey, QueryValue\n from _winreg import HKEY_LOCAL_MACHINE, KEY_READ, KEY_WOW64_64KEY\n try:\n with OpenKey(HKEY_LOCAL_MACHINE, path, 0, KEY_READ | KEY_WOW64_64KEY\n ) as key:\n return QueryValue(key, 'InstallPath').strip(os.sep)\n except:\n return sys.exec_prefix\n\n\n<docstring token>\n\n\ndef mp_handler():\n try:\n print('Creating Polygon OID list...')\n field = ['OID@', 'MUN_LABEL']\n idList = []\n clipperNameList = []\n with arcpy.da.SearchCursor(clipper, field) as cursor:\n for row in cursor:\n id = row[0]\n name = row[1]\n name = name.replace(' ', '_')\n name = name.replace('-', '_')\n idList.append(id)\n clipperNameList.append(name)\n print('There are ' + str(len(idList)) +\n ' object IDs (polygons) to process.')\n clipperDescObj = arcpy.Describe(clipper)\n field = clipperDescObj.OIDFieldName\n jobs = []\n \"\"\"\n Nested loop creates job list for each input feature layer of clip (preserved lands and planning/preservation regions) and each feature of clip feature layer\n Use enumerate to get index of tobeclipped list then assign value at that index to a variable holding one element (instead of a list)\n \"\"\"\n for i, item in enumerate(tobeclipped):\n tobeclippeditem = tobeclipped[i]\n j = 0\n for id in idList:\n name = clipperNameList[j]\n j += 1\n jobs.append((clipper, tobeclippeditem, field, id, outFolder,\n name))\n print('Job list has ' + str(len(jobs)) + ' elements.')\n \"\"\" Multiprocessing Pool \"\"\"\n multiprocessing.set_executable(os.path.join(get_install_path(),\n 'pythonw.exe'))\n print('Sending to pool')\n cpuNum = multiprocessing.cpu_count()\n print('There are: ' + str(cpuNum) + ' cpu cores on this machine')\n with multiprocessing.Pool(processes=cpuNum) as pool:\n res = pool.starmap(worker, jobs)\n \"\"\" Error Reporting if successful try \"\"\"\n failed = res.count(False)\n if failed > 0:\n arcpy.AddError('{} workers failed!'.format(failed))\n print('{} workers failed!'.format(failed))\n arcpy.AddMessage('Finished multiprocessing!')\n print('Finished multiprocessing!')\n arcpy.Delete_management('in_memory')\n arcpy.AddMessage('Total time: %s seconds' % (time.time() - startTime))\n except arcpy.ExecuteError:\n arcpy.AddError(arcpy.GetMessages(2))\n print('Execute Error:', arcpy.ExecuteError)\n except Exception as e:\n arcpy.AddError(str(e))\n print('Exception:', e)\n arcpy.Delete_management('in_memory')\n arcpy.AddMessage('Total time: %s seconds' % (time.time() - startTime))\n\n\n<docstring token>\nif __name__ == '__main__':\n mp_handler()\n", "<docstring token>\n<import token>\n<assignment token>\n<import token>\n<assignment token>\n<docstring token>\n<assignment token>\nif os.path.exists(outFolder):\n if os.path.isdir(outFolder):\n print('The proper output folder exists, moving on')\n else:\n os.mkdir(outFolder)\n print('Created the output directory')\nelse:\n os.mkdir(outFolder)\n print('Created the output directory')\n<docstring token>\n<assignment token>\narcpy.MakeFeatureLayer_management(highlandsMuni, clipper)\n<assignment token>\narcpy.MakeFeatureLayer_management(presLands, inMemPresLands)\n<assignment token>\narcpy.MakeFeatureLayer_management(planPresPoly, inMemPlanPresPoly)\n<assignment token>\n<docstring token>\n\n\ndef get_install_path():\n \"\"\" Return 64bit python install path from registry (if installed and registered),\n otherwise fall back to current 32bit process install path.\n \"\"\"\n if sys.maxsize > 2 ** 32:\n return sys.exec_prefix\n path = 'SOFTWARE\\\\Python\\\\PythonCore\\\\2.7'\n from _winreg import OpenKey, QueryValue\n from _winreg import HKEY_LOCAL_MACHINE, KEY_READ, KEY_WOW64_64KEY\n try:\n with OpenKey(HKEY_LOCAL_MACHINE, path, 0, KEY_READ | KEY_WOW64_64KEY\n ) as key:\n return QueryValue(key, 'InstallPath').strip(os.sep)\n except:\n return sys.exec_prefix\n\n\n<docstring token>\n\n\ndef mp_handler():\n try:\n print('Creating Polygon OID list...')\n field = ['OID@', 'MUN_LABEL']\n idList = []\n clipperNameList = []\n with arcpy.da.SearchCursor(clipper, field) as cursor:\n for row in cursor:\n id = row[0]\n name = row[1]\n name = name.replace(' ', '_')\n name = name.replace('-', '_')\n idList.append(id)\n clipperNameList.append(name)\n print('There are ' + str(len(idList)) +\n ' object IDs (polygons) to process.')\n clipperDescObj = arcpy.Describe(clipper)\n field = clipperDescObj.OIDFieldName\n jobs = []\n \"\"\"\n Nested loop creates job list for each input feature layer of clip (preserved lands and planning/preservation regions) and each feature of clip feature layer\n Use enumerate to get index of tobeclipped list then assign value at that index to a variable holding one element (instead of a list)\n \"\"\"\n for i, item in enumerate(tobeclipped):\n tobeclippeditem = tobeclipped[i]\n j = 0\n for id in idList:\n name = clipperNameList[j]\n j += 1\n jobs.append((clipper, tobeclippeditem, field, id, outFolder,\n name))\n print('Job list has ' + str(len(jobs)) + ' elements.')\n \"\"\" Multiprocessing Pool \"\"\"\n multiprocessing.set_executable(os.path.join(get_install_path(),\n 'pythonw.exe'))\n print('Sending to pool')\n cpuNum = multiprocessing.cpu_count()\n print('There are: ' + str(cpuNum) + ' cpu cores on this machine')\n with multiprocessing.Pool(processes=cpuNum) as pool:\n res = pool.starmap(worker, jobs)\n \"\"\" Error Reporting if successful try \"\"\"\n failed = res.count(False)\n if failed > 0:\n arcpy.AddError('{} workers failed!'.format(failed))\n print('{} workers failed!'.format(failed))\n arcpy.AddMessage('Finished multiprocessing!')\n print('Finished multiprocessing!')\n arcpy.Delete_management('in_memory')\n arcpy.AddMessage('Total time: %s seconds' % (time.time() - startTime))\n except arcpy.ExecuteError:\n arcpy.AddError(arcpy.GetMessages(2))\n print('Execute Error:', arcpy.ExecuteError)\n except Exception as e:\n arcpy.AddError(str(e))\n print('Exception:', e)\n arcpy.Delete_management('in_memory')\n arcpy.AddMessage('Total time: %s seconds' % (time.time() - startTime))\n\n\n<docstring token>\nif __name__ == '__main__':\n mp_handler()\n", "<docstring token>\n<import token>\n<assignment token>\n<import token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<docstring token>\n\n\ndef get_install_path():\n \"\"\" Return 64bit python install path from registry (if installed and registered),\n otherwise fall back to current 32bit process install path.\n \"\"\"\n if sys.maxsize > 2 ** 32:\n return sys.exec_prefix\n path = 'SOFTWARE\\\\Python\\\\PythonCore\\\\2.7'\n from _winreg import OpenKey, QueryValue\n from _winreg import HKEY_LOCAL_MACHINE, KEY_READ, KEY_WOW64_64KEY\n try:\n with OpenKey(HKEY_LOCAL_MACHINE, path, 0, KEY_READ | KEY_WOW64_64KEY\n ) as key:\n return QueryValue(key, 'InstallPath').strip(os.sep)\n except:\n return sys.exec_prefix\n\n\n<docstring token>\n\n\ndef mp_handler():\n try:\n print('Creating Polygon OID list...')\n field = ['OID@', 'MUN_LABEL']\n idList = []\n clipperNameList = []\n with arcpy.da.SearchCursor(clipper, field) as cursor:\n for row in cursor:\n id = row[0]\n name = row[1]\n name = name.replace(' ', '_')\n name = name.replace('-', '_')\n idList.append(id)\n clipperNameList.append(name)\n print('There are ' + str(len(idList)) +\n ' object IDs (polygons) to process.')\n clipperDescObj = arcpy.Describe(clipper)\n field = clipperDescObj.OIDFieldName\n jobs = []\n \"\"\"\n Nested loop creates job list for each input feature layer of clip (preserved lands and planning/preservation regions) and each feature of clip feature layer\n Use enumerate to get index of tobeclipped list then assign value at that index to a variable holding one element (instead of a list)\n \"\"\"\n for i, item in enumerate(tobeclipped):\n tobeclippeditem = tobeclipped[i]\n j = 0\n for id in idList:\n name = clipperNameList[j]\n j += 1\n jobs.append((clipper, tobeclippeditem, field, id, outFolder,\n name))\n print('Job list has ' + str(len(jobs)) + ' elements.')\n \"\"\" Multiprocessing Pool \"\"\"\n multiprocessing.set_executable(os.path.join(get_install_path(),\n 'pythonw.exe'))\n print('Sending to pool')\n cpuNum = multiprocessing.cpu_count()\n print('There are: ' + str(cpuNum) + ' cpu cores on this machine')\n with multiprocessing.Pool(processes=cpuNum) as pool:\n res = pool.starmap(worker, jobs)\n \"\"\" Error Reporting if successful try \"\"\"\n failed = res.count(False)\n if failed > 0:\n arcpy.AddError('{} workers failed!'.format(failed))\n print('{} workers failed!'.format(failed))\n arcpy.AddMessage('Finished multiprocessing!')\n print('Finished multiprocessing!')\n arcpy.Delete_management('in_memory')\n arcpy.AddMessage('Total time: %s seconds' % (time.time() - startTime))\n except arcpy.ExecuteError:\n arcpy.AddError(arcpy.GetMessages(2))\n print('Execute Error:', arcpy.ExecuteError)\n except Exception as e:\n arcpy.AddError(str(e))\n print('Exception:', e)\n arcpy.Delete_management('in_memory')\n arcpy.AddMessage('Total time: %s seconds' % (time.time() - startTime))\n\n\n<docstring token>\n<code token>\n", "<docstring token>\n<import token>\n<assignment token>\n<import token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<docstring token>\n<function token>\n<docstring token>\n\n\ndef mp_handler():\n try:\n print('Creating Polygon OID list...')\n field = ['OID@', 'MUN_LABEL']\n idList = []\n clipperNameList = []\n with arcpy.da.SearchCursor(clipper, field) as cursor:\n for row in cursor:\n id = row[0]\n name = row[1]\n name = name.replace(' ', '_')\n name = name.replace('-', '_')\n idList.append(id)\n clipperNameList.append(name)\n print('There are ' + str(len(idList)) +\n ' object IDs (polygons) to process.')\n clipperDescObj = arcpy.Describe(clipper)\n field = clipperDescObj.OIDFieldName\n jobs = []\n \"\"\"\n Nested loop creates job list for each input feature layer of clip (preserved lands and planning/preservation regions) and each feature of clip feature layer\n Use enumerate to get index of tobeclipped list then assign value at that index to a variable holding one element (instead of a list)\n \"\"\"\n for i, item in enumerate(tobeclipped):\n tobeclippeditem = tobeclipped[i]\n j = 0\n for id in idList:\n name = clipperNameList[j]\n j += 1\n jobs.append((clipper, tobeclippeditem, field, id, outFolder,\n name))\n print('Job list has ' + str(len(jobs)) + ' elements.')\n \"\"\" Multiprocessing Pool \"\"\"\n multiprocessing.set_executable(os.path.join(get_install_path(),\n 'pythonw.exe'))\n print('Sending to pool')\n cpuNum = multiprocessing.cpu_count()\n print('There are: ' + str(cpuNum) + ' cpu cores on this machine')\n with multiprocessing.Pool(processes=cpuNum) as pool:\n res = pool.starmap(worker, jobs)\n \"\"\" Error Reporting if successful try \"\"\"\n failed = res.count(False)\n if failed > 0:\n arcpy.AddError('{} workers failed!'.format(failed))\n print('{} workers failed!'.format(failed))\n arcpy.AddMessage('Finished multiprocessing!')\n print('Finished multiprocessing!')\n arcpy.Delete_management('in_memory')\n arcpy.AddMessage('Total time: %s seconds' % (time.time() - startTime))\n except arcpy.ExecuteError:\n arcpy.AddError(arcpy.GetMessages(2))\n print('Execute Error:', arcpy.ExecuteError)\n except Exception as e:\n arcpy.AddError(str(e))\n print('Exception:', e)\n arcpy.Delete_management('in_memory')\n arcpy.AddMessage('Total time: %s seconds' % (time.time() - startTime))\n\n\n<docstring token>\n<code token>\n", "<docstring token>\n<import token>\n<assignment token>\n<import token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<docstring token>\n<function token>\n<docstring token>\n<function token>\n<docstring token>\n<code token>\n" ]
false
99,456
9b551fbcd94e2ce2ebe7dd777d642477bdca94d1
from django.contrib.auth.models import User from userbetting.models import Game, Team, Bet from rest_framework import routers, serializers, viewsets # Serializers define the API representation. class BetSerializer(serializers.HyperlinkedModelSerializer): user = serializers.StringRelatedField(many=False) chosen_team = serializers.StringRelatedField(many=False) class Meta: model = Bet fields = ('bet_id', 'user', 'chosen_team', 'amount') class UserSerializer(serializers.HyperlinkedModelSerializer): user_bets = BetSerializer(many=True, read_only=True) class Meta: model = User fields = ('url', 'username', 'email', 'is_staff', 'user_bets') class GameSerializer(serializers.HyperlinkedModelSerializer): team_a = serializers.StringRelatedField(many=False) team_b = serializers.StringRelatedField(many=False) game_bets = BetSerializer(many=True, read_only=True) class Meta: model = Game fields = ('game_id', 'team_a', 'team_b', 'videogame', 'game_date', 'winning_team', 'game_bets') # ViewSets define the view behavior. class UserViewSet(viewsets.ModelViewSet): queryset = User.objects.all() serializer_class = UserSerializer class GameViewSet(viewsets.ModelViewSet): queryset = Game.objects.all() serializer_class = GameSerializer # Routers provide an easy way of automatically determining the URL conf. router = routers.DefaultRouter() router.register(r'users', UserViewSet) router.register(r'games', GameViewSet)
[ "from django.contrib.auth.models import User\nfrom userbetting.models import Game, Team, Bet\nfrom rest_framework import routers, serializers, viewsets\n\n\n# Serializers define the API representation.\n\n\nclass BetSerializer(serializers.HyperlinkedModelSerializer):\n user = serializers.StringRelatedField(many=False)\n chosen_team = serializers.StringRelatedField(many=False)\n\n class Meta:\n model = Bet\n fields = ('bet_id', 'user', 'chosen_team', 'amount')\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n user_bets = BetSerializer(many=True, read_only=True)\n\n class Meta:\n model = User\n fields = ('url', 'username', 'email', 'is_staff', 'user_bets')\n\nclass GameSerializer(serializers.HyperlinkedModelSerializer):\n team_a = serializers.StringRelatedField(many=False)\n team_b = serializers.StringRelatedField(many=False)\n game_bets = BetSerializer(many=True, read_only=True)\n\n class Meta:\n model = Game\n fields = ('game_id', 'team_a', 'team_b', 'videogame', 'game_date', 'winning_team', 'game_bets')\n\n# ViewSets define the view behavior.\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n# Routers provide an easy way of automatically determining the URL conf.\nrouter = routers.DefaultRouter()\nrouter.register(r'users', UserViewSet)\nrouter.register(r'games', GameViewSet)\n", "from django.contrib.auth.models import User\nfrom userbetting.models import Game, Team, Bet\nfrom rest_framework import routers, serializers, viewsets\n\n\nclass BetSerializer(serializers.HyperlinkedModelSerializer):\n user = serializers.StringRelatedField(many=False)\n chosen_team = serializers.StringRelatedField(many=False)\n\n\n class Meta:\n model = Bet\n fields = 'bet_id', 'user', 'chosen_team', 'amount'\n\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n user_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = User\n fields = 'url', 'username', 'email', 'is_staff', 'user_bets'\n\n\nclass GameSerializer(serializers.HyperlinkedModelSerializer):\n team_a = serializers.StringRelatedField(many=False)\n team_b = serializers.StringRelatedField(many=False)\n game_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = Game\n fields = ('game_id', 'team_a', 'team_b', 'videogame', 'game_date',\n 'winning_team', 'game_bets')\n\n\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\nrouter = routers.DefaultRouter()\nrouter.register('users', UserViewSet)\nrouter.register('games', GameViewSet)\n", "<import token>\n\n\nclass BetSerializer(serializers.HyperlinkedModelSerializer):\n user = serializers.StringRelatedField(many=False)\n chosen_team = serializers.StringRelatedField(many=False)\n\n\n class Meta:\n model = Bet\n fields = 'bet_id', 'user', 'chosen_team', 'amount'\n\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n user_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = User\n fields = 'url', 'username', 'email', 'is_staff', 'user_bets'\n\n\nclass GameSerializer(serializers.HyperlinkedModelSerializer):\n team_a = serializers.StringRelatedField(many=False)\n team_b = serializers.StringRelatedField(many=False)\n game_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = Game\n fields = ('game_id', 'team_a', 'team_b', 'videogame', 'game_date',\n 'winning_team', 'game_bets')\n\n\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\nrouter = routers.DefaultRouter()\nrouter.register('users', UserViewSet)\nrouter.register('games', GameViewSet)\n", "<import token>\n\n\nclass BetSerializer(serializers.HyperlinkedModelSerializer):\n user = serializers.StringRelatedField(many=False)\n chosen_team = serializers.StringRelatedField(many=False)\n\n\n class Meta:\n model = Bet\n fields = 'bet_id', 'user', 'chosen_team', 'amount'\n\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n user_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = User\n fields = 'url', 'username', 'email', 'is_staff', 'user_bets'\n\n\nclass GameSerializer(serializers.HyperlinkedModelSerializer):\n team_a = serializers.StringRelatedField(many=False)\n team_b = serializers.StringRelatedField(many=False)\n game_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = Game\n fields = ('game_id', 'team_a', 'team_b', 'videogame', 'game_date',\n 'winning_team', 'game_bets')\n\n\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\n<assignment token>\nrouter.register('users', UserViewSet)\nrouter.register('games', GameViewSet)\n", "<import token>\n\n\nclass BetSerializer(serializers.HyperlinkedModelSerializer):\n user = serializers.StringRelatedField(many=False)\n chosen_team = serializers.StringRelatedField(many=False)\n\n\n class Meta:\n model = Bet\n fields = 'bet_id', 'user', 'chosen_team', 'amount'\n\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n user_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = User\n fields = 'url', 'username', 'email', 'is_staff', 'user_bets'\n\n\nclass GameSerializer(serializers.HyperlinkedModelSerializer):\n team_a = serializers.StringRelatedField(many=False)\n team_b = serializers.StringRelatedField(many=False)\n game_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = Game\n fields = ('game_id', 'team_a', 'team_b', 'videogame', 'game_date',\n 'winning_team', 'game_bets')\n\n\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass BetSerializer(serializers.HyperlinkedModelSerializer):\n <assignment token>\n <assignment token>\n\n\n class Meta:\n model = Bet\n fields = 'bet_id', 'user', 'chosen_team', 'amount'\n\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n user_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = User\n fields = 'url', 'username', 'email', 'is_staff', 'user_bets'\n\n\nclass GameSerializer(serializers.HyperlinkedModelSerializer):\n team_a = serializers.StringRelatedField(many=False)\n team_b = serializers.StringRelatedField(many=False)\n game_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = Game\n fields = ('game_id', 'team_a', 'team_b', 'videogame', 'game_date',\n 'winning_team', 'game_bets')\n\n\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n user_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = User\n fields = 'url', 'username', 'email', 'is_staff', 'user_bets'\n\n\nclass GameSerializer(serializers.HyperlinkedModelSerializer):\n team_a = serializers.StringRelatedField(many=False)\n team_b = serializers.StringRelatedField(many=False)\n game_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = Game\n fields = ('game_id', 'team_a', 'team_b', 'videogame', 'game_date',\n 'winning_team', 'game_bets')\n\n\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n <assignment token>\n\n\n class Meta:\n model = User\n fields = 'url', 'username', 'email', 'is_staff', 'user_bets'\n\n\nclass GameSerializer(serializers.HyperlinkedModelSerializer):\n team_a = serializers.StringRelatedField(many=False)\n team_b = serializers.StringRelatedField(many=False)\n game_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = Game\n fields = ('game_id', 'team_a', 'team_b', 'videogame', 'game_date',\n 'winning_team', 'game_bets')\n\n\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n\n\nclass GameSerializer(serializers.HyperlinkedModelSerializer):\n team_a = serializers.StringRelatedField(many=False)\n team_b = serializers.StringRelatedField(many=False)\n game_bets = BetSerializer(many=True, read_only=True)\n\n\n class Meta:\n model = Game\n fields = ('game_id', 'team_a', 'team_b', 'videogame', 'game_date',\n 'winning_team', 'game_bets')\n\n\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n\n\nclass GameSerializer(serializers.HyperlinkedModelSerializer):\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n model = Game\n fields = ('game_id', 'team_a', 'team_b', 'videogame', 'game_date',\n 'winning_team', 'game_bets')\n\n\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserViewSet(viewsets.ModelViewSet):\n <assignment token>\n <assignment token>\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n\n\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass GameViewSet(viewsets.ModelViewSet):\n <assignment token>\n <assignment token>\n\n\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<assignment token>\n<code token>\n" ]
false
99,457
581cba92406db5624b8f86356f0aaade55c56180
token = '1159459407:AAH1Gj1BXjiy88GMxpMdyZHbV5UeO_tcirU' help_text = 'Этот бот поможет тебе узнать текущую температру воздуха в твоем городе. Также он подберёт тебе одежду ' \ 'под погоду. Просто введи название своего города '
[ "token = '1159459407:AAH1Gj1BXjiy88GMxpMdyZHbV5UeO_tcirU'\nhelp_text = 'Этот бот поможет тебе узнать текущую температру воздуха в твоем городе. Также он подберёт тебе одежду ' \\\n 'под погоду. Просто введи название своего города '\n", "token = '1159459407:AAH1Gj1BXjiy88GMxpMdyZHbV5UeO_tcirU'\nhelp_text = (\n 'Этот бот поможет тебе узнать текущую температру воздуха в твоем городе. Также он подберёт тебе одежду под погоду. Просто введи название своего города '\n )\n", "<assignment token>\n" ]
false
99,458
026306035403ba02e7a5d266373f76cf21098bfc
#Python tiene funciones para crear, leer, actualizar y eliminar archivos #Abrir un archivo myFile= open('myfile.txt', 'w') #Conseguir informacion print('Nombre: ', myFile.name) print('Cerrado: ', myFile.closed) print('Modo de apertura: ', myFile.mode) #Escribir en el archivo myFile.write('Estoy aprendiendo Python,') myFile.write(' es un lenguaje chido') myFile.close() #Adjuntar al archivo myFile= open('myfile.txt', 'a') myFile.write('\nDespues inicio Django') myFile.close() #Leer un archivo myFile= open('myfile.txt', 'r+') texto= myFile.read() print(texto)
[ "#Python tiene funciones para crear, leer, actualizar y eliminar archivos\r\n\r\n#Abrir un archivo\r\nmyFile= open('myfile.txt', 'w')\r\n\r\n#Conseguir informacion\r\nprint('Nombre: ', myFile.name)\r\nprint('Cerrado: ', myFile.closed)\r\nprint('Modo de apertura: ', myFile.mode)\r\n\r\n#Escribir en el archivo\r\nmyFile.write('Estoy aprendiendo Python,')\r\nmyFile.write(' es un lenguaje chido')\r\nmyFile.close()\r\n\r\n#Adjuntar al archivo\r\nmyFile= open('myfile.txt', 'a')\r\nmyFile.write('\\nDespues inicio Django')\r\nmyFile.close()\r\n\r\n#Leer un archivo\r\nmyFile= open('myfile.txt', 'r+')\r\ntexto= myFile.read()\r\nprint(texto)\r\n", "myFile = open('myfile.txt', 'w')\nprint('Nombre: ', myFile.name)\nprint('Cerrado: ', myFile.closed)\nprint('Modo de apertura: ', myFile.mode)\nmyFile.write('Estoy aprendiendo Python,')\nmyFile.write(' es un lenguaje chido')\nmyFile.close()\nmyFile = open('myfile.txt', 'a')\nmyFile.write(\"\"\"\nDespues inicio Django\"\"\")\nmyFile.close()\nmyFile = open('myfile.txt', 'r+')\ntexto = myFile.read()\nprint(texto)\n", "<assignment token>\nprint('Nombre: ', myFile.name)\nprint('Cerrado: ', myFile.closed)\nprint('Modo de apertura: ', myFile.mode)\nmyFile.write('Estoy aprendiendo Python,')\nmyFile.write(' es un lenguaje chido')\nmyFile.close()\n<assignment token>\nmyFile.write(\"\"\"\nDespues inicio Django\"\"\")\nmyFile.close()\n<assignment token>\nprint(texto)\n", "<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
99,459
5dd164ea920fbcb9be4b3846b7aaee2e245c6923
from setuptools import setup pypi_classifiers = [ 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', "Development Status :: 4 - Beta", "Environment :: Console", "Operating System :: OS Independent", 'Intended Audience :: Science/Research', 'Natural Language :: English', 'Topic :: Scientific/Engineering :: Bio-Informatics', "Topic :: Software Development :: Libraries :: Python Modules", 'License :: OSI Approved :: MIT License', ] install_requires = [ 'biopython>=1.70', ] desc = """For a set of reference transposons, collect instances from one or more genomes and write as multi-FASTA files.""" setup(name='reunite', version='1.0.0', description=desc, url='https://github.com/Adamtaranto/TE-Reunite', author='Adam Taranto', author_email='[email protected]', license='MIT', packages=['reunite'], classifiers=pypi_classifiers, keywords=["Transposon","TE","repeat","transposon"], install_requires=install_requires, include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'reunite=reunite.run_cmd:main', ], }, )
[ "from setuptools import setup\n\npypi_classifiers = [\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n \"Development Status :: 4 - Beta\",\n \"Environment :: Console\",\n \"Operating System :: OS Independent\",\n 'Intended Audience :: Science/Research',\n 'Natural Language :: English',\n 'Topic :: Scientific/Engineering :: Bio-Informatics',\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n 'License :: OSI Approved :: MIT License',\n]\n\ninstall_requires = [\n 'biopython>=1.70',\n]\n\ndesc = \"\"\"For a set of reference transposons, collect instances from one or more genomes and write as multi-FASTA files.\"\"\"\n\nsetup(name='reunite',\n version='1.0.0',\n description=desc,\n url='https://github.com/Adamtaranto/TE-Reunite',\n author='Adam Taranto',\n author_email='[email protected]',\n license='MIT',\n packages=['reunite'],\n classifiers=pypi_classifiers,\n keywords=[\"Transposon\",\"TE\",\"repeat\",\"transposon\"],\n install_requires=install_requires,\n include_package_data=True,\n zip_safe=False,\n entry_points={\n 'console_scripts': [\n 'reunite=reunite.run_cmd:main',\n ],\n },\n )\n", "from setuptools import setup\npypi_classifiers = ['Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3', 'Development Status :: 4 - Beta',\n 'Environment :: Console', 'Operating System :: OS Independent',\n 'Intended Audience :: Science/Research', 'Natural Language :: English',\n 'Topic :: Scientific/Engineering :: Bio-Informatics',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'License :: OSI Approved :: MIT License']\ninstall_requires = ['biopython>=1.70']\ndesc = (\n 'For a set of reference transposons, collect instances from one or more genomes and write as multi-FASTA files.'\n )\nsetup(name='reunite', version='1.0.0', description=desc, url=\n 'https://github.com/Adamtaranto/TE-Reunite', author='Adam Taranto',\n author_email='[email protected]', license='MIT', packages=[\n 'reunite'], classifiers=pypi_classifiers, keywords=['Transposon', 'TE',\n 'repeat', 'transposon'], install_requires=install_requires,\n include_package_data=True, zip_safe=False, entry_points={\n 'console_scripts': ['reunite=reunite.run_cmd:main']})\n", "<import token>\npypi_classifiers = ['Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3', 'Development Status :: 4 - Beta',\n 'Environment :: Console', 'Operating System :: OS Independent',\n 'Intended Audience :: Science/Research', 'Natural Language :: English',\n 'Topic :: Scientific/Engineering :: Bio-Informatics',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'License :: OSI Approved :: MIT License']\ninstall_requires = ['biopython>=1.70']\ndesc = (\n 'For a set of reference transposons, collect instances from one or more genomes and write as multi-FASTA files.'\n )\nsetup(name='reunite', version='1.0.0', description=desc, url=\n 'https://github.com/Adamtaranto/TE-Reunite', author='Adam Taranto',\n author_email='[email protected]', license='MIT', packages=[\n 'reunite'], classifiers=pypi_classifiers, keywords=['Transposon', 'TE',\n 'repeat', 'transposon'], install_requires=install_requires,\n include_package_data=True, zip_safe=False, entry_points={\n 'console_scripts': ['reunite=reunite.run_cmd:main']})\n", "<import token>\n<assignment token>\nsetup(name='reunite', version='1.0.0', description=desc, url=\n 'https://github.com/Adamtaranto/TE-Reunite', author='Adam Taranto',\n author_email='[email protected]', license='MIT', packages=[\n 'reunite'], classifiers=pypi_classifiers, keywords=['Transposon', 'TE',\n 'repeat', 'transposon'], install_requires=install_requires,\n include_package_data=True, zip_safe=False, entry_points={\n 'console_scripts': ['reunite=reunite.run_cmd:main']})\n", "<import token>\n<assignment token>\n<code token>\n" ]
false
99,460
d6764c500c486f8c61d8b0983dead70e28b50c07
#!/usr/bin/env python # -*- coding:utf8 -*- # auther; 18793 # Date:2019/5/22 10:49 # filename: pymysql_有条件的查询.py import pymysql # 1.建立数据库连接 connection = pymysql.connect(host='localhost', user='root', password='admin#123', database='hujianli2', charset='utf8') # 2.创建游标对象 try: with connection.cursor() as cursor: # 3.执行SQL操作 sql = 'select name,userid from user where userid > %(id)s' cursor.execute(sql, {'id': 0}) # 4.提取结果集 result_set = cursor.fetchall() for row in result_set: print("id:{0} - name:{1}".format(row[1], row[0])) # 5.with代码块结束,关闭游标 finally: # 6.关闭数据连接 connection.close()
[ "#!/usr/bin/env python\n# -*- coding:utf8 -*-\n# auther; 18793\n# Date:2019/5/22 10:49\n# filename: pymysql_有条件的查询.py\nimport pymysql\n\n# 1.建立数据库连接\nconnection = pymysql.connect(host='localhost',\n user='root',\n password='admin#123',\n database='hujianli2',\n charset='utf8')\n\n# 2.创建游标对象\n\ntry:\n with connection.cursor() as cursor:\n # 3.执行SQL操作\n sql = 'select name,userid from user where userid > %(id)s'\n cursor.execute(sql, {'id': 0})\n\n # 4.提取结果集\n result_set = cursor.fetchall()\n\n for row in result_set:\n print(\"id:{0} - name:{1}\".format(row[1], row[0]))\n\n # 5.with代码块结束,关闭游标\nfinally:\n # 6.关闭数据连接\n connection.close()\n", "import pymysql\nconnection = pymysql.connect(host='localhost', user='root', password=\n 'admin#123', database='hujianli2', charset='utf8')\ntry:\n with connection.cursor() as cursor:\n sql = 'select name,userid from user where userid > %(id)s'\n cursor.execute(sql, {'id': 0})\n result_set = cursor.fetchall()\n for row in result_set:\n print('id:{0} - name:{1}'.format(row[1], row[0]))\nfinally:\n connection.close()\n", "<import token>\nconnection = pymysql.connect(host='localhost', user='root', password=\n 'admin#123', database='hujianli2', charset='utf8')\ntry:\n with connection.cursor() as cursor:\n sql = 'select name,userid from user where userid > %(id)s'\n cursor.execute(sql, {'id': 0})\n result_set = cursor.fetchall()\n for row in result_set:\n print('id:{0} - name:{1}'.format(row[1], row[0]))\nfinally:\n connection.close()\n", "<import token>\n<assignment token>\ntry:\n with connection.cursor() as cursor:\n sql = 'select name,userid from user where userid > %(id)s'\n cursor.execute(sql, {'id': 0})\n result_set = cursor.fetchall()\n for row in result_set:\n print('id:{0} - name:{1}'.format(row[1], row[0]))\nfinally:\n connection.close()\n", "<import token>\n<assignment token>\n<code token>\n" ]
false
99,461
c07f97c83c4af66e752544be149e68121d8a88cf
from output.models.ms_data.errata10.err_e002_xsd.err_e002 import ( Root, TestElement, ) __all__ = [ "Root", "TestElement", ]
[ "from output.models.ms_data.errata10.err_e002_xsd.err_e002 import (\n Root,\n TestElement,\n)\n\n__all__ = [\n \"Root\",\n \"TestElement\",\n]\n", "from output.models.ms_data.errata10.err_e002_xsd.err_e002 import Root, TestElement\n__all__ = ['Root', 'TestElement']\n", "<import token>\n__all__ = ['Root', 'TestElement']\n", "<import token>\n<assignment token>\n" ]
false
99,462
5be9cc511ed96d0d398c5abcd0b400612fa2d1ba
# Generated by Django 2.1.7 on 2019-03-27 00:25 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('donuts', '0005_auto_20190327_0023'), ] operations = [ migrations.AlterModelOptions( name='savory', options={'verbose_name_plural': 'savories'}, ), ]
[ "# Generated by Django 2.1.7 on 2019-03-27 00:25\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('donuts', '0005_auto_20190327_0023'),\n ]\n\n operations = [\n migrations.AlterModelOptions(\n name='savory',\n options={'verbose_name_plural': 'savories'},\n ),\n ]\n", "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('donuts', '0005_auto_20190327_0023')]\n operations = [migrations.AlterModelOptions(name='savory', options={\n 'verbose_name_plural': 'savories'})]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('donuts', '0005_auto_20190327_0023')]\n operations = [migrations.AlterModelOptions(name='savory', options={\n 'verbose_name_plural': 'savories'})]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
99,463
18543e8d3989bf115daa1d8fba8f26bf12a87c05
#!/usr/bin/env python import socket import ssl import shutil #envia um arquivo def send_file(socket, filename): with open('test.txt','rb') as inp: out = socket.makefile('wb') shutil.copyfileobj(inp, out) TCP_IP = '127.0.0.1' TCP_PORT = 7000 BUFFER_SIZE = 20 # Normally 1024, but we want fast response #socket para o server s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #ligacao com tp e porta s.bind((TCP_IP, TCP_PORT)) #atende chamadas s.listen(1) #recebeu uma chamada conn, addr = s.accept() #wrap de ssl connstream = ssl.wrap_socket(conn, server_side=True, certfile="selfsigned.cert", keyfile="selfsigned.key") print 'Connection address:', addr #recebe dados do cliente data = connstream.recv(BUFFER_SIZE) if data == 'download': print "Sending file..." data = 'ok' #confirma a requisicao do cliente connstream.send(data) send_file(connstream, "test.txt") connstream.close()
[ "#!/usr/bin/env python\n\nimport socket\nimport ssl\nimport shutil\n\n#envia um arquivo\ndef send_file(socket, filename):\n with open('test.txt','rb') as inp:\n out = socket.makefile('wb')\n shutil.copyfileobj(inp, out)\n\n\nTCP_IP = '127.0.0.1'\nTCP_PORT = 7000\nBUFFER_SIZE = 20 # Normally 1024, but we want fast response\n\n#socket para o server\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n#ligacao com tp e porta\ns.bind((TCP_IP, TCP_PORT))\n#atende chamadas\ns.listen(1)\n\n#recebeu uma chamada\nconn, addr = s.accept()\n#wrap de ssl\nconnstream = ssl.wrap_socket(conn,\n server_side=True,\n certfile=\"selfsigned.cert\",\n keyfile=\"selfsigned.key\")\nprint 'Connection address:', addr\n\n#recebe dados do cliente\ndata = connstream.recv(BUFFER_SIZE)\n\nif data == 'download':\n print \"Sending file...\"\n data = 'ok'\n #confirma a requisicao do cliente\n connstream.send(data)\n send_file(connstream, \"test.txt\")\n\nconnstream.close()\n" ]
true
99,464
23328911b618a2982fdf8aec67005459cb39a0c5
#!C:\Users\dell\AppData\Local\Programs\Python\Python35 python # -*- coding: utf-8 -*- import sys, os class Fileoperation: # def __init__(self): # self.case_path = sys.path[0] # self.case_path = os.getcwd() def filewrite(self, fpath, filename, writeword): filepath = os.path.join(fpath, filename) with open(filepath, 'w', encoding='utf-8') as f: f.write(writeword) f.close() def fileread(self, fpath, filename): filepath = os.path.join(fpath, filename) with open(filepath, 'r', encoding='utf-8') as f: try: all_the_text = f.read() finally: f.close() return (all_the_text) # class Fileoperation: # def __init__(self): # self.case_path = sys.path[0] # def filewrite(self,filename,writeword): # filepath = os.path.join(self.case_path, filename) # with open(filepath, 'w', encoding='utf-8') as f: # f.write(writeword) # f.close() # def fileread(self,filename): # filepath = os.path.join(self.case_path,filename) # with open(filepath, 'r', encoding='utf-8') as f: # try: # all_the_text = f.read() # finally: # f.close() # return(all_the_text)
[ "#!C:\\Users\\dell\\AppData\\Local\\Programs\\Python\\Python35 python\n# -*- coding: utf-8 -*-\nimport sys, os\n\nclass Fileoperation:\n # def __init__(self):\n # self.case_path = sys.path[0]\n # self.case_path = os.getcwd()\n\n def filewrite(self, fpath, filename, writeword):\n filepath = os.path.join(fpath, filename)\n with open(filepath, 'w', encoding='utf-8') as f:\n f.write(writeword)\n f.close()\n\n def fileread(self, fpath, filename):\n filepath = os.path.join(fpath, filename)\n with open(filepath, 'r', encoding='utf-8') as f:\n try:\n all_the_text = f.read()\n finally:\n f.close()\n return (all_the_text)\n\n# class Fileoperation:\n# def __init__(self):\n# self.case_path = sys.path[0]\n# def filewrite(self,filename,writeword):\n# filepath = os.path.join(self.case_path, filename)\n# with open(filepath, 'w', encoding='utf-8') as f:\n# f.write(writeword)\n# f.close()\n# def fileread(self,filename):\n# filepath = os.path.join(self.case_path,filename)\n# with open(filepath, 'r', encoding='utf-8') as f:\n# try:\n# all_the_text = f.read()\n# finally:\n# f.close()\n# return(all_the_text)\n", "import sys, os\n\n\nclass Fileoperation:\n\n def filewrite(self, fpath, filename, writeword):\n filepath = os.path.join(fpath, filename)\n with open(filepath, 'w', encoding='utf-8') as f:\n f.write(writeword)\n f.close()\n\n def fileread(self, fpath, filename):\n filepath = os.path.join(fpath, filename)\n with open(filepath, 'r', encoding='utf-8') as f:\n try:\n all_the_text = f.read()\n finally:\n f.close()\n return all_the_text\n", "<import token>\n\n\nclass Fileoperation:\n\n def filewrite(self, fpath, filename, writeword):\n filepath = os.path.join(fpath, filename)\n with open(filepath, 'w', encoding='utf-8') as f:\n f.write(writeword)\n f.close()\n\n def fileread(self, fpath, filename):\n filepath = os.path.join(fpath, filename)\n with open(filepath, 'r', encoding='utf-8') as f:\n try:\n all_the_text = f.read()\n finally:\n f.close()\n return all_the_text\n", "<import token>\n\n\nclass Fileoperation:\n <function token>\n\n def fileread(self, fpath, filename):\n filepath = os.path.join(fpath, filename)\n with open(filepath, 'r', encoding='utf-8') as f:\n try:\n all_the_text = f.read()\n finally:\n f.close()\n return all_the_text\n", "<import token>\n\n\nclass Fileoperation:\n <function token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
99,465
68e6435bc1b03f373871c41e20488533293e57d5
#------------------------------------------------------- #Description : while loop #syntax : # while(condition): # statements; #About : Iterating through a given condition #------------------------------------------------------- print("-------------Iterating through given condition----------------"); i = 7 while i!=12: print(i); i += 1; #------------------------------------------------------- #Description : while loop #syntax : # while(condition): # statements; #About : Iterating through a list #------------------------------------------------------- print("-------------Iterating through a list----------------"); j=0; teams = ['RCB','MI','KXIP','RR','CSK']; while j!=len(teams): print(teams[j]); j += 1;
[ "#-------------------------------------------------------\n#Description : while loop\n#syntax : \n# while(condition):\n# statements;\n#About : Iterating through a given condition\n#-------------------------------------------------------\n\nprint(\"-------------Iterating through given condition----------------\");\ni = 7\nwhile i!=12:\n print(i);\n i += 1; \n\n#-------------------------------------------------------\n#Description : while loop\n#syntax : \n# while(condition):\n# statements;\n#About : Iterating through a list\n#-------------------------------------------------------\n\nprint(\"-------------Iterating through a list----------------\");\nj=0;\nteams = ['RCB','MI','KXIP','RR','CSK'];\nwhile j!=len(teams):\n print(teams[j]);\n j += 1;\n\n", "print('-------------Iterating through given condition----------------')\ni = 7\nwhile i != 12:\n print(i)\n i += 1\nprint('-------------Iterating through a list----------------')\nj = 0\nteams = ['RCB', 'MI', 'KXIP', 'RR', 'CSK']\nwhile j != len(teams):\n print(teams[j])\n j += 1\n", "print('-------------Iterating through given condition----------------')\n<assignment token>\nwhile i != 12:\n print(i)\n i += 1\nprint('-------------Iterating through a list----------------')\n<assignment token>\nwhile j != len(teams):\n print(teams[j])\n j += 1\n", "<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
99,466
7b26eb93e8da779c61ae90a88b453e0ff250c639
from django.db import models from fs_user.models import UserInfo from fs_goods.models import GoodsInfo class CartModel(models.Model): """ 购物车商品记录表: 用户和购物车记录数据:一对多关系,同一个用户可以添加多个商品到记录表中; 商品和购物车记录数据:一对多关系,不同的用户可以添加相同的产品; """ user = models.ForeignKey(UserInfo, on_delete=models.CASCADE) good = models.ForeignKey(GoodsInfo, on_delete=models.CASCADE) count = models.IntegerField(default=1)
[ "from django.db import models\nfrom fs_user.models import UserInfo\nfrom fs_goods.models import GoodsInfo\n\n\nclass CartModel(models.Model):\n \"\"\"\n 购物车商品记录表:\n 用户和购物车记录数据:一对多关系,同一个用户可以添加多个商品到记录表中;\n 商品和购物车记录数据:一对多关系,不同的用户可以添加相同的产品;\n \"\"\"\n user = models.ForeignKey(UserInfo, on_delete=models.CASCADE)\n good = models.ForeignKey(GoodsInfo, on_delete=models.CASCADE)\n count = models.IntegerField(default=1)\n", "<import token>\n\n\nclass CartModel(models.Model):\n \"\"\"\n 购物车商品记录表:\n 用户和购物车记录数据:一对多关系,同一个用户可以添加多个商品到记录表中;\n 商品和购物车记录数据:一对多关系,不同的用户可以添加相同的产品;\n \"\"\"\n user = models.ForeignKey(UserInfo, on_delete=models.CASCADE)\n good = models.ForeignKey(GoodsInfo, on_delete=models.CASCADE)\n count = models.IntegerField(default=1)\n", "<import token>\n\n\nclass CartModel(models.Model):\n <docstring token>\n user = models.ForeignKey(UserInfo, on_delete=models.CASCADE)\n good = models.ForeignKey(GoodsInfo, on_delete=models.CASCADE)\n count = models.IntegerField(default=1)\n", "<import token>\n\n\nclass CartModel(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
99,467
eb5f4c316ee444491f34525817ff88f047027322
from typing import Optional import unittest from ..layout import ( match_layouts_to_screens, ScreenTileLayout, VirtualScreenArea, ) class MatchLayoutsTest(unittest.TestCase): def test_match_one_exact(self) -> None: layout1 = mk_screen_tile_layout('a', 100, 200) screen1 = mk_virtual_screen_area('1', 100, 200) mpack, errs = match_layouts_to_screens( [[layout1]], [screen1] ) i, m = mpack self.assertEqual(list(errs), []) self.assertEqual( m, ((layout1, screen1,),) ) def test_match_two_distant_layouts_one_screen(self) -> None: layout1 = mk_screen_tile_layout('a', 100, 200) layout2 = mk_screen_tile_layout('b', 200, 100) screen1 = mk_virtual_screen_area('1', 100, 200) mpack, errs = match_layouts_to_screens( [[layout1], [layout2]], [screen1] ) i, m = mpack self.assertEqual(list(errs), []) self.assertEqual( m, ((layout1, screen1,),) ) def test_match_two_screens_index_match(self) -> None: self.maxDiff = None layout1 = mk_screen_tile_layout('a', 100, 200) layout2 = mk_screen_tile_layout('b', 200, 100) screen1 = mk_virtual_screen_area('1', 100, 200) screen2 = mk_virtual_screen_area('2', 200, 100) mpack, errs = match_layouts_to_screens( [[layout1, layout2], [layout2, layout1]], [screen1, screen2] ) i, m = mpack self.assertEqual(list(errs), []) self.assertEqual( m, ((layout1, screen1,), (layout2, screen2),) ) mpack, errs = match_layouts_to_screens( [[layout2, layout1], [layout1, layout2]], [screen1, screen2] ) i, m = mpack self.assertEqual(list(errs), []) self.assertEqual( m, ((layout1, screen1,), (layout2, screen2),) ) def test_std_config(self) -> None: screen1 = mk_virtual_screen_area('primary', 1024, 768) layout1 = mk_screen_tile_layout(None, 0, 0, False, True) mpack, errs = match_layouts_to_screens( [[layout1]], [screen1] ) i, m = mpack self.assertEqual(list(errs), []) self.assertEqual( m, ((layout1, screen1,),) ) def test_two_layouts_one_screen_vs_two(self) -> None: screen = mk_virtual_screen_area('primary', 1024, 768) layout1_screen1 = mk_screen_tile_layout(None, 2440, 1980, False, True) layout1_screen2 = mk_screen_tile_layout(None, 1080, 1920, False, False) layout2_screen1 = mk_screen_tile_layout(None, 1920, 1080, False, True) mpack, errs = match_layouts_to_screens( [[layout1_screen1, layout1_screen2], [layout2_screen1]], [screen] ) i, m = mpack self.assertEqual(list(errs), []) self.assertEqual( m, ((layout2_screen1, screen,),) ) def mk_screen_tile_layout( name: Optional[str], w: int, h: int, direct: bool = True, primary: bool = True ) -> ScreenTileLayout: return ScreenTileLayout(name, direct, primary, (w, h)) def mk_virtual_screen_area( name: str, w: int, h: int, x: int = 0, y: int = 0, primary: bool = True ) -> VirtualScreenArea: return VirtualScreenArea(name, (x, y, w, h,), primary)
[ "\nfrom typing import Optional\nimport unittest\nfrom ..layout import (\n match_layouts_to_screens,\n ScreenTileLayout, VirtualScreenArea,\n)\n\n\nclass MatchLayoutsTest(unittest.TestCase):\n def test_match_one_exact(self) -> None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens(\n [[layout1]],\n [screen1]\n )\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(\n m,\n ((layout1, screen1,),)\n )\n\n def test_match_two_distant_layouts_one_screen(self) -> None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens(\n [[layout1], [layout2]],\n [screen1]\n )\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(\n m,\n ((layout1, screen1,),)\n )\n\n def test_match_two_screens_index_match(self) -> None:\n self.maxDiff = None\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n screen2 = mk_virtual_screen_area('2', 200, 100)\n mpack, errs = match_layouts_to_screens(\n [[layout1, layout2], [layout2, layout1]],\n [screen1, screen2]\n )\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(\n m,\n ((layout1, screen1,), (layout2, screen2),)\n )\n mpack, errs = match_layouts_to_screens(\n [[layout2, layout1], [layout1, layout2]],\n [screen1, screen2]\n )\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(\n m,\n ((layout1, screen1,), (layout2, screen2),)\n )\n\n def test_std_config(self) -> None:\n screen1 = mk_virtual_screen_area('primary', 1024, 768)\n layout1 = mk_screen_tile_layout(None, 0, 0, False, True)\n mpack, errs = match_layouts_to_screens(\n [[layout1]],\n [screen1]\n )\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(\n m,\n ((layout1, screen1,),)\n )\n\n def test_two_layouts_one_screen_vs_two(self) -> None:\n screen = mk_virtual_screen_area('primary', 1024, 768)\n layout1_screen1 = mk_screen_tile_layout(None, 2440, 1980, False, True)\n layout1_screen2 = mk_screen_tile_layout(None, 1080, 1920, False, False)\n layout2_screen1 = mk_screen_tile_layout(None, 1920, 1080, False, True)\n mpack, errs = match_layouts_to_screens(\n [[layout1_screen1, layout1_screen2], [layout2_screen1]],\n [screen]\n )\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(\n m,\n ((layout2_screen1, screen,),)\n )\n\n\ndef mk_screen_tile_layout(\n name: Optional[str], w: int, h: int, direct: bool = True, primary: bool = True\n) -> ScreenTileLayout:\n return ScreenTileLayout(name, direct, primary, (w, h))\n\n\ndef mk_virtual_screen_area(\n name: str, w: int, h: int, x: int = 0, y: int = 0, primary: bool = True\n) -> VirtualScreenArea:\n return VirtualScreenArea(name, (x, y, w, h,), primary)\n", "from typing import Optional\nimport unittest\nfrom ..layout import match_layouts_to_screens, ScreenTileLayout, VirtualScreenArea\n\n\nclass MatchLayoutsTest(unittest.TestCase):\n\n def test_match_one_exact(self) ->None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens([[layout1]], [screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_match_two_distant_layouts_one_screen(self) ->None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens([[layout1], [layout2]], [\n screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_match_two_screens_index_match(self) ->None:\n self.maxDiff = None\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n screen2 = mk_virtual_screen_area('2', 200, 100)\n mpack, errs = match_layouts_to_screens([[layout1, layout2], [\n layout2, layout1]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n mpack, errs = match_layouts_to_screens([[layout2, layout1], [\n layout1, layout2]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n\n def test_std_config(self) ->None:\n screen1 = mk_virtual_screen_area('primary', 1024, 768)\n layout1 = mk_screen_tile_layout(None, 0, 0, False, True)\n mpack, errs = match_layouts_to_screens([[layout1]], [screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_two_layouts_one_screen_vs_two(self) ->None:\n screen = mk_virtual_screen_area('primary', 1024, 768)\n layout1_screen1 = mk_screen_tile_layout(None, 2440, 1980, False, True)\n layout1_screen2 = mk_screen_tile_layout(None, 1080, 1920, False, False)\n layout2_screen1 = mk_screen_tile_layout(None, 1920, 1080, False, True)\n mpack, errs = match_layouts_to_screens([[layout1_screen1,\n layout1_screen2], [layout2_screen1]], [screen])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout2_screen1, screen),))\n\n\ndef mk_screen_tile_layout(name: Optional[str], w: int, h: int, direct: bool\n =True, primary: bool=True) ->ScreenTileLayout:\n return ScreenTileLayout(name, direct, primary, (w, h))\n\n\ndef mk_virtual_screen_area(name: str, w: int, h: int, x: int=0, y: int=0,\n primary: bool=True) ->VirtualScreenArea:\n return VirtualScreenArea(name, (x, y, w, h), primary)\n", "<import token>\n\n\nclass MatchLayoutsTest(unittest.TestCase):\n\n def test_match_one_exact(self) ->None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens([[layout1]], [screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_match_two_distant_layouts_one_screen(self) ->None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens([[layout1], [layout2]], [\n screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_match_two_screens_index_match(self) ->None:\n self.maxDiff = None\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n screen2 = mk_virtual_screen_area('2', 200, 100)\n mpack, errs = match_layouts_to_screens([[layout1, layout2], [\n layout2, layout1]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n mpack, errs = match_layouts_to_screens([[layout2, layout1], [\n layout1, layout2]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n\n def test_std_config(self) ->None:\n screen1 = mk_virtual_screen_area('primary', 1024, 768)\n layout1 = mk_screen_tile_layout(None, 0, 0, False, True)\n mpack, errs = match_layouts_to_screens([[layout1]], [screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_two_layouts_one_screen_vs_two(self) ->None:\n screen = mk_virtual_screen_area('primary', 1024, 768)\n layout1_screen1 = mk_screen_tile_layout(None, 2440, 1980, False, True)\n layout1_screen2 = mk_screen_tile_layout(None, 1080, 1920, False, False)\n layout2_screen1 = mk_screen_tile_layout(None, 1920, 1080, False, True)\n mpack, errs = match_layouts_to_screens([[layout1_screen1,\n layout1_screen2], [layout2_screen1]], [screen])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout2_screen1, screen),))\n\n\ndef mk_screen_tile_layout(name: Optional[str], w: int, h: int, direct: bool\n =True, primary: bool=True) ->ScreenTileLayout:\n return ScreenTileLayout(name, direct, primary, (w, h))\n\n\ndef mk_virtual_screen_area(name: str, w: int, h: int, x: int=0, y: int=0,\n primary: bool=True) ->VirtualScreenArea:\n return VirtualScreenArea(name, (x, y, w, h), primary)\n", "<import token>\n\n\nclass MatchLayoutsTest(unittest.TestCase):\n\n def test_match_one_exact(self) ->None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens([[layout1]], [screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_match_two_distant_layouts_one_screen(self) ->None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens([[layout1], [layout2]], [\n screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_match_two_screens_index_match(self) ->None:\n self.maxDiff = None\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n screen2 = mk_virtual_screen_area('2', 200, 100)\n mpack, errs = match_layouts_to_screens([[layout1, layout2], [\n layout2, layout1]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n mpack, errs = match_layouts_to_screens([[layout2, layout1], [\n layout1, layout2]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n\n def test_std_config(self) ->None:\n screen1 = mk_virtual_screen_area('primary', 1024, 768)\n layout1 = mk_screen_tile_layout(None, 0, 0, False, True)\n mpack, errs = match_layouts_to_screens([[layout1]], [screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_two_layouts_one_screen_vs_two(self) ->None:\n screen = mk_virtual_screen_area('primary', 1024, 768)\n layout1_screen1 = mk_screen_tile_layout(None, 2440, 1980, False, True)\n layout1_screen2 = mk_screen_tile_layout(None, 1080, 1920, False, False)\n layout2_screen1 = mk_screen_tile_layout(None, 1920, 1080, False, True)\n mpack, errs = match_layouts_to_screens([[layout1_screen1,\n layout1_screen2], [layout2_screen1]], [screen])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout2_screen1, screen),))\n\n\ndef mk_screen_tile_layout(name: Optional[str], w: int, h: int, direct: bool\n =True, primary: bool=True) ->ScreenTileLayout:\n return ScreenTileLayout(name, direct, primary, (w, h))\n\n\n<function token>\n", "<import token>\n\n\nclass MatchLayoutsTest(unittest.TestCase):\n\n def test_match_one_exact(self) ->None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens([[layout1]], [screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_match_two_distant_layouts_one_screen(self) ->None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens([[layout1], [layout2]], [\n screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_match_two_screens_index_match(self) ->None:\n self.maxDiff = None\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n screen2 = mk_virtual_screen_area('2', 200, 100)\n mpack, errs = match_layouts_to_screens([[layout1, layout2], [\n layout2, layout1]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n mpack, errs = match_layouts_to_screens([[layout2, layout1], [\n layout1, layout2]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n\n def test_std_config(self) ->None:\n screen1 = mk_virtual_screen_area('primary', 1024, 768)\n layout1 = mk_screen_tile_layout(None, 0, 0, False, True)\n mpack, errs = match_layouts_to_screens([[layout1]], [screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_two_layouts_one_screen_vs_two(self) ->None:\n screen = mk_virtual_screen_area('primary', 1024, 768)\n layout1_screen1 = mk_screen_tile_layout(None, 2440, 1980, False, True)\n layout1_screen2 = mk_screen_tile_layout(None, 1080, 1920, False, False)\n layout2_screen1 = mk_screen_tile_layout(None, 1920, 1080, False, True)\n mpack, errs = match_layouts_to_screens([[layout1_screen1,\n layout1_screen2], [layout2_screen1]], [screen])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout2_screen1, screen),))\n\n\n<function token>\n<function token>\n", "<import token>\n\n\nclass MatchLayoutsTest(unittest.TestCase):\n\n def test_match_one_exact(self) ->None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens([[layout1]], [screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_match_two_distant_layouts_one_screen(self) ->None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens([[layout1], [layout2]], [\n screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_match_two_screens_index_match(self) ->None:\n self.maxDiff = None\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n screen2 = mk_virtual_screen_area('2', 200, 100)\n mpack, errs = match_layouts_to_screens([[layout1, layout2], [\n layout2, layout1]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n mpack, errs = match_layouts_to_screens([[layout2, layout1], [\n layout1, layout2]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n <function token>\n\n def test_two_layouts_one_screen_vs_two(self) ->None:\n screen = mk_virtual_screen_area('primary', 1024, 768)\n layout1_screen1 = mk_screen_tile_layout(None, 2440, 1980, False, True)\n layout1_screen2 = mk_screen_tile_layout(None, 1080, 1920, False, False)\n layout2_screen1 = mk_screen_tile_layout(None, 1920, 1080, False, True)\n mpack, errs = match_layouts_to_screens([[layout1_screen1,\n layout1_screen2], [layout2_screen1]], [screen])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout2_screen1, screen),))\n\n\n<function token>\n<function token>\n", "<import token>\n\n\nclass MatchLayoutsTest(unittest.TestCase):\n <function token>\n\n def test_match_two_distant_layouts_one_screen(self) ->None:\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n mpack, errs = match_layouts_to_screens([[layout1], [layout2]], [\n screen1])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1),))\n\n def test_match_two_screens_index_match(self) ->None:\n self.maxDiff = None\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n screen2 = mk_virtual_screen_area('2', 200, 100)\n mpack, errs = match_layouts_to_screens([[layout1, layout2], [\n layout2, layout1]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n mpack, errs = match_layouts_to_screens([[layout2, layout1], [\n layout1, layout2]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n <function token>\n\n def test_two_layouts_one_screen_vs_two(self) ->None:\n screen = mk_virtual_screen_area('primary', 1024, 768)\n layout1_screen1 = mk_screen_tile_layout(None, 2440, 1980, False, True)\n layout1_screen2 = mk_screen_tile_layout(None, 1080, 1920, False, False)\n layout2_screen1 = mk_screen_tile_layout(None, 1920, 1080, False, True)\n mpack, errs = match_layouts_to_screens([[layout1_screen1,\n layout1_screen2], [layout2_screen1]], [screen])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout2_screen1, screen),))\n\n\n<function token>\n<function token>\n", "<import token>\n\n\nclass MatchLayoutsTest(unittest.TestCase):\n <function token>\n <function token>\n\n def test_match_two_screens_index_match(self) ->None:\n self.maxDiff = None\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n screen2 = mk_virtual_screen_area('2', 200, 100)\n mpack, errs = match_layouts_to_screens([[layout1, layout2], [\n layout2, layout1]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n mpack, errs = match_layouts_to_screens([[layout2, layout1], [\n layout1, layout2]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n <function token>\n\n def test_two_layouts_one_screen_vs_two(self) ->None:\n screen = mk_virtual_screen_area('primary', 1024, 768)\n layout1_screen1 = mk_screen_tile_layout(None, 2440, 1980, False, True)\n layout1_screen2 = mk_screen_tile_layout(None, 1080, 1920, False, False)\n layout2_screen1 = mk_screen_tile_layout(None, 1920, 1080, False, True)\n mpack, errs = match_layouts_to_screens([[layout1_screen1,\n layout1_screen2], [layout2_screen1]], [screen])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout2_screen1, screen),))\n\n\n<function token>\n<function token>\n", "<import token>\n\n\nclass MatchLayoutsTest(unittest.TestCase):\n <function token>\n <function token>\n\n def test_match_two_screens_index_match(self) ->None:\n self.maxDiff = None\n layout1 = mk_screen_tile_layout('a', 100, 200)\n layout2 = mk_screen_tile_layout('b', 200, 100)\n screen1 = mk_virtual_screen_area('1', 100, 200)\n screen2 = mk_virtual_screen_area('2', 200, 100)\n mpack, errs = match_layouts_to_screens([[layout1, layout2], [\n layout2, layout1]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n mpack, errs = match_layouts_to_screens([[layout2, layout1], [\n layout1, layout2]], [screen1, screen2])\n i, m = mpack\n self.assertEqual(list(errs), [])\n self.assertEqual(m, ((layout1, screen1), (layout2, screen2)))\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n", "<import token>\n\n\nclass MatchLayoutsTest(unittest.TestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n", "<import token>\n<class token>\n<function token>\n<function token>\n" ]
false
99,468
3eb270da7a115046266d9565a5d980cf8ae62842
#NOME: LEONARDO ANDRADE #DATA: 23/04/2019 #DESCRIÇAO: print('Entre com o valor do salário bruto', end = ': ') salBruto = float(input()) print('Entre com o desconto do Imposto de Renda (porcentagem)', end = ': ') valor_IR = float(input()) print('Entre com o desconto do INSS', end = ': ') valor__INSS = float(input()) print('Entre com o valor do Auxilio Moradia', end = ': ') moradiaValue = float(input()) print ('Entre com o valor do Auxílio Alimentação', end = ': ') alimentValue = float(input()) print() print ('*********************************************************') print ('Salario Bruto = ',salBruto) print ('*********************************************************') print('DESCONTOS') if salBruto < 2500 : desc_IR = 0 elif 6500 > salBruto >= 2500 : desc_IR = (18 * salBruto) / 100 elif salBruto >= 6500 : desc_IR = (27.5 * salBruto) / 100 print('Imposto de Renda = ',desc_IR) desc_INSS = (11 * salBruto) / 100 if 600 >= desc_INSS : print('Previdencia Social = ',desc_INSS) else: print('Previdencia Social = ', 600) print() print('Auxílios') print('Moradia = ',moradiaValue) print('Alimetação = ',alimentValue) print('**********************************************************') if 600 >= desc_INSS : salLiquido = salBruto - desc_IR - desc_INSS + moradiaValue + alimentValue else : salLiquido = salBruto -desc_IR - 600 + moradiaValue + alimentValue print('Salário Líquido = ', salLiquido)
[ "#NOME: LEONARDO ANDRADE\n#DATA: 23/04/2019\n#DESCRIÇAO: \n\nprint('Entre com o valor do salário bruto', end = ': ')\nsalBruto = float(input())\n\nprint('Entre com o desconto do Imposto de Renda (porcentagem)', end = ': ')\nvalor_IR = float(input())\n\nprint('Entre com o desconto do INSS', end = ': ')\nvalor__INSS = float(input())\n\nprint('Entre com o valor do Auxilio Moradia', end = ': ')\nmoradiaValue = float(input())\n\nprint ('Entre com o valor do Auxílio Alimentação', end = ': ')\nalimentValue = float(input())\n\nprint()\n\nprint ('*********************************************************')\n\nprint ('Salario Bruto = ',salBruto)\n\nprint ('*********************************************************')\n\nprint('DESCONTOS')\n\nif salBruto < 2500 :\n desc_IR = 0\n\nelif 6500 > salBruto >= 2500 :\n desc_IR = (18 * salBruto) / 100\n\nelif salBruto >= 6500 :\n desc_IR = (27.5 * salBruto) / 100\n\nprint('Imposto de Renda = ',desc_IR)\n\ndesc_INSS = (11 * salBruto) / 100\n\nif 600 >= desc_INSS :\n print('Previdencia Social = ',desc_INSS)\n\nelse:\n print('Previdencia Social = ', 600)\n\nprint()\n\nprint('Auxílios')\nprint('Moradia = ',moradiaValue)\nprint('Alimetação = ',alimentValue)\n\nprint('**********************************************************')\n\nif 600 >= desc_INSS : \n salLiquido = salBruto - desc_IR - desc_INSS + moradiaValue + alimentValue\n\nelse :\n salLiquido = salBruto -desc_IR - 600 + moradiaValue + alimentValue\n\nprint('Salário Líquido = ', salLiquido)\n", "print('Entre com o valor do salário bruto', end=': ')\nsalBruto = float(input())\nprint('Entre com o desconto do Imposto de Renda (porcentagem)', end=': ')\nvalor_IR = float(input())\nprint('Entre com o desconto do INSS', end=': ')\nvalor__INSS = float(input())\nprint('Entre com o valor do Auxilio Moradia', end=': ')\nmoradiaValue = float(input())\nprint('Entre com o valor do Auxílio Alimentação', end=': ')\nalimentValue = float(input())\nprint()\nprint('*********************************************************')\nprint('Salario Bruto = ', salBruto)\nprint('*********************************************************')\nprint('DESCONTOS')\nif salBruto < 2500:\n desc_IR = 0\nelif 6500 > salBruto >= 2500:\n desc_IR = 18 * salBruto / 100\nelif salBruto >= 6500:\n desc_IR = 27.5 * salBruto / 100\nprint('Imposto de Renda = ', desc_IR)\ndesc_INSS = 11 * salBruto / 100\nif 600 >= desc_INSS:\n print('Previdencia Social = ', desc_INSS)\nelse:\n print('Previdencia Social = ', 600)\nprint()\nprint('Auxílios')\nprint('Moradia = ', moradiaValue)\nprint('Alimetação = ', alimentValue)\nprint('**********************************************************')\nif 600 >= desc_INSS:\n salLiquido = salBruto - desc_IR - desc_INSS + moradiaValue + alimentValue\nelse:\n salLiquido = salBruto - desc_IR - 600 + moradiaValue + alimentValue\nprint('Salário Líquido = ', salLiquido)\n", "print('Entre com o valor do salário bruto', end=': ')\n<assignment token>\nprint('Entre com o desconto do Imposto de Renda (porcentagem)', end=': ')\n<assignment token>\nprint('Entre com o desconto do INSS', end=': ')\n<assignment token>\nprint('Entre com o valor do Auxilio Moradia', end=': ')\n<assignment token>\nprint('Entre com o valor do Auxílio Alimentação', end=': ')\n<assignment token>\nprint()\nprint('*********************************************************')\nprint('Salario Bruto = ', salBruto)\nprint('*********************************************************')\nprint('DESCONTOS')\nif salBruto < 2500:\n desc_IR = 0\nelif 6500 > salBruto >= 2500:\n desc_IR = 18 * salBruto / 100\nelif salBruto >= 6500:\n desc_IR = 27.5 * salBruto / 100\nprint('Imposto de Renda = ', desc_IR)\n<assignment token>\nif 600 >= desc_INSS:\n print('Previdencia Social = ', desc_INSS)\nelse:\n print('Previdencia Social = ', 600)\nprint()\nprint('Auxílios')\nprint('Moradia = ', moradiaValue)\nprint('Alimetação = ', alimentValue)\nprint('**********************************************************')\nif 600 >= desc_INSS:\n salLiquido = salBruto - desc_IR - desc_INSS + moradiaValue + alimentValue\nelse:\n salLiquido = salBruto - desc_IR - 600 + moradiaValue + alimentValue\nprint('Salário Líquido = ', salLiquido)\n", "<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
99,469
f00c5fc115b93f55f3e91fac20c52f180a5e7644
#-*-coding:utf-8-*- #socket编程示例 import time,datetime from selenium import webdriver from selenium import selenium import os import json from email.mime.text import MIMEText import smtplib from test.test_imageop import AAAAA # wd=webdriver.Ie() # wd.get("http://mytestlink.vicp.net:8001/redirect1.html") # wd.execute_script("alert('呵呵')".decode('gbk')) # wd.execute_script("if('呵呵'==='呵呵'){alert(1)}".decode('gbk')) # wd.quit() # print "中文123".decode('gbk') # wd.find_element_by_css_selector("#cm").click() #wd.switch_to_alert().accept() #wd.find_element_by_css_selector("a").click() # import urllib # str = '{json_name:PythonTab中文网,json_age:22}' # #str = str.encode('utf-8') # d = {'name':str,'age':'"18'} # print len(d) # q = urllib.urlencode(d) # print q # # tmp='[{"field1":"[email protected]","field2":"","field3":"","text":"吴锦涛/wujt3","type":"","value":"31535033"},{"field1":"[email protected]","field2":"","field3":"","text":"wujt3344/wujt3344","type":"","value":"35897406"}]' # tmp1=eval(tmp) # print tmp1[0]['text'] # import cx_Oracle # conn=cx_Oracle.connect("base/[email protected]:9403/nctstdb") # cursor=conn.cursor() # sql="select user_id,cust_id from so1.ins_user_771 a where a.bill_id = '13481118910' and a.state='1'" # cursor.execute(sql) # r=cursor.fetchall() # print r print '2091912938'[-1:] if 'xxx' not in locals().keys(): print 'hehe' print time.strftime("%Y%m") import traceback import sys import logging logging.basicConfig(level=10,format='%(asctime)s %(levelname)s %(message)s',datefmt='%Y-%m-%d %H:%M:%S') try: print 1+"1" #print 1/0 except ZeroDivisionError,e: logging.debug('hehe') logging.exception(e) logging.error('haha') #print e #traceback.print_exc(file=sys.stdout) print 'start next' except TypeError: print 'type error' print '===' print map(lambda x: x[10:14], ['AT_SCRIPT_0003.py']) for i in range(0,1): print 'iiii' import random print time.strftime("%Y%m%d") print str(random.randint(100, 200)) print '新增成功!集团号为:7717149428'[-10:] a='''pass| err| pass| ''' print a.count('\n')
[ "#-*-coding:utf-8-*-\r\n#socket编程示例\r\n\r\n \r\nimport time,datetime\r\nfrom selenium import webdriver\r\nfrom selenium import selenium\r\nimport os\r\nimport json\r\n\r\nfrom email.mime.text import MIMEText\r\nimport smtplib\r\nfrom test.test_imageop import AAAAA\r\n\r\n \r\n# wd=webdriver.Ie()\r\n# wd.get(\"http://mytestlink.vicp.net:8001/redirect1.html\")\r\n# wd.execute_script(\"alert('呵呵')\".decode('gbk'))\r\n# wd.execute_script(\"if('呵呵'==='呵呵'){alert(1)}\".decode('gbk'))\r\n# wd.quit()\r\n# print \"中文123\".decode('gbk')\r\n# wd.find_element_by_css_selector(\"#cm\").click()\r\n\r\n#wd.switch_to_alert().accept()\r\n#wd.find_element_by_css_selector(\"a\").click()\r\n\r\n\r\n\r\n# import urllib\r\n# str = '{json_name:PythonTab中文网,json_age:22}'\r\n# #str = str.encode('utf-8')\r\n# d = {'name':str,'age':'\"18'}\r\n# print len(d)\r\n# q = urllib.urlencode(d)\r\n# print q\r\n# \r\n# tmp='[{\"field1\":\"[email protected]\",\"field2\":\"\",\"field3\":\"\",\"text\":\"吴锦涛/wujt3\",\"type\":\"\",\"value\":\"31535033\"},{\"field1\":\"[email protected]\",\"field2\":\"\",\"field3\":\"\",\"text\":\"wujt3344/wujt3344\",\"type\":\"\",\"value\":\"35897406\"}]'\r\n# tmp1=eval(tmp)\r\n# print tmp1[0]['text']\r\n\r\n# import cx_Oracle\r\n# conn=cx_Oracle.connect(\"base/[email protected]:9403/nctstdb\")\r\n# cursor=conn.cursor()\r\n# sql=\"select user_id,cust_id from so1.ins_user_771 a where a.bill_id = '13481118910' and a.state='1'\"\r\n# cursor.execute(sql)\r\n# r=cursor.fetchall()\r\n# print r\r\nprint '2091912938'[-1:]\r\nif 'xxx' not in locals().keys():\r\n print 'hehe'\r\nprint time.strftime(\"%Y%m\")\r\nimport traceback\r\nimport sys\r\nimport logging\r\nlogging.basicConfig(level=10,format='%(asctime)s %(levelname)s %(message)s',datefmt='%Y-%m-%d %H:%M:%S')\r\ntry:\r\n print 1+\"1\"\r\n #print 1/0\r\nexcept ZeroDivisionError,e:\r\n logging.debug('hehe')\r\n logging.exception(e)\r\n logging.error('haha')\r\n #print e\r\n #traceback.print_exc(file=sys.stdout)\r\n print 'start next'\r\nexcept TypeError:\r\n print 'type error'\r\nprint '==='\r\n\r\nprint map(lambda x: x[10:14], ['AT_SCRIPT_0003.py'])\r\nfor i in range(0,1):\r\n print 'iiii'\r\n\r\nimport random\r\nprint time.strftime(\"%Y%m%d\")\r\nprint str(random.randint(100, 200))\r\n\r\nprint '新增成功!集团号为:7717149428'[-10:]\r\n\r\na='''pass|\r\nerr|\r\npass|\r\n'''\r\nprint a.count('\\n')\r\n\r\n" ]
true
99,470
f4e5dcc404212a371b5e7ed5146e42782364b4df
from django.db import models from django.core.urlresolvers import reverse from redactor.fields import RedactorField from django.utils.html import strip_tags class Page(models.Model): title = models.CharField(max_length=255, unique=True) subtitle = models.CharField(max_length=255, null=True, blank=True) slug = models.SlugField(max_length=255, unique=True) order = models.IntegerField(unique=True) content = RedactorField(verbose_name=u'Text') published = models.BooleanField(default=True) class Meta: ordering = ['order'] def get_absolute_url(self): return reverse('blog.views.page',args=[self.slug]) def __unicode__(self): return u'%s' % self.title class Tag(models.Model): name = models.CharField(max_length=255, unique=True) slug = models.SlugField(max_length=255, unique=True) class Meta: ordering = ['name'] def __str__(self): return self.name def get_absolute_url(self): return reverse("blog.views.tag",args=[self.slug]) class Post(models.Model): title = models.CharField(max_length=255) slug = models.SlugField(unique=True, max_length=255) description = models.CharField(max_length=255, null=True, blank=True,help_text="Leave blank for auto-fill") author = models.CharField(max_length=255,default="Honestly Curated") content = RedactorField(verbose_name=u'Text') published = models.BooleanField(default=True) created = models.DateTimeField(auto_now_add=True) tag = models.ManyToManyField(Tag, related_name="posts", related_query_name="post", blank=True) class Meta: ordering = ['-created'] def __unicode__(self): return u'%s' % self.title def get_absolute_url(self): return reverse('blog.views.post',args=[self.slug]) def save(self, *args, **kwargs): if self.content and (self.description is None or self.description == ""): suffix = "..." length = 100 content = strip_tags(self.content) self.description = content if len(content) <= length else content[:length-len(suffix)].rsplit(' ', 1)[0] + suffix super(Post, self).save(*args, **kwargs)
[ "from django.db import models\nfrom django.core.urlresolvers import reverse\nfrom redactor.fields import RedactorField\nfrom django.utils.html import strip_tags\n\nclass Page(models.Model):\n title = models.CharField(max_length=255, unique=True)\n subtitle = models.CharField(max_length=255, null=True, blank=True)\n slug = models.SlugField(max_length=255, unique=True)\n order = models.IntegerField(unique=True)\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n \n class Meta:\n ordering = ['order']\n \n def get_absolute_url(self):\n return reverse('blog.views.page',args=[self.slug])\n \n def __unicode__(self):\n return u'%s' % self.title\n\nclass Tag(models.Model):\n name = models.CharField(max_length=255, unique=True)\n slug = models.SlugField(max_length=255, unique=True)\n\n class Meta:\n ordering = ['name']\n \n def __str__(self):\n return self.name\n\n def get_absolute_url(self):\n return reverse(\"blog.views.tag\",args=[self.slug])\n\nclass Post(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(unique=True,\n max_length=255)\n description = models.CharField(max_length=255, null=True, blank=True,help_text=\"Leave blank for auto-fill\")\n author = models.CharField(max_length=255,default=\"Honestly Curated\")\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n created = models.DateTimeField(auto_now_add=True)\n tag = models.ManyToManyField(Tag, related_name=\"posts\", related_query_name=\"post\", blank=True)\n \n class Meta:\n ordering = ['-created']\n \n def __unicode__(self):\n return u'%s' % self.title\n \n def get_absolute_url(self):\n return reverse('blog.views.post',args=[self.slug])\n \n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == \"\"):\n suffix = \"...\"\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:length-len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "from django.db import models\nfrom django.core.urlresolvers import reverse\nfrom redactor.fields import RedactorField\nfrom django.utils.html import strip_tags\n\n\nclass Page(models.Model):\n title = models.CharField(max_length=255, unique=True)\n subtitle = models.CharField(max_length=255, null=True, blank=True)\n slug = models.SlugField(max_length=255, unique=True)\n order = models.IntegerField(unique=True)\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n\n\n class Meta:\n ordering = ['order']\n\n def get_absolute_url(self):\n return reverse('blog.views.page', args=[self.slug])\n\n def __unicode__(self):\n return u'%s' % self.title\n\n\nclass Tag(models.Model):\n name = models.CharField(max_length=255, unique=True)\n slug = models.SlugField(max_length=255, unique=True)\n\n\n class Meta:\n ordering = ['name']\n\n def __str__(self):\n return self.name\n\n def get_absolute_url(self):\n return reverse('blog.views.tag', args=[self.slug])\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(unique=True, max_length=255)\n description = models.CharField(max_length=255, null=True, blank=True,\n help_text='Leave blank for auto-fill')\n author = models.CharField(max_length=255, default='Honestly Curated')\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n created = models.DateTimeField(auto_now_add=True)\n tag = models.ManyToManyField(Tag, related_name='posts',\n related_query_name='post', blank=True)\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n\n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == ''\n ):\n suffix = '...'\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:\n length - len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "<import token>\n\n\nclass Page(models.Model):\n title = models.CharField(max_length=255, unique=True)\n subtitle = models.CharField(max_length=255, null=True, blank=True)\n slug = models.SlugField(max_length=255, unique=True)\n order = models.IntegerField(unique=True)\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n\n\n class Meta:\n ordering = ['order']\n\n def get_absolute_url(self):\n return reverse('blog.views.page', args=[self.slug])\n\n def __unicode__(self):\n return u'%s' % self.title\n\n\nclass Tag(models.Model):\n name = models.CharField(max_length=255, unique=True)\n slug = models.SlugField(max_length=255, unique=True)\n\n\n class Meta:\n ordering = ['name']\n\n def __str__(self):\n return self.name\n\n def get_absolute_url(self):\n return reverse('blog.views.tag', args=[self.slug])\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(unique=True, max_length=255)\n description = models.CharField(max_length=255, null=True, blank=True,\n help_text='Leave blank for auto-fill')\n author = models.CharField(max_length=255, default='Honestly Curated')\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n created = models.DateTimeField(auto_now_add=True)\n tag = models.ManyToManyField(Tag, related_name='posts',\n related_query_name='post', blank=True)\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n\n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == ''\n ):\n suffix = '...'\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:\n length - len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "<import token>\n\n\nclass Page(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n ordering = ['order']\n\n def get_absolute_url(self):\n return reverse('blog.views.page', args=[self.slug])\n\n def __unicode__(self):\n return u'%s' % self.title\n\n\nclass Tag(models.Model):\n name = models.CharField(max_length=255, unique=True)\n slug = models.SlugField(max_length=255, unique=True)\n\n\n class Meta:\n ordering = ['name']\n\n def __str__(self):\n return self.name\n\n def get_absolute_url(self):\n return reverse('blog.views.tag', args=[self.slug])\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(unique=True, max_length=255)\n description = models.CharField(max_length=255, null=True, blank=True,\n help_text='Leave blank for auto-fill')\n author = models.CharField(max_length=255, default='Honestly Curated')\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n created = models.DateTimeField(auto_now_add=True)\n tag = models.ManyToManyField(Tag, related_name='posts',\n related_query_name='post', blank=True)\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n\n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == ''\n ):\n suffix = '...'\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:\n length - len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "<import token>\n\n\nclass Page(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n ordering = ['order']\n <function token>\n\n def __unicode__(self):\n return u'%s' % self.title\n\n\nclass Tag(models.Model):\n name = models.CharField(max_length=255, unique=True)\n slug = models.SlugField(max_length=255, unique=True)\n\n\n class Meta:\n ordering = ['name']\n\n def __str__(self):\n return self.name\n\n def get_absolute_url(self):\n return reverse('blog.views.tag', args=[self.slug])\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(unique=True, max_length=255)\n description = models.CharField(max_length=255, null=True, blank=True,\n help_text='Leave blank for auto-fill')\n author = models.CharField(max_length=255, default='Honestly Curated')\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n created = models.DateTimeField(auto_now_add=True)\n tag = models.ManyToManyField(Tag, related_name='posts',\n related_query_name='post', blank=True)\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n\n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == ''\n ):\n suffix = '...'\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:\n length - len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "<import token>\n\n\nclass Page(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n ordering = ['order']\n <function token>\n <function token>\n\n\nclass Tag(models.Model):\n name = models.CharField(max_length=255, unique=True)\n slug = models.SlugField(max_length=255, unique=True)\n\n\n class Meta:\n ordering = ['name']\n\n def __str__(self):\n return self.name\n\n def get_absolute_url(self):\n return reverse('blog.views.tag', args=[self.slug])\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(unique=True, max_length=255)\n description = models.CharField(max_length=255, null=True, blank=True,\n help_text='Leave blank for auto-fill')\n author = models.CharField(max_length=255, default='Honestly Curated')\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n created = models.DateTimeField(auto_now_add=True)\n tag = models.ManyToManyField(Tag, related_name='posts',\n related_query_name='post', blank=True)\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n\n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == ''\n ):\n suffix = '...'\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:\n length - len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "<import token>\n<class token>\n\n\nclass Tag(models.Model):\n name = models.CharField(max_length=255, unique=True)\n slug = models.SlugField(max_length=255, unique=True)\n\n\n class Meta:\n ordering = ['name']\n\n def __str__(self):\n return self.name\n\n def get_absolute_url(self):\n return reverse('blog.views.tag', args=[self.slug])\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(unique=True, max_length=255)\n description = models.CharField(max_length=255, null=True, blank=True,\n help_text='Leave blank for auto-fill')\n author = models.CharField(max_length=255, default='Honestly Curated')\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n created = models.DateTimeField(auto_now_add=True)\n tag = models.ManyToManyField(Tag, related_name='posts',\n related_query_name='post', blank=True)\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n\n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == ''\n ):\n suffix = '...'\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:\n length - len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "<import token>\n<class token>\n\n\nclass Tag(models.Model):\n <assignment token>\n <assignment token>\n\n\n class Meta:\n ordering = ['name']\n\n def __str__(self):\n return self.name\n\n def get_absolute_url(self):\n return reverse('blog.views.tag', args=[self.slug])\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(unique=True, max_length=255)\n description = models.CharField(max_length=255, null=True, blank=True,\n help_text='Leave blank for auto-fill')\n author = models.CharField(max_length=255, default='Honestly Curated')\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n created = models.DateTimeField(auto_now_add=True)\n tag = models.ManyToManyField(Tag, related_name='posts',\n related_query_name='post', blank=True)\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n\n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == ''\n ):\n suffix = '...'\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:\n length - len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "<import token>\n<class token>\n\n\nclass Tag(models.Model):\n <assignment token>\n <assignment token>\n\n\n class Meta:\n ordering = ['name']\n\n def __str__(self):\n return self.name\n <function token>\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(unique=True, max_length=255)\n description = models.CharField(max_length=255, null=True, blank=True,\n help_text='Leave blank for auto-fill')\n author = models.CharField(max_length=255, default='Honestly Curated')\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n created = models.DateTimeField(auto_now_add=True)\n tag = models.ManyToManyField(Tag, related_name='posts',\n related_query_name='post', blank=True)\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n\n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == ''\n ):\n suffix = '...'\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:\n length - len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "<import token>\n<class token>\n\n\nclass Tag(models.Model):\n <assignment token>\n <assignment token>\n\n\n class Meta:\n ordering = ['name']\n <function token>\n <function token>\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(unique=True, max_length=255)\n description = models.CharField(max_length=255, null=True, blank=True,\n help_text='Leave blank for auto-fill')\n author = models.CharField(max_length=255, default='Honestly Curated')\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n created = models.DateTimeField(auto_now_add=True)\n tag = models.ManyToManyField(Tag, related_name='posts',\n related_query_name='post', blank=True)\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n\n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == ''\n ):\n suffix = '...'\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:\n length - len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "<import token>\n<class token>\n<class token>\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(unique=True, max_length=255)\n description = models.CharField(max_length=255, null=True, blank=True,\n help_text='Leave blank for auto-fill')\n author = models.CharField(max_length=255, default='Honestly Curated')\n content = RedactorField(verbose_name=u'Text')\n published = models.BooleanField(default=True)\n created = models.DateTimeField(auto_now_add=True)\n tag = models.ManyToManyField(Tag, related_name='posts',\n related_query_name='post', blank=True)\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n\n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == ''\n ):\n suffix = '...'\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:\n length - len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "<import token>\n<class token>\n<class token>\n\n\nclass Post(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n\n def save(self, *args, **kwargs):\n if self.content and (self.description is None or self.description == ''\n ):\n suffix = '...'\n length = 100\n content = strip_tags(self.content)\n self.description = content if len(content) <= length else content[:\n length - len(suffix)].rsplit(' ', 1)[0] + suffix\n super(Post, self).save(*args, **kwargs)\n", "<import token>\n<class token>\n<class token>\n\n\nclass Post(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('blog.views.post', args=[self.slug])\n <function token>\n", "<import token>\n<class token>\n<class token>\n\n\nclass Post(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n ordering = ['-created']\n\n def __unicode__(self):\n return u'%s' % self.title\n <function token>\n <function token>\n", "<import token>\n<class token>\n<class token>\n\n\nclass Post(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n ordering = ['-created']\n <function token>\n <function token>\n <function token>\n", "<import token>\n<class token>\n<class token>\n<class token>\n" ]
false
99,471
3810d0ba664e77d03c2c8e5d2c289333b4060d24
import numpy as np import torch from scipy import stats as stats from sklearn.metrics import f1_score from torch import nn as nn from my_functions import precision_k, print_num_on_tqdm, tqdm_with_num def training(params, model, train_loader, optimizer): device = params["device"] batch_total = params["train_batch_total"] loss_func = nn.BCELoss() model.train() losses = [] # Show loss with tqdm with tqdm_with_num(train_loader, batch_total) as loader: loader.set_description("Training ") # Batch Loop for idx, batch in enumerate(loader): # ---------------------- Main Process ----------------------- data, target = (batch.text.to(device), batch.label.to(device)) optimizer.zero_grad() outputs = model(data) outputs = torch.sigmoid(outputs) loss = loss_func(outputs, target) loss.backward() optimizer.step() # ----------------------------------------------------------- # Print training progress losses.append(loss.item()) if idx < batch_total - 1: print_num_on_tqdm(loader, loss) else: loss_epoch = np.mean(losses) print_num_on_tqdm(loader, loss_epoch, last=True) def validating_testing(params, model, data_loader, is_valid=True): device = params["device"] measure = params["measure"] doc_key = is_valid and "valid" or "test" batch_total = params[doc_key + "_batch_total"] model.eval() eval_epoch = 0.0 target_all = np.empty((0, params["num_of_class"]), dtype=np.int8) eval_all = np.empty((0, params["num_of_class"]), dtype=np.float32) # Show p@k with tqdm with tqdm_with_num(data_loader, batch_total) as loader: # Set description to tqdm is_valid and loader.set_description("Validating") is_valid or loader.set_description("Testing ") with torch.no_grad(): # Batch Loop for idx, batch in enumerate(loader): # ---------------------- Main Process ----------------------- data, target = (batch.text.to(device), batch.label.to("cpu")) target = target.detach().numpy().copy() outputs = model(data) outputs = torch.sigmoid(outputs) # ----------------------------------------------------------- # Print some progress outputs = outputs.to("cpu").detach().numpy().copy() if "f1" in measure: outputs = outputs >= 0.5 target_all = np.concatenate([target_all, target]) eval_all = np.concatenate([eval_all, outputs]) if idx < batch_total - 1: if "f1" in measure: avg = measure[:-3] eval_batch = f1_score(target, outputs, average=avg) else: k = int(measure[-1]) eval_batch = precision_k(target, outputs, k) print_num_on_tqdm(loader, eval_batch, measure) else: if "f1" in measure: avg = measure[:-3] eval_epoch = f1_score(target_all, eval_all, average=avg) else: k = int(measure[-1]) eval_epoch = precision_k(target_all, eval_all, k) print_num_on_tqdm(loader, eval_epoch, measure, True) return eval_epoch def custom_evaluation(params, model, data_loader, is_valid=True): device = params["device"] measure = params["measure"] doc_key = is_valid and "valid" or "test" batch_total = params[doc_key + "_batch_total"] model.eval() eval_epoch = 0.0 target_all = np.empty((0, params["num_of_class"]), dtype=np.int8) eval_all = np.empty((0, params["num_of_class"]), dtype=np.float32) sum_recall_1 = 0 sum_recall_2 = 0 sum_recall_3 = 0 sum_precision_1 = 0 sum_precision_2 = 0 sum_precision_3 = 0 num_test_data = 0 total_labels = 0 # Show p@k with tqdm with tqdm_with_num(data_loader, batch_total) as loader: # Set description to tqdm is_valid and loader.set_description("Validating") is_valid or loader.set_description("Testing ") with torch.no_grad(): # Batch Loop for idx, batch in enumerate(loader): # ---------------------- Main Process ----------------------- data, target = (batch.text.to(device), batch.label.to("cpu")) target = target.detach().numpy().copy() outputs = model(data) outputs = torch.sigmoid(outputs) # ----------------------------------------------------------- # Print some progress outputs = outputs.to("cpu").detach().numpy().copy() num_test_data += len(outputs) # loop through current batch for i in range(len(outputs)): actual_labels = np.nonzero(target[i])[0] predicted_labels = np.argsort(outputs[i])[-3:][::-1] correct_prediction = 0 if predicted_labels[0] in actual_labels: correct_prediction += 1 sum_precision_1 += (correct_prediction / 1) sum_recall_1 += (correct_prediction / len(actual_labels)) # K = 2 if predicted_labels[1] in actual_labels: correct_prediction += 1 sum_precision_2 += (correct_prediction / 2) sum_recall_2 += (correct_prediction / len(actual_labels)) # K = 3 if predicted_labels[2] in actual_labels: correct_prediction += 1 sum_precision_3 += (correct_prediction / 3) sum_recall_3 += (correct_prediction / len(actual_labels)) precision_1 = sum_precision_1 / num_test_data precision_2 = sum_precision_2 / num_test_data precision_3 = sum_precision_3 / num_test_data recall_1 = sum_recall_1 / num_test_data recall_2 = sum_recall_2 / num_test_data recall_3 = sum_recall_3 / num_test_data f1_1 = 2 * precision_1 * recall_1 / (precision_1 + recall_1) f1_2 = 2 * precision_2 * recall_2 / (precision_2 + recall_2) f1_3 = 2 * precision_3 * recall_3 / (precision_3 + recall_3) print("K = 1") print("P@1 = " + precision_1.__str__()) print("R@1 = " + recall_1.__str__()) print("F@1 = " + f1_1.__str__()) print("K = 2") print("P@2 = " + precision_2.__str__()) print("R@2 = " + recall_2.__str__()) print("F@2 = " + f1_2.__str__()) print("K = 3") print("P@3 = " + precision_3.__str__()) print("R@3 = " + recall_3.__str__()) print("F@3 = " + f1_3.__str__()) return eval_epoch
[ "import numpy as np\r\nimport torch\r\nfrom scipy import stats as stats\r\nfrom sklearn.metrics import f1_score\r\nfrom torch import nn as nn\r\n\r\nfrom my_functions import precision_k, print_num_on_tqdm, tqdm_with_num\r\n\r\n\r\ndef training(params, model, train_loader, optimizer):\r\n device = params[\"device\"]\r\n batch_total = params[\"train_batch_total\"]\r\n loss_func = nn.BCELoss()\r\n\r\n model.train()\r\n losses = []\r\n\r\n # Show loss with tqdm\r\n with tqdm_with_num(train_loader, batch_total) as loader:\r\n loader.set_description(\"Training \")\r\n\r\n # Batch Loop\r\n for idx, batch in enumerate(loader):\r\n # ---------------------- Main Process -----------------------\r\n data, target = (batch.text.to(device), batch.label.to(device))\r\n\r\n optimizer.zero_grad()\r\n\r\n outputs = model(data)\r\n outputs = torch.sigmoid(outputs)\r\n loss = loss_func(outputs, target)\r\n\r\n loss.backward()\r\n optimizer.step()\r\n # -----------------------------------------------------------\r\n\r\n # Print training progress\r\n losses.append(loss.item())\r\n\r\n if idx < batch_total - 1:\r\n print_num_on_tqdm(loader, loss)\r\n else:\r\n loss_epoch = np.mean(losses)\r\n print_num_on_tqdm(loader, loss_epoch, last=True)\r\n\r\n\r\ndef validating_testing(params, model, data_loader, is_valid=True):\r\n device = params[\"device\"]\r\n measure = params[\"measure\"]\r\n doc_key = is_valid and \"valid\" or \"test\"\r\n batch_total = params[doc_key + \"_batch_total\"]\r\n\r\n model.eval()\r\n\r\n eval_epoch = 0.0\r\n target_all = np.empty((0, params[\"num_of_class\"]), dtype=np.int8)\r\n eval_all = np.empty((0, params[\"num_of_class\"]), dtype=np.float32)\r\n\r\n # Show p@k with tqdm\r\n with tqdm_with_num(data_loader, batch_total) as loader:\r\n # Set description to tqdm\r\n is_valid and loader.set_description(\"Validating\")\r\n is_valid or loader.set_description(\"Testing \")\r\n\r\n with torch.no_grad():\r\n # Batch Loop\r\n for idx, batch in enumerate(loader):\r\n # ---------------------- Main Process -----------------------\r\n data, target = (batch.text.to(device), batch.label.to(\"cpu\"))\r\n target = target.detach().numpy().copy()\r\n\r\n outputs = model(data)\r\n outputs = torch.sigmoid(outputs)\r\n # -----------------------------------------------------------\r\n\r\n # Print some progress\r\n outputs = outputs.to(\"cpu\").detach().numpy().copy()\r\n\r\n\r\n if \"f1\" in measure:\r\n outputs = outputs >= 0.5\r\n\r\n target_all = np.concatenate([target_all, target])\r\n eval_all = np.concatenate([eval_all, outputs])\r\n if idx < batch_total - 1:\r\n if \"f1\" in measure:\r\n avg = measure[:-3]\r\n eval_batch = f1_score(target, outputs, average=avg)\r\n else:\r\n k = int(measure[-1])\r\n eval_batch = precision_k(target, outputs, k)\r\n print_num_on_tqdm(loader, eval_batch, measure)\r\n else:\r\n if \"f1\" in measure:\r\n avg = measure[:-3]\r\n eval_epoch = f1_score(target_all, eval_all, average=avg)\r\n else:\r\n k = int(measure[-1])\r\n eval_epoch = precision_k(target_all, eval_all, k)\r\n print_num_on_tqdm(loader, eval_epoch, measure, True)\r\n\r\n return eval_epoch\r\n\r\n\r\n\r\ndef custom_evaluation(params, model, data_loader, is_valid=True):\r\n device = params[\"device\"]\r\n measure = params[\"measure\"]\r\n doc_key = is_valid and \"valid\" or \"test\"\r\n batch_total = params[doc_key + \"_batch_total\"]\r\n\r\n model.eval()\r\n\r\n eval_epoch = 0.0\r\n target_all = np.empty((0, params[\"num_of_class\"]), dtype=np.int8)\r\n eval_all = np.empty((0, params[\"num_of_class\"]), dtype=np.float32)\r\n\r\n sum_recall_1 = 0\r\n sum_recall_2 = 0\r\n sum_recall_3 = 0\r\n sum_precision_1 = 0\r\n sum_precision_2 = 0\r\n sum_precision_3 = 0\r\n num_test_data = 0\r\n total_labels = 0\r\n\r\n # Show p@k with tqdm\r\n with tqdm_with_num(data_loader, batch_total) as loader:\r\n # Set description to tqdm\r\n is_valid and loader.set_description(\"Validating\")\r\n is_valid or loader.set_description(\"Testing \")\r\n\r\n with torch.no_grad():\r\n # Batch Loop\r\n for idx, batch in enumerate(loader):\r\n # ---------------------- Main Process -----------------------\r\n data, target = (batch.text.to(device), batch.label.to(\"cpu\"))\r\n target = target.detach().numpy().copy()\r\n\r\n outputs = model(data)\r\n outputs = torch.sigmoid(outputs)\r\n # -----------------------------------------------------------\r\n\r\n # Print some progress\r\n outputs = outputs.to(\"cpu\").detach().numpy().copy()\r\n\r\n num_test_data += len(outputs)\r\n # loop through current batch\r\n for i in range(len(outputs)):\r\n actual_labels = np.nonzero(target[i])[0]\r\n predicted_labels = np.argsort(outputs[i])[-3:][::-1]\r\n\r\n correct_prediction = 0\r\n if predicted_labels[0] in actual_labels:\r\n correct_prediction += 1\r\n sum_precision_1 += (correct_prediction / 1)\r\n sum_recall_1 += (correct_prediction / len(actual_labels))\r\n\r\n # K = 2\r\n if predicted_labels[1] in actual_labels:\r\n correct_prediction += 1\r\n sum_precision_2 += (correct_prediction / 2)\r\n sum_recall_2 += (correct_prediction / len(actual_labels))\r\n\r\n # K = 3\r\n if predicted_labels[2] in actual_labels:\r\n correct_prediction += 1\r\n sum_precision_3 += (correct_prediction / 3)\r\n sum_recall_3 += (correct_prediction / len(actual_labels))\r\n\r\n precision_1 = sum_precision_1 / num_test_data\r\n precision_2 = sum_precision_2 / num_test_data\r\n precision_3 = sum_precision_3 / num_test_data\r\n recall_1 = sum_recall_1 / num_test_data\r\n recall_2 = sum_recall_2 / num_test_data\r\n recall_3 = sum_recall_3 / num_test_data\r\n f1_1 = 2 * precision_1 * recall_1 / (precision_1 + recall_1)\r\n f1_2 = 2 * precision_2 * recall_2 / (precision_2 + recall_2)\r\n f1_3 = 2 * precision_3 * recall_3 / (precision_3 + recall_3)\r\n\r\n print(\"K = 1\")\r\n print(\"P@1 = \" + precision_1.__str__())\r\n print(\"R@1 = \" + recall_1.__str__())\r\n print(\"F@1 = \" + f1_1.__str__())\r\n\r\n print(\"K = 2\")\r\n print(\"P@2 = \" + precision_2.__str__())\r\n print(\"R@2 = \" + recall_2.__str__())\r\n print(\"F@2 = \" + f1_2.__str__())\r\n\r\n print(\"K = 3\")\r\n print(\"P@3 = \" + precision_3.__str__())\r\n print(\"R@3 = \" + recall_3.__str__())\r\n print(\"F@3 = \" + f1_3.__str__())\r\n\r\n return eval_epoch\r\n", "import numpy as np\nimport torch\nfrom scipy import stats as stats\nfrom sklearn.metrics import f1_score\nfrom torch import nn as nn\nfrom my_functions import precision_k, print_num_on_tqdm, tqdm_with_num\n\n\ndef training(params, model, train_loader, optimizer):\n device = params['device']\n batch_total = params['train_batch_total']\n loss_func = nn.BCELoss()\n model.train()\n losses = []\n with tqdm_with_num(train_loader, batch_total) as loader:\n loader.set_description('Training ')\n for idx, batch in enumerate(loader):\n data, target = batch.text.to(device), batch.label.to(device)\n optimizer.zero_grad()\n outputs = model(data)\n outputs = torch.sigmoid(outputs)\n loss = loss_func(outputs, target)\n loss.backward()\n optimizer.step()\n losses.append(loss.item())\n if idx < batch_total - 1:\n print_num_on_tqdm(loader, loss)\n else:\n loss_epoch = np.mean(losses)\n print_num_on_tqdm(loader, loss_epoch, last=True)\n\n\ndef validating_testing(params, model, data_loader, is_valid=True):\n device = params['device']\n measure = params['measure']\n doc_key = is_valid and 'valid' or 'test'\n batch_total = params[doc_key + '_batch_total']\n model.eval()\n eval_epoch = 0.0\n target_all = np.empty((0, params['num_of_class']), dtype=np.int8)\n eval_all = np.empty((0, params['num_of_class']), dtype=np.float32)\n with tqdm_with_num(data_loader, batch_total) as loader:\n is_valid and loader.set_description('Validating')\n is_valid or loader.set_description('Testing ')\n with torch.no_grad():\n for idx, batch in enumerate(loader):\n data, target = batch.text.to(device), batch.label.to('cpu')\n target = target.detach().numpy().copy()\n outputs = model(data)\n outputs = torch.sigmoid(outputs)\n outputs = outputs.to('cpu').detach().numpy().copy()\n if 'f1' in measure:\n outputs = outputs >= 0.5\n target_all = np.concatenate([target_all, target])\n eval_all = np.concatenate([eval_all, outputs])\n if idx < batch_total - 1:\n if 'f1' in measure:\n avg = measure[:-3]\n eval_batch = f1_score(target, outputs, average=avg)\n else:\n k = int(measure[-1])\n eval_batch = precision_k(target, outputs, k)\n print_num_on_tqdm(loader, eval_batch, measure)\n else:\n if 'f1' in measure:\n avg = measure[:-3]\n eval_epoch = f1_score(target_all, eval_all, average=avg\n )\n else:\n k = int(measure[-1])\n eval_epoch = precision_k(target_all, eval_all, k)\n print_num_on_tqdm(loader, eval_epoch, measure, True)\n return eval_epoch\n\n\ndef custom_evaluation(params, model, data_loader, is_valid=True):\n device = params['device']\n measure = params['measure']\n doc_key = is_valid and 'valid' or 'test'\n batch_total = params[doc_key + '_batch_total']\n model.eval()\n eval_epoch = 0.0\n target_all = np.empty((0, params['num_of_class']), dtype=np.int8)\n eval_all = np.empty((0, params['num_of_class']), dtype=np.float32)\n sum_recall_1 = 0\n sum_recall_2 = 0\n sum_recall_3 = 0\n sum_precision_1 = 0\n sum_precision_2 = 0\n sum_precision_3 = 0\n num_test_data = 0\n total_labels = 0\n with tqdm_with_num(data_loader, batch_total) as loader:\n is_valid and loader.set_description('Validating')\n is_valid or loader.set_description('Testing ')\n with torch.no_grad():\n for idx, batch in enumerate(loader):\n data, target = batch.text.to(device), batch.label.to('cpu')\n target = target.detach().numpy().copy()\n outputs = model(data)\n outputs = torch.sigmoid(outputs)\n outputs = outputs.to('cpu').detach().numpy().copy()\n num_test_data += len(outputs)\n for i in range(len(outputs)):\n actual_labels = np.nonzero(target[i])[0]\n predicted_labels = np.argsort(outputs[i])[-3:][::-1]\n correct_prediction = 0\n if predicted_labels[0] in actual_labels:\n correct_prediction += 1\n sum_precision_1 += correct_prediction / 1\n sum_recall_1 += correct_prediction / len(actual_labels)\n if predicted_labels[1] in actual_labels:\n correct_prediction += 1\n sum_precision_2 += correct_prediction / 2\n sum_recall_2 += correct_prediction / len(actual_labels)\n if predicted_labels[2] in actual_labels:\n correct_prediction += 1\n sum_precision_3 += correct_prediction / 3\n sum_recall_3 += correct_prediction / len(actual_labels)\n precision_1 = sum_precision_1 / num_test_data\n precision_2 = sum_precision_2 / num_test_data\n precision_3 = sum_precision_3 / num_test_data\n recall_1 = sum_recall_1 / num_test_data\n recall_2 = sum_recall_2 / num_test_data\n recall_3 = sum_recall_3 / num_test_data\n f1_1 = 2 * precision_1 * recall_1 / (precision_1 + recall_1)\n f1_2 = 2 * precision_2 * recall_2 / (precision_2 + recall_2)\n f1_3 = 2 * precision_3 * recall_3 / (precision_3 + recall_3)\n print('K = 1')\n print('P@1 = ' + precision_1.__str__())\n print('R@1 = ' + recall_1.__str__())\n print('F@1 = ' + f1_1.__str__())\n print('K = 2')\n print('P@2 = ' + precision_2.__str__())\n print('R@2 = ' + recall_2.__str__())\n print('F@2 = ' + f1_2.__str__())\n print('K = 3')\n print('P@3 = ' + precision_3.__str__())\n print('R@3 = ' + recall_3.__str__())\n print('F@3 = ' + f1_3.__str__())\n return eval_epoch\n", "<import token>\n\n\ndef training(params, model, train_loader, optimizer):\n device = params['device']\n batch_total = params['train_batch_total']\n loss_func = nn.BCELoss()\n model.train()\n losses = []\n with tqdm_with_num(train_loader, batch_total) as loader:\n loader.set_description('Training ')\n for idx, batch in enumerate(loader):\n data, target = batch.text.to(device), batch.label.to(device)\n optimizer.zero_grad()\n outputs = model(data)\n outputs = torch.sigmoid(outputs)\n loss = loss_func(outputs, target)\n loss.backward()\n optimizer.step()\n losses.append(loss.item())\n if idx < batch_total - 1:\n print_num_on_tqdm(loader, loss)\n else:\n loss_epoch = np.mean(losses)\n print_num_on_tqdm(loader, loss_epoch, last=True)\n\n\ndef validating_testing(params, model, data_loader, is_valid=True):\n device = params['device']\n measure = params['measure']\n doc_key = is_valid and 'valid' or 'test'\n batch_total = params[doc_key + '_batch_total']\n model.eval()\n eval_epoch = 0.0\n target_all = np.empty((0, params['num_of_class']), dtype=np.int8)\n eval_all = np.empty((0, params['num_of_class']), dtype=np.float32)\n with tqdm_with_num(data_loader, batch_total) as loader:\n is_valid and loader.set_description('Validating')\n is_valid or loader.set_description('Testing ')\n with torch.no_grad():\n for idx, batch in enumerate(loader):\n data, target = batch.text.to(device), batch.label.to('cpu')\n target = target.detach().numpy().copy()\n outputs = model(data)\n outputs = torch.sigmoid(outputs)\n outputs = outputs.to('cpu').detach().numpy().copy()\n if 'f1' in measure:\n outputs = outputs >= 0.5\n target_all = np.concatenate([target_all, target])\n eval_all = np.concatenate([eval_all, outputs])\n if idx < batch_total - 1:\n if 'f1' in measure:\n avg = measure[:-3]\n eval_batch = f1_score(target, outputs, average=avg)\n else:\n k = int(measure[-1])\n eval_batch = precision_k(target, outputs, k)\n print_num_on_tqdm(loader, eval_batch, measure)\n else:\n if 'f1' in measure:\n avg = measure[:-3]\n eval_epoch = f1_score(target_all, eval_all, average=avg\n )\n else:\n k = int(measure[-1])\n eval_epoch = precision_k(target_all, eval_all, k)\n print_num_on_tqdm(loader, eval_epoch, measure, True)\n return eval_epoch\n\n\ndef custom_evaluation(params, model, data_loader, is_valid=True):\n device = params['device']\n measure = params['measure']\n doc_key = is_valid and 'valid' or 'test'\n batch_total = params[doc_key + '_batch_total']\n model.eval()\n eval_epoch = 0.0\n target_all = np.empty((0, params['num_of_class']), dtype=np.int8)\n eval_all = np.empty((0, params['num_of_class']), dtype=np.float32)\n sum_recall_1 = 0\n sum_recall_2 = 0\n sum_recall_3 = 0\n sum_precision_1 = 0\n sum_precision_2 = 0\n sum_precision_3 = 0\n num_test_data = 0\n total_labels = 0\n with tqdm_with_num(data_loader, batch_total) as loader:\n is_valid and loader.set_description('Validating')\n is_valid or loader.set_description('Testing ')\n with torch.no_grad():\n for idx, batch in enumerate(loader):\n data, target = batch.text.to(device), batch.label.to('cpu')\n target = target.detach().numpy().copy()\n outputs = model(data)\n outputs = torch.sigmoid(outputs)\n outputs = outputs.to('cpu').detach().numpy().copy()\n num_test_data += len(outputs)\n for i in range(len(outputs)):\n actual_labels = np.nonzero(target[i])[0]\n predicted_labels = np.argsort(outputs[i])[-3:][::-1]\n correct_prediction = 0\n if predicted_labels[0] in actual_labels:\n correct_prediction += 1\n sum_precision_1 += correct_prediction / 1\n sum_recall_1 += correct_prediction / len(actual_labels)\n if predicted_labels[1] in actual_labels:\n correct_prediction += 1\n sum_precision_2 += correct_prediction / 2\n sum_recall_2 += correct_prediction / len(actual_labels)\n if predicted_labels[2] in actual_labels:\n correct_prediction += 1\n sum_precision_3 += correct_prediction / 3\n sum_recall_3 += correct_prediction / len(actual_labels)\n precision_1 = sum_precision_1 / num_test_data\n precision_2 = sum_precision_2 / num_test_data\n precision_3 = sum_precision_3 / num_test_data\n recall_1 = sum_recall_1 / num_test_data\n recall_2 = sum_recall_2 / num_test_data\n recall_3 = sum_recall_3 / num_test_data\n f1_1 = 2 * precision_1 * recall_1 / (precision_1 + recall_1)\n f1_2 = 2 * precision_2 * recall_2 / (precision_2 + recall_2)\n f1_3 = 2 * precision_3 * recall_3 / (precision_3 + recall_3)\n print('K = 1')\n print('P@1 = ' + precision_1.__str__())\n print('R@1 = ' + recall_1.__str__())\n print('F@1 = ' + f1_1.__str__())\n print('K = 2')\n print('P@2 = ' + precision_2.__str__())\n print('R@2 = ' + recall_2.__str__())\n print('F@2 = ' + f1_2.__str__())\n print('K = 3')\n print('P@3 = ' + precision_3.__str__())\n print('R@3 = ' + recall_3.__str__())\n print('F@3 = ' + f1_3.__str__())\n return eval_epoch\n", "<import token>\n\n\ndef training(params, model, train_loader, optimizer):\n device = params['device']\n batch_total = params['train_batch_total']\n loss_func = nn.BCELoss()\n model.train()\n losses = []\n with tqdm_with_num(train_loader, batch_total) as loader:\n loader.set_description('Training ')\n for idx, batch in enumerate(loader):\n data, target = batch.text.to(device), batch.label.to(device)\n optimizer.zero_grad()\n outputs = model(data)\n outputs = torch.sigmoid(outputs)\n loss = loss_func(outputs, target)\n loss.backward()\n optimizer.step()\n losses.append(loss.item())\n if idx < batch_total - 1:\n print_num_on_tqdm(loader, loss)\n else:\n loss_epoch = np.mean(losses)\n print_num_on_tqdm(loader, loss_epoch, last=True)\n\n\n<function token>\n\n\ndef custom_evaluation(params, model, data_loader, is_valid=True):\n device = params['device']\n measure = params['measure']\n doc_key = is_valid and 'valid' or 'test'\n batch_total = params[doc_key + '_batch_total']\n model.eval()\n eval_epoch = 0.0\n target_all = np.empty((0, params['num_of_class']), dtype=np.int8)\n eval_all = np.empty((0, params['num_of_class']), dtype=np.float32)\n sum_recall_1 = 0\n sum_recall_2 = 0\n sum_recall_3 = 0\n sum_precision_1 = 0\n sum_precision_2 = 0\n sum_precision_3 = 0\n num_test_data = 0\n total_labels = 0\n with tqdm_with_num(data_loader, batch_total) as loader:\n is_valid and loader.set_description('Validating')\n is_valid or loader.set_description('Testing ')\n with torch.no_grad():\n for idx, batch in enumerate(loader):\n data, target = batch.text.to(device), batch.label.to('cpu')\n target = target.detach().numpy().copy()\n outputs = model(data)\n outputs = torch.sigmoid(outputs)\n outputs = outputs.to('cpu').detach().numpy().copy()\n num_test_data += len(outputs)\n for i in range(len(outputs)):\n actual_labels = np.nonzero(target[i])[0]\n predicted_labels = np.argsort(outputs[i])[-3:][::-1]\n correct_prediction = 0\n if predicted_labels[0] in actual_labels:\n correct_prediction += 1\n sum_precision_1 += correct_prediction / 1\n sum_recall_1 += correct_prediction / len(actual_labels)\n if predicted_labels[1] in actual_labels:\n correct_prediction += 1\n sum_precision_2 += correct_prediction / 2\n sum_recall_2 += correct_prediction / len(actual_labels)\n if predicted_labels[2] in actual_labels:\n correct_prediction += 1\n sum_precision_3 += correct_prediction / 3\n sum_recall_3 += correct_prediction / len(actual_labels)\n precision_1 = sum_precision_1 / num_test_data\n precision_2 = sum_precision_2 / num_test_data\n precision_3 = sum_precision_3 / num_test_data\n recall_1 = sum_recall_1 / num_test_data\n recall_2 = sum_recall_2 / num_test_data\n recall_3 = sum_recall_3 / num_test_data\n f1_1 = 2 * precision_1 * recall_1 / (precision_1 + recall_1)\n f1_2 = 2 * precision_2 * recall_2 / (precision_2 + recall_2)\n f1_3 = 2 * precision_3 * recall_3 / (precision_3 + recall_3)\n print('K = 1')\n print('P@1 = ' + precision_1.__str__())\n print('R@1 = ' + recall_1.__str__())\n print('F@1 = ' + f1_1.__str__())\n print('K = 2')\n print('P@2 = ' + precision_2.__str__())\n print('R@2 = ' + recall_2.__str__())\n print('F@2 = ' + f1_2.__str__())\n print('K = 3')\n print('P@3 = ' + precision_3.__str__())\n print('R@3 = ' + recall_3.__str__())\n print('F@3 = ' + f1_3.__str__())\n return eval_epoch\n", "<import token>\n<function token>\n<function token>\n\n\ndef custom_evaluation(params, model, data_loader, is_valid=True):\n device = params['device']\n measure = params['measure']\n doc_key = is_valid and 'valid' or 'test'\n batch_total = params[doc_key + '_batch_total']\n model.eval()\n eval_epoch = 0.0\n target_all = np.empty((0, params['num_of_class']), dtype=np.int8)\n eval_all = np.empty((0, params['num_of_class']), dtype=np.float32)\n sum_recall_1 = 0\n sum_recall_2 = 0\n sum_recall_3 = 0\n sum_precision_1 = 0\n sum_precision_2 = 0\n sum_precision_3 = 0\n num_test_data = 0\n total_labels = 0\n with tqdm_with_num(data_loader, batch_total) as loader:\n is_valid and loader.set_description('Validating')\n is_valid or loader.set_description('Testing ')\n with torch.no_grad():\n for idx, batch in enumerate(loader):\n data, target = batch.text.to(device), batch.label.to('cpu')\n target = target.detach().numpy().copy()\n outputs = model(data)\n outputs = torch.sigmoid(outputs)\n outputs = outputs.to('cpu').detach().numpy().copy()\n num_test_data += len(outputs)\n for i in range(len(outputs)):\n actual_labels = np.nonzero(target[i])[0]\n predicted_labels = np.argsort(outputs[i])[-3:][::-1]\n correct_prediction = 0\n if predicted_labels[0] in actual_labels:\n correct_prediction += 1\n sum_precision_1 += correct_prediction / 1\n sum_recall_1 += correct_prediction / len(actual_labels)\n if predicted_labels[1] in actual_labels:\n correct_prediction += 1\n sum_precision_2 += correct_prediction / 2\n sum_recall_2 += correct_prediction / len(actual_labels)\n if predicted_labels[2] in actual_labels:\n correct_prediction += 1\n sum_precision_3 += correct_prediction / 3\n sum_recall_3 += correct_prediction / len(actual_labels)\n precision_1 = sum_precision_1 / num_test_data\n precision_2 = sum_precision_2 / num_test_data\n precision_3 = sum_precision_3 / num_test_data\n recall_1 = sum_recall_1 / num_test_data\n recall_2 = sum_recall_2 / num_test_data\n recall_3 = sum_recall_3 / num_test_data\n f1_1 = 2 * precision_1 * recall_1 / (precision_1 + recall_1)\n f1_2 = 2 * precision_2 * recall_2 / (precision_2 + recall_2)\n f1_3 = 2 * precision_3 * recall_3 / (precision_3 + recall_3)\n print('K = 1')\n print('P@1 = ' + precision_1.__str__())\n print('R@1 = ' + recall_1.__str__())\n print('F@1 = ' + f1_1.__str__())\n print('K = 2')\n print('P@2 = ' + precision_2.__str__())\n print('R@2 = ' + recall_2.__str__())\n print('F@2 = ' + f1_2.__str__())\n print('K = 3')\n print('P@3 = ' + precision_3.__str__())\n print('R@3 = ' + recall_3.__str__())\n print('F@3 = ' + f1_3.__str__())\n return eval_epoch\n", "<import token>\n<function token>\n<function token>\n<function token>\n" ]
false
99,472
94f92d55ad47a380d443d58f687fefe4a099025c
def diff21(n): if n <= 21 : return abs(n-21) else: return abs(n-21)*2
[ "def diff21(n):\n if n <= 21 : \n return abs(n-21) \n else:\n return abs(n-21)*2\n\n", "def diff21(n):\n if n <= 21:\n return abs(n - 21)\n else:\n return abs(n - 21) * 2\n", "<function token>\n" ]
false
99,473
6947c1da87874e2123ed153a3d641dceeffc6a52
##import sys ##sys.path.append("C:\\Users\\Satyam\\Desktop\\test") from testing import b from testing import test b() test.c()
[ "##import sys\n##sys.path.append(\"C:\\\\Users\\\\Satyam\\\\Desktop\\\\test\")\nfrom testing import b\nfrom testing import test\nb()\ntest.c()\n\n", "from testing import b\nfrom testing import test\nb()\ntest.c()\n", "<import token>\nb()\ntest.c()\n", "<import token>\n<code token>\n" ]
false
99,474
d2ae5b4007217f1a653a875a14d497cdb910b684
#!/usr/bin/python from mayavi import mlab from scipy.special import gamma as Gamma import numpy as np import math def dirichlet(a1, a2, a3): x = [[t/10.0 for i in range(10)] for t in range(11)] y = [[(1-xr[0])/10.0*i for i in range(1,11)] for xr in x] x = np.array(x) y = np.array(y) k = Gamma(a1+a2+a3)/(Gamma(a1)*Gamma(a2)*Gamma(a3)) z = k*pow(x, a1-1)*pow(y, a2-1)*pow(1-x-y, a3-1) for i in range(len(z)): for j in range(len(z[i])): if np.isinf(z[i][j]): z[i][j] = np.nan mlab.mesh(x, y, z) mlab.show() if __name__ == '__main__': dirichlet(2, 2, 2)
[ "#!/usr/bin/python\n\nfrom mayavi import mlab\nfrom scipy.special import gamma as Gamma\nimport numpy as np\nimport math\n\ndef dirichlet(a1, a2, a3):\n\tx = [[t/10.0 for i in range(10)] for t in range(11)]\n\ty = [[(1-xr[0])/10.0*i for i in range(1,11)] for xr in x]\n\tx = np.array(x)\n\ty = np.array(y)\n\n\tk = Gamma(a1+a2+a3)/(Gamma(a1)*Gamma(a2)*Gamma(a3))\n\tz = k*pow(x, a1-1)*pow(y, a2-1)*pow(1-x-y, a3-1)\n\tfor i in range(len(z)):\n\t\tfor j in range(len(z[i])):\n\t\t\tif np.isinf(z[i][j]):\n\t\t\t\tz[i][j] = np.nan\n\n\n\tmlab.mesh(x, y, z)\n\tmlab.show()\n\nif __name__ == '__main__':\n\tdirichlet(2, 2, 2)\n", "from mayavi import mlab\nfrom scipy.special import gamma as Gamma\nimport numpy as np\nimport math\n\n\ndef dirichlet(a1, a2, a3):\n x = [[(t / 10.0) for i in range(10)] for t in range(11)]\n y = [[((1 - xr[0]) / 10.0 * i) for i in range(1, 11)] for xr in x]\n x = np.array(x)\n y = np.array(y)\n k = Gamma(a1 + a2 + a3) / (Gamma(a1) * Gamma(a2) * Gamma(a3))\n z = k * pow(x, a1 - 1) * pow(y, a2 - 1) * pow(1 - x - y, a3 - 1)\n for i in range(len(z)):\n for j in range(len(z[i])):\n if np.isinf(z[i][j]):\n z[i][j] = np.nan\n mlab.mesh(x, y, z)\n mlab.show()\n\n\nif __name__ == '__main__':\n dirichlet(2, 2, 2)\n", "<import token>\n\n\ndef dirichlet(a1, a2, a3):\n x = [[(t / 10.0) for i in range(10)] for t in range(11)]\n y = [[((1 - xr[0]) / 10.0 * i) for i in range(1, 11)] for xr in x]\n x = np.array(x)\n y = np.array(y)\n k = Gamma(a1 + a2 + a3) / (Gamma(a1) * Gamma(a2) * Gamma(a3))\n z = k * pow(x, a1 - 1) * pow(y, a2 - 1) * pow(1 - x - y, a3 - 1)\n for i in range(len(z)):\n for j in range(len(z[i])):\n if np.isinf(z[i][j]):\n z[i][j] = np.nan\n mlab.mesh(x, y, z)\n mlab.show()\n\n\nif __name__ == '__main__':\n dirichlet(2, 2, 2)\n", "<import token>\n\n\ndef dirichlet(a1, a2, a3):\n x = [[(t / 10.0) for i in range(10)] for t in range(11)]\n y = [[((1 - xr[0]) / 10.0 * i) for i in range(1, 11)] for xr in x]\n x = np.array(x)\n y = np.array(y)\n k = Gamma(a1 + a2 + a3) / (Gamma(a1) * Gamma(a2) * Gamma(a3))\n z = k * pow(x, a1 - 1) * pow(y, a2 - 1) * pow(1 - x - y, a3 - 1)\n for i in range(len(z)):\n for j in range(len(z[i])):\n if np.isinf(z[i][j]):\n z[i][j] = np.nan\n mlab.mesh(x, y, z)\n mlab.show()\n\n\n<code token>\n", "<import token>\n<function token>\n<code token>\n" ]
false
99,475
196f46c79f09c5cfed7c1495f9353e34fead6e2b
import bs_main def test_main(): map = bs_main.run() assert len(map.keys()) == 20
[ "import bs_main\n\n\ndef test_main():\n map = bs_main.run()\n assert len(map.keys()) == 20\n", "<import token>\n\n\ndef test_main():\n map = bs_main.run()\n assert len(map.keys()) == 20\n", "<import token>\n<function token>\n" ]
false
99,476
4e651ab3c46f0f2fe558834d6ebe7612ec256507
import csv import os def is_non_zero_file(fpath): return True if os.path.isfile(fpath) and os.path.getsize(fpath) > 0 else False number = 0 with open('tadawul_data.csv', 'rbU') as csvfile: spamreader = csv.reader(csvfile, delimiter=',') next(spamreader) for row in spamreader: with open(row[0]+'.csv','a+') as datafile: d = csv.writer(datafile) if not is_non_zero_file(row[0]+'.csv'): d.writerow(['Date','Open','High','Low','Close','Volume', 'Adj Close','Num_deals','Value','Change','Change_per']) d = csv.writer(datafile) d.writerow(row[1:]) print "####Done"
[ "\nimport csv\nimport os\n\ndef is_non_zero_file(fpath): \n return True if os.path.isfile(fpath) and os.path.getsize(fpath) > 0 else False\n\nnumber = 0\n\nwith open('tadawul_data.csv', 'rbU') as csvfile:\n\tspamreader = csv.reader(csvfile, delimiter=',')\n\tnext(spamreader)\n\n\tfor row in spamreader:\n\t\twith open(row[0]+'.csv','a+') as datafile:\n\t\t\td = csv.writer(datafile)\n\t\t\tif not is_non_zero_file(row[0]+'.csv'):\n\t\t\t\td.writerow(['Date','Open','High','Low','Close','Volume', 'Adj Close','Num_deals','Value','Change','Change_per'])\n\t\t\td = csv.writer(datafile)\n\t\t\td.writerow(row[1:])\n\n\n\nprint \"####Done\"" ]
true
99,477
d9230ea7d293a61146b47ad7b7cea23c3644b7d6
from django.contrib import admin from .models import Pet, Feeding, Toy, Photo # Register your models here. admin.site.register(Pet) admin.site.register(Feeding) admin.site.register(Toy) admin.site.register(Photo)
[ "from django.contrib import admin\nfrom .models import Pet, Feeding, Toy, Photo\n# Register your models here.\n\nadmin.site.register(Pet)\nadmin.site.register(Feeding)\nadmin.site.register(Toy)\nadmin.site.register(Photo)", "from django.contrib import admin\nfrom .models import Pet, Feeding, Toy, Photo\nadmin.site.register(Pet)\nadmin.site.register(Feeding)\nadmin.site.register(Toy)\nadmin.site.register(Photo)\n", "<import token>\nadmin.site.register(Pet)\nadmin.site.register(Feeding)\nadmin.site.register(Toy)\nadmin.site.register(Photo)\n", "<import token>\n<code token>\n" ]
false
99,478
0c3ea483d0f12cdd44962cfce25faa8893737163
""" Write a Python program to append a list to the second list. """ list1=[1,2,3,4] list2=[5,6,7,8] list1+=list2 print(list1)
[ "\"\"\"\r\nWrite a Python program to append a list to the second list.\r\n\"\"\"\r\n\r\nlist1=[1,2,3,4]\r\nlist2=[5,6,7,8]\r\n\r\nlist1+=list2\r\nprint(list1)", "<docstring token>\nlist1 = [1, 2, 3, 4]\nlist2 = [5, 6, 7, 8]\nlist1 += list2\nprint(list1)\n", "<docstring token>\n<assignment token>\nlist1 += list2\nprint(list1)\n", "<docstring token>\n<assignment token>\n<code token>\n" ]
false
99,479
15e0b5c7d9636f53b85c4def9eb3acf8a7f230eb
my_name = 'Zed A. Shaw' my_age = 35 # not a lie my_height = 74 # inches, Study Drill 2: 0.0254 m to a inche #height = 74 * 0.0254 # height of Zed in meters. my_weight = 180 # lbs, Study Drill 2: 0.45 kg to a lb #weight = 180 * 0.45 # weight of Zed in kilograms. my_eyes = 'Blue' my_teeth = 'White' my_hair = 'Brown' print("Let's talk about %s." % my_name) print("He's %d inches tall." % my_height) print("He's %d punds heavy." % my_weight) print("Actually that's not too heavy.") print("He's got %s eyes and %s hair." % (my_eyes, my_hair)) print("His teeth are usually %s depending on the coffee." % my_teeth) # this line is tricky, try to get it exactly right print("If I add %d, %d, and %d I get %d." % ( my_age, my_height, my_weight, my_age + my_height + my_weight))
[ "my_name = 'Zed A. Shaw'\nmy_age = 35 # not a lie\nmy_height = 74 # inches, Study Drill 2: 0.0254 m to a inche\n#height = 74 * 0.0254 # height of Zed in meters.\nmy_weight = 180 # lbs, Study Drill 2: 0.45 kg to a lb\n#weight = 180 * 0.45 # weight of Zed in kilograms.\nmy_eyes = 'Blue'\nmy_teeth = 'White'\nmy_hair = 'Brown'\n\nprint(\"Let's talk about %s.\" % my_name)\nprint(\"He's %d inches tall.\" % my_height)\nprint(\"He's %d punds heavy.\" % my_weight)\nprint(\"Actually that's not too heavy.\")\nprint(\"He's got %s eyes and %s hair.\" % (my_eyes, my_hair))\nprint(\"His teeth are usually %s depending on the coffee.\" % my_teeth)\n\n# this line is tricky, try to get it exactly right\nprint(\"If I add %d, %d, and %d I get %d.\" % (\n my_age, my_height, my_weight, my_age + my_height + my_weight))\n", "my_name = 'Zed A. Shaw'\nmy_age = 35\nmy_height = 74\nmy_weight = 180\nmy_eyes = 'Blue'\nmy_teeth = 'White'\nmy_hair = 'Brown'\nprint(\"Let's talk about %s.\" % my_name)\nprint(\"He's %d inches tall.\" % my_height)\nprint(\"He's %d punds heavy.\" % my_weight)\nprint(\"Actually that's not too heavy.\")\nprint(\"He's got %s eyes and %s hair.\" % (my_eyes, my_hair))\nprint('His teeth are usually %s depending on the coffee.' % my_teeth)\nprint('If I add %d, %d, and %d I get %d.' % (my_age, my_height, my_weight, \n my_age + my_height + my_weight))\n", "<assignment token>\nprint(\"Let's talk about %s.\" % my_name)\nprint(\"He's %d inches tall.\" % my_height)\nprint(\"He's %d punds heavy.\" % my_weight)\nprint(\"Actually that's not too heavy.\")\nprint(\"He's got %s eyes and %s hair.\" % (my_eyes, my_hair))\nprint('His teeth are usually %s depending on the coffee.' % my_teeth)\nprint('If I add %d, %d, and %d I get %d.' % (my_age, my_height, my_weight, \n my_age + my_height + my_weight))\n", "<assignment token>\n<code token>\n" ]
false
99,480
1d8c380d3d6bdaed4e956ff3127ddced5f23d1d3
from .od_route_extractor import OriginDestinationRouteExtractor # expose the feature extraction utility
[ "from .od_route_extractor import OriginDestinationRouteExtractor # expose the feature extraction utility\n", "from .od_route_extractor import OriginDestinationRouteExtractor\n", "<import token>\n" ]
false
99,481
29261e47e1ee6c5901f6914380ebf11a97690e1d
# -*- coding: utf-8 -*- from Tkinter import * from tkFileDialog import * from Tools import * class Clusterization(): def __init__ (self): self.colors = ['black', 'green', 'blue', 'red', 'magenta', 'grey', 'cyan'] self.dots = list() self.centers = list() self.window = Tk() self.window.title('Kohonen') self.window.geometry('1000x900+100+100') self.window.resizable(False, False) self.drawing_area = Canvas(self.window, width=970, height=750, bd=2, cursor = 'dot', relief=RIDGE) self.drawing_area.place(x = 15, y = 25, width = 970) self.drawing_area.bind("<ButtonPress-1>", self.draw_dots) self.drawing_area.bind("<ButtonPress-3>", self.draw_centers) self.button_manhattan = Button(self.window, bd = 2, text = 'Manhattan', width = 30, height = 1, relief=RIDGE) self.button_manhattan.place(x = 300, y = 800, width = 105) self.button_manhattan.bind('<Button-1>', self.manhattan) self.button_chebyshev = Button(self.window, bd = 2, text = 'Chebyshev', width = 30, height = 1, relief=RIDGE) self.button_chebyshev.place(x = 410, y = 800, width = 105) self.button_chebyshev.bind('<Button-1>', self.chebyshev) self.button_upload_dots = Button(self.window, bd = 2, text = 'Upload dots', width = 30, height = 1, relief=RIDGE) self.button_upload_dots.place(x = 520, y = 800, width = 105) self.button_upload_dots.bind('<Button-1>', self.upload_dots) self.button_upload_centers = Button(self.window, bd = 2, text = 'Upload centers', width = 30, height = 1, relief=RIDGE) self.button_upload_centers.place(x = 630, y = 800, width = 105) self.button_upload_centers.bind('<Button-1>', self.upload_centers) self.button_clear_all = Button(self.window, bd = 2, text = 'C L E A R A L L', width = 30, height = 1, relief=RIDGE) self.button_clear_all.place(x = 300, y = 850, width = 435) self.button_clear_all.bind('<Button-1>', self.reset) # self.button_write = Button(self.window, bd = 2, text = 'Write data to file', width = 30, height = 1, relief=RIDGE) # self.button_write.place(x = 510, y = 850, width = 200) #self.button_upload_centers.bind('<Button-1>', self.write_to_file) def manhattan(self, event): clusters = kohonen(self.dots, self.centers, 'Manhattan') self.drawing_area.delete('all') for i,center in enumerate(self.centers): cl = list() for dot,cluster in zip(self.dots,clusters): if cluster == center: cl.append(dot) cl.insert(0, center) x_center = cl[0][0] y_center = cl[0][1] self.drawing_area.create_oval(x_center, y_center, x_center + 7, y_center + 7, width=1, fill=self.colors[0]) for c in cl[1:]: x = c[0] y = c[1] self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill=self.colors[i+1]) def chebyshev(self, event): clusters = kohonen(self.dots, self.centers, 'Chebyshev') self.drawing_area.delete('all') for i,center in enumerate(self.centers): cl = list() for dot,cluster in zip(self.dots,clusters): if cluster == center: cl.append(dot) cl.insert(0, center) x_center = cl[0][0] y_center = cl[0][1] self.drawing_area.create_oval(x_center, y_center, x_center + 7, y_center + 7, width=1, fill=self.colors[0]) for c in cl[1:]: x = c[0] y = c[1] self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill=self.colors[i+1]) def draw_dots(self, event): event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7, width=1, fill=self.colors[0]) self.dots.append([event.x, event.y]) def draw_centers(self, event): event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7, width=1, fill=self.colors[1]) self.centers.append([event.x, event.y]) def upload_dots(self, event): Tk().withdraw() filename = askopenfilename() self.dots += list(read_file(filename)) for dot in self.dots: x = dot[0] y = dot[1] self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill=self.colors[0]) def upload_centers(self, event): Tk().withdraw() filename = askopenfilename() self.centers += list(read_file(filename)) for center in self.centers: x = center[0] y = center[1] self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill=self.colors[1]) def reset (self, event): self.drawing_area.delete('all') self.dots = list() self.centers = list() def run (self): self.window.mainloop()
[ "# -*- coding: utf-8 -*-\nfrom Tkinter import *\nfrom tkFileDialog import *\nfrom Tools import *\nclass Clusterization():\n def __init__ (self):\n\n self.colors = ['black', 'green', 'blue', 'red', 'magenta', 'grey', 'cyan']\n\n self.dots = list()\n self.centers = list()\n\n self.window = Tk()\n self.window.title('Kohonen')\n self.window.geometry('1000x900+100+100')\n self.window.resizable(False, False)\n\n self.drawing_area = Canvas(self.window, width=970, height=750, bd=2, cursor = 'dot', relief=RIDGE)\n self.drawing_area.place(x = 15, y = 25, width = 970)\n self.drawing_area.bind(\"<ButtonPress-1>\", self.draw_dots)\n self.drawing_area.bind(\"<ButtonPress-3>\", self.draw_centers)\n\n self.button_manhattan = Button(self.window, bd = 2, text = 'Manhattan', width = 30, height = 1, relief=RIDGE)\n self.button_manhattan.place(x = 300, y = 800, width = 105)\n self.button_manhattan.bind('<Button-1>', self.manhattan)\n\n self.button_chebyshev = Button(self.window, bd = 2, text = 'Chebyshev', width = 30, height = 1, relief=RIDGE)\n self.button_chebyshev.place(x = 410, y = 800, width = 105)\n self.button_chebyshev.bind('<Button-1>', self.chebyshev)\n\n self.button_upload_dots = Button(self.window, bd = 2, text = 'Upload dots', width = 30, height = 1, relief=RIDGE)\n self.button_upload_dots.place(x = 520, y = 800, width = 105)\n self.button_upload_dots.bind('<Button-1>', self.upload_dots)\n\n self.button_upload_centers = Button(self.window, bd = 2, text = 'Upload centers', width = 30, height = 1, relief=RIDGE)\n self.button_upload_centers.place(x = 630, y = 800, width = 105)\n self.button_upload_centers.bind('<Button-1>', self.upload_centers)\n\n self.button_clear_all = Button(self.window, bd = 2, text = 'C L E A R A L L', width = 30, height = 1, relief=RIDGE)\n self.button_clear_all.place(x = 300, y = 850, width = 435)\n self.button_clear_all.bind('<Button-1>', self.reset)\n\n # self.button_write = Button(self.window, bd = 2, text = 'Write data to file', width = 30, height = 1, relief=RIDGE)\n # self.button_write.place(x = 510, y = 850, width = 200)\n #self.button_upload_centers.bind('<Button-1>', self.write_to_file)\n\n def manhattan(self, event):\n clusters = kohonen(self.dots, self.centers, 'Manhattan')\n self.drawing_area.delete('all')\n for i,center in enumerate(self.centers):\n cl = list()\n for dot,cluster in zip(self.dots,clusters):\n if cluster == center:\n cl.append(dot)\n cl.insert(0, center)\n x_center = cl[0][0]\n y_center = cl[0][1]\n self.drawing_area.create_oval(x_center, y_center, x_center + 7, y_center + 7, width=1, fill=self.colors[0])\n for c in cl[1:]:\n x = c[0]\n y = c[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill=self.colors[i+1])\n\n def chebyshev(self, event):\n clusters = kohonen(self.dots, self.centers, 'Chebyshev')\n self.drawing_area.delete('all')\n for i,center in enumerate(self.centers):\n cl = list()\n for dot,cluster in zip(self.dots,clusters):\n if cluster == center:\n cl.append(dot)\n cl.insert(0, center)\n x_center = cl[0][0]\n y_center = cl[0][1]\n self.drawing_area.create_oval(x_center, y_center, x_center + 7, y_center + 7, width=1, fill=self.colors[0])\n for c in cl[1:]:\n x = c[0]\n y = c[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill=self.colors[i+1])\n\n def draw_dots(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7, width=1, fill=self.colors[0])\n self.dots.append([event.x, event.y])\n\n def draw_centers(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7, width=1, fill=self.colors[1])\n self.centers.append([event.x, event.y])\n\n def upload_dots(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.dots += list(read_file(filename))\n for dot in self.dots:\n x = dot[0]\n y = dot[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill=self.colors[0])\n\n def upload_centers(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.centers += list(read_file(filename))\n for center in self.centers:\n x = center[0]\n y = center[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill=self.colors[1])\n\n def reset (self, event):\n self.drawing_area.delete('all')\n self.dots = list()\n self.centers = list()\n\n def run (self):\n self.window.mainloop()\n", "from Tkinter import *\nfrom tkFileDialog import *\nfrom Tools import *\n\n\nclass Clusterization:\n\n def __init__(self):\n self.colors = ['black', 'green', 'blue', 'red', 'magenta', 'grey',\n 'cyan']\n self.dots = list()\n self.centers = list()\n self.window = Tk()\n self.window.title('Kohonen')\n self.window.geometry('1000x900+100+100')\n self.window.resizable(False, False)\n self.drawing_area = Canvas(self.window, width=970, height=750, bd=2,\n cursor='dot', relief=RIDGE)\n self.drawing_area.place(x=15, y=25, width=970)\n self.drawing_area.bind('<ButtonPress-1>', self.draw_dots)\n self.drawing_area.bind('<ButtonPress-3>', self.draw_centers)\n self.button_manhattan = Button(self.window, bd=2, text='Manhattan',\n width=30, height=1, relief=RIDGE)\n self.button_manhattan.place(x=300, y=800, width=105)\n self.button_manhattan.bind('<Button-1>', self.manhattan)\n self.button_chebyshev = Button(self.window, bd=2, text='Chebyshev',\n width=30, height=1, relief=RIDGE)\n self.button_chebyshev.place(x=410, y=800, width=105)\n self.button_chebyshev.bind('<Button-1>', self.chebyshev)\n self.button_upload_dots = Button(self.window, bd=2, text=\n 'Upload dots', width=30, height=1, relief=RIDGE)\n self.button_upload_dots.place(x=520, y=800, width=105)\n self.button_upload_dots.bind('<Button-1>', self.upload_dots)\n self.button_upload_centers = Button(self.window, bd=2, text=\n 'Upload centers', width=30, height=1, relief=RIDGE)\n self.button_upload_centers.place(x=630, y=800, width=105)\n self.button_upload_centers.bind('<Button-1>', self.upload_centers)\n self.button_clear_all = Button(self.window, bd=2, text=\n 'C L E A R A L L', width=30, height=1, relief=RIDGE)\n self.button_clear_all.place(x=300, y=850, width=435)\n self.button_clear_all.bind('<Button-1>', self.reset)\n\n def manhattan(self, event):\n clusters = kohonen(self.dots, self.centers, 'Manhattan')\n self.drawing_area.delete('all')\n for i, center in enumerate(self.centers):\n cl = list()\n for dot, cluster in zip(self.dots, clusters):\n if cluster == center:\n cl.append(dot)\n cl.insert(0, center)\n x_center = cl[0][0]\n y_center = cl[0][1]\n self.drawing_area.create_oval(x_center, y_center, x_center + 7,\n y_center + 7, width=1, fill=self.colors[0])\n for c in cl[1:]:\n x = c[0]\n y = c[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1,\n fill=self.colors[i + 1])\n\n def chebyshev(self, event):\n clusters = kohonen(self.dots, self.centers, 'Chebyshev')\n self.drawing_area.delete('all')\n for i, center in enumerate(self.centers):\n cl = list()\n for dot, cluster in zip(self.dots, clusters):\n if cluster == center:\n cl.append(dot)\n cl.insert(0, center)\n x_center = cl[0][0]\n y_center = cl[0][1]\n self.drawing_area.create_oval(x_center, y_center, x_center + 7,\n y_center + 7, width=1, fill=self.colors[0])\n for c in cl[1:]:\n x = c[0]\n y = c[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1,\n fill=self.colors[i + 1])\n\n def draw_dots(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[0])\n self.dots.append([event.x, event.y])\n\n def draw_centers(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[1])\n self.centers.append([event.x, event.y])\n\n def upload_dots(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.dots += list(read_file(filename))\n for dot in self.dots:\n x = dot[0]\n y = dot[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[0])\n\n def upload_centers(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.centers += list(read_file(filename))\n for center in self.centers:\n x = center[0]\n y = center[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[1])\n\n def reset(self, event):\n self.drawing_area.delete('all')\n self.dots = list()\n self.centers = list()\n\n def run(self):\n self.window.mainloop()\n", "<import token>\n\n\nclass Clusterization:\n\n def __init__(self):\n self.colors = ['black', 'green', 'blue', 'red', 'magenta', 'grey',\n 'cyan']\n self.dots = list()\n self.centers = list()\n self.window = Tk()\n self.window.title('Kohonen')\n self.window.geometry('1000x900+100+100')\n self.window.resizable(False, False)\n self.drawing_area = Canvas(self.window, width=970, height=750, bd=2,\n cursor='dot', relief=RIDGE)\n self.drawing_area.place(x=15, y=25, width=970)\n self.drawing_area.bind('<ButtonPress-1>', self.draw_dots)\n self.drawing_area.bind('<ButtonPress-3>', self.draw_centers)\n self.button_manhattan = Button(self.window, bd=2, text='Manhattan',\n width=30, height=1, relief=RIDGE)\n self.button_manhattan.place(x=300, y=800, width=105)\n self.button_manhattan.bind('<Button-1>', self.manhattan)\n self.button_chebyshev = Button(self.window, bd=2, text='Chebyshev',\n width=30, height=1, relief=RIDGE)\n self.button_chebyshev.place(x=410, y=800, width=105)\n self.button_chebyshev.bind('<Button-1>', self.chebyshev)\n self.button_upload_dots = Button(self.window, bd=2, text=\n 'Upload dots', width=30, height=1, relief=RIDGE)\n self.button_upload_dots.place(x=520, y=800, width=105)\n self.button_upload_dots.bind('<Button-1>', self.upload_dots)\n self.button_upload_centers = Button(self.window, bd=2, text=\n 'Upload centers', width=30, height=1, relief=RIDGE)\n self.button_upload_centers.place(x=630, y=800, width=105)\n self.button_upload_centers.bind('<Button-1>', self.upload_centers)\n self.button_clear_all = Button(self.window, bd=2, text=\n 'C L E A R A L L', width=30, height=1, relief=RIDGE)\n self.button_clear_all.place(x=300, y=850, width=435)\n self.button_clear_all.bind('<Button-1>', self.reset)\n\n def manhattan(self, event):\n clusters = kohonen(self.dots, self.centers, 'Manhattan')\n self.drawing_area.delete('all')\n for i, center in enumerate(self.centers):\n cl = list()\n for dot, cluster in zip(self.dots, clusters):\n if cluster == center:\n cl.append(dot)\n cl.insert(0, center)\n x_center = cl[0][0]\n y_center = cl[0][1]\n self.drawing_area.create_oval(x_center, y_center, x_center + 7,\n y_center + 7, width=1, fill=self.colors[0])\n for c in cl[1:]:\n x = c[0]\n y = c[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1,\n fill=self.colors[i + 1])\n\n def chebyshev(self, event):\n clusters = kohonen(self.dots, self.centers, 'Chebyshev')\n self.drawing_area.delete('all')\n for i, center in enumerate(self.centers):\n cl = list()\n for dot, cluster in zip(self.dots, clusters):\n if cluster == center:\n cl.append(dot)\n cl.insert(0, center)\n x_center = cl[0][0]\n y_center = cl[0][1]\n self.drawing_area.create_oval(x_center, y_center, x_center + 7,\n y_center + 7, width=1, fill=self.colors[0])\n for c in cl[1:]:\n x = c[0]\n y = c[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1,\n fill=self.colors[i + 1])\n\n def draw_dots(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[0])\n self.dots.append([event.x, event.y])\n\n def draw_centers(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[1])\n self.centers.append([event.x, event.y])\n\n def upload_dots(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.dots += list(read_file(filename))\n for dot in self.dots:\n x = dot[0]\n y = dot[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[0])\n\n def upload_centers(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.centers += list(read_file(filename))\n for center in self.centers:\n x = center[0]\n y = center[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[1])\n\n def reset(self, event):\n self.drawing_area.delete('all')\n self.dots = list()\n self.centers = list()\n\n def run(self):\n self.window.mainloop()\n", "<import token>\n\n\nclass Clusterization:\n\n def __init__(self):\n self.colors = ['black', 'green', 'blue', 'red', 'magenta', 'grey',\n 'cyan']\n self.dots = list()\n self.centers = list()\n self.window = Tk()\n self.window.title('Kohonen')\n self.window.geometry('1000x900+100+100')\n self.window.resizable(False, False)\n self.drawing_area = Canvas(self.window, width=970, height=750, bd=2,\n cursor='dot', relief=RIDGE)\n self.drawing_area.place(x=15, y=25, width=970)\n self.drawing_area.bind('<ButtonPress-1>', self.draw_dots)\n self.drawing_area.bind('<ButtonPress-3>', self.draw_centers)\n self.button_manhattan = Button(self.window, bd=2, text='Manhattan',\n width=30, height=1, relief=RIDGE)\n self.button_manhattan.place(x=300, y=800, width=105)\n self.button_manhattan.bind('<Button-1>', self.manhattan)\n self.button_chebyshev = Button(self.window, bd=2, text='Chebyshev',\n width=30, height=1, relief=RIDGE)\n self.button_chebyshev.place(x=410, y=800, width=105)\n self.button_chebyshev.bind('<Button-1>', self.chebyshev)\n self.button_upload_dots = Button(self.window, bd=2, text=\n 'Upload dots', width=30, height=1, relief=RIDGE)\n self.button_upload_dots.place(x=520, y=800, width=105)\n self.button_upload_dots.bind('<Button-1>', self.upload_dots)\n self.button_upload_centers = Button(self.window, bd=2, text=\n 'Upload centers', width=30, height=1, relief=RIDGE)\n self.button_upload_centers.place(x=630, y=800, width=105)\n self.button_upload_centers.bind('<Button-1>', self.upload_centers)\n self.button_clear_all = Button(self.window, bd=2, text=\n 'C L E A R A L L', width=30, height=1, relief=RIDGE)\n self.button_clear_all.place(x=300, y=850, width=435)\n self.button_clear_all.bind('<Button-1>', self.reset)\n\n def manhattan(self, event):\n clusters = kohonen(self.dots, self.centers, 'Manhattan')\n self.drawing_area.delete('all')\n for i, center in enumerate(self.centers):\n cl = list()\n for dot, cluster in zip(self.dots, clusters):\n if cluster == center:\n cl.append(dot)\n cl.insert(0, center)\n x_center = cl[0][0]\n y_center = cl[0][1]\n self.drawing_area.create_oval(x_center, y_center, x_center + 7,\n y_center + 7, width=1, fill=self.colors[0])\n for c in cl[1:]:\n x = c[0]\n y = c[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1,\n fill=self.colors[i + 1])\n <function token>\n\n def draw_dots(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[0])\n self.dots.append([event.x, event.y])\n\n def draw_centers(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[1])\n self.centers.append([event.x, event.y])\n\n def upload_dots(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.dots += list(read_file(filename))\n for dot in self.dots:\n x = dot[0]\n y = dot[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[0])\n\n def upload_centers(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.centers += list(read_file(filename))\n for center in self.centers:\n x = center[0]\n y = center[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[1])\n\n def reset(self, event):\n self.drawing_area.delete('all')\n self.dots = list()\n self.centers = list()\n\n def run(self):\n self.window.mainloop()\n", "<import token>\n\n\nclass Clusterization:\n\n def __init__(self):\n self.colors = ['black', 'green', 'blue', 'red', 'magenta', 'grey',\n 'cyan']\n self.dots = list()\n self.centers = list()\n self.window = Tk()\n self.window.title('Kohonen')\n self.window.geometry('1000x900+100+100')\n self.window.resizable(False, False)\n self.drawing_area = Canvas(self.window, width=970, height=750, bd=2,\n cursor='dot', relief=RIDGE)\n self.drawing_area.place(x=15, y=25, width=970)\n self.drawing_area.bind('<ButtonPress-1>', self.draw_dots)\n self.drawing_area.bind('<ButtonPress-3>', self.draw_centers)\n self.button_manhattan = Button(self.window, bd=2, text='Manhattan',\n width=30, height=1, relief=RIDGE)\n self.button_manhattan.place(x=300, y=800, width=105)\n self.button_manhattan.bind('<Button-1>', self.manhattan)\n self.button_chebyshev = Button(self.window, bd=2, text='Chebyshev',\n width=30, height=1, relief=RIDGE)\n self.button_chebyshev.place(x=410, y=800, width=105)\n self.button_chebyshev.bind('<Button-1>', self.chebyshev)\n self.button_upload_dots = Button(self.window, bd=2, text=\n 'Upload dots', width=30, height=1, relief=RIDGE)\n self.button_upload_dots.place(x=520, y=800, width=105)\n self.button_upload_dots.bind('<Button-1>', self.upload_dots)\n self.button_upload_centers = Button(self.window, bd=2, text=\n 'Upload centers', width=30, height=1, relief=RIDGE)\n self.button_upload_centers.place(x=630, y=800, width=105)\n self.button_upload_centers.bind('<Button-1>', self.upload_centers)\n self.button_clear_all = Button(self.window, bd=2, text=\n 'C L E A R A L L', width=30, height=1, relief=RIDGE)\n self.button_clear_all.place(x=300, y=850, width=435)\n self.button_clear_all.bind('<Button-1>', self.reset)\n\n def manhattan(self, event):\n clusters = kohonen(self.dots, self.centers, 'Manhattan')\n self.drawing_area.delete('all')\n for i, center in enumerate(self.centers):\n cl = list()\n for dot, cluster in zip(self.dots, clusters):\n if cluster == center:\n cl.append(dot)\n cl.insert(0, center)\n x_center = cl[0][0]\n y_center = cl[0][1]\n self.drawing_area.create_oval(x_center, y_center, x_center + 7,\n y_center + 7, width=1, fill=self.colors[0])\n for c in cl[1:]:\n x = c[0]\n y = c[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1,\n fill=self.colors[i + 1])\n <function token>\n\n def draw_dots(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[0])\n self.dots.append([event.x, event.y])\n\n def draw_centers(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[1])\n self.centers.append([event.x, event.y])\n\n def upload_dots(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.dots += list(read_file(filename))\n for dot in self.dots:\n x = dot[0]\n y = dot[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[0])\n\n def upload_centers(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.centers += list(read_file(filename))\n for center in self.centers:\n x = center[0]\n y = center[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[1])\n\n def reset(self, event):\n self.drawing_area.delete('all')\n self.dots = list()\n self.centers = list()\n <function token>\n", "<import token>\n\n\nclass Clusterization:\n\n def __init__(self):\n self.colors = ['black', 'green', 'blue', 'red', 'magenta', 'grey',\n 'cyan']\n self.dots = list()\n self.centers = list()\n self.window = Tk()\n self.window.title('Kohonen')\n self.window.geometry('1000x900+100+100')\n self.window.resizable(False, False)\n self.drawing_area = Canvas(self.window, width=970, height=750, bd=2,\n cursor='dot', relief=RIDGE)\n self.drawing_area.place(x=15, y=25, width=970)\n self.drawing_area.bind('<ButtonPress-1>', self.draw_dots)\n self.drawing_area.bind('<ButtonPress-3>', self.draw_centers)\n self.button_manhattan = Button(self.window, bd=2, text='Manhattan',\n width=30, height=1, relief=RIDGE)\n self.button_manhattan.place(x=300, y=800, width=105)\n self.button_manhattan.bind('<Button-1>', self.manhattan)\n self.button_chebyshev = Button(self.window, bd=2, text='Chebyshev',\n width=30, height=1, relief=RIDGE)\n self.button_chebyshev.place(x=410, y=800, width=105)\n self.button_chebyshev.bind('<Button-1>', self.chebyshev)\n self.button_upload_dots = Button(self.window, bd=2, text=\n 'Upload dots', width=30, height=1, relief=RIDGE)\n self.button_upload_dots.place(x=520, y=800, width=105)\n self.button_upload_dots.bind('<Button-1>', self.upload_dots)\n self.button_upload_centers = Button(self.window, bd=2, text=\n 'Upload centers', width=30, height=1, relief=RIDGE)\n self.button_upload_centers.place(x=630, y=800, width=105)\n self.button_upload_centers.bind('<Button-1>', self.upload_centers)\n self.button_clear_all = Button(self.window, bd=2, text=\n 'C L E A R A L L', width=30, height=1, relief=RIDGE)\n self.button_clear_all.place(x=300, y=850, width=435)\n self.button_clear_all.bind('<Button-1>', self.reset)\n\n def manhattan(self, event):\n clusters = kohonen(self.dots, self.centers, 'Manhattan')\n self.drawing_area.delete('all')\n for i, center in enumerate(self.centers):\n cl = list()\n for dot, cluster in zip(self.dots, clusters):\n if cluster == center:\n cl.append(dot)\n cl.insert(0, center)\n x_center = cl[0][0]\n y_center = cl[0][1]\n self.drawing_area.create_oval(x_center, y_center, x_center + 7,\n y_center + 7, width=1, fill=self.colors[0])\n for c in cl[1:]:\n x = c[0]\n y = c[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1,\n fill=self.colors[i + 1])\n <function token>\n\n def draw_dots(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[0])\n self.dots.append([event.x, event.y])\n <function token>\n\n def upload_dots(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.dots += list(read_file(filename))\n for dot in self.dots:\n x = dot[0]\n y = dot[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[0])\n\n def upload_centers(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.centers += list(read_file(filename))\n for center in self.centers:\n x = center[0]\n y = center[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[1])\n\n def reset(self, event):\n self.drawing_area.delete('all')\n self.dots = list()\n self.centers = list()\n <function token>\n", "<import token>\n\n\nclass Clusterization:\n <function token>\n\n def manhattan(self, event):\n clusters = kohonen(self.dots, self.centers, 'Manhattan')\n self.drawing_area.delete('all')\n for i, center in enumerate(self.centers):\n cl = list()\n for dot, cluster in zip(self.dots, clusters):\n if cluster == center:\n cl.append(dot)\n cl.insert(0, center)\n x_center = cl[0][0]\n y_center = cl[0][1]\n self.drawing_area.create_oval(x_center, y_center, x_center + 7,\n y_center + 7, width=1, fill=self.colors[0])\n for c in cl[1:]:\n x = c[0]\n y = c[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1,\n fill=self.colors[i + 1])\n <function token>\n\n def draw_dots(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[0])\n self.dots.append([event.x, event.y])\n <function token>\n\n def upload_dots(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.dots += list(read_file(filename))\n for dot in self.dots:\n x = dot[0]\n y = dot[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[0])\n\n def upload_centers(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.centers += list(read_file(filename))\n for center in self.centers:\n x = center[0]\n y = center[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[1])\n\n def reset(self, event):\n self.drawing_area.delete('all')\n self.dots = list()\n self.centers = list()\n <function token>\n", "<import token>\n\n\nclass Clusterization:\n <function token>\n <function token>\n <function token>\n\n def draw_dots(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[0])\n self.dots.append([event.x, event.y])\n <function token>\n\n def upload_dots(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.dots += list(read_file(filename))\n for dot in self.dots:\n x = dot[0]\n y = dot[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[0])\n\n def upload_centers(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.centers += list(read_file(filename))\n for center in self.centers:\n x = center[0]\n y = center[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[1])\n\n def reset(self, event):\n self.drawing_area.delete('all')\n self.dots = list()\n self.centers = list()\n <function token>\n", "<import token>\n\n\nclass Clusterization:\n <function token>\n <function token>\n <function token>\n\n def draw_dots(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[0])\n self.dots.append([event.x, event.y])\n <function token>\n\n def upload_dots(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.dots += list(read_file(filename))\n for dot in self.dots:\n x = dot[0]\n y = dot[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[0])\n\n def upload_centers(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.centers += list(read_file(filename))\n for center in self.centers:\n x = center[0]\n y = center[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[1])\n <function token>\n <function token>\n", "<import token>\n\n\nclass Clusterization:\n <function token>\n <function token>\n <function token>\n\n def draw_dots(self, event):\n event.widget.create_oval(event.x, event.y, event.x + 7, event.y + 7,\n width=1, fill=self.colors[0])\n self.dots.append([event.x, event.y])\n <function token>\n <function token>\n\n def upload_centers(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.centers += list(read_file(filename))\n for center in self.centers:\n x = center[0]\n y = center[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[1])\n <function token>\n <function token>\n", "<import token>\n\n\nclass Clusterization:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def upload_centers(self, event):\n Tk().withdraw()\n filename = askopenfilename()\n self.centers += list(read_file(filename))\n for center in self.centers:\n x = center[0]\n y = center[1]\n self.drawing_area.create_oval(x, y, x + 7, y + 7, width=1, fill\n =self.colors[1])\n <function token>\n <function token>\n", "<import token>\n\n\nclass Clusterization:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
99,482
3ea7b7df0277669f301895732b3801b639e74829
#!/usr/bin/env python # tf_reducer.py # Syed Rafayal # A reducer program for calculating TF by counting number for file_name, word import sys # declare and assign global variable current_word = None prev_filename = None current_count = 0 word = None N = 0 filename_count = {} lines = [] # input comes from STDIN for line in sys.stdin: # remove leading and trailing whitespace line = line.strip() # adding line in the list lines.append(line) # split the line into file_name, word, count by tab file_name, word, count = line.split('\t') # convert count (currently a string) to int count = int(count) # check previous file name with current file name # if same then add count with N # otherwise change previous file name and reset N value by 0 # and update dictionary 'filename_count' by previous filename if prev_filename == file_name: N = N + count else: if prev_filename != None: filename_count[prev_filename] = N N = 0 prev_filename = file_name # update last value filename_count[prev_filename] = N # read all the line from list for line in lines: # split the line into file_name, word, count by tab file_name, word, count = line.split('\t') for name in filename_count: if file_name == name: # calculate Term Frequency tf = float(count)/float(filename_count[name]) # write the results to STDOUT (standard output); # what we output here will be the input for the # next Mapper step, i.e. the input for idf_mapper.py # tab-delimited; print "%s\t%s\t%s" % (word, file_name, str(tf))
[ "#!/usr/bin/env python\n# tf_reducer.py\n# Syed Rafayal\n# A reducer program for calculating TF by counting number for file_name, word\nimport sys\n\n# declare and assign global variable\ncurrent_word = None\nprev_filename = None\ncurrent_count = 0\nword = None\nN = 0\nfilename_count = {}\nlines = []\n\n# input comes from STDIN\nfor line in sys.stdin:\n # remove leading and trailing whitespace\n line = line.strip()\n # adding line in the list\n lines.append(line)\n # split the line into file_name, word, count by tab\n file_name, word, count = line.split('\\t')\n # convert count (currently a string) to int\n count = int(count)\n # check previous file name with current file name\n # if same then add count with N\n # otherwise change previous file name and reset N value by 0\n # and update dictionary 'filename_count' by previous filename\n if prev_filename == file_name:\n N = N + count\n else:\n if prev_filename != None:\n filename_count[prev_filename] = N\n N = 0\n prev_filename = file_name\n# update last value\nfilename_count[prev_filename] = N\n\n# read all the line from list\nfor line in lines:\n # split the line into file_name, word, count by tab\n file_name, word, count = line.split('\\t')\n for name in filename_count:\n if file_name == name:\n # calculate Term Frequency\n tf = float(count)/float(filename_count[name])\n # write the results to STDOUT (standard output);\n # what we output here will be the input for the\n # next Mapper step, i.e. the input for idf_mapper.py\n # tab-delimited;\n print \"%s\\t%s\\t%s\" % (word, file_name, str(tf))\n" ]
true
99,483
390dc5c2a0e51ae0345e83638b4c6a4d8cea7b6f
# Generated by Django 2.2.1 on 2019-05-08 21:54 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('github', '0007_auto_20190508_2149'), ] operations = [ migrations.AddField( model_name='repository', name='forks', field=models.IntegerField(default=1), preserve_default=False, ), migrations.AddField( model_name='repository', name='stars', field=models.IntegerField(default=1), preserve_default=False, ), ]
[ "# Generated by Django 2.2.1 on 2019-05-08 21:54\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('github', '0007_auto_20190508_2149'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='repository',\n name='forks',\n field=models.IntegerField(default=1),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='repository',\n name='stars',\n field=models.IntegerField(default=1),\n preserve_default=False,\n ),\n ]\n", "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('github', '0007_auto_20190508_2149')]\n operations = [migrations.AddField(model_name='repository', name='forks',\n field=models.IntegerField(default=1), preserve_default=False),\n migrations.AddField(model_name='repository', name='stars', field=\n models.IntegerField(default=1), preserve_default=False)]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('github', '0007_auto_20190508_2149')]\n operations = [migrations.AddField(model_name='repository', name='forks',\n field=models.IntegerField(default=1), preserve_default=False),\n migrations.AddField(model_name='repository', name='stars', field=\n models.IntegerField(default=1), preserve_default=False)]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
99,484
cb88f7ed0e44519032066a81ac3dad3bbfc9eaba
# coding=utf-8 # author huxh # time 2020/4/7 10:29 AM def isStraight(nums): nums = [num for num in nums if num] return len(set(nums)) == len(nums) and max(nums) - min(nums) < 5
[ "# coding=utf-8\n# author huxh\n# time 2020/4/7 10:29 AM\n\n\ndef isStraight(nums):\n nums = [num for num in nums if num]\n return len(set(nums)) == len(nums) and max(nums) - min(nums) < 5\n\n", "def isStraight(nums):\n nums = [num for num in nums if num]\n return len(set(nums)) == len(nums) and max(nums) - min(nums) < 5\n", "<function token>\n" ]
false
99,485
355af566411cd3c017d14d3856561b62b0834833
''' Given a non-negative integer numRows, generate the first numRows of Pascal's triangle. In Pascal's triangle, each number is the sum of the two numbers directly above it. Example: Input: 5 Output: [ [1], [1,1], [1,2,1], [1,3,3,1], [1,4,6,4,1] ] ''' class Solution(object): def generate(self, n): """ :type numRows: int :rtype: List[List[int]] """ ans=[] #temp1=[1] #temp2=[1,1] #ans.append(temp1) # ans.append(temp2) for i in range (0, n): temp=[] temp.append(1) for j in range (1,i): temp.append(ans[i-1][j-1]+ans[i-1][j]) #print("yui") if i!=0: temp.append(1) ans.append(temp) return ans
[ "'''\nGiven a non-negative integer numRows, generate the first numRows of Pascal's triangle.\n\n\nIn Pascal's triangle, each number is the sum of the two numbers directly above it.\n\nExample:\n\nInput: 5\nOutput:\n[\n [1],\n [1,1],\n [1,2,1],\n [1,3,3,1],\n [1,4,6,4,1]\n]\n'''\n\nclass Solution(object):\n def generate(self, n):\n \"\"\"\n :type numRows: int\n :rtype: List[List[int]]\n \"\"\"\n ans=[]\n \n \n #temp1=[1]\n #temp2=[1,1]\n #ans.append(temp1)\n # ans.append(temp2)\n \n for i in range (0, n):\n temp=[]\n temp.append(1)\n for j in range (1,i):\n temp.append(ans[i-1][j-1]+ans[i-1][j])\n #print(\"yui\")\n if i!=0:\n temp.append(1)\n ans.append(temp)\n \n return ans\n", "<docstring token>\n\n\nclass Solution(object):\n\n def generate(self, n):\n \"\"\"\n :type numRows: int\n :rtype: List[List[int]]\n \"\"\"\n ans = []\n for i in range(0, n):\n temp = []\n temp.append(1)\n for j in range(1, i):\n temp.append(ans[i - 1][j - 1] + ans[i - 1][j])\n if i != 0:\n temp.append(1)\n ans.append(temp)\n return ans\n", "<docstring token>\n\n\nclass Solution(object):\n <function token>\n", "<docstring token>\n<class token>\n" ]
false
99,486
3890af36e64abaa0742d829fd03efcb61ce18d6a
# Generated by Django 2.2.4 on 2019-09-11 03:12 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('clients', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='product', name='priceUSD', ), migrations.AddField( model_name='client', name='source', field=models.CharField(default='OTHER', max_length=10, verbose_name='Source'), ), migrations.AddField( model_name='product', name='currency', field=models.CharField(choices=[('USD', 'USD'), ('RMB', 'RMB'), ('EUR', 'EUR')], default='USD', max_length=5), ), migrations.AddField( model_name='product', name='price', field=models.DecimalField(decimal_places=2, default=0, max_digits=6, verbose_name='Price'), preserve_default=False, ), migrations.AddField( model_name='product', name='priceRMB', field=models.DecimalField(decimal_places=2, default=0, max_digits=6, verbose_name='Price(RMB)'), preserve_default=False, ), ]
[ "# Generated by Django 2.2.4 on 2019-09-11 03:12\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('clients', '0001_initial'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='product',\n name='priceUSD',\n ),\n migrations.AddField(\n model_name='client',\n name='source',\n field=models.CharField(default='OTHER', max_length=10, verbose_name='Source'),\n ),\n migrations.AddField(\n model_name='product',\n name='currency',\n field=models.CharField(choices=[('USD', 'USD'), ('RMB', 'RMB'), ('EUR', 'EUR')], default='USD', max_length=5),\n ),\n migrations.AddField(\n model_name='product',\n name='price',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=6, verbose_name='Price'),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='product',\n name='priceRMB',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=6, verbose_name='Price(RMB)'),\n preserve_default=False,\n ),\n ]\n", "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('clients', '0001_initial')]\n operations = [migrations.RemoveField(model_name='product', name=\n 'priceUSD'), migrations.AddField(model_name='client', name='source',\n field=models.CharField(default='OTHER', max_length=10, verbose_name\n ='Source')), migrations.AddField(model_name='product', name=\n 'currency', field=models.CharField(choices=[('USD', 'USD'), ('RMB',\n 'RMB'), ('EUR', 'EUR')], default='USD', max_length=5)), migrations.\n AddField(model_name='product', name='price', field=models.\n DecimalField(decimal_places=2, default=0, max_digits=6,\n verbose_name='Price'), preserve_default=False), migrations.AddField\n (model_name='product', name='priceRMB', field=models.DecimalField(\n decimal_places=2, default=0, max_digits=6, verbose_name=\n 'Price(RMB)'), preserve_default=False)]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('clients', '0001_initial')]\n operations = [migrations.RemoveField(model_name='product', name=\n 'priceUSD'), migrations.AddField(model_name='client', name='source',\n field=models.CharField(default='OTHER', max_length=10, verbose_name\n ='Source')), migrations.AddField(model_name='product', name=\n 'currency', field=models.CharField(choices=[('USD', 'USD'), ('RMB',\n 'RMB'), ('EUR', 'EUR')], default='USD', max_length=5)), migrations.\n AddField(model_name='product', name='price', field=models.\n DecimalField(decimal_places=2, default=0, max_digits=6,\n verbose_name='Price'), preserve_default=False), migrations.AddField\n (model_name='product', name='priceRMB', field=models.DecimalField(\n decimal_places=2, default=0, max_digits=6, verbose_name=\n 'Price(RMB)'), preserve_default=False)]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
99,487
56e6360f47d12243d4a99863b58d798e1fc21e86
""" Binary Search Tree Node """ class BinarySearchTreeNode: """ Elemental Node in a Binary Search Tree Attributes ---------- element: any type with comparable function parent: BinarySearchTreeNode left: BinarySearchTreeNode right: BinarySearchTreeNode Methods ------- has_parent() returns True if there is a node parent, False otherwise has_left() returns True if this node has a left node, False otherwise has_right() returns True if this node has a right node, False otherwise """ def __init__(self, element, parent=None, left=None, right=None): """ Node Constructor Parameters ---------- element : any type with comparable function Node element parent=None : BinarySearchTreeNode Node parent left=None : BinarySearchTreeNode Node in left right=None : BinarySearchTreeNode Node in right """ self.element = element self.parent = parent self.left = left self.right = right def __str__(self): """ Returns a String representation of this node. """ return 'BinarySearchTreeNode(' + str(self.element) + ')' def __repr__(self): """ Returns a String representation of this node. """ return str(self) def __eq__(self, node): """ Returns a Boolean depending if this and other node are equal. """ if node == None or self.element != node.element: return False return self.left == node.left and self.right == node.right # All getters and setters def get_element(self): """ Get the node element Returns ------- element node element """ return self.element def set_element(self, element): """ Set the node element Parameters ---------- element : any type with comparable function node element """ self.element = element def get_parent(self): """ Get the node element Returns ------- parent : BinarySearchTreeNode Node parent """ return self.parent def set_parent(self, parent): """ Set the parent node Parameters ---------- parent : BinarySearchTreeNode Parent node """ self.parent = parent def get_left(self): """ Get the node in left Returns ------- element : BinarySearchTreeNode Node in left """ return self.left def set_left(self, left): """ Set the node in right Parameters ---------- left : BinarySearchTreeNode Left node """ self.left = left def get_right(self): """ Get the node in right Returns ------- right : BinarySearchTreeNode Node in right """ return self.right def set_right(self, right): """ Set the node in right Parameters ---------- right : BinarySearchTreeNode Right node """ self.right = right def has_parent(self): """ Check if there is a parent Returns ------- Boolean True if this node has a parent, False otherwise """ return self.parent != None def has_left(self): """ Check if there is a left node Returns ------- Boolean True if this node has a left node, False otherwise """ return self.left != None def has_right(self): """ Check if there is a right node Returns ------- Boolean True if this node has a right node, False otherwise """ return self.right != None def is_left_child(self): """ Check if this node is a left child. Returns ------- Boolean True if this node is a Left child, False otherwise """ if self.parent == None: return False return self.parent.left == self def is_right_child(self): """ Check if this node is a right child. Returns ------- Boolean True if this node is a right child, False otherwise """ if self.parent == None: return False return self.parent.right == self
[ "\"\"\" Binary Search Tree Node \"\"\"\n\nclass BinarySearchTreeNode:\n \"\"\"\n Elemental Node in a Binary Search Tree\n\n Attributes\n ----------\n element: any type with comparable function\n parent: BinarySearchTreeNode\n left: BinarySearchTreeNode\n right: BinarySearchTreeNode\n\n Methods\n -------\n has_parent()\n returns True if there is a node parent, False otherwise\n\n has_left()\n returns True if this node has a left node, False otherwise\n\n has_right()\n returns True if this node has a right node, False otherwise\n \"\"\"\n \n def __init__(self, element, parent=None, left=None, right=None):\n \"\"\"\n Node Constructor\n \n Parameters\n ----------\n element : any type with comparable function\n Node element\n\n parent=None : BinarySearchTreeNode\n Node parent\n\n left=None : BinarySearchTreeNode\n Node in left\n\n right=None : BinarySearchTreeNode\n Node in right\n \"\"\"\n self.element = element\n self.parent = parent\n self.left = left\n self.right = right\n \n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n # All getters and setters\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n\n def get_parent(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n parent : BinarySearchTreeNode\n Node parent\n \"\"\"\n return self.parent\n\n def set_parent(self, parent):\n \"\"\"\n Set the parent node\n \n Parameters\n ----------\n parent : BinarySearchTreeNode\n Parent node\n \"\"\"\n self.parent = parent\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n\n def set_left(self, left):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n left : BinarySearchTreeNode\n Left node\n \"\"\"\n self.left = left\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n\n def set_right(self, right):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n right : BinarySearchTreeNode\n Right node\n \"\"\"\n self.right = right\n\n def has_parent(self):\n \"\"\"\n Check if there is a parent\n \n Returns\n -------\n Boolean\n True if this node has a parent, False otherwise\n \"\"\"\n return self.parent != None\n\n def has_left(self):\n \"\"\"\n Check if there is a left node\n \n Returns\n -------\n Boolean\n True if this node has a left node, False otherwise\n \"\"\"\n return self.left != None\n\n def has_right(self):\n \"\"\"\n Check if there is a right node\n \n Returns\n -------\n Boolean\n True if this node has a right node, False otherwise\n \"\"\"\n return self.right != None\n\n def is_left_child(self):\n \"\"\"\n Check if this node is a left child.\n\n Returns\n -------\n Boolean\n True if this node is a Left child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n\n return self.parent.left == self\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n \"\"\"\n Elemental Node in a Binary Search Tree\n\n Attributes\n ----------\n element: any type with comparable function\n parent: BinarySearchTreeNode\n left: BinarySearchTreeNode\n right: BinarySearchTreeNode\n\n Methods\n -------\n has_parent()\n returns True if there is a node parent, False otherwise\n\n has_left()\n returns True if this node has a left node, False otherwise\n\n has_right()\n returns True if this node has a right node, False otherwise\n \"\"\"\n\n def __init__(self, element, parent=None, left=None, right=None):\n \"\"\"\n Node Constructor\n \n Parameters\n ----------\n element : any type with comparable function\n Node element\n\n parent=None : BinarySearchTreeNode\n Node parent\n\n left=None : BinarySearchTreeNode\n Node in left\n\n right=None : BinarySearchTreeNode\n Node in right\n \"\"\"\n self.element = element\n self.parent = parent\n self.left = left\n self.right = right\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n\n def get_parent(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n parent : BinarySearchTreeNode\n Node parent\n \"\"\"\n return self.parent\n\n def set_parent(self, parent):\n \"\"\"\n Set the parent node\n \n Parameters\n ----------\n parent : BinarySearchTreeNode\n Parent node\n \"\"\"\n self.parent = parent\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n\n def set_left(self, left):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n left : BinarySearchTreeNode\n Left node\n \"\"\"\n self.left = left\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n\n def set_right(self, right):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n right : BinarySearchTreeNode\n Right node\n \"\"\"\n self.right = right\n\n def has_parent(self):\n \"\"\"\n Check if there is a parent\n \n Returns\n -------\n Boolean\n True if this node has a parent, False otherwise\n \"\"\"\n return self.parent != None\n\n def has_left(self):\n \"\"\"\n Check if there is a left node\n \n Returns\n -------\n Boolean\n True if this node has a left node, False otherwise\n \"\"\"\n return self.left != None\n\n def has_right(self):\n \"\"\"\n Check if there is a right node\n \n Returns\n -------\n Boolean\n True if this node has a right node, False otherwise\n \"\"\"\n return self.right != None\n\n def is_left_child(self):\n \"\"\"\n Check if this node is a left child.\n\n Returns\n -------\n Boolean\n True if this node is a Left child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.left == self\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n\n def __init__(self, element, parent=None, left=None, right=None):\n \"\"\"\n Node Constructor\n \n Parameters\n ----------\n element : any type with comparable function\n Node element\n\n parent=None : BinarySearchTreeNode\n Node parent\n\n left=None : BinarySearchTreeNode\n Node in left\n\n right=None : BinarySearchTreeNode\n Node in right\n \"\"\"\n self.element = element\n self.parent = parent\n self.left = left\n self.right = right\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n\n def get_parent(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n parent : BinarySearchTreeNode\n Node parent\n \"\"\"\n return self.parent\n\n def set_parent(self, parent):\n \"\"\"\n Set the parent node\n \n Parameters\n ----------\n parent : BinarySearchTreeNode\n Parent node\n \"\"\"\n self.parent = parent\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n\n def set_left(self, left):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n left : BinarySearchTreeNode\n Left node\n \"\"\"\n self.left = left\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n\n def set_right(self, right):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n right : BinarySearchTreeNode\n Right node\n \"\"\"\n self.right = right\n\n def has_parent(self):\n \"\"\"\n Check if there is a parent\n \n Returns\n -------\n Boolean\n True if this node has a parent, False otherwise\n \"\"\"\n return self.parent != None\n\n def has_left(self):\n \"\"\"\n Check if there is a left node\n \n Returns\n -------\n Boolean\n True if this node has a left node, False otherwise\n \"\"\"\n return self.left != None\n\n def has_right(self):\n \"\"\"\n Check if there is a right node\n \n Returns\n -------\n Boolean\n True if this node has a right node, False otherwise\n \"\"\"\n return self.right != None\n\n def is_left_child(self):\n \"\"\"\n Check if this node is a left child.\n\n Returns\n -------\n Boolean\n True if this node is a Left child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.left == self\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n\n def __init__(self, element, parent=None, left=None, right=None):\n \"\"\"\n Node Constructor\n \n Parameters\n ----------\n element : any type with comparable function\n Node element\n\n parent=None : BinarySearchTreeNode\n Node parent\n\n left=None : BinarySearchTreeNode\n Node in left\n\n right=None : BinarySearchTreeNode\n Node in right\n \"\"\"\n self.element = element\n self.parent = parent\n self.left = left\n self.right = right\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n\n def set_parent(self, parent):\n \"\"\"\n Set the parent node\n \n Parameters\n ----------\n parent : BinarySearchTreeNode\n Parent node\n \"\"\"\n self.parent = parent\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n\n def set_left(self, left):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n left : BinarySearchTreeNode\n Left node\n \"\"\"\n self.left = left\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n\n def set_right(self, right):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n right : BinarySearchTreeNode\n Right node\n \"\"\"\n self.right = right\n\n def has_parent(self):\n \"\"\"\n Check if there is a parent\n \n Returns\n -------\n Boolean\n True if this node has a parent, False otherwise\n \"\"\"\n return self.parent != None\n\n def has_left(self):\n \"\"\"\n Check if there is a left node\n \n Returns\n -------\n Boolean\n True if this node has a left node, False otherwise\n \"\"\"\n return self.left != None\n\n def has_right(self):\n \"\"\"\n Check if there is a right node\n \n Returns\n -------\n Boolean\n True if this node has a right node, False otherwise\n \"\"\"\n return self.right != None\n\n def is_left_child(self):\n \"\"\"\n Check if this node is a left child.\n\n Returns\n -------\n Boolean\n True if this node is a Left child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.left == self\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n\n def __init__(self, element, parent=None, left=None, right=None):\n \"\"\"\n Node Constructor\n \n Parameters\n ----------\n element : any type with comparable function\n Node element\n\n parent=None : BinarySearchTreeNode\n Node parent\n\n left=None : BinarySearchTreeNode\n Node in left\n\n right=None : BinarySearchTreeNode\n Node in right\n \"\"\"\n self.element = element\n self.parent = parent\n self.left = left\n self.right = right\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n\n def set_parent(self, parent):\n \"\"\"\n Set the parent node\n \n Parameters\n ----------\n parent : BinarySearchTreeNode\n Parent node\n \"\"\"\n self.parent = parent\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n\n def set_left(self, left):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n left : BinarySearchTreeNode\n Left node\n \"\"\"\n self.left = left\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n\n def has_parent(self):\n \"\"\"\n Check if there is a parent\n \n Returns\n -------\n Boolean\n True if this node has a parent, False otherwise\n \"\"\"\n return self.parent != None\n\n def has_left(self):\n \"\"\"\n Check if there is a left node\n \n Returns\n -------\n Boolean\n True if this node has a left node, False otherwise\n \"\"\"\n return self.left != None\n\n def has_right(self):\n \"\"\"\n Check if there is a right node\n \n Returns\n -------\n Boolean\n True if this node has a right node, False otherwise\n \"\"\"\n return self.right != None\n\n def is_left_child(self):\n \"\"\"\n Check if this node is a left child.\n\n Returns\n -------\n Boolean\n True if this node is a Left child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.left == self\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n\n def __init__(self, element, parent=None, left=None, right=None):\n \"\"\"\n Node Constructor\n \n Parameters\n ----------\n element : any type with comparable function\n Node element\n\n parent=None : BinarySearchTreeNode\n Node parent\n\n left=None : BinarySearchTreeNode\n Node in left\n\n right=None : BinarySearchTreeNode\n Node in right\n \"\"\"\n self.element = element\n self.parent = parent\n self.left = left\n self.right = right\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n\n def set_parent(self, parent):\n \"\"\"\n Set the parent node\n \n Parameters\n ----------\n parent : BinarySearchTreeNode\n Parent node\n \"\"\"\n self.parent = parent\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n\n def set_left(self, left):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n left : BinarySearchTreeNode\n Left node\n \"\"\"\n self.left = left\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n <function token>\n\n def has_left(self):\n \"\"\"\n Check if there is a left node\n \n Returns\n -------\n Boolean\n True if this node has a left node, False otherwise\n \"\"\"\n return self.left != None\n\n def has_right(self):\n \"\"\"\n Check if there is a right node\n \n Returns\n -------\n Boolean\n True if this node has a right node, False otherwise\n \"\"\"\n return self.right != None\n\n def is_left_child(self):\n \"\"\"\n Check if this node is a left child.\n\n Returns\n -------\n Boolean\n True if this node is a Left child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.left == self\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n\n def set_parent(self, parent):\n \"\"\"\n Set the parent node\n \n Parameters\n ----------\n parent : BinarySearchTreeNode\n Parent node\n \"\"\"\n self.parent = parent\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n\n def set_left(self, left):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n left : BinarySearchTreeNode\n Left node\n \"\"\"\n self.left = left\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n <function token>\n\n def has_left(self):\n \"\"\"\n Check if there is a left node\n \n Returns\n -------\n Boolean\n True if this node has a left node, False otherwise\n \"\"\"\n return self.left != None\n\n def has_right(self):\n \"\"\"\n Check if there is a right node\n \n Returns\n -------\n Boolean\n True if this node has a right node, False otherwise\n \"\"\"\n return self.right != None\n\n def is_left_child(self):\n \"\"\"\n Check if this node is a left child.\n\n Returns\n -------\n Boolean\n True if this node is a Left child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.left == self\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n\n def set_parent(self, parent):\n \"\"\"\n Set the parent node\n \n Parameters\n ----------\n parent : BinarySearchTreeNode\n Parent node\n \"\"\"\n self.parent = parent\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n\n def set_left(self, left):\n \"\"\"\n Set the node in right\n \n Parameters\n ----------\n left : BinarySearchTreeNode\n Left node\n \"\"\"\n self.left = left\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n <function token>\n\n def has_left(self):\n \"\"\"\n Check if there is a left node\n \n Returns\n -------\n Boolean\n True if this node has a left node, False otherwise\n \"\"\"\n return self.left != None\n\n def has_right(self):\n \"\"\"\n Check if there is a right node\n \n Returns\n -------\n Boolean\n True if this node has a right node, False otherwise\n \"\"\"\n return self.right != None\n <function token>\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n\n def set_parent(self, parent):\n \"\"\"\n Set the parent node\n \n Parameters\n ----------\n parent : BinarySearchTreeNode\n Parent node\n \"\"\"\n self.parent = parent\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n <function token>\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n <function token>\n\n def has_left(self):\n \"\"\"\n Check if there is a left node\n \n Returns\n -------\n Boolean\n True if this node has a left node, False otherwise\n \"\"\"\n return self.left != None\n\n def has_right(self):\n \"\"\"\n Check if there is a right node\n \n Returns\n -------\n Boolean\n True if this node has a right node, False otherwise\n \"\"\"\n return self.right != None\n <function token>\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n <function token>\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n <function token>\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n <function token>\n\n def has_left(self):\n \"\"\"\n Check if there is a left node\n \n Returns\n -------\n Boolean\n True if this node has a left node, False otherwise\n \"\"\"\n return self.left != None\n\n def has_right(self):\n \"\"\"\n Check if there is a right node\n \n Returns\n -------\n Boolean\n True if this node has a right node, False otherwise\n \"\"\"\n return self.right != None\n <function token>\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n <function token>\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n <function token>\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n <function token>\n <function token>\n\n def has_right(self):\n \"\"\"\n Check if there is a right node\n \n Returns\n -------\n Boolean\n True if this node has a right node, False otherwise\n \"\"\"\n return self.right != None\n <function token>\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n\n def __eq__(self, node):\n \"\"\"\n Returns a Boolean depending if this and other node are equal.\n \"\"\"\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n <function token>\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n <function token>\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n <function token>\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n <function token>\n\n def get_left(self):\n \"\"\"\n Get the node in left\n \n Returns\n -------\n element : BinarySearchTreeNode\n Node in left\n \"\"\"\n return self.left\n <function token>\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n\n def __repr__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return str(self)\n <function token>\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n\n def __str__(self):\n \"\"\"\n Returns a String representation of this node.\n \"\"\"\n return 'BinarySearchTreeNode(' + str(self.element) + ')'\n <function token>\n <function token>\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_right(self):\n \"\"\"\n Get the node in right\n \n Returns\n -------\n right : BinarySearchTreeNode\n Node in right\n \"\"\"\n return self.right\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def is_right_child(self):\n \"\"\"\n Check if this node is a right child.\n\n Returns\n -------\n Boolean\n True if this node is a right child, False otherwise\n \"\"\"\n if self.parent == None:\n return False\n return self.parent.right == self\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_element(self):\n \"\"\"\n Get the node element\n \n Returns\n -------\n element\n node element\n \"\"\"\n return self.element\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def set_element(self, element):\n \"\"\"\n Set the node element\n \n Parameters\n ----------\n element : any type with comparable function\n node element\n \"\"\"\n self.element = element\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<docstring token>\n\n\nclass BinarySearchTreeNode:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<docstring token>\n<class token>\n" ]
false
99,488
2f4766993847a2e4aac1a6f5c5aea786b690fd60
#Definition for singly-linked list. class ListNode: def __init__(self, x, y): self.val = x self.next = None self.id = y class Solution: # @param {ListNode} head # @return {ListNode} def deleteDuplicates(self, head): if not head: return node = head prev = ListNode(0,-1) prev.next = head head = prev while node.next: flag = False while node.next and node.val == node.next.val: node.next = node.next.next flag = True node = node.next if flag and prev.next: prev.next = prev.next.next else: #node = node.next prev = prev.next if not node: return head.next return head.next def print_nodes(l): s = '' while l: s += '->' + str(l.val) l = l.next print s l1 = ListNode(1,0) l2 =ListNode(1,1) l3 =ListNode(2,2) l4 =ListNode(2,3) l5 =ListNode(2,4) l6 =ListNode(3,5) l7 =ListNode(3,6) l8 =ListNode(4,7) l1.next = l2 #l2.next = l3 l3.next = l4 l4.next = l5 l5.next = l6 l6.next = l7 l7.next = l8 solution = Solution() print_nodes(solution.deleteDuplicates(l1)) #print_nodes(l) class Solution: # @param {ListNode} head # @return {ListNode} def deleteDuplicates(self, head): node = head prev = ListNode(0) prev.next = head head = prev while node: #Check if current node is a duplicated node flag = False while node.next and node.val == node.next.val: node.next = node.next.next flag = True #Node is now pointer of a list with each element appears only once node = node.next #Delete the current node if its value appeared more than once if flag and prev.next: prev.next = prev.next.next #Put the node at the end of the result else: prev = prev.next return head.next
[ "#Definition for singly-linked list.\nclass ListNode:\n def __init__(self, x, y):\n self.val = x\n self.next = None\n self.id = y\n\n\nclass Solution:\n # @param {ListNode} head\n # @return {ListNode}\n def deleteDuplicates(self, head):\n if not head:\n return\n node = head\n\n prev = ListNode(0,-1)\n prev.next = head\n head = prev\n\n while node.next:\n flag = False\n while node.next and node.val == node.next.val:\n node.next = node.next.next\n flag = True\n node = node.next\n if flag and prev.next:\n prev.next = prev.next.next\n else:\n #node = node.next\n prev = prev.next\n if not node:\n return head.next\n return head.next\n\n\ndef print_nodes(l):\n s = ''\n while l:\n s += '->' + str(l.val)\n l = l.next\n print s\n\n\nl1 = ListNode(1,0)\nl2 =ListNode(1,1)\nl3 =ListNode(2,2)\nl4 =ListNode(2,3)\n\nl5 =ListNode(2,4)\nl6 =ListNode(3,5)\nl7 =ListNode(3,6)\nl8 =ListNode(4,7)\n\nl1.next = l2\n#l2.next = l3\nl3.next = l4\nl4.next = l5\nl5.next = l6\nl6.next = l7\nl7.next = l8\nsolution = Solution()\nprint_nodes(solution.deleteDuplicates(l1))\n\n#print_nodes(l)\n\nclass Solution:\n # @param {ListNode} head\n # @return {ListNode}\n def deleteDuplicates(self, head):\n node = head\n prev = ListNode(0)\n prev.next = head\n head = prev\n\n while node:\n #Check if current node is a duplicated node\n flag = False\n while node.next and node.val == node.next.val:\n node.next = node.next.next\n flag = True\n #Node is now pointer of a list with each element appears only once\n node = node.next\n #Delete the current node if its value appeared more than once\n if flag and prev.next:\n prev.next = prev.next.next\n #Put the node at the end of the result\n else:\n prev = prev.next\n\n return head.next" ]
true
99,489
e031d4e5a98bde7da46592e766fcdffbb73cf349
# Generated by Django 2.2.2 on 2019-06-12 14:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('dbadmin', '0003_auto_20190611_1451'), ] operations = [ migrations.CreateModel( name='Outcomes', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), #('OutcomeID', models.IntegerField()), ('OutcomeDescription', models.CharField(max_length=1024)), ('CourseNumber', models.CharField(max_length=128)), ], ), migrations.CreateModel( name='Reviewer', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), #('ReviewerID', models.IntegerField()), ('ReviewerName', models.CharField(max_length=128)), ('ReviewerPhone', models.CharField(max_length=16)), ('ReviewerEmail', models.CharField(max_length=128)), ('ReviewerDepartment', models.CharField(max_length=8)), ], ), ]
[ "# Generated by Django 2.2.2 on 2019-06-12 14:59\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('dbadmin', '0003_auto_20190611_1451'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Outcomes',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n #('OutcomeID', models.IntegerField()),\n ('OutcomeDescription', models.CharField(max_length=1024)),\n ('CourseNumber', models.CharField(max_length=128)),\n ],\n ),\n migrations.CreateModel(\n name='Reviewer',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n #('ReviewerID', models.IntegerField()),\n ('ReviewerName', models.CharField(max_length=128)),\n ('ReviewerPhone', models.CharField(max_length=16)),\n ('ReviewerEmail', models.CharField(max_length=128)),\n ('ReviewerDepartment', models.CharField(max_length=8)),\n ],\n ),\n ]\n", "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('dbadmin', '0003_auto_20190611_1451')]\n operations = [migrations.CreateModel(name='Outcomes', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('OutcomeDescription', models.CharField\n (max_length=1024)), ('CourseNumber', models.CharField(max_length=\n 128))]), migrations.CreateModel(name='Reviewer', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('ReviewerName', models.CharField(\n max_length=128)), ('ReviewerPhone', models.CharField(max_length=16)\n ), ('ReviewerEmail', models.CharField(max_length=128)), (\n 'ReviewerDepartment', models.CharField(max_length=8))])]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('dbadmin', '0003_auto_20190611_1451')]\n operations = [migrations.CreateModel(name='Outcomes', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('OutcomeDescription', models.CharField\n (max_length=1024)), ('CourseNumber', models.CharField(max_length=\n 128))]), migrations.CreateModel(name='Reviewer', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('ReviewerName', models.CharField(\n max_length=128)), ('ReviewerPhone', models.CharField(max_length=16)\n ), ('ReviewerEmail', models.CharField(max_length=128)), (\n 'ReviewerDepartment', models.CharField(max_length=8))])]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
99,490
f86f6f9015b98d5b5fb058c57964a8af26931525
from fractions import gcd def solve(): N, *A = map(int, open(0).read().split()) ans = 0 left = [0 for _ in range(N+1)] right = [0 for _ in range(N+1)] for i in range(N): left[i+1] = gcd(left[i], A[i]) for i in range(N-1, -1, -1): right[i] = gcd(right[i+1], A[i]) for i in range(N): ans = max(ans, gcd(left[i], right[i+1])) print(ans) if __name__ == '__main__': solve()
[ "from fractions import gcd\n\n\ndef solve():\n N, *A = map(int, open(0).read().split())\n \n ans = 0\n left = [0 for _ in range(N+1)]\n right = [0 for _ in range(N+1)]\n for i in range(N):\n left[i+1] = gcd(left[i], A[i])\n for i in range(N-1, -1, -1):\n right[i] = gcd(right[i+1], A[i])\n for i in range(N):\n ans = max(ans, gcd(left[i], right[i+1]))\n print(ans)\n\n\nif __name__ == '__main__':\n solve()\n", "from fractions import gcd\n\n\ndef solve():\n N, *A = map(int, open(0).read().split())\n ans = 0\n left = [(0) for _ in range(N + 1)]\n right = [(0) for _ in range(N + 1)]\n for i in range(N):\n left[i + 1] = gcd(left[i], A[i])\n for i in range(N - 1, -1, -1):\n right[i] = gcd(right[i + 1], A[i])\n for i in range(N):\n ans = max(ans, gcd(left[i], right[i + 1]))\n print(ans)\n\n\nif __name__ == '__main__':\n solve()\n", "<import token>\n\n\ndef solve():\n N, *A = map(int, open(0).read().split())\n ans = 0\n left = [(0) for _ in range(N + 1)]\n right = [(0) for _ in range(N + 1)]\n for i in range(N):\n left[i + 1] = gcd(left[i], A[i])\n for i in range(N - 1, -1, -1):\n right[i] = gcd(right[i + 1], A[i])\n for i in range(N):\n ans = max(ans, gcd(left[i], right[i + 1]))\n print(ans)\n\n\nif __name__ == '__main__':\n solve()\n", "<import token>\n\n\ndef solve():\n N, *A = map(int, open(0).read().split())\n ans = 0\n left = [(0) for _ in range(N + 1)]\n right = [(0) for _ in range(N + 1)]\n for i in range(N):\n left[i + 1] = gcd(left[i], A[i])\n for i in range(N - 1, -1, -1):\n right[i] = gcd(right[i + 1], A[i])\n for i in range(N):\n ans = max(ans, gcd(left[i], right[i + 1]))\n print(ans)\n\n\n<code token>\n", "<import token>\n<function token>\n<code token>\n" ]
false
99,491
09f8a1cb74704d7367513ebe5147cd5370fce208
import asyncio import logging from functools import partial from os import getenv from aiofile import AIOFile, Writer from chat_common import get_logged_message, chat_connector, get_argparser, _non_empty_printable, run_client, _port, setup_general_log reader_log = logging.getLogger("Chat reader") async def read_chat(filename, chat_connector): assert bool(filename) and filename.isprintable(), AssertionError("Filename has to be non-empty printable.") async with AIOFile(filename, mode="a", encoding="utf-8") as file: writer = Writer(file) try: await chat_connector(writer) await file.fsync() except asyncio.CancelledError: await file.fsync() raise async def read_write_lines(reader, _, writer): data = await reader.readline() while data: await writer(get_logged_message(data.decode("utf-8", "ignore"))) data = await reader.readline() def get_args(): parser = get_argparser() parser.add_argument("-p", "--port", action="store", type=_port, help="chat port, default is 5000", default=int(getenv("CHAT_PORT", 5000))) parser.add_argument("-H", "--history", action="store", type=_non_empty_printable, help="messages history, default is ./messages.history", default=getenv("CHAT_HISTORY", "./messages.history")) return parser.parse_args() if __name__ == '__main__': options = get_args() # logger settings log_level = options.loglevel * 10 setup_general_log(options.log, log_level) reader_log.setLevel(log_level) connector = partial(chat_connector, options.host, options.port, options.delay, options.retries, read_write_lines) chat_handler = partial(read_chat, options.history, connector) reader_log.info(f"Chat reader is starting with options: {options}") run_client(chat_handler)
[ "import asyncio\nimport logging\nfrom functools import partial\nfrom os import getenv\n\nfrom aiofile import AIOFile, Writer\n\nfrom chat_common import get_logged_message, chat_connector, get_argparser, _non_empty_printable, run_client, _port, setup_general_log\n\n\nreader_log = logging.getLogger(\"Chat reader\")\n\n\nasync def read_chat(filename, chat_connector):\n\n assert bool(filename) and filename.isprintable(), AssertionError(\"Filename has to be non-empty printable.\")\n\n async with AIOFile(filename, mode=\"a\", encoding=\"utf-8\") as file:\n writer = Writer(file)\n\n try:\n await chat_connector(writer)\n\n await file.fsync()\n\n except asyncio.CancelledError:\n await file.fsync()\n raise\n\n\nasync def read_write_lines(reader, _, writer):\n\n data = await reader.readline()\n\n while data:\n await writer(get_logged_message(data.decode(\"utf-8\", \"ignore\")))\n data = await reader.readline()\n\n\ndef get_args():\n\n parser = get_argparser()\n\n parser.add_argument(\"-p\", \"--port\", action=\"store\", type=_port,\n help=\"chat port, default is 5000\",\n default=int(getenv(\"CHAT_PORT\", 5000)))\n\n parser.add_argument(\"-H\", \"--history\", action=\"store\", type=_non_empty_printable,\n help=\"messages history, default is ./messages.history\",\n default=getenv(\"CHAT_HISTORY\", \"./messages.history\"))\n\n return parser.parse_args()\n\n\nif __name__ == '__main__':\n options = get_args()\n\n # logger settings\n log_level = options.loglevel * 10\n setup_general_log(options.log, log_level)\n reader_log.setLevel(log_level)\n\n connector = partial(chat_connector, options.host, options.port, options.delay, options.retries, read_write_lines)\n chat_handler = partial(read_chat, options.history, connector)\n\n reader_log.info(f\"Chat reader is starting with options: {options}\")\n run_client(chat_handler)\n", "import asyncio\nimport logging\nfrom functools import partial\nfrom os import getenv\nfrom aiofile import AIOFile, Writer\nfrom chat_common import get_logged_message, chat_connector, get_argparser, _non_empty_printable, run_client, _port, setup_general_log\nreader_log = logging.getLogger('Chat reader')\n\n\nasync def read_chat(filename, chat_connector):\n assert bool(filename) and filename.isprintable(), AssertionError(\n 'Filename has to be non-empty printable.')\n async with AIOFile(filename, mode='a', encoding='utf-8') as file:\n writer = Writer(file)\n try:\n await chat_connector(writer)\n await file.fsync()\n except asyncio.CancelledError:\n await file.fsync()\n raise\n\n\nasync def read_write_lines(reader, _, writer):\n data = await reader.readline()\n while data:\n await writer(get_logged_message(data.decode('utf-8', 'ignore')))\n data = await reader.readline()\n\n\ndef get_args():\n parser = get_argparser()\n parser.add_argument('-p', '--port', action='store', type=_port, help=\n 'chat port, default is 5000', default=int(getenv('CHAT_PORT', 5000)))\n parser.add_argument('-H', '--history', action='store', type=\n _non_empty_printable, help=\n 'messages history, default is ./messages.history', default=getenv(\n 'CHAT_HISTORY', './messages.history'))\n return parser.parse_args()\n\n\nif __name__ == '__main__':\n options = get_args()\n log_level = options.loglevel * 10\n setup_general_log(options.log, log_level)\n reader_log.setLevel(log_level)\n connector = partial(chat_connector, options.host, options.port, options\n .delay, options.retries, read_write_lines)\n chat_handler = partial(read_chat, options.history, connector)\n reader_log.info(f'Chat reader is starting with options: {options}')\n run_client(chat_handler)\n", "<import token>\nreader_log = logging.getLogger('Chat reader')\n\n\nasync def read_chat(filename, chat_connector):\n assert bool(filename) and filename.isprintable(), AssertionError(\n 'Filename has to be non-empty printable.')\n async with AIOFile(filename, mode='a', encoding='utf-8') as file:\n writer = Writer(file)\n try:\n await chat_connector(writer)\n await file.fsync()\n except asyncio.CancelledError:\n await file.fsync()\n raise\n\n\nasync def read_write_lines(reader, _, writer):\n data = await reader.readline()\n while data:\n await writer(get_logged_message(data.decode('utf-8', 'ignore')))\n data = await reader.readline()\n\n\ndef get_args():\n parser = get_argparser()\n parser.add_argument('-p', '--port', action='store', type=_port, help=\n 'chat port, default is 5000', default=int(getenv('CHAT_PORT', 5000)))\n parser.add_argument('-H', '--history', action='store', type=\n _non_empty_printable, help=\n 'messages history, default is ./messages.history', default=getenv(\n 'CHAT_HISTORY', './messages.history'))\n return parser.parse_args()\n\n\nif __name__ == '__main__':\n options = get_args()\n log_level = options.loglevel * 10\n setup_general_log(options.log, log_level)\n reader_log.setLevel(log_level)\n connector = partial(chat_connector, options.host, options.port, options\n .delay, options.retries, read_write_lines)\n chat_handler = partial(read_chat, options.history, connector)\n reader_log.info(f'Chat reader is starting with options: {options}')\n run_client(chat_handler)\n", "<import token>\n<assignment token>\n\n\nasync def read_chat(filename, chat_connector):\n assert bool(filename) and filename.isprintable(), AssertionError(\n 'Filename has to be non-empty printable.')\n async with AIOFile(filename, mode='a', encoding='utf-8') as file:\n writer = Writer(file)\n try:\n await chat_connector(writer)\n await file.fsync()\n except asyncio.CancelledError:\n await file.fsync()\n raise\n\n\nasync def read_write_lines(reader, _, writer):\n data = await reader.readline()\n while data:\n await writer(get_logged_message(data.decode('utf-8', 'ignore')))\n data = await reader.readline()\n\n\ndef get_args():\n parser = get_argparser()\n parser.add_argument('-p', '--port', action='store', type=_port, help=\n 'chat port, default is 5000', default=int(getenv('CHAT_PORT', 5000)))\n parser.add_argument('-H', '--history', action='store', type=\n _non_empty_printable, help=\n 'messages history, default is ./messages.history', default=getenv(\n 'CHAT_HISTORY', './messages.history'))\n return parser.parse_args()\n\n\nif __name__ == '__main__':\n options = get_args()\n log_level = options.loglevel * 10\n setup_general_log(options.log, log_level)\n reader_log.setLevel(log_level)\n connector = partial(chat_connector, options.host, options.port, options\n .delay, options.retries, read_write_lines)\n chat_handler = partial(read_chat, options.history, connector)\n reader_log.info(f'Chat reader is starting with options: {options}')\n run_client(chat_handler)\n", "<import token>\n<assignment token>\n<code token>\n\n\ndef get_args():\n parser = get_argparser()\n parser.add_argument('-p', '--port', action='store', type=_port, help=\n 'chat port, default is 5000', default=int(getenv('CHAT_PORT', 5000)))\n parser.add_argument('-H', '--history', action='store', type=\n _non_empty_printable, help=\n 'messages history, default is ./messages.history', default=getenv(\n 'CHAT_HISTORY', './messages.history'))\n return parser.parse_args()\n\n\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<function token>\n<code token>\n" ]
false
99,492
d8b12127cf7f0093bf3c4951c6131f6760cc27c0
from groupy import Client import os GROUP_ID = 48071223 client = Client.from_token(os.environ["GROUPME_ACCESS_TOKEN"]) group = client.groups.get(id=GROUP_ID) group.leave() while True: group.post(text=input("> "))
[ "from groupy import Client\nimport os\n\nGROUP_ID = 48071223\n\n\nclient = Client.from_token(os.environ[\"GROUPME_ACCESS_TOKEN\"])\ngroup = client.groups.get(id=GROUP_ID)\ngroup.leave()\nwhile True:\n group.post(text=input(\"> \"))\n", "from groupy import Client\nimport os\nGROUP_ID = 48071223\nclient = Client.from_token(os.environ['GROUPME_ACCESS_TOKEN'])\ngroup = client.groups.get(id=GROUP_ID)\ngroup.leave()\nwhile True:\n group.post(text=input('> '))\n", "<import token>\nGROUP_ID = 48071223\nclient = Client.from_token(os.environ['GROUPME_ACCESS_TOKEN'])\ngroup = client.groups.get(id=GROUP_ID)\ngroup.leave()\nwhile True:\n group.post(text=input('> '))\n", "<import token>\n<assignment token>\ngroup.leave()\nwhile True:\n group.post(text=input('> '))\n", "<import token>\n<assignment token>\n<code token>\n" ]
false
99,493
f1d04a86a825f717715b20e99d4614aa87eff647
#!/usr/bin/env python # -*-coding:utf-8 -*- ''' @author: daishilong @contact: [email protected] ''' import time import numpy as np import matplotlib.pyplot as plt from sklearn.externals import joblib from sklearn.gaussian_process import GaussianProcessRegressor from sklearn.gaussian_process.kernels import WhiteKernel, ExpSineSquared, Matern, ConstantKernel, RBF, RationalQuadratic from sklearn.model_selection import train_test_split from mpl_toolkits.mplot3d import Axes3D from restore import Restore from sklearn.preprocessing import StandardScaler rng = np.random.RandomState(0) # data1 = np.loadtxt("E:/WiFi/day1/3F/0 m per s.csv", dtype=float, delimiter=',')[:,1:] # # data3 = np.loadtxt("E:/WiFi/day3/1.5 m per s.txt", dtype=float, delimiter=',')[:,1:] # # data4 = np.loadtxt("E:/WiFi/day4/1.5 m per s.txt", dtype=float, delimiter=',')[:,1:] # # data5 = np.loadtxt("E:/WiFi/day5/1.5 m per s.txt", dtype=float, delimiter=',')[:,1:] data1 = np.loadtxt("E:/WiFi/day2/1.5 m per s.txt", dtype=float, delimiter=',')[:,1:] data2 = np.loadtxt("E:/project/PycharmProjects/wifiServer/3F/GP/0/meanRe.csv", dtype=float, delimiter=',') dataAll = np.r_[data1, data2] TrainChoice = range(0, len(dataAll), 1) dataAll=dataAll[TrainChoice] grid = np.loadtxt("candidate.csv", dtype=float, delimiter=',') #grid = np.array(dataAll[:,:2]) gridMean = np.array(grid) gridStd = np.array(grid) default = -90 # dataAll[dataAll[:,:]==-100] = default # testdataAll = np.loadtxt("E:/WiFi/实验室6楼/wifiData/行人验证/lastpdr.csv", dtype=float, delimiter=',') # testdataAll = testdataAll[testdataAll[:,1]==2,2:] testdataAll = np.loadtxt("E:/WiFi/day1/3F/2 m per s.csv", dtype=float, delimiter=',')[:,1:] # testdataAll = np.loadtxt("E:/WiFi/实验室6楼/wifiData/实验/1.2m_human.csv", dtype=float, delimiter=',')[:,3:] font1 = {'family' : 'Times New Roman', 'weight' : 'normal', 'size' : 20,} modelPath = 'model/GP/' inputNum = 2 interval = 19 scaler = StandardScaler().fit(dataAll[:,:inputNum]) ax = [] err = np.zeros([len(testdataAll),len(testdataAll[0])-2]) for Ap in range(0,interval): testAP = inputNum + Ap testAPBand = testAP + interval testdata = testdataAll[testdataAll[:,testAP]!=-100,:] dataRaw = dataAll[dataAll[:, testAP] != -100] y,dy,reData = Restore(dataAll=dataAll,gap=interval,inputN=inputNum,num=Ap) y[y[:]==-100]=default X = dataAll[:,:inputNum] kernel = 1.0* RBF(length_scale=1.0, length_scale_bounds=(1e-2, 1e3)) \ +WhiteKernel(noise_level=1) gpr = GaussianProcessRegressor(kernel=kernel, normalize_y=True) stime = time.time() gpr.fit(scaler.transform(X), y) print("Time for GPR fitting: %.3f" % (time.time() - stime)) X_predict =grid[:,:2] # Predict using gaussian process regressor stime = time.time() y_gpr, y_std = gpr.predict(scaler.transform(X_predict), return_std=True) gridMean = np.c_[gridMean,y_gpr] gridStd = np.c_[gridStd, y_std] print("Time for GPR prediction with standard-deviation: %.3f" % (time.time() - stime)) print(gpr.kernel_) #print(y_gpr-testdata[:,testAPBand]) # Plot results ax.append(plt.figure().add_subplot(111, projection='3d')) ax[Ap].scatter(dataRaw[:, 0], dataRaw[:, 1], dataRaw[:,testAP], c='r') dataUndetect = dataAll[dataAll[:, testAP] == -100] # ax[Ap].scatter(reData[:, 0], reData[:, 1], reData[:, testAP], c='black') ax[Ap].scatter(dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:, testAP], c='b') ax[Ap].scatter(X_predict[:, 0], X_predict[:, 1], y_gpr[:], c='g') # if Ap ==9: # np.savetxt("csv/measured.csv", np.array(np.c_[dataRaw[:, 0], dataRaw[:, 1], dataRaw[:,testAP]]), fmt='%f', delimiter=',', newline='\r\n') # np.savetxt("csv/recovered.csv", np.array(np.c_[reData[:, 0], reData[:, 1], reData[:, testAP]]), fmt='%f', # delimiter=',', newline='\r\n') # np.savetxt("csv/undetected.csv", np.array(np.c_[dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:, testAP]]), fmt='%f', # delimiter=',', newline='\r\n') # np.savetxt("csv/fingerprint.csv", np.array(np.c_[X_predict[:, 0], X_predict[:, 1], y_gpr[:]]), fmt='%f', # delimiter=',', newline='\r\n') # ax[Ap].scatter(testdata[:, 0], testdata[:, 1], testdata[:,testAP], c='b') ax[Ap].set_zlabel('RSSI (dBm)',font1) # 坐标轴 ax[Ap].set_ylabel('Y (m)',font1) ax[Ap].set_xlabel('X (m)',font1) ax[Ap].legend(['measured data','undetected data','fingerprint map'],prop=font1,loc = 'lower center', bbox_to_anchor=(0.6,0.95)) plt.xticks(fontsize=15) plt.yticks(fontsize=15) joblib.dump(gpr, modelPath + "ap" + str(Ap) + ".pkl") for Ap in range(interval,2*interval): testAP = inputNum + Ap # ignore the default RSSI data = dataAll[:, :] data = data[:] # trainingData, testingData = train_test_split(data, test_size=0.2) # trainingData=trainingData[trainingData[:,0].argsort()] # testingData = testingData[testingData[:, 0].argsort()] testdata = testdataAll[testdataAll[:, testAP] != -100, :] X = np.r_[data[:, 0:2]] # X_show = np.r_[trainingData[:, 0].reshape(-1,1),testingData[:1,0].reshape(-1,1)] # X_show = X_show[X_show[:, 0].argsort()] # y = data[:,testAP] dataRaw = dataAll[dataAll[:, testAP] != -100] y, dy, reData = Restore(dataAll=dataAll, gap=-interval, inputN=inputNum, num=Ap) y[y[:]==-100]=default # dy = np.zeros(data[:, testAP].shape) + 4 kernel = 1.0* RBF(length_scale=1.0, length_scale_bounds=(1e-2, 1e3)) \ +WhiteKernel(noise_level=1) gpr = GaussianProcessRegressor(kernel=kernel, normalize_y=True) stime = time.time() gpr.fit(scaler.transform(X), y) print("Time for GPR fitting: %.3f" % (time.time() - stime)) X_predict = grid[:, :2] # Predict using gaussian process regressor stime = time.time() y_gpr, y_std = gpr.predict(scaler.transform(X_predict), return_std=True) gridMean = np.c_[gridMean, y_gpr] gridStd = np.c_[gridStd, y_std] print("Time for GPR prediction with standard-deviation: %.3f" % (time.time() - stime)) ax.append(plt.figure().add_subplot(111, projection='3d')) ax[Ap].scatter(dataRaw[:, 0], dataRaw[:, 1], dataRaw[:, testAP], c='r') dataUndetect = dataAll[dataAll[:, testAP] == -100] # ax[Ap].scatter(reData[:, 0], reData[:, 1], reData[:, testAP], c='black') ax[Ap].scatter(dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:, testAP], c='b') ax[Ap].scatter(X_predict[:, 0], X_predict[:, 1], y_gpr[:], c='g') # ax[Ap].scatter(testdata[:, 0], testdata[:, 1], testdata[:,testAP], c='b') ax[Ap].set_zlabel('RSSI (dBm)', font1) # 坐标轴 ax[Ap].set_ylabel('Y (m)', font1) ax[Ap].set_xlabel('X (m)', font1) ax[Ap].legend(['measured data', 'undetected data', 'fingerprint map'], prop=font1, loc='lower center',bbox_to_anchor=(0.6,0.95)) if Ap ==2*interval-1: np.savetxt("csv/measured.csv", np.array(np.c_[dataRaw[:, 0], dataRaw[:, 1], dataRaw[:,testAP]]), fmt='%f', delimiter=',', newline='\r\n') np.savetxt("csv/recovered.csv", np.array(np.c_[reData[:, 0], reData[:, 1], reData[:, testAP]]), fmt='%f', delimiter=',', newline='\r\n') np.savetxt("csv/undetected.csv", np.array(np.c_[dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:, testAP]]), fmt='%f', delimiter=',', newline='\r\n') np.savetxt("csv/fingerprint.csv", np.array(np.c_[X_predict[:, 0], X_predict[:, 1], y_gpr[:]]), fmt='%f', delimiter=',', newline='\r\n') # err[:len(y_gpr),Ap]=y_gpr-grid[:,testAP] plt.xticks(fontsize=15) plt.yticks(fontsize=15) print(gpr.kernel_) np.savetxt("6b/GP/sample/3/means.csv", np.array(gridMean), fmt='%f', delimiter=',', newline='\r\n') np.savetxt("6b/GP/sample/3/stds.csv", np.array(gridStd), fmt='%f', delimiter=',', newline='\r\n') # plt.xticks(()) # plt.yticks(()) plt.show()
[ "#!/usr/bin/env python\n# -*-coding:utf-8 -*- \n'''\n@author: daishilong\n@contact: [email protected]\n'''\n\nimport time\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.externals import joblib\nfrom sklearn.gaussian_process import GaussianProcessRegressor\nfrom sklearn.gaussian_process.kernels import WhiteKernel, ExpSineSquared, Matern, ConstantKernel, RBF, RationalQuadratic\nfrom sklearn.model_selection import train_test_split\nfrom mpl_toolkits.mplot3d import Axes3D\nfrom restore import Restore\nfrom sklearn.preprocessing import StandardScaler\nrng = np.random.RandomState(0)\n# data1 = np.loadtxt(\"E:/WiFi/day1/3F/0 m per s.csv\", dtype=float, delimiter=',')[:,1:]\n#\n# data3 = np.loadtxt(\"E:/WiFi/day3/1.5 m per s.txt\", dtype=float, delimiter=',')[:,1:]\n#\n# data4 = np.loadtxt(\"E:/WiFi/day4/1.5 m per s.txt\", dtype=float, delimiter=',')[:,1:]\n#\n# data5 = np.loadtxt(\"E:/WiFi/day5/1.5 m per s.txt\", dtype=float, delimiter=',')[:,1:]\ndata1 = np.loadtxt(\"E:/WiFi/day2/1.5 m per s.txt\", dtype=float, delimiter=',')[:,1:]\ndata2 = np.loadtxt(\"E:/project/PycharmProjects/wifiServer/3F/GP/0/meanRe.csv\", dtype=float, delimiter=',')\ndataAll = np.r_[data1, data2]\nTrainChoice = range(0, len(dataAll), 1)\ndataAll=dataAll[TrainChoice]\ngrid = np.loadtxt(\"candidate.csv\", dtype=float, delimiter=',')\n#grid = np.array(dataAll[:,:2])\ngridMean = np.array(grid)\ngridStd = np.array(grid)\ndefault = -90\n# dataAll[dataAll[:,:]==-100] = default\n\n# testdataAll = np.loadtxt(\"E:/WiFi/实验室6楼/wifiData/行人验证/lastpdr.csv\", dtype=float, delimiter=',')\n# testdataAll = testdataAll[testdataAll[:,1]==2,2:]\ntestdataAll = np.loadtxt(\"E:/WiFi/day1/3F/2 m per s.csv\", dtype=float, delimiter=',')[:,1:]\n# testdataAll = np.loadtxt(\"E:/WiFi/实验室6楼/wifiData/实验/1.2m_human.csv\", dtype=float, delimiter=',')[:,3:]\nfont1 = {'family' : 'Times New Roman',\n'weight' : 'normal',\n'size' : 20,}\nmodelPath = 'model/GP/'\ninputNum = 2\ninterval = 19\nscaler = StandardScaler().fit(dataAll[:,:inputNum])\nax = []\nerr = np.zeros([len(testdataAll),len(testdataAll[0])-2])\nfor Ap in range(0,interval):\n testAP = inputNum + Ap\n testAPBand = testAP + interval\n testdata = testdataAll[testdataAll[:,testAP]!=-100,:]\n dataRaw = dataAll[dataAll[:, testAP] != -100]\n y,dy,reData = Restore(dataAll=dataAll,gap=interval,inputN=inputNum,num=Ap)\n y[y[:]==-100]=default\n\n X = dataAll[:,:inputNum]\n\n kernel = 1.0* RBF(length_scale=1.0, length_scale_bounds=(1e-2, 1e3)) \\\n +WhiteKernel(noise_level=1)\n gpr = GaussianProcessRegressor(kernel=kernel, normalize_y=True)\n stime = time.time()\n gpr.fit(scaler.transform(X), y)\n print(\"Time for GPR fitting: %.3f\" % (time.time() - stime))\n\n X_predict =grid[:,:2]\n # Predict using gaussian process regressor\n\n stime = time.time()\n y_gpr, y_std = gpr.predict(scaler.transform(X_predict), return_std=True)\n gridMean = np.c_[gridMean,y_gpr]\n gridStd = np.c_[gridStd, y_std]\n print(\"Time for GPR prediction with standard-deviation: %.3f\"\n % (time.time() - stime))\n print(gpr.kernel_)\n #print(y_gpr-testdata[:,testAPBand])\n\n # Plot results\n ax.append(plt.figure().add_subplot(111, projection='3d'))\n\n ax[Ap].scatter(dataRaw[:, 0], dataRaw[:, 1], dataRaw[:,testAP], c='r')\n dataUndetect = dataAll[dataAll[:, testAP] == -100]\n # ax[Ap].scatter(reData[:, 0], reData[:, 1], reData[:, testAP], c='black')\n\n ax[Ap].scatter(dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:, testAP], c='b')\n\n ax[Ap].scatter(X_predict[:, 0], X_predict[:, 1], y_gpr[:], c='g')\n # if Ap ==9:\n # np.savetxt(\"csv/measured.csv\", np.array(np.c_[dataRaw[:, 0], dataRaw[:, 1], dataRaw[:,testAP]]), fmt='%f', delimiter=',', newline='\\r\\n')\n # np.savetxt(\"csv/recovered.csv\", np.array(np.c_[reData[:, 0], reData[:, 1], reData[:, testAP]]), fmt='%f',\n # delimiter=',', newline='\\r\\n')\n # np.savetxt(\"csv/undetected.csv\", np.array(np.c_[dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:, testAP]]), fmt='%f',\n # delimiter=',', newline='\\r\\n')\n # np.savetxt(\"csv/fingerprint.csv\", np.array(np.c_[X_predict[:, 0], X_predict[:, 1], y_gpr[:]]), fmt='%f',\n # delimiter=',', newline='\\r\\n')\n\n\n # ax[Ap].scatter(testdata[:, 0], testdata[:, 1], testdata[:,testAP], c='b')\n ax[Ap].set_zlabel('RSSI (dBm)',font1) # 坐标轴\n ax[Ap].set_ylabel('Y (m)',font1)\n ax[Ap].set_xlabel('X (m)',font1)\n ax[Ap].legend(['measured data','undetected data','fingerprint map'],prop=font1,loc = 'lower center', bbox_to_anchor=(0.6,0.95))\n\n plt.xticks(fontsize=15)\n plt.yticks(fontsize=15)\n joblib.dump(gpr, modelPath + \"ap\" + str(Ap) + \".pkl\")\nfor Ap in range(interval,2*interval):\n testAP = inputNum + Ap\n\n # ignore the default RSSI\n data = dataAll[:, :]\n data = data[:]\n # trainingData, testingData = train_test_split(data, test_size=0.2)\n # trainingData=trainingData[trainingData[:,0].argsort()]\n # testingData = testingData[testingData[:, 0].argsort()]\n\n\n\n\n testdata = testdataAll[testdataAll[:, testAP] != -100, :]\n\n X = np.r_[data[:, 0:2]]\n # X_show = np.r_[trainingData[:, 0].reshape(-1,1),testingData[:1,0].reshape(-1,1)]\n # X_show = X_show[X_show[:, 0].argsort()]\n # y = data[:,testAP]\n dataRaw = dataAll[dataAll[:, testAP] != -100]\n y, dy, reData = Restore(dataAll=dataAll, gap=-interval, inputN=inputNum, num=Ap)\n y[y[:]==-100]=default\n # dy = np.zeros(data[:, testAP].shape) + 4\n kernel = 1.0* RBF(length_scale=1.0, length_scale_bounds=(1e-2, 1e3)) \\\n +WhiteKernel(noise_level=1)\n gpr = GaussianProcessRegressor(kernel=kernel, normalize_y=True)\n stime = time.time()\n gpr.fit(scaler.transform(X), y)\n print(\"Time for GPR fitting: %.3f\" % (time.time() - stime))\n\n\n X_predict = grid[:, :2]\n # Predict using gaussian process regressor\n\n stime = time.time()\n y_gpr, y_std = gpr.predict(scaler.transform(X_predict), return_std=True)\n gridMean = np.c_[gridMean, y_gpr]\n gridStd = np.c_[gridStd, y_std]\n print(\"Time for GPR prediction with standard-deviation: %.3f\"\n % (time.time() - stime))\n ax.append(plt.figure().add_subplot(111, projection='3d'))\n ax[Ap].scatter(dataRaw[:, 0], dataRaw[:, 1], dataRaw[:, testAP], c='r')\n dataUndetect = dataAll[dataAll[:, testAP] == -100]\n # ax[Ap].scatter(reData[:, 0], reData[:, 1], reData[:, testAP], c='black')\n\n ax[Ap].scatter(dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:, testAP], c='b')\n\n ax[Ap].scatter(X_predict[:, 0], X_predict[:, 1], y_gpr[:], c='g')\n # ax[Ap].scatter(testdata[:, 0], testdata[:, 1], testdata[:,testAP], c='b')\n ax[Ap].set_zlabel('RSSI (dBm)', font1) # 坐标轴\n ax[Ap].set_ylabel('Y (m)', font1)\n ax[Ap].set_xlabel('X (m)', font1)\n ax[Ap].legend(['measured data', 'undetected data', 'fingerprint map'], prop=font1,\n loc='lower center',bbox_to_anchor=(0.6,0.95))\n if Ap ==2*interval-1:\n np.savetxt(\"csv/measured.csv\", np.array(np.c_[dataRaw[:, 0], dataRaw[:, 1], dataRaw[:,testAP]]), fmt='%f', delimiter=',', newline='\\r\\n')\n np.savetxt(\"csv/recovered.csv\", np.array(np.c_[reData[:, 0], reData[:, 1], reData[:, testAP]]), fmt='%f',\n delimiter=',', newline='\\r\\n')\n np.savetxt(\"csv/undetected.csv\", np.array(np.c_[dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:, testAP]]), fmt='%f',\n delimiter=',', newline='\\r\\n')\n np.savetxt(\"csv/fingerprint.csv\", np.array(np.c_[X_predict[:, 0], X_predict[:, 1], y_gpr[:]]), fmt='%f',\n delimiter=',', newline='\\r\\n')\n # err[:len(y_gpr),Ap]=y_gpr-grid[:,testAP]\n plt.xticks(fontsize=15)\n plt.yticks(fontsize=15)\n print(gpr.kernel_)\n\nnp.savetxt(\"6b/GP/sample/3/means.csv\", np.array(gridMean), fmt='%f', delimiter=',', newline='\\r\\n')\nnp.savetxt(\"6b/GP/sample/3/stds.csv\", np.array(gridStd), fmt='%f', delimiter=',', newline='\\r\\n')\n# plt.xticks(())\n# plt.yticks(())\nplt.show()\n\n\n", "<docstring token>\nimport time\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.externals import joblib\nfrom sklearn.gaussian_process import GaussianProcessRegressor\nfrom sklearn.gaussian_process.kernels import WhiteKernel, ExpSineSquared, Matern, ConstantKernel, RBF, RationalQuadratic\nfrom sklearn.model_selection import train_test_split\nfrom mpl_toolkits.mplot3d import Axes3D\nfrom restore import Restore\nfrom sklearn.preprocessing import StandardScaler\nrng = np.random.RandomState(0)\ndata1 = np.loadtxt('E:/WiFi/day2/1.5 m per s.txt', dtype=float, delimiter=','\n )[:, 1:]\ndata2 = np.loadtxt('E:/project/PycharmProjects/wifiServer/3F/GP/0/meanRe.csv',\n dtype=float, delimiter=',')\ndataAll = np.r_[data1, data2]\nTrainChoice = range(0, len(dataAll), 1)\ndataAll = dataAll[TrainChoice]\ngrid = np.loadtxt('candidate.csv', dtype=float, delimiter=',')\ngridMean = np.array(grid)\ngridStd = np.array(grid)\ndefault = -90\ntestdataAll = np.loadtxt('E:/WiFi/day1/3F/2 m per s.csv', dtype=float,\n delimiter=',')[:, 1:]\nfont1 = {'family': 'Times New Roman', 'weight': 'normal', 'size': 20}\nmodelPath = 'model/GP/'\ninputNum = 2\ninterval = 19\nscaler = StandardScaler().fit(dataAll[:, :inputNum])\nax = []\nerr = np.zeros([len(testdataAll), len(testdataAll[0]) - 2])\nfor Ap in range(0, interval):\n testAP = inputNum + Ap\n testAPBand = testAP + interval\n testdata = testdataAll[testdataAll[:, testAP] != -100, :]\n dataRaw = dataAll[dataAll[:, testAP] != -100]\n y, dy, reData = Restore(dataAll=dataAll, gap=interval, inputN=inputNum,\n num=Ap)\n y[y[:] == -100] = default\n X = dataAll[:, :inputNum]\n kernel = 1.0 * RBF(length_scale=1.0, length_scale_bounds=(0.01, 1000.0)\n ) + WhiteKernel(noise_level=1)\n gpr = GaussianProcessRegressor(kernel=kernel, normalize_y=True)\n stime = time.time()\n gpr.fit(scaler.transform(X), y)\n print('Time for GPR fitting: %.3f' % (time.time() - stime))\n X_predict = grid[:, :2]\n stime = time.time()\n y_gpr, y_std = gpr.predict(scaler.transform(X_predict), return_std=True)\n gridMean = np.c_[gridMean, y_gpr]\n gridStd = np.c_[gridStd, y_std]\n print('Time for GPR prediction with standard-deviation: %.3f' % (time.\n time() - stime))\n print(gpr.kernel_)\n ax.append(plt.figure().add_subplot(111, projection='3d'))\n ax[Ap].scatter(dataRaw[:, 0], dataRaw[:, 1], dataRaw[:, testAP], c='r')\n dataUndetect = dataAll[dataAll[:, testAP] == -100]\n ax[Ap].scatter(dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:,\n testAP], c='b')\n ax[Ap].scatter(X_predict[:, 0], X_predict[:, 1], y_gpr[:], c='g')\n ax[Ap].set_zlabel('RSSI (dBm)', font1)\n ax[Ap].set_ylabel('Y (m)', font1)\n ax[Ap].set_xlabel('X (m)', font1)\n ax[Ap].legend(['measured data', 'undetected data', 'fingerprint map'],\n prop=font1, loc='lower center', bbox_to_anchor=(0.6, 0.95))\n plt.xticks(fontsize=15)\n plt.yticks(fontsize=15)\n joblib.dump(gpr, modelPath + 'ap' + str(Ap) + '.pkl')\nfor Ap in range(interval, 2 * interval):\n testAP = inputNum + Ap\n data = dataAll[:, :]\n data = data[:]\n testdata = testdataAll[testdataAll[:, testAP] != -100, :]\n X = np.r_[data[:, 0:2]]\n dataRaw = dataAll[dataAll[:, testAP] != -100]\n y, dy, reData = Restore(dataAll=dataAll, gap=-interval, inputN=inputNum,\n num=Ap)\n y[y[:] == -100] = default\n kernel = 1.0 * RBF(length_scale=1.0, length_scale_bounds=(0.01, 1000.0)\n ) + WhiteKernel(noise_level=1)\n gpr = GaussianProcessRegressor(kernel=kernel, normalize_y=True)\n stime = time.time()\n gpr.fit(scaler.transform(X), y)\n print('Time for GPR fitting: %.3f' % (time.time() - stime))\n X_predict = grid[:, :2]\n stime = time.time()\n y_gpr, y_std = gpr.predict(scaler.transform(X_predict), return_std=True)\n gridMean = np.c_[gridMean, y_gpr]\n gridStd = np.c_[gridStd, y_std]\n print('Time for GPR prediction with standard-deviation: %.3f' % (time.\n time() - stime))\n ax.append(plt.figure().add_subplot(111, projection='3d'))\n ax[Ap].scatter(dataRaw[:, 0], dataRaw[:, 1], dataRaw[:, testAP], c='r')\n dataUndetect = dataAll[dataAll[:, testAP] == -100]\n ax[Ap].scatter(dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:,\n testAP], c='b')\n ax[Ap].scatter(X_predict[:, 0], X_predict[:, 1], y_gpr[:], c='g')\n ax[Ap].set_zlabel('RSSI (dBm)', font1)\n ax[Ap].set_ylabel('Y (m)', font1)\n ax[Ap].set_xlabel('X (m)', font1)\n ax[Ap].legend(['measured data', 'undetected data', 'fingerprint map'],\n prop=font1, loc='lower center', bbox_to_anchor=(0.6, 0.95))\n if Ap == 2 * interval - 1:\n np.savetxt('csv/measured.csv', np.array(np.c_[dataRaw[:, 0],\n dataRaw[:, 1], dataRaw[:, testAP]]), fmt='%f', delimiter=',',\n newline='\\r\\n')\n np.savetxt('csv/recovered.csv', np.array(np.c_[reData[:, 0], reData\n [:, 1], reData[:, testAP]]), fmt='%f', delimiter=',', newline=\n '\\r\\n')\n np.savetxt('csv/undetected.csv', np.array(np.c_[dataUndetect[:, 0],\n dataUndetect[:, 1], dataUndetect[:, testAP]]), fmt='%f',\n delimiter=',', newline='\\r\\n')\n np.savetxt('csv/fingerprint.csv', np.array(np.c_[X_predict[:, 0],\n X_predict[:, 1], y_gpr[:]]), fmt='%f', delimiter=',', newline=\n '\\r\\n')\n plt.xticks(fontsize=15)\n plt.yticks(fontsize=15)\n print(gpr.kernel_)\nnp.savetxt('6b/GP/sample/3/means.csv', np.array(gridMean), fmt='%f',\n delimiter=',', newline='\\r\\n')\nnp.savetxt('6b/GP/sample/3/stds.csv', np.array(gridStd), fmt='%f',\n delimiter=',', newline='\\r\\n')\nplt.show()\n", "<docstring token>\n<import token>\nrng = np.random.RandomState(0)\ndata1 = np.loadtxt('E:/WiFi/day2/1.5 m per s.txt', dtype=float, delimiter=','\n )[:, 1:]\ndata2 = np.loadtxt('E:/project/PycharmProjects/wifiServer/3F/GP/0/meanRe.csv',\n dtype=float, delimiter=',')\ndataAll = np.r_[data1, data2]\nTrainChoice = range(0, len(dataAll), 1)\ndataAll = dataAll[TrainChoice]\ngrid = np.loadtxt('candidate.csv', dtype=float, delimiter=',')\ngridMean = np.array(grid)\ngridStd = np.array(grid)\ndefault = -90\ntestdataAll = np.loadtxt('E:/WiFi/day1/3F/2 m per s.csv', dtype=float,\n delimiter=',')[:, 1:]\nfont1 = {'family': 'Times New Roman', 'weight': 'normal', 'size': 20}\nmodelPath = 'model/GP/'\ninputNum = 2\ninterval = 19\nscaler = StandardScaler().fit(dataAll[:, :inputNum])\nax = []\nerr = np.zeros([len(testdataAll), len(testdataAll[0]) - 2])\nfor Ap in range(0, interval):\n testAP = inputNum + Ap\n testAPBand = testAP + interval\n testdata = testdataAll[testdataAll[:, testAP] != -100, :]\n dataRaw = dataAll[dataAll[:, testAP] != -100]\n y, dy, reData = Restore(dataAll=dataAll, gap=interval, inputN=inputNum,\n num=Ap)\n y[y[:] == -100] = default\n X = dataAll[:, :inputNum]\n kernel = 1.0 * RBF(length_scale=1.0, length_scale_bounds=(0.01, 1000.0)\n ) + WhiteKernel(noise_level=1)\n gpr = GaussianProcessRegressor(kernel=kernel, normalize_y=True)\n stime = time.time()\n gpr.fit(scaler.transform(X), y)\n print('Time for GPR fitting: %.3f' % (time.time() - stime))\n X_predict = grid[:, :2]\n stime = time.time()\n y_gpr, y_std = gpr.predict(scaler.transform(X_predict), return_std=True)\n gridMean = np.c_[gridMean, y_gpr]\n gridStd = np.c_[gridStd, y_std]\n print('Time for GPR prediction with standard-deviation: %.3f' % (time.\n time() - stime))\n print(gpr.kernel_)\n ax.append(plt.figure().add_subplot(111, projection='3d'))\n ax[Ap].scatter(dataRaw[:, 0], dataRaw[:, 1], dataRaw[:, testAP], c='r')\n dataUndetect = dataAll[dataAll[:, testAP] == -100]\n ax[Ap].scatter(dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:,\n testAP], c='b')\n ax[Ap].scatter(X_predict[:, 0], X_predict[:, 1], y_gpr[:], c='g')\n ax[Ap].set_zlabel('RSSI (dBm)', font1)\n ax[Ap].set_ylabel('Y (m)', font1)\n ax[Ap].set_xlabel('X (m)', font1)\n ax[Ap].legend(['measured data', 'undetected data', 'fingerprint map'],\n prop=font1, loc='lower center', bbox_to_anchor=(0.6, 0.95))\n plt.xticks(fontsize=15)\n plt.yticks(fontsize=15)\n joblib.dump(gpr, modelPath + 'ap' + str(Ap) + '.pkl')\nfor Ap in range(interval, 2 * interval):\n testAP = inputNum + Ap\n data = dataAll[:, :]\n data = data[:]\n testdata = testdataAll[testdataAll[:, testAP] != -100, :]\n X = np.r_[data[:, 0:2]]\n dataRaw = dataAll[dataAll[:, testAP] != -100]\n y, dy, reData = Restore(dataAll=dataAll, gap=-interval, inputN=inputNum,\n num=Ap)\n y[y[:] == -100] = default\n kernel = 1.0 * RBF(length_scale=1.0, length_scale_bounds=(0.01, 1000.0)\n ) + WhiteKernel(noise_level=1)\n gpr = GaussianProcessRegressor(kernel=kernel, normalize_y=True)\n stime = time.time()\n gpr.fit(scaler.transform(X), y)\n print('Time for GPR fitting: %.3f' % (time.time() - stime))\n X_predict = grid[:, :2]\n stime = time.time()\n y_gpr, y_std = gpr.predict(scaler.transform(X_predict), return_std=True)\n gridMean = np.c_[gridMean, y_gpr]\n gridStd = np.c_[gridStd, y_std]\n print('Time for GPR prediction with standard-deviation: %.3f' % (time.\n time() - stime))\n ax.append(plt.figure().add_subplot(111, projection='3d'))\n ax[Ap].scatter(dataRaw[:, 0], dataRaw[:, 1], dataRaw[:, testAP], c='r')\n dataUndetect = dataAll[dataAll[:, testAP] == -100]\n ax[Ap].scatter(dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:,\n testAP], c='b')\n ax[Ap].scatter(X_predict[:, 0], X_predict[:, 1], y_gpr[:], c='g')\n ax[Ap].set_zlabel('RSSI (dBm)', font1)\n ax[Ap].set_ylabel('Y (m)', font1)\n ax[Ap].set_xlabel('X (m)', font1)\n ax[Ap].legend(['measured data', 'undetected data', 'fingerprint map'],\n prop=font1, loc='lower center', bbox_to_anchor=(0.6, 0.95))\n if Ap == 2 * interval - 1:\n np.savetxt('csv/measured.csv', np.array(np.c_[dataRaw[:, 0],\n dataRaw[:, 1], dataRaw[:, testAP]]), fmt='%f', delimiter=',',\n newline='\\r\\n')\n np.savetxt('csv/recovered.csv', np.array(np.c_[reData[:, 0], reData\n [:, 1], reData[:, testAP]]), fmt='%f', delimiter=',', newline=\n '\\r\\n')\n np.savetxt('csv/undetected.csv', np.array(np.c_[dataUndetect[:, 0],\n dataUndetect[:, 1], dataUndetect[:, testAP]]), fmt='%f',\n delimiter=',', newline='\\r\\n')\n np.savetxt('csv/fingerprint.csv', np.array(np.c_[X_predict[:, 0],\n X_predict[:, 1], y_gpr[:]]), fmt='%f', delimiter=',', newline=\n '\\r\\n')\n plt.xticks(fontsize=15)\n plt.yticks(fontsize=15)\n print(gpr.kernel_)\nnp.savetxt('6b/GP/sample/3/means.csv', np.array(gridMean), fmt='%f',\n delimiter=',', newline='\\r\\n')\nnp.savetxt('6b/GP/sample/3/stds.csv', np.array(gridStd), fmt='%f',\n delimiter=',', newline='\\r\\n')\nplt.show()\n", "<docstring token>\n<import token>\n<assignment token>\nfor Ap in range(0, interval):\n testAP = inputNum + Ap\n testAPBand = testAP + interval\n testdata = testdataAll[testdataAll[:, testAP] != -100, :]\n dataRaw = dataAll[dataAll[:, testAP] != -100]\n y, dy, reData = Restore(dataAll=dataAll, gap=interval, inputN=inputNum,\n num=Ap)\n y[y[:] == -100] = default\n X = dataAll[:, :inputNum]\n kernel = 1.0 * RBF(length_scale=1.0, length_scale_bounds=(0.01, 1000.0)\n ) + WhiteKernel(noise_level=1)\n gpr = GaussianProcessRegressor(kernel=kernel, normalize_y=True)\n stime = time.time()\n gpr.fit(scaler.transform(X), y)\n print('Time for GPR fitting: %.3f' % (time.time() - stime))\n X_predict = grid[:, :2]\n stime = time.time()\n y_gpr, y_std = gpr.predict(scaler.transform(X_predict), return_std=True)\n gridMean = np.c_[gridMean, y_gpr]\n gridStd = np.c_[gridStd, y_std]\n print('Time for GPR prediction with standard-deviation: %.3f' % (time.\n time() - stime))\n print(gpr.kernel_)\n ax.append(plt.figure().add_subplot(111, projection='3d'))\n ax[Ap].scatter(dataRaw[:, 0], dataRaw[:, 1], dataRaw[:, testAP], c='r')\n dataUndetect = dataAll[dataAll[:, testAP] == -100]\n ax[Ap].scatter(dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:,\n testAP], c='b')\n ax[Ap].scatter(X_predict[:, 0], X_predict[:, 1], y_gpr[:], c='g')\n ax[Ap].set_zlabel('RSSI (dBm)', font1)\n ax[Ap].set_ylabel('Y (m)', font1)\n ax[Ap].set_xlabel('X (m)', font1)\n ax[Ap].legend(['measured data', 'undetected data', 'fingerprint map'],\n prop=font1, loc='lower center', bbox_to_anchor=(0.6, 0.95))\n plt.xticks(fontsize=15)\n plt.yticks(fontsize=15)\n joblib.dump(gpr, modelPath + 'ap' + str(Ap) + '.pkl')\nfor Ap in range(interval, 2 * interval):\n testAP = inputNum + Ap\n data = dataAll[:, :]\n data = data[:]\n testdata = testdataAll[testdataAll[:, testAP] != -100, :]\n X = np.r_[data[:, 0:2]]\n dataRaw = dataAll[dataAll[:, testAP] != -100]\n y, dy, reData = Restore(dataAll=dataAll, gap=-interval, inputN=inputNum,\n num=Ap)\n y[y[:] == -100] = default\n kernel = 1.0 * RBF(length_scale=1.0, length_scale_bounds=(0.01, 1000.0)\n ) + WhiteKernel(noise_level=1)\n gpr = GaussianProcessRegressor(kernel=kernel, normalize_y=True)\n stime = time.time()\n gpr.fit(scaler.transform(X), y)\n print('Time for GPR fitting: %.3f' % (time.time() - stime))\n X_predict = grid[:, :2]\n stime = time.time()\n y_gpr, y_std = gpr.predict(scaler.transform(X_predict), return_std=True)\n gridMean = np.c_[gridMean, y_gpr]\n gridStd = np.c_[gridStd, y_std]\n print('Time for GPR prediction with standard-deviation: %.3f' % (time.\n time() - stime))\n ax.append(plt.figure().add_subplot(111, projection='3d'))\n ax[Ap].scatter(dataRaw[:, 0], dataRaw[:, 1], dataRaw[:, testAP], c='r')\n dataUndetect = dataAll[dataAll[:, testAP] == -100]\n ax[Ap].scatter(dataUndetect[:, 0], dataUndetect[:, 1], dataUndetect[:,\n testAP], c='b')\n ax[Ap].scatter(X_predict[:, 0], X_predict[:, 1], y_gpr[:], c='g')\n ax[Ap].set_zlabel('RSSI (dBm)', font1)\n ax[Ap].set_ylabel('Y (m)', font1)\n ax[Ap].set_xlabel('X (m)', font1)\n ax[Ap].legend(['measured data', 'undetected data', 'fingerprint map'],\n prop=font1, loc='lower center', bbox_to_anchor=(0.6, 0.95))\n if Ap == 2 * interval - 1:\n np.savetxt('csv/measured.csv', np.array(np.c_[dataRaw[:, 0],\n dataRaw[:, 1], dataRaw[:, testAP]]), fmt='%f', delimiter=',',\n newline='\\r\\n')\n np.savetxt('csv/recovered.csv', np.array(np.c_[reData[:, 0], reData\n [:, 1], reData[:, testAP]]), fmt='%f', delimiter=',', newline=\n '\\r\\n')\n np.savetxt('csv/undetected.csv', np.array(np.c_[dataUndetect[:, 0],\n dataUndetect[:, 1], dataUndetect[:, testAP]]), fmt='%f',\n delimiter=',', newline='\\r\\n')\n np.savetxt('csv/fingerprint.csv', np.array(np.c_[X_predict[:, 0],\n X_predict[:, 1], y_gpr[:]]), fmt='%f', delimiter=',', newline=\n '\\r\\n')\n plt.xticks(fontsize=15)\n plt.yticks(fontsize=15)\n print(gpr.kernel_)\nnp.savetxt('6b/GP/sample/3/means.csv', np.array(gridMean), fmt='%f',\n delimiter=',', newline='\\r\\n')\nnp.savetxt('6b/GP/sample/3/stds.csv', np.array(gridStd), fmt='%f',\n delimiter=',', newline='\\r\\n')\nplt.show()\n", "<docstring token>\n<import token>\n<assignment token>\n<code token>\n" ]
false
99,494
6abdf9fde9b86cd9be2591c387837f41ac3aef7f
# -*- coding: utf-8 -*- from tkinter import * root = Tk() root.title('RadioButton') root.geometry('400x300+400+20') # 创建Frame fm = Frame(root, width=50, height=8) fm.pack() # 文本框在frame内创建 text = Text(fm, width=50, height=8) text.pack(side=LEFT, fill=Y) str = 'On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.' # 插入字符串 text.insert(INSERT, str) # 删除文本内容 # text.delete('1.0', 'end') # 滚动条在frame内创建 scroll = Scrollbar(fm) scroll.pack(side=RIGHT, fill=Y) # 是文本和滚动条相互生效 scroll.config(command=text.yview) text.config(yscrollcommand=scroll.set) root.mainloop()
[ "# -*- coding: utf-8 -*-\nfrom tkinter import *\n\nroot = Tk()\n\nroot.title('RadioButton')\n\nroot.geometry('400x300+400+20')\n\n# 创建Frame\nfm = Frame(root, width=50, height=8)\nfm.pack()\n\n# 文本框在frame内创建\ntext = Text(fm, width=50, height=8)\ntext.pack(side=LEFT, fill=Y)\n\nstr = 'On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.'\n\n# 插入字符串\ntext.insert(INSERT, str)\n\n# 删除文本内容\n# text.delete('1.0', 'end')\n\n# 滚动条在frame内创建\nscroll = Scrollbar(fm)\nscroll.pack(side=RIGHT, fill=Y)\n\n# 是文本和滚动条相互生效\nscroll.config(command=text.yview)\ntext.config(yscrollcommand=scroll.set)\n\n\n\nroot.mainloop()", "from tkinter import *\nroot = Tk()\nroot.title('RadioButton')\nroot.geometry('400x300+400+20')\nfm = Frame(root, width=50, height=8)\nfm.pack()\ntext = Text(fm, width=50, height=8)\ntext.pack(side=LEFT, fill=Y)\nstr = (\n 'On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.'\n )\ntext.insert(INSERT, str)\nscroll = Scrollbar(fm)\nscroll.pack(side=RIGHT, fill=Y)\nscroll.config(command=text.yview)\ntext.config(yscrollcommand=scroll.set)\nroot.mainloop()\n", "<import token>\nroot = Tk()\nroot.title('RadioButton')\nroot.geometry('400x300+400+20')\nfm = Frame(root, width=50, height=8)\nfm.pack()\ntext = Text(fm, width=50, height=8)\ntext.pack(side=LEFT, fill=Y)\nstr = (\n 'On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.On October 18, TÜV Rheinland, a global leader for independent inspection services, unveiled the plaque for its Internet-of-Things Excellence Center in Longhua, Shenzhen to mark its official opening.'\n )\ntext.insert(INSERT, str)\nscroll = Scrollbar(fm)\nscroll.pack(side=RIGHT, fill=Y)\nscroll.config(command=text.yview)\ntext.config(yscrollcommand=scroll.set)\nroot.mainloop()\n", "<import token>\n<assignment token>\nroot.title('RadioButton')\nroot.geometry('400x300+400+20')\n<assignment token>\nfm.pack()\n<assignment token>\ntext.pack(side=LEFT, fill=Y)\n<assignment token>\ntext.insert(INSERT, str)\n<assignment token>\nscroll.pack(side=RIGHT, fill=Y)\nscroll.config(command=text.yview)\ntext.config(yscrollcommand=scroll.set)\nroot.mainloop()\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
99,495
bb5be6ae74ccc71edf6055883fc9ccdfa48ee6e4
import csv import json def pair_entity_ratio(found_pair_set_len, entity_count): return found_pair_set_len / entity_count def precision_and_recall(found_pair_set, pos_pair_set, neg_pair_set=None): # if a neg_pair_set is provided, # consider the "universe" to be only the what's inside pos_pair_set and neg_pair_set, # because this means a previous blocking was applied if neg_pair_set is not None: found_pair_set = found_pair_set & (pos_pair_set | neg_pair_set) true_positives = found_pair_set & pos_pair_set false_positives = found_pair_set - pos_pair_set if true_positives: precision = len(true_positives) / (len(true_positives) + len(false_positives)) else: precision = 0.0 recall = len(true_positives) / len(pos_pair_set) return precision, recall def f1_score(precision, recall): if precision or recall: return (2 * precision * recall) / (precision + recall) else: return 0.0 def evaluate_output_json( unlabeled_csv_filepath, output_json_filepath, pos_pair_json_filepath, csv_encoding="utf-8" ): with open( unlabeled_csv_filepath, "r", newline="", encoding=csv_encoding ) as record_dict_csv_file: record_count = sum(1 for __ in csv.DictReader(record_dict_csv_file)) with open(output_json_filepath, "r") as f: found_pair_set = json.load(f) found_pair_set = set(tuple(t) for t in found_pair_set) with open(pos_pair_json_filepath, "r") as f: pos_pair_set = json.load(f) pos_pair_set = set(tuple(t) for t in pos_pair_set) precision, recall = precision_and_recall(found_pair_set, pos_pair_set) return ( precision, recall, f1_score(precision, recall), pair_entity_ratio(len(found_pair_set), record_count), )
[ "import csv\nimport json\n\n\ndef pair_entity_ratio(found_pair_set_len, entity_count):\n return found_pair_set_len / entity_count\n\n\ndef precision_and_recall(found_pair_set, pos_pair_set, neg_pair_set=None):\n # if a neg_pair_set is provided,\n # consider the \"universe\" to be only the what's inside pos_pair_set and neg_pair_set,\n # because this means a previous blocking was applied\n if neg_pair_set is not None:\n found_pair_set = found_pair_set & (pos_pair_set | neg_pair_set)\n\n true_positives = found_pair_set & pos_pair_set\n false_positives = found_pair_set - pos_pair_set\n if true_positives:\n precision = len(true_positives) / (len(true_positives) + len(false_positives))\n else:\n precision = 0.0\n recall = len(true_positives) / len(pos_pair_set)\n return precision, recall\n\n\ndef f1_score(precision, recall):\n if precision or recall:\n return (2 * precision * recall) / (precision + recall)\n else:\n return 0.0\n\n\ndef evaluate_output_json(\n unlabeled_csv_filepath, output_json_filepath, pos_pair_json_filepath, csv_encoding=\"utf-8\"\n):\n with open(\n unlabeled_csv_filepath, \"r\", newline=\"\", encoding=csv_encoding\n ) as record_dict_csv_file:\n record_count = sum(1 for __ in csv.DictReader(record_dict_csv_file))\n\n with open(output_json_filepath, \"r\") as f:\n found_pair_set = json.load(f)\n found_pair_set = set(tuple(t) for t in found_pair_set)\n\n with open(pos_pair_json_filepath, \"r\") as f:\n pos_pair_set = json.load(f)\n pos_pair_set = set(tuple(t) for t in pos_pair_set)\n\n precision, recall = precision_and_recall(found_pair_set, pos_pair_set)\n return (\n precision,\n recall,\n f1_score(precision, recall),\n pair_entity_ratio(len(found_pair_set), record_count),\n )\n", "import csv\nimport json\n\n\ndef pair_entity_ratio(found_pair_set_len, entity_count):\n return found_pair_set_len / entity_count\n\n\ndef precision_and_recall(found_pair_set, pos_pair_set, neg_pair_set=None):\n if neg_pair_set is not None:\n found_pair_set = found_pair_set & (pos_pair_set | neg_pair_set)\n true_positives = found_pair_set & pos_pair_set\n false_positives = found_pair_set - pos_pair_set\n if true_positives:\n precision = len(true_positives) / (len(true_positives) + len(\n false_positives))\n else:\n precision = 0.0\n recall = len(true_positives) / len(pos_pair_set)\n return precision, recall\n\n\ndef f1_score(precision, recall):\n if precision or recall:\n return 2 * precision * recall / (precision + recall)\n else:\n return 0.0\n\n\ndef evaluate_output_json(unlabeled_csv_filepath, output_json_filepath,\n pos_pair_json_filepath, csv_encoding='utf-8'):\n with open(unlabeled_csv_filepath, 'r', newline='', encoding=csv_encoding\n ) as record_dict_csv_file:\n record_count = sum(1 for __ in csv.DictReader(record_dict_csv_file))\n with open(output_json_filepath, 'r') as f:\n found_pair_set = json.load(f)\n found_pair_set = set(tuple(t) for t in found_pair_set)\n with open(pos_pair_json_filepath, 'r') as f:\n pos_pair_set = json.load(f)\n pos_pair_set = set(tuple(t) for t in pos_pair_set)\n precision, recall = precision_and_recall(found_pair_set, pos_pair_set)\n return precision, recall, f1_score(precision, recall), pair_entity_ratio(\n len(found_pair_set), record_count)\n", "<import token>\n\n\ndef pair_entity_ratio(found_pair_set_len, entity_count):\n return found_pair_set_len / entity_count\n\n\ndef precision_and_recall(found_pair_set, pos_pair_set, neg_pair_set=None):\n if neg_pair_set is not None:\n found_pair_set = found_pair_set & (pos_pair_set | neg_pair_set)\n true_positives = found_pair_set & pos_pair_set\n false_positives = found_pair_set - pos_pair_set\n if true_positives:\n precision = len(true_positives) / (len(true_positives) + len(\n false_positives))\n else:\n precision = 0.0\n recall = len(true_positives) / len(pos_pair_set)\n return precision, recall\n\n\ndef f1_score(precision, recall):\n if precision or recall:\n return 2 * precision * recall / (precision + recall)\n else:\n return 0.0\n\n\ndef evaluate_output_json(unlabeled_csv_filepath, output_json_filepath,\n pos_pair_json_filepath, csv_encoding='utf-8'):\n with open(unlabeled_csv_filepath, 'r', newline='', encoding=csv_encoding\n ) as record_dict_csv_file:\n record_count = sum(1 for __ in csv.DictReader(record_dict_csv_file))\n with open(output_json_filepath, 'r') as f:\n found_pair_set = json.load(f)\n found_pair_set = set(tuple(t) for t in found_pair_set)\n with open(pos_pair_json_filepath, 'r') as f:\n pos_pair_set = json.load(f)\n pos_pair_set = set(tuple(t) for t in pos_pair_set)\n precision, recall = precision_and_recall(found_pair_set, pos_pair_set)\n return precision, recall, f1_score(precision, recall), pair_entity_ratio(\n len(found_pair_set), record_count)\n", "<import token>\n\n\ndef pair_entity_ratio(found_pair_set_len, entity_count):\n return found_pair_set_len / entity_count\n\n\ndef precision_and_recall(found_pair_set, pos_pair_set, neg_pair_set=None):\n if neg_pair_set is not None:\n found_pair_set = found_pair_set & (pos_pair_set | neg_pair_set)\n true_positives = found_pair_set & pos_pair_set\n false_positives = found_pair_set - pos_pair_set\n if true_positives:\n precision = len(true_positives) / (len(true_positives) + len(\n false_positives))\n else:\n precision = 0.0\n recall = len(true_positives) / len(pos_pair_set)\n return precision, recall\n\n\ndef f1_score(precision, recall):\n if precision or recall:\n return 2 * precision * recall / (precision + recall)\n else:\n return 0.0\n\n\n<function token>\n", "<import token>\n\n\ndef pair_entity_ratio(found_pair_set_len, entity_count):\n return found_pair_set_len / entity_count\n\n\ndef precision_and_recall(found_pair_set, pos_pair_set, neg_pair_set=None):\n if neg_pair_set is not None:\n found_pair_set = found_pair_set & (pos_pair_set | neg_pair_set)\n true_positives = found_pair_set & pos_pair_set\n false_positives = found_pair_set - pos_pair_set\n if true_positives:\n precision = len(true_positives) / (len(true_positives) + len(\n false_positives))\n else:\n precision = 0.0\n recall = len(true_positives) / len(pos_pair_set)\n return precision, recall\n\n\n<function token>\n<function token>\n", "<import token>\n<function token>\n\n\ndef precision_and_recall(found_pair_set, pos_pair_set, neg_pair_set=None):\n if neg_pair_set is not None:\n found_pair_set = found_pair_set & (pos_pair_set | neg_pair_set)\n true_positives = found_pair_set & pos_pair_set\n false_positives = found_pair_set - pos_pair_set\n if true_positives:\n precision = len(true_positives) / (len(true_positives) + len(\n false_positives))\n else:\n precision = 0.0\n recall = len(true_positives) / len(pos_pair_set)\n return precision, recall\n\n\n<function token>\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
99,496
2d58ba49074e5af44fa7979ca2c0c8c98c38f9d2
import os import sys import battlefield import bombfield import ship import player nth = { 1: "primero", 2: "segundo", 3: "terceiro", 4: "quarto", 5: "quinto", 6: "sexto", 7: "setimo", 8: "oitavo" } rowlist = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14"] class Game: def clear(self): os.system('cls' if os.name == 'nt' else 'clear') def __init__(self): self.p1 = "" self.p2 = "" self.p1Field = battlefield.Battlefield() self.p2Field = battlefield.Battlefield() self.p1BombField = bombfield.Bombfield() self.p2BombField = bombfield.Bombfield() self.ships = [] self.ships.append(ship.Ship(5)) self.ships.append(ship.Ship(4)) self.ships.append(ship.Ship(4)) self.ships.append(ship.Ship(2)) self.ships.append(ship.Ship(2)) self.ships.append(ship.Ship(2)) self.ships.append(ship.Ship(1)) self.ships.append(ship.Ship(1)) def columnExist(self, column): if ("A" <= column <= "N"): return True else: return False def rowExist(self, row): if (1 <= row <= 14): return True else: return False def printfield(self, f): l = [' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N'] spacing = ' '.join(['{:<2}'] * len(l)) text = spacing.format(*l) for v in range(1, len(l)): text += "\n" + spacing.format(v, f['A'][v], f['B'][v], f['C'][v], f['D'][v], f['E'][v], f['F'][v], f['G'][v], f['H'][v], f['I'][v], f['J'][v], f['K'][v], f['L'][v], f['M'][v], f['N'][ v]) return text def placeShips(self, player): counter = 1 print(player.name + ", coloque seus navios na posição inicial,\n") print("Depois diga a direção (right, left, up ou down)\n") print(self.printfield(player.field.field)) for x in player.ships: column = "" row = "" direction = "" cellBusy = True pff = player.field.field while self.columnExist(column) == False or row not in rowlist or cellBusy == True: userInput = input( player.name + ", em que celula (A-N)(1-14) você quer colocar sua " + nth[counter] + " barca?\n") if (len(userInput) >= 2): column = userInput[0].upper() row = userInput[1] if len(userInput) >= 3: row += userInput[2] if (self.columnExist(column) and row in rowlist): cellBusy = pff[column][int(row)] row = int(row) newrow = row newcolumn = column if len(x.parts)==1: pff[newcolumn][newrow] = True else: while ( direction != "right" and direction != "left" and direction != "up" and direction != "down") or self.rowExist( newrow) == False or self.columnExist(newcolumn) == False or cellBusy == True: direction = input(player.name + ", qual direção (right, left, up or down) seu barco " + nth[ counter] + " está virado?\n") cellBusy = False partCounter = 0 for y in range(len(x.parts)): newcolumn = column newrow = row if (direction == "down"): newrow = row + partCounter elif (direction == "up"): newrow = row - partCounter elif (direction == "left"): newcolumn = chr(ord(column) - partCounter) elif (direction == "right"): newcolumn = chr(ord(column) + partCounter) partCounter += 1 if self.columnExist(newcolumn) and self.rowExist(newrow): if pff[newcolumn][newrow] == True: cellBusy = pff[newcolumn][newrow] elif pff[newcolumn][newrow] == False and partCounter == len(x.parts): for p in range(0, partCounter): if (ord(newcolumn) < ord(column)): pff[chr(ord(column) - p)][newrow] = True elif (ord(newcolumn) > ord(column)): pff[chr(ord(column) + p)][newrow] = True elif (newrow < row): pff[newcolumn][newrow + p] = True elif (newrow > row): pff[newcolumn][newrow - p] = True self.clear() print(self.printfield(player.field.field)) counter += 1 def newPlayer(self, n, ships, field, bombfield): newName = input("Player " + str(n) + ",qual teu nick?\n") while newName == "": newName = input("Digita ai mano\n") self.clear() p = player.Player(newName, ships[:], field, bombfield) self.placeShips(p) return p def anythingLeft(self, d): newList = [] def myprint(d): for k, v in d.items(): if isinstance(v, dict): myprint(v) else: newList.append(v) myprint(d) return True in newList def selectCell(self, player): column = "" row = "" while self.columnExist(column) == False or row not in rowlist: userInput = input(player.name + ", onde (A-N)(1-14) tu quer mandar o pipoco?\n") if (len(userInput) < 2): column = "" row = "" else: column = userInput[0].upper() row = userInput[1] if len(userInput) == 3: row += userInput[2] return [column, row] def bomb(self, player, enemy, column, row): eff = enemy.field.field self.result = '' row = int(row) if (eff[column][row] == True): self.result = 'X' eff[column][row] = 'X' player.bombfield.field[column][row] = 'X' if self.anythingLeft(eff) == False: self.result = player.name + " wins!" else: self.result = 'O' eff[column][row] = '@' if player.bombfield.field[column][row] != 'X': player.bombfield.field[column][row] = 'O' def start(self): while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self.p2.field.field): print('Teu campo:\n') print(self.printfield(self.p1.field.field)) print('\nCampo delas:\n') print(self.printfield(self.p1.bombfield.field)) cell = self.selectCell(self.p1) self.bomb(self.p1, self.p2, cell[0], cell[1]) self.clear() if self.result == 'X': print('ACERTOU CARA!') elif self.result == 'O': print('ERROOOOOU!') else: print(self.result) sys.exit() # Exit the application print(self.printfield(self.p1.bombfield.field)) input('aperta enter men') self.clear() if self.anythingLeft(self.p1.field.field) and self.anythingLeft(self.p2.field.field): print('Teu campo:\n') print(self.printfield(self.p2.field.field)) print('\nCampo do babaca la:\n') print(self.printfield(self.p2.bombfield.field)) cell = self.selectCell(self.p2) self.bomb(self.p2, self.p1, cell[0], cell[1]) self.clear() if self.result == 'X': print('Acertou, mizera!') elif self.result == 'O': print('Errou de novo pora!') else: print(self.result) sys.exit() input('Aperta enter parça') self.clear()
[ "import os\r\nimport sys\r\nimport battlefield\r\nimport bombfield\r\nimport ship\r\nimport player\r\nnth = {\r\n 1: \"primero\",\r\n 2: \"segundo\",\r\n 3: \"terceiro\",\r\n 4: \"quarto\",\r\n 5: \"quinto\",\r\n 6: \"sexto\",\r\n 7: \"setimo\",\r\n 8: \"oitavo\"\r\n}\r\n\r\nrowlist = [\"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\", \"10\", \"11\", \"12\", \"13\", \"14\"]\r\n\r\n\r\nclass Game:\r\n\r\n def clear(self):\r\n os.system('cls' if os.name == 'nt' else 'clear')\r\n\r\n def __init__(self):\r\n self.p1 = \"\"\r\n self.p2 = \"\"\r\n\r\n self.p1Field = battlefield.Battlefield()\r\n self.p2Field = battlefield.Battlefield()\r\n self.p1BombField = bombfield.Bombfield()\r\n self.p2BombField = bombfield.Bombfield()\r\n\r\n self.ships = []\r\n self.ships.append(ship.Ship(5))\r\n self.ships.append(ship.Ship(4))\r\n self.ships.append(ship.Ship(4))\r\n self.ships.append(ship.Ship(2))\r\n self.ships.append(ship.Ship(2))\r\n self.ships.append(ship.Ship(2))\r\n self.ships.append(ship.Ship(1))\r\n self.ships.append(ship.Ship(1))\r\n\r\n def columnExist(self, column):\r\n if (\"A\" <= column <= \"N\"):\r\n return True\r\n else:\r\n return False\r\n\r\n def rowExist(self, row):\r\n if (1 <= row <= 14):\r\n return True\r\n else:\r\n return False\r\n\r\n def printfield(self, f):\r\n\r\n l = [' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N']\r\n spacing = ' '.join(['{:<2}'] * len(l))\r\n text = spacing.format(*l)\r\n for v in range(1, len(l)):\r\n text += \"\\n\" + spacing.format(v, f['A'][v], f['B'][v], f['C'][v], f['D'][v], f['E'][v], f['F'][v],\r\n f['G'][v], f['H'][v], f['I'][v], f['J'][v], f['K'][v], f['L'][v], f['M'][v],\r\n f['N'][\r\n v])\r\n\r\n return text\r\n\r\n def placeShips(self, player):\r\n counter = 1\r\n\r\n print(player.name + \", coloque seus navios na posição inicial,\\n\")\r\n print(\"Depois diga a direção (right, left, up ou down)\\n\")\r\n\r\n print(self.printfield(player.field.field))\r\n\r\n for x in player.ships:\r\n column = \"\"\r\n row = \"\"\r\n direction = \"\"\r\n cellBusy = True\r\n pff = player.field.field\r\n while self.columnExist(column) == False or row not in rowlist or cellBusy == True:\r\n userInput = input(\r\n player.name + \", em que celula (A-N)(1-14) você quer colocar sua \" + nth[counter] + \" barca?\\n\")\r\n if (len(userInput) >= 2):\r\n column = userInput[0].upper()\r\n row = userInput[1]\r\n if len(userInput) >= 3:\r\n row += userInput[2]\r\n if (self.columnExist(column) and row in rowlist):\r\n cellBusy = pff[column][int(row)]\r\n\r\n row = int(row)\r\n\r\n newrow = row\r\n newcolumn = column\r\n if len(x.parts)==1:\r\n pff[newcolumn][newrow] = True\r\n else:\r\n while (\r\n direction != \"right\" and direction != \"left\" and direction != \"up\" and direction != \"down\") or self.rowExist(\r\n newrow) == False or self.columnExist(newcolumn) == False or cellBusy == True:\r\n direction = input(player.name + \", qual direção (right, left, up or down) seu barco \" + nth[\r\n counter] + \" está virado?\\n\")\r\n cellBusy = False\r\n partCounter = 0\r\n\r\n for y in range(len(x.parts)):\r\n newcolumn = column\r\n newrow = row\r\n if (direction == \"down\"):\r\n newrow = row + partCounter\r\n\r\n elif (direction == \"up\"):\r\n newrow = row - partCounter\r\n\r\n elif (direction == \"left\"):\r\n newcolumn = chr(ord(column) - partCounter)\r\n\r\n elif (direction == \"right\"):\r\n newcolumn = chr(ord(column) + partCounter)\r\n\r\n partCounter += 1\r\n if self.columnExist(newcolumn) and self.rowExist(newrow):\r\n if pff[newcolumn][newrow] == True:\r\n cellBusy = pff[newcolumn][newrow]\r\n\r\n elif pff[newcolumn][newrow] == False and partCounter == len(x.parts):\r\n for p in range(0, partCounter):\r\n if (ord(newcolumn) < ord(column)):\r\n pff[chr(ord(column) - p)][newrow] = True\r\n elif (ord(newcolumn) > ord(column)):\r\n pff[chr(ord(column) + p)][newrow] = True\r\n elif (newrow < row):\r\n pff[newcolumn][newrow + p] = True\r\n elif (newrow > row):\r\n pff[newcolumn][newrow - p] = True\r\n\r\n self.clear()\r\n print(self.printfield(player.field.field))\r\n counter += 1\r\n\r\n def newPlayer(self, n, ships, field, bombfield):\r\n newName = input(\"Player \" + str(n) + \",qual teu nick?\\n\")\r\n while newName == \"\":\r\n newName = input(\"Digita ai mano\\n\")\r\n self.clear()\r\n p = player.Player(newName, ships[:], field, bombfield)\r\n\r\n self.placeShips(p)\r\n return p\r\n\r\n def anythingLeft(self, d):\r\n newList = []\r\n\r\n def myprint(d):\r\n for k, v in d.items():\r\n if isinstance(v, dict):\r\n myprint(v)\r\n else:\r\n newList.append(v)\r\n\r\n myprint(d)\r\n return True in newList\r\n\r\n def selectCell(self, player):\r\n column = \"\"\r\n row = \"\"\r\n while self.columnExist(column) == False or row not in rowlist:\r\n userInput = input(player.name + \", onde (A-N)(1-14) tu quer mandar o pipoco?\\n\")\r\n\r\n if (len(userInput) < 2):\r\n column = \"\"\r\n row = \"\"\r\n else:\r\n column = userInput[0].upper()\r\n row = userInput[1]\r\n if len(userInput) == 3:\r\n row += userInput[2]\r\n\r\n return [column, row]\r\n\r\n def bomb(self, player, enemy, column, row):\r\n eff = enemy.field.field\r\n self.result = ''\r\n\r\n row = int(row)\r\n if (eff[column][row] == True):\r\n self.result = 'X'\r\n eff[column][row] = 'X'\r\n player.bombfield.field[column][row] = 'X'\r\n\r\n if self.anythingLeft(eff) == False:\r\n self.result = player.name + \" wins!\"\r\n else:\r\n self.result = 'O'\r\n eff[column][row] = '@'\r\n if player.bombfield.field[column][row] != 'X':\r\n player.bombfield.field[column][row] = 'O'\r\n\r\n def start(self):\r\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self.p2.field.field):\r\n print('Teu campo:\\n')\r\n print(self.printfield(self.p1.field.field))\r\n print('\\nCampo delas:\\n')\r\n print(self.printfield(self.p1.bombfield.field))\r\n cell = self.selectCell(self.p1)\r\n self.bomb(self.p1, self.p2, cell[0], cell[1])\r\n self.clear()\r\n\r\n if self.result == 'X':\r\n print('ACERTOU CARA!')\r\n elif self.result == 'O':\r\n print('ERROOOOOU!')\r\n else:\r\n print(self.result)\r\n sys.exit() # Exit the application\r\n\r\n print(self.printfield(self.p1.bombfield.field))\r\n\r\n input('aperta enter men')\r\n self.clear()\r\n\r\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(self.p2.field.field):\r\n print('Teu campo:\\n')\r\n print(self.printfield(self.p2.field.field))\r\n print('\\nCampo do babaca la:\\n')\r\n print(self.printfield(self.p2.bombfield.field))\r\n cell = self.selectCell(self.p2)\r\n self.bomb(self.p2, self.p1, cell[0], cell[1])\r\n self.clear()\r\n\r\n if self.result == 'X':\r\n print('Acertou, mizera!')\r\n elif self.result == 'O':\r\n print('Errou de novo pora!')\r\n else:\r\n print(self.result)\r\n sys.exit()\r\n\r\n input('Aperta enter parça')\r\n self.clear()", "import os\nimport sys\nimport battlefield\nimport bombfield\nimport ship\nimport player\nnth = {(1): 'primero', (2): 'segundo', (3): 'terceiro', (4): 'quarto', (5):\n 'quinto', (6): 'sexto', (7): 'setimo', (8): 'oitavo'}\nrowlist = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12',\n '13', '14']\n\n\nclass Game:\n\n def clear(self):\n os.system('cls' if os.name == 'nt' else 'clear')\n\n def __init__(self):\n self.p1 = ''\n self.p2 = ''\n self.p1Field = battlefield.Battlefield()\n self.p2Field = battlefield.Battlefield()\n self.p1BombField = bombfield.Bombfield()\n self.p2BombField = bombfield.Bombfield()\n self.ships = []\n self.ships.append(ship.Ship(5))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(1))\n self.ships.append(ship.Ship(1))\n\n def columnExist(self, column):\n if 'A' <= column <= 'N':\n return True\n else:\n return False\n\n def rowExist(self, row):\n if 1 <= row <= 14:\n return True\n else:\n return False\n\n def printfield(self, f):\n l = [' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K',\n 'L', 'M', 'N']\n spacing = ' '.join(['{:<2}'] * len(l))\n text = spacing.format(*l)\n for v in range(1, len(l)):\n text += '\\n' + spacing.format(v, f['A'][v], f['B'][v], f['C'][v\n ], f['D'][v], f['E'][v], f['F'][v], f['G'][v], f['H'][v], f\n ['I'][v], f['J'][v], f['K'][v], f['L'][v], f['M'][v], f['N'][v]\n )\n return text\n\n def placeShips(self, player):\n counter = 1\n print(player.name + ', coloque seus navios na posição inicial,\\n')\n print('Depois diga a direção (right, left, up ou down)\\n')\n print(self.printfield(player.field.field))\n for x in player.ships:\n column = ''\n row = ''\n direction = ''\n cellBusy = True\n pff = player.field.field\n while self.columnExist(column\n ) == False or row not in rowlist or cellBusy == True:\n userInput = input(player.name +\n ', em que celula (A-N)(1-14) você quer colocar sua ' +\n nth[counter] + ' barca?\\n')\n if len(userInput) >= 2:\n column = userInput[0].upper()\n row = userInput[1]\n if len(userInput) >= 3:\n row += userInput[2]\n if self.columnExist(column) and row in rowlist:\n cellBusy = pff[column][int(row)]\n row = int(row)\n newrow = row\n newcolumn = column\n if len(x.parts) == 1:\n pff[newcolumn][newrow] = True\n else:\n while (direction != 'right' and direction != 'left' and \n direction != 'up' and direction != 'down' or self.\n rowExist(newrow) == False or self.columnExist(newcolumn\n ) == False or cellBusy == True):\n direction = input(player.name +\n ', qual direção (right, left, up or down) seu barco ' +\n nth[counter] + ' está virado?\\n')\n cellBusy = False\n partCounter = 0\n for y in range(len(x.parts)):\n newcolumn = column\n newrow = row\n if direction == 'down':\n newrow = row + partCounter\n elif direction == 'up':\n newrow = row - partCounter\n elif direction == 'left':\n newcolumn = chr(ord(column) - partCounter)\n elif direction == 'right':\n newcolumn = chr(ord(column) + partCounter)\n partCounter += 1\n if self.columnExist(newcolumn) and self.rowExist(newrow\n ):\n if pff[newcolumn][newrow] == True:\n cellBusy = pff[newcolumn][newrow]\n elif pff[newcolumn][newrow\n ] == False and partCounter == len(x.parts):\n for p in range(0, partCounter):\n if ord(newcolumn) < ord(column):\n pff[chr(ord(column) - p)][newrow] = True\n elif ord(newcolumn) > ord(column):\n pff[chr(ord(column) + p)][newrow] = True\n elif newrow < row:\n pff[newcolumn][newrow + p] = True\n elif newrow > row:\n pff[newcolumn][newrow - p] = True\n self.clear()\n print(self.printfield(player.field.field))\n counter += 1\n\n def newPlayer(self, n, ships, field, bombfield):\n newName = input('Player ' + str(n) + ',qual teu nick?\\n')\n while newName == '':\n newName = input('Digita ai mano\\n')\n self.clear()\n p = player.Player(newName, ships[:], field, bombfield)\n self.placeShips(p)\n return p\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n\n def selectCell(self, player):\n column = ''\n row = ''\n while self.columnExist(column) == False or row not in rowlist:\n userInput = input(player.name +\n ', onde (A-N)(1-14) tu quer mandar o pipoco?\\n')\n if len(userInput) < 2:\n column = ''\n row = ''\n else:\n column = userInput[0].upper()\n row = userInput[1]\n if len(userInput) == 3:\n row += userInput[2]\n return [column, row]\n\n def bomb(self, player, enemy, column, row):\n eff = enemy.field.field\n self.result = ''\n row = int(row)\n if eff[column][row] == True:\n self.result = 'X'\n eff[column][row] = 'X'\n player.bombfield.field[column][row] = 'X'\n if self.anythingLeft(eff) == False:\n self.result = player.name + ' wins!'\n else:\n self.result = 'O'\n eff[column][row] = '@'\n if player.bombfield.field[column][row] != 'X':\n player.bombfield.field[column][row] = 'O'\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\nnth = {(1): 'primero', (2): 'segundo', (3): 'terceiro', (4): 'quarto', (5):\n 'quinto', (6): 'sexto', (7): 'setimo', (8): 'oitavo'}\nrowlist = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12',\n '13', '14']\n\n\nclass Game:\n\n def clear(self):\n os.system('cls' if os.name == 'nt' else 'clear')\n\n def __init__(self):\n self.p1 = ''\n self.p2 = ''\n self.p1Field = battlefield.Battlefield()\n self.p2Field = battlefield.Battlefield()\n self.p1BombField = bombfield.Bombfield()\n self.p2BombField = bombfield.Bombfield()\n self.ships = []\n self.ships.append(ship.Ship(5))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(1))\n self.ships.append(ship.Ship(1))\n\n def columnExist(self, column):\n if 'A' <= column <= 'N':\n return True\n else:\n return False\n\n def rowExist(self, row):\n if 1 <= row <= 14:\n return True\n else:\n return False\n\n def printfield(self, f):\n l = [' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K',\n 'L', 'M', 'N']\n spacing = ' '.join(['{:<2}'] * len(l))\n text = spacing.format(*l)\n for v in range(1, len(l)):\n text += '\\n' + spacing.format(v, f['A'][v], f['B'][v], f['C'][v\n ], f['D'][v], f['E'][v], f['F'][v], f['G'][v], f['H'][v], f\n ['I'][v], f['J'][v], f['K'][v], f['L'][v], f['M'][v], f['N'][v]\n )\n return text\n\n def placeShips(self, player):\n counter = 1\n print(player.name + ', coloque seus navios na posição inicial,\\n')\n print('Depois diga a direção (right, left, up ou down)\\n')\n print(self.printfield(player.field.field))\n for x in player.ships:\n column = ''\n row = ''\n direction = ''\n cellBusy = True\n pff = player.field.field\n while self.columnExist(column\n ) == False or row not in rowlist or cellBusy == True:\n userInput = input(player.name +\n ', em que celula (A-N)(1-14) você quer colocar sua ' +\n nth[counter] + ' barca?\\n')\n if len(userInput) >= 2:\n column = userInput[0].upper()\n row = userInput[1]\n if len(userInput) >= 3:\n row += userInput[2]\n if self.columnExist(column) and row in rowlist:\n cellBusy = pff[column][int(row)]\n row = int(row)\n newrow = row\n newcolumn = column\n if len(x.parts) == 1:\n pff[newcolumn][newrow] = True\n else:\n while (direction != 'right' and direction != 'left' and \n direction != 'up' and direction != 'down' or self.\n rowExist(newrow) == False or self.columnExist(newcolumn\n ) == False or cellBusy == True):\n direction = input(player.name +\n ', qual direção (right, left, up or down) seu barco ' +\n nth[counter] + ' está virado?\\n')\n cellBusy = False\n partCounter = 0\n for y in range(len(x.parts)):\n newcolumn = column\n newrow = row\n if direction == 'down':\n newrow = row + partCounter\n elif direction == 'up':\n newrow = row - partCounter\n elif direction == 'left':\n newcolumn = chr(ord(column) - partCounter)\n elif direction == 'right':\n newcolumn = chr(ord(column) + partCounter)\n partCounter += 1\n if self.columnExist(newcolumn) and self.rowExist(newrow\n ):\n if pff[newcolumn][newrow] == True:\n cellBusy = pff[newcolumn][newrow]\n elif pff[newcolumn][newrow\n ] == False and partCounter == len(x.parts):\n for p in range(0, partCounter):\n if ord(newcolumn) < ord(column):\n pff[chr(ord(column) - p)][newrow] = True\n elif ord(newcolumn) > ord(column):\n pff[chr(ord(column) + p)][newrow] = True\n elif newrow < row:\n pff[newcolumn][newrow + p] = True\n elif newrow > row:\n pff[newcolumn][newrow - p] = True\n self.clear()\n print(self.printfield(player.field.field))\n counter += 1\n\n def newPlayer(self, n, ships, field, bombfield):\n newName = input('Player ' + str(n) + ',qual teu nick?\\n')\n while newName == '':\n newName = input('Digita ai mano\\n')\n self.clear()\n p = player.Player(newName, ships[:], field, bombfield)\n self.placeShips(p)\n return p\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n\n def selectCell(self, player):\n column = ''\n row = ''\n while self.columnExist(column) == False or row not in rowlist:\n userInput = input(player.name +\n ', onde (A-N)(1-14) tu quer mandar o pipoco?\\n')\n if len(userInput) < 2:\n column = ''\n row = ''\n else:\n column = userInput[0].upper()\n row = userInput[1]\n if len(userInput) == 3:\n row += userInput[2]\n return [column, row]\n\n def bomb(self, player, enemy, column, row):\n eff = enemy.field.field\n self.result = ''\n row = int(row)\n if eff[column][row] == True:\n self.result = 'X'\n eff[column][row] = 'X'\n player.bombfield.field[column][row] = 'X'\n if self.anythingLeft(eff) == False:\n self.result = player.name + ' wins!'\n else:\n self.result = 'O'\n eff[column][row] = '@'\n if player.bombfield.field[column][row] != 'X':\n player.bombfield.field[column][row] = 'O'\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\n<assignment token>\n\n\nclass Game:\n\n def clear(self):\n os.system('cls' if os.name == 'nt' else 'clear')\n\n def __init__(self):\n self.p1 = ''\n self.p2 = ''\n self.p1Field = battlefield.Battlefield()\n self.p2Field = battlefield.Battlefield()\n self.p1BombField = bombfield.Bombfield()\n self.p2BombField = bombfield.Bombfield()\n self.ships = []\n self.ships.append(ship.Ship(5))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(1))\n self.ships.append(ship.Ship(1))\n\n def columnExist(self, column):\n if 'A' <= column <= 'N':\n return True\n else:\n return False\n\n def rowExist(self, row):\n if 1 <= row <= 14:\n return True\n else:\n return False\n\n def printfield(self, f):\n l = [' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K',\n 'L', 'M', 'N']\n spacing = ' '.join(['{:<2}'] * len(l))\n text = spacing.format(*l)\n for v in range(1, len(l)):\n text += '\\n' + spacing.format(v, f['A'][v], f['B'][v], f['C'][v\n ], f['D'][v], f['E'][v], f['F'][v], f['G'][v], f['H'][v], f\n ['I'][v], f['J'][v], f['K'][v], f['L'][v], f['M'][v], f['N'][v]\n )\n return text\n\n def placeShips(self, player):\n counter = 1\n print(player.name + ', coloque seus navios na posição inicial,\\n')\n print('Depois diga a direção (right, left, up ou down)\\n')\n print(self.printfield(player.field.field))\n for x in player.ships:\n column = ''\n row = ''\n direction = ''\n cellBusy = True\n pff = player.field.field\n while self.columnExist(column\n ) == False or row not in rowlist or cellBusy == True:\n userInput = input(player.name +\n ', em que celula (A-N)(1-14) você quer colocar sua ' +\n nth[counter] + ' barca?\\n')\n if len(userInput) >= 2:\n column = userInput[0].upper()\n row = userInput[1]\n if len(userInput) >= 3:\n row += userInput[2]\n if self.columnExist(column) and row in rowlist:\n cellBusy = pff[column][int(row)]\n row = int(row)\n newrow = row\n newcolumn = column\n if len(x.parts) == 1:\n pff[newcolumn][newrow] = True\n else:\n while (direction != 'right' and direction != 'left' and \n direction != 'up' and direction != 'down' or self.\n rowExist(newrow) == False or self.columnExist(newcolumn\n ) == False or cellBusy == True):\n direction = input(player.name +\n ', qual direção (right, left, up or down) seu barco ' +\n nth[counter] + ' está virado?\\n')\n cellBusy = False\n partCounter = 0\n for y in range(len(x.parts)):\n newcolumn = column\n newrow = row\n if direction == 'down':\n newrow = row + partCounter\n elif direction == 'up':\n newrow = row - partCounter\n elif direction == 'left':\n newcolumn = chr(ord(column) - partCounter)\n elif direction == 'right':\n newcolumn = chr(ord(column) + partCounter)\n partCounter += 1\n if self.columnExist(newcolumn) and self.rowExist(newrow\n ):\n if pff[newcolumn][newrow] == True:\n cellBusy = pff[newcolumn][newrow]\n elif pff[newcolumn][newrow\n ] == False and partCounter == len(x.parts):\n for p in range(0, partCounter):\n if ord(newcolumn) < ord(column):\n pff[chr(ord(column) - p)][newrow] = True\n elif ord(newcolumn) > ord(column):\n pff[chr(ord(column) + p)][newrow] = True\n elif newrow < row:\n pff[newcolumn][newrow + p] = True\n elif newrow > row:\n pff[newcolumn][newrow - p] = True\n self.clear()\n print(self.printfield(player.field.field))\n counter += 1\n\n def newPlayer(self, n, ships, field, bombfield):\n newName = input('Player ' + str(n) + ',qual teu nick?\\n')\n while newName == '':\n newName = input('Digita ai mano\\n')\n self.clear()\n p = player.Player(newName, ships[:], field, bombfield)\n self.placeShips(p)\n return p\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n\n def selectCell(self, player):\n column = ''\n row = ''\n while self.columnExist(column) == False or row not in rowlist:\n userInput = input(player.name +\n ', onde (A-N)(1-14) tu quer mandar o pipoco?\\n')\n if len(userInput) < 2:\n column = ''\n row = ''\n else:\n column = userInput[0].upper()\n row = userInput[1]\n if len(userInput) == 3:\n row += userInput[2]\n return [column, row]\n\n def bomb(self, player, enemy, column, row):\n eff = enemy.field.field\n self.result = ''\n row = int(row)\n if eff[column][row] == True:\n self.result = 'X'\n eff[column][row] = 'X'\n player.bombfield.field[column][row] = 'X'\n if self.anythingLeft(eff) == False:\n self.result = player.name + ' wins!'\n else:\n self.result = 'O'\n eff[column][row] = '@'\n if player.bombfield.field[column][row] != 'X':\n player.bombfield.field[column][row] = 'O'\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\n<assignment token>\n\n\nclass Game:\n\n def clear(self):\n os.system('cls' if os.name == 'nt' else 'clear')\n\n def __init__(self):\n self.p1 = ''\n self.p2 = ''\n self.p1Field = battlefield.Battlefield()\n self.p2Field = battlefield.Battlefield()\n self.p1BombField = bombfield.Bombfield()\n self.p2BombField = bombfield.Bombfield()\n self.ships = []\n self.ships.append(ship.Ship(5))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(1))\n self.ships.append(ship.Ship(1))\n\n def columnExist(self, column):\n if 'A' <= column <= 'N':\n return True\n else:\n return False\n\n def rowExist(self, row):\n if 1 <= row <= 14:\n return True\n else:\n return False\n\n def printfield(self, f):\n l = [' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K',\n 'L', 'M', 'N']\n spacing = ' '.join(['{:<2}'] * len(l))\n text = spacing.format(*l)\n for v in range(1, len(l)):\n text += '\\n' + spacing.format(v, f['A'][v], f['B'][v], f['C'][v\n ], f['D'][v], f['E'][v], f['F'][v], f['G'][v], f['H'][v], f\n ['I'][v], f['J'][v], f['K'][v], f['L'][v], f['M'][v], f['N'][v]\n )\n return text\n <function token>\n\n def newPlayer(self, n, ships, field, bombfield):\n newName = input('Player ' + str(n) + ',qual teu nick?\\n')\n while newName == '':\n newName = input('Digita ai mano\\n')\n self.clear()\n p = player.Player(newName, ships[:], field, bombfield)\n self.placeShips(p)\n return p\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n\n def selectCell(self, player):\n column = ''\n row = ''\n while self.columnExist(column) == False or row not in rowlist:\n userInput = input(player.name +\n ', onde (A-N)(1-14) tu quer mandar o pipoco?\\n')\n if len(userInput) < 2:\n column = ''\n row = ''\n else:\n column = userInput[0].upper()\n row = userInput[1]\n if len(userInput) == 3:\n row += userInput[2]\n return [column, row]\n\n def bomb(self, player, enemy, column, row):\n eff = enemy.field.field\n self.result = ''\n row = int(row)\n if eff[column][row] == True:\n self.result = 'X'\n eff[column][row] = 'X'\n player.bombfield.field[column][row] = 'X'\n if self.anythingLeft(eff) == False:\n self.result = player.name + ' wins!'\n else:\n self.result = 'O'\n eff[column][row] = '@'\n if player.bombfield.field[column][row] != 'X':\n player.bombfield.field[column][row] = 'O'\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\n<assignment token>\n\n\nclass Game:\n\n def clear(self):\n os.system('cls' if os.name == 'nt' else 'clear')\n\n def __init__(self):\n self.p1 = ''\n self.p2 = ''\n self.p1Field = battlefield.Battlefield()\n self.p2Field = battlefield.Battlefield()\n self.p1BombField = bombfield.Bombfield()\n self.p2BombField = bombfield.Bombfield()\n self.ships = []\n self.ships.append(ship.Ship(5))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(1))\n self.ships.append(ship.Ship(1))\n\n def columnExist(self, column):\n if 'A' <= column <= 'N':\n return True\n else:\n return False\n\n def rowExist(self, row):\n if 1 <= row <= 14:\n return True\n else:\n return False\n\n def printfield(self, f):\n l = [' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K',\n 'L', 'M', 'N']\n spacing = ' '.join(['{:<2}'] * len(l))\n text = spacing.format(*l)\n for v in range(1, len(l)):\n text += '\\n' + spacing.format(v, f['A'][v], f['B'][v], f['C'][v\n ], f['D'][v], f['E'][v], f['F'][v], f['G'][v], f['H'][v], f\n ['I'][v], f['J'][v], f['K'][v], f['L'][v], f['M'][v], f['N'][v]\n )\n return text\n <function token>\n\n def newPlayer(self, n, ships, field, bombfield):\n newName = input('Player ' + str(n) + ',qual teu nick?\\n')\n while newName == '':\n newName = input('Digita ai mano\\n')\n self.clear()\n p = player.Player(newName, ships[:], field, bombfield)\n self.placeShips(p)\n return p\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n <function token>\n\n def bomb(self, player, enemy, column, row):\n eff = enemy.field.field\n self.result = ''\n row = int(row)\n if eff[column][row] == True:\n self.result = 'X'\n eff[column][row] = 'X'\n player.bombfield.field[column][row] = 'X'\n if self.anythingLeft(eff) == False:\n self.result = player.name + ' wins!'\n else:\n self.result = 'O'\n eff[column][row] = '@'\n if player.bombfield.field[column][row] != 'X':\n player.bombfield.field[column][row] = 'O'\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\n<assignment token>\n\n\nclass Game:\n <function token>\n\n def __init__(self):\n self.p1 = ''\n self.p2 = ''\n self.p1Field = battlefield.Battlefield()\n self.p2Field = battlefield.Battlefield()\n self.p1BombField = bombfield.Bombfield()\n self.p2BombField = bombfield.Bombfield()\n self.ships = []\n self.ships.append(ship.Ship(5))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(1))\n self.ships.append(ship.Ship(1))\n\n def columnExist(self, column):\n if 'A' <= column <= 'N':\n return True\n else:\n return False\n\n def rowExist(self, row):\n if 1 <= row <= 14:\n return True\n else:\n return False\n\n def printfield(self, f):\n l = [' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K',\n 'L', 'M', 'N']\n spacing = ' '.join(['{:<2}'] * len(l))\n text = spacing.format(*l)\n for v in range(1, len(l)):\n text += '\\n' + spacing.format(v, f['A'][v], f['B'][v], f['C'][v\n ], f['D'][v], f['E'][v], f['F'][v], f['G'][v], f['H'][v], f\n ['I'][v], f['J'][v], f['K'][v], f['L'][v], f['M'][v], f['N'][v]\n )\n return text\n <function token>\n\n def newPlayer(self, n, ships, field, bombfield):\n newName = input('Player ' + str(n) + ',qual teu nick?\\n')\n while newName == '':\n newName = input('Digita ai mano\\n')\n self.clear()\n p = player.Player(newName, ships[:], field, bombfield)\n self.placeShips(p)\n return p\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n <function token>\n\n def bomb(self, player, enemy, column, row):\n eff = enemy.field.field\n self.result = ''\n row = int(row)\n if eff[column][row] == True:\n self.result = 'X'\n eff[column][row] = 'X'\n player.bombfield.field[column][row] = 'X'\n if self.anythingLeft(eff) == False:\n self.result = player.name + ' wins!'\n else:\n self.result = 'O'\n eff[column][row] = '@'\n if player.bombfield.field[column][row] != 'X':\n player.bombfield.field[column][row] = 'O'\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\n<assignment token>\n\n\nclass Game:\n <function token>\n\n def __init__(self):\n self.p1 = ''\n self.p2 = ''\n self.p1Field = battlefield.Battlefield()\n self.p2Field = battlefield.Battlefield()\n self.p1BombField = bombfield.Bombfield()\n self.p2BombField = bombfield.Bombfield()\n self.ships = []\n self.ships.append(ship.Ship(5))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(1))\n self.ships.append(ship.Ship(1))\n\n def columnExist(self, column):\n if 'A' <= column <= 'N':\n return True\n else:\n return False\n <function token>\n\n def printfield(self, f):\n l = [' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K',\n 'L', 'M', 'N']\n spacing = ' '.join(['{:<2}'] * len(l))\n text = spacing.format(*l)\n for v in range(1, len(l)):\n text += '\\n' + spacing.format(v, f['A'][v], f['B'][v], f['C'][v\n ], f['D'][v], f['E'][v], f['F'][v], f['G'][v], f['H'][v], f\n ['I'][v], f['J'][v], f['K'][v], f['L'][v], f['M'][v], f['N'][v]\n )\n return text\n <function token>\n\n def newPlayer(self, n, ships, field, bombfield):\n newName = input('Player ' + str(n) + ',qual teu nick?\\n')\n while newName == '':\n newName = input('Digita ai mano\\n')\n self.clear()\n p = player.Player(newName, ships[:], field, bombfield)\n self.placeShips(p)\n return p\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n <function token>\n\n def bomb(self, player, enemy, column, row):\n eff = enemy.field.field\n self.result = ''\n row = int(row)\n if eff[column][row] == True:\n self.result = 'X'\n eff[column][row] = 'X'\n player.bombfield.field[column][row] = 'X'\n if self.anythingLeft(eff) == False:\n self.result = player.name + ' wins!'\n else:\n self.result = 'O'\n eff[column][row] = '@'\n if player.bombfield.field[column][row] != 'X':\n player.bombfield.field[column][row] = 'O'\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\n<assignment token>\n\n\nclass Game:\n <function token>\n\n def __init__(self):\n self.p1 = ''\n self.p2 = ''\n self.p1Field = battlefield.Battlefield()\n self.p2Field = battlefield.Battlefield()\n self.p1BombField = bombfield.Bombfield()\n self.p2BombField = bombfield.Bombfield()\n self.ships = []\n self.ships.append(ship.Ship(5))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(4))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(2))\n self.ships.append(ship.Ship(1))\n self.ships.append(ship.Ship(1))\n\n def columnExist(self, column):\n if 'A' <= column <= 'N':\n return True\n else:\n return False\n <function token>\n <function token>\n <function token>\n\n def newPlayer(self, n, ships, field, bombfield):\n newName = input('Player ' + str(n) + ',qual teu nick?\\n')\n while newName == '':\n newName = input('Digita ai mano\\n')\n self.clear()\n p = player.Player(newName, ships[:], field, bombfield)\n self.placeShips(p)\n return p\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n <function token>\n\n def bomb(self, player, enemy, column, row):\n eff = enemy.field.field\n self.result = ''\n row = int(row)\n if eff[column][row] == True:\n self.result = 'X'\n eff[column][row] = 'X'\n player.bombfield.field[column][row] = 'X'\n if self.anythingLeft(eff) == False:\n self.result = player.name + ' wins!'\n else:\n self.result = 'O'\n eff[column][row] = '@'\n if player.bombfield.field[column][row] != 'X':\n player.bombfield.field[column][row] = 'O'\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\n<assignment token>\n\n\nclass Game:\n <function token>\n <function token>\n\n def columnExist(self, column):\n if 'A' <= column <= 'N':\n return True\n else:\n return False\n <function token>\n <function token>\n <function token>\n\n def newPlayer(self, n, ships, field, bombfield):\n newName = input('Player ' + str(n) + ',qual teu nick?\\n')\n while newName == '':\n newName = input('Digita ai mano\\n')\n self.clear()\n p = player.Player(newName, ships[:], field, bombfield)\n self.placeShips(p)\n return p\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n <function token>\n\n def bomb(self, player, enemy, column, row):\n eff = enemy.field.field\n self.result = ''\n row = int(row)\n if eff[column][row] == True:\n self.result = 'X'\n eff[column][row] = 'X'\n player.bombfield.field[column][row] = 'X'\n if self.anythingLeft(eff) == False:\n self.result = player.name + ' wins!'\n else:\n self.result = 'O'\n eff[column][row] = '@'\n if player.bombfield.field[column][row] != 'X':\n player.bombfield.field[column][row] = 'O'\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\n<assignment token>\n\n\nclass Game:\n <function token>\n <function token>\n\n def columnExist(self, column):\n if 'A' <= column <= 'N':\n return True\n else:\n return False\n <function token>\n <function token>\n <function token>\n\n def newPlayer(self, n, ships, field, bombfield):\n newName = input('Player ' + str(n) + ',qual teu nick?\\n')\n while newName == '':\n newName = input('Digita ai mano\\n')\n self.clear()\n p = player.Player(newName, ships[:], field, bombfield)\n self.placeShips(p)\n return p\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n <function token>\n <function token>\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\n<assignment token>\n\n\nclass Game:\n <function token>\n <function token>\n\n def columnExist(self, column):\n if 'A' <= column <= 'N':\n return True\n else:\n return False\n <function token>\n <function token>\n <function token>\n <function token>\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n <function token>\n <function token>\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\n<assignment token>\n\n\nclass Game:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n <function token>\n <function token>\n\n def start(self):\n while self.anythingLeft(self.p1.field.field) and self.anythingLeft(self\n .p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p1.field.field))\n print('\\nCampo delas:\\n')\n print(self.printfield(self.p1.bombfield.field))\n cell = self.selectCell(self.p1)\n self.bomb(self.p1, self.p2, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('ACERTOU CARA!')\n elif self.result == 'O':\n print('ERROOOOOU!')\n else:\n print(self.result)\n sys.exit()\n print(self.printfield(self.p1.bombfield.field))\n input('aperta enter men')\n self.clear()\n if self.anythingLeft(self.p1.field.field) and self.anythingLeft(\n self.p2.field.field):\n print('Teu campo:\\n')\n print(self.printfield(self.p2.field.field))\n print('\\nCampo do babaca la:\\n')\n print(self.printfield(self.p2.bombfield.field))\n cell = self.selectCell(self.p2)\n self.bomb(self.p2, self.p1, cell[0], cell[1])\n self.clear()\n if self.result == 'X':\n print('Acertou, mizera!')\n elif self.result == 'O':\n print('Errou de novo pora!')\n else:\n print(self.result)\n sys.exit()\n input('Aperta enter parça')\n self.clear()\n", "<import token>\n<assignment token>\n\n\nclass Game:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def anythingLeft(self, d):\n newList = []\n\n def myprint(d):\n for k, v in d.items():\n if isinstance(v, dict):\n myprint(v)\n else:\n newList.append(v)\n myprint(d)\n return True in newList\n <function token>\n <function token>\n <function token>\n", "<import token>\n<assignment token>\n\n\nclass Game:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<assignment token>\n<class token>\n" ]
false
99,497
702b72a34a79e84cd6065c29c659c86d41b44dd2
# -*- coding: UTF-8 -*- ''' Authorized by vlon Jang Created on May 15, 2016 Email:[email protected] From Institute of Computing Technology All Rights Reserved. ''' featueName = 'keys' from gen_X_features import fromDateTrain, toDateTrain, fromDateTest, toDateTest, fromDateSubmit, toDateSubmit def gen(fromDate = None, toDate = None, table_name = None): sqlTemplate = """ drop table if exists {table_name}; create table {table_name} as SELECT user_id, song_id, date_format(date_add(str_to_date(ds, '%Y%m%d'), interval 2 month), '%Y%m%d') as ds from user_actions where action_type = '1' and ds>='{fromDate}' and ds <= '{toDate}' GROUP BY user_id, song_id, ds order by ds desc; create index IDX_{table_name} on {table_name}(user_id); """ return sqlTemplate.format(table_name=table_name, fromDate=fromDate, toDate=toDate) def genAll(): ''' This function is used to generate keys which is used as the origin table of left join on train, test, submit dataset. ''' return (gen(fromDateTrain, toDateTrain, 'user_%s_train' %featueName), gen(fromDateTest, toDateTest, 'user_%s_test' %featueName), gen(fromDateSubmit, toDateSubmit, 'user_%s_submit' %featueName)) if __name__ == '__main__': for sql in genAll(): print sql
[ "# -*- coding: UTF-8 -*- \n'''\nAuthorized by vlon Jang\nCreated on May 15, 2016\nEmail:[email protected]\nFrom Institute of Computing Technology\nAll Rights Reserved.\n'''\n\nfeatueName = 'keys'\nfrom gen_X_features import fromDateTrain, toDateTrain, fromDateTest, toDateTest, fromDateSubmit, toDateSubmit\n\ndef gen(fromDate = None, toDate = None, table_name = None):\n sqlTemplate = \"\"\"\n drop table if exists {table_name};\n create table {table_name} as\n SELECT user_id, song_id, \n date_format(date_add(str_to_date(ds, '%Y%m%d'), interval 2 month), '%Y%m%d') as ds \n from user_actions \n where action_type = '1' and ds>='{fromDate}' and ds <= '{toDate}'\n GROUP BY user_id, song_id, ds\n order by ds desc;\n create index IDX_{table_name} on {table_name}(user_id);\n \"\"\"\n return sqlTemplate.format(table_name=table_name, fromDate=fromDate, toDate=toDate)\n \n\ndef genAll():\n '''\n This function is used to generate keys which is used as the origin table of \n left join on train, test, submit dataset.\n '''\n return (gen(fromDateTrain, toDateTrain, 'user_%s_train' %featueName), \n gen(fromDateTest, toDateTest, 'user_%s_test' %featueName),\n gen(fromDateSubmit, toDateSubmit, 'user_%s_submit' %featueName))\n \nif __name__ == '__main__':\n for sql in genAll():\n print sql" ]
true
99,498
9f64fe84e84155895d7258b9a691211a6137d289
#!/usr/bin/env python import warnings warnings.filterwarnings("ignore", category=DeprecationWarning) import os, sys, re, time, json, traceback from datetime import datetime from ftplib import FTP, error_temp import cPickle from StringIO import StringIO import numpy as np import redis, cv2 from skimage import feature from email_send import send_email MODELLIST = ['inside_empty', 'open_close'] MODELPATH = 'models' REDIS_INPUT_LIST = 'garage_files2label' REDIS_FAIL_LIST = 'garage_failed_files' REDIS_OUTPUT_PREFIX = 'garage_label_' NO_LABEL = '_' NO_LABEL_QUEUE_PREFIX = 'no_label_' def dbprint(text): print >>sys.__stderr__, '[%s]:%s' % (datetime.fromtimestamp(time.time()).strftime('%d/%m/%Y %H:%M:%S.%f'), text) def detect_label(model, image): image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) hist = feature.hog(image, orientations=9, pixels_per_cell=[4, 4], cells_per_block=[2, 2], transform_sqrt=True) hist[hist < 0] = 0 labels = model.predict_proba(hist) labels = zip(model.classes_, labels[0]) labels.sort(key=lambda x: x[1], reverse=True) rs = labels[0][0] if abs(labels[0][1] - labels[1][1]) > 0.4 else '_' dbprint('%s -> %s' % (labels, rs)) return rs def detect_image_label(model, ftp_h, fpath): t = time.time() reader = StringIO() ftp_h.retrbinary("RETR %s" % fpath, reader.write) imgdata = reader.getvalue() img_array = np.asarray(bytearray(imgdata), dtype=np.uint8) image = cv2.imdecode(img_array, cv2.CV_LOAD_IMAGE_UNCHANGED) rs = (detect_label(model, image), imgdata) tdiff = int(time.time() - t) dbprint('image process time: %d:%02d' % (tdiff//60, tdiff%60)) return rs #00D6FB009223(n800sau)_1_20160516142921_30928.jpg failed_file = '' ftp_h = None models = None r = re.compile('^[0-9A-F]+\(.*\)_\d_(\d+)_\d+\.jpg') try: redis = redis.Redis() for i in range(2): fpath = redis.lpop(REDIS_INPUT_LIST) if fpath is None: print 'End of files' break bname = os.path.basename(fpath) dbprint('popped %s' % fpath) m = r.match(bname) if m: dt = datetime.strptime(m.groups()[0], '%Y%m%d%H%M%S') ts = time.mktime(dt.timetuple()) try: if ftp_h is None: ftp_h = FTP('192.168.1.1', timeout=30) ftp_h.login('writer', 'pfgbcm') if models is None: t = time.time() models = {} for mname in MODELLIST: models[mname] = cPickle.loads(open(os.path.join(MODELPATH, mname + '.svc')).read()) # cPickle.dump(models[mname], open(os.path.join(MODELPATH, mname + '.svc.new'), 'w'), protocol=cPickle.HIGHEST_PROTOCOL) tdiff = int(time.time() - t) dbprint('models load time: %d:%02d' % (tdiff//60, tdiff%60)) msglist = [] labellist = [] dbprint('model names: %s' % models.keys()) for mname,model in models.items(): output_name = REDIS_OUTPUT_PREFIX + mname dbprint('Start %s' % bname) label,imgdata = detect_image_label(model, ftp_h, fpath) if label == NO_LABEL: queue_pfx = NO_LABEL_QUEUE_PREFIX + mname redis.rpush(queue_pfx, bname) redis.ltrim(queue_pfx, max(0, redis.llen(queue_pfx) - 100), -1) elif label in ('open', 'close'): redis.set('gate', json.dumps({'label': label, 'ts': time.time()})) if label != NO_LABEL: last_rec = redis.lrange(output_name, -1, -1) if last_rec: last_rec = json.loads(last_rec[0]) if last_rec['ts'] < ts and last_rec['label'] != label: msg = '%s changed at %s from %s to %s (diff=%d), %s' % (mname, dt.strftime('%d/%m %H:%M:%S'), last_rec['label'], label, ts - last_rec['ts'], bname) dbprint('%s %s' % (bname, msg)) msglist.append(msg) labellist.append((mname, label)) else: msg = 'Initial at %s %s' % (dt.strftime('%d/%m %H:%M:%S'), label) dbprint('%s %s' % (bname, msg)) msglist.append(msg) labellist.append((mname, label)) dbprint(bname) redis.rpush(output_name, json.dumps({'label': label, 'ts': ts, 'name': fpath})) redis.ltrim(output_name, max(0, redis.llen(output_name) - 100), -1) if msglist: labellist = [label for mname,label in labellist if label != NO_LABEL] if not labellist: labellist = ['_'] send_email('[email protected]', '%s: %s' % (dt.strftime('%H:%M:%S %d/%m'), ','.join(labellist)), '\n'.join(msglist), [imgdata]) except: # return fpath back to redis list redis.rpush(REDIS_FAIL_LIST, fpath) failed_file = fpath raise redis.rpush(REDIS_FAIL_LIST, fpath) break except Exception, e: if not isinstance(e, error_temp): send_email('[email protected]', '%s error occured: %s' % (failed_file, str(e)), 'Error details: %s' % traceback.format_exc()) traceback.print_exc(sys.stderr) if not ftp_h is None: ftp_h.quit() print 'Finished at %s' % time.strftime('%d/%m %H:%M:%S')
[ "#!/usr/bin/env python\n\nimport warnings\nwarnings.filterwarnings(\"ignore\", category=DeprecationWarning)\nimport os, sys, re, time, json, traceback\nfrom datetime import datetime\nfrom ftplib import FTP, error_temp\nimport cPickle\nfrom StringIO import StringIO\nimport numpy as np\nimport redis, cv2\nfrom skimage import feature\n\nfrom email_send import send_email\n\nMODELLIST = ['inside_empty', 'open_close']\n\nMODELPATH = 'models'\nREDIS_INPUT_LIST = 'garage_files2label'\nREDIS_FAIL_LIST = 'garage_failed_files'\nREDIS_OUTPUT_PREFIX = 'garage_label_'\nNO_LABEL = '_'\nNO_LABEL_QUEUE_PREFIX = 'no_label_'\n\ndef dbprint(text):\n\tprint >>sys.__stderr__, '[%s]:%s' % (datetime.fromtimestamp(time.time()).strftime('%d/%m/%Y %H:%M:%S.%f'), text)\n\ndef detect_label(model, image):\n\timage = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)\n\thist = feature.hog(image,\n\t\torientations=9,\n\t\tpixels_per_cell=[4, 4],\n\t\tcells_per_block=[2, 2],\n\t\ttransform_sqrt=True)\n\thist[hist < 0] = 0\n\tlabels = model.predict_proba(hist)\n\tlabels = zip(model.classes_, labels[0])\n\tlabels.sort(key=lambda x: x[1], reverse=True)\n\trs = labels[0][0] if abs(labels[0][1] - labels[1][1]) > 0.4 else '_'\n\tdbprint('%s -> %s' % (labels, rs))\n\treturn rs\n\n\ndef detect_image_label(model, ftp_h, fpath):\n\tt = time.time()\n\treader = StringIO()\n\tftp_h.retrbinary(\"RETR %s\" % fpath, reader.write)\n\timgdata = reader.getvalue()\n\timg_array = np.asarray(bytearray(imgdata), dtype=np.uint8)\n\timage = cv2.imdecode(img_array, cv2.CV_LOAD_IMAGE_UNCHANGED)\n\trs = (detect_label(model, image), imgdata)\n\ttdiff = int(time.time() - t)\n\tdbprint('image process time: %d:%02d' % (tdiff//60, tdiff%60))\n\treturn rs\n\n#00D6FB009223(n800sau)_1_20160516142921_30928.jpg\nfailed_file = ''\nftp_h = None\nmodels = None\nr = re.compile('^[0-9A-F]+\\(.*\\)_\\d_(\\d+)_\\d+\\.jpg')\ntry:\n\tredis = redis.Redis()\n\tfor i in range(2):\n\t\tfpath = redis.lpop(REDIS_INPUT_LIST)\n\t\tif fpath is None:\n\t\t\tprint 'End of files'\n\t\t\tbreak\n\t\tbname = os.path.basename(fpath)\n\t\tdbprint('popped %s' % fpath)\n\t\tm = r.match(bname)\n\t\tif m:\n\t\t\tdt = datetime.strptime(m.groups()[0], '%Y%m%d%H%M%S')\n\t\t\tts = time.mktime(dt.timetuple())\n\t\t\ttry:\n\t\t\t\tif ftp_h is None:\n\t\t\t\t\tftp_h = FTP('192.168.1.1', timeout=30)\n\t\t\t\t\tftp_h.login('writer', 'pfgbcm')\n\t\t\t\tif models is None:\n\t\t\t\t\tt = time.time()\n\t\t\t\t\tmodels = {}\n\t\t\t\t\tfor mname in MODELLIST:\n\t\t\t\t\t\tmodels[mname] = cPickle.loads(open(os.path.join(MODELPATH, mname + '.svc')).read())\n#\t\t\t\t\t\tcPickle.dump(models[mname], open(os.path.join(MODELPATH, mname + '.svc.new'), 'w'), protocol=cPickle.HIGHEST_PROTOCOL)\n\t\t\t\t\ttdiff = int(time.time() - t)\n\t\t\t\t\tdbprint('models load time: %d:%02d' % (tdiff//60, tdiff%60))\n\t\t\t\tmsglist = []\n\t\t\t\tlabellist = []\n\t\t\t\tdbprint('model names: %s' % models.keys())\n\t\t\t\tfor mname,model in models.items():\n\t\t\t\t\toutput_name = REDIS_OUTPUT_PREFIX + mname\n\t\t\t\t\tdbprint('Start %s' % bname)\n\t\t\t\t\tlabel,imgdata = detect_image_label(model, ftp_h, fpath)\n\t\t\t\t\tif label == NO_LABEL:\n\t\t\t\t\t\tqueue_pfx = NO_LABEL_QUEUE_PREFIX + mname\n\t\t\t\t\t\tredis.rpush(queue_pfx, bname)\n\t\t\t\t\t\tredis.ltrim(queue_pfx, max(0, redis.llen(queue_pfx) - 100), -1)\n\t\t\t\t\telif label in ('open', 'close'):\n\t\t\t\t\t\tredis.set('gate', json.dumps({'label': label, 'ts': time.time()}))\n\t\t\t\t\tif label != NO_LABEL:\n\t\t\t\t\t\tlast_rec = redis.lrange(output_name, -1, -1)\n\t\t\t\t\t\tif last_rec:\n\t\t\t\t\t\t\tlast_rec = json.loads(last_rec[0])\n\t\t\t\t\t\t\tif last_rec['ts'] < ts and last_rec['label'] != label:\n\t\t\t\t\t\t\t\tmsg = '%s changed at %s from %s to %s (diff=%d), %s' % (mname, dt.strftime('%d/%m %H:%M:%S'), last_rec['label'], label, ts - last_rec['ts'], bname)\n\t\t\t\t\t\t\t\tdbprint('%s %s' % (bname, msg))\n\t\t\t\t\t\t\t\tmsglist.append(msg)\n\t\t\t\t\t\t\t\tlabellist.append((mname, label))\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tmsg = 'Initial at %s %s' % (dt.strftime('%d/%m %H:%M:%S'), label)\n\t\t\t\t\t\t\tdbprint('%s %s' % (bname, msg))\n\t\t\t\t\t\t\tmsglist.append(msg)\n\t\t\t\t\t\t\tlabellist.append((mname, label))\n\t\t\t\t\t\tdbprint(bname)\n\t\t\t\t\t\tredis.rpush(output_name, json.dumps({'label': label, 'ts': ts, 'name': fpath}))\n\t\t\t\t\t\tredis.ltrim(output_name, max(0, redis.llen(output_name) - 100), -1)\n\t\t\t\tif msglist:\n\t\t\t\t\tlabellist = [label for mname,label in labellist if label != NO_LABEL]\n\t\t\t\t\tif not labellist:\n\t\t\t\t\t\tlabellist = ['_']\n\t\t\t\t\tsend_email('[email protected]', '%s: %s' % (dt.strftime('%H:%M:%S %d/%m'), ','.join(labellist)), '\\n'.join(msglist), [imgdata])\n\t\t\texcept:\n\t\t\t\t# return fpath back to redis list\n\t\t\t\tredis.rpush(REDIS_FAIL_LIST, fpath)\n\t\t\t\tfailed_file = fpath\n\t\t\t\traise\n\t\tredis.rpush(REDIS_FAIL_LIST, fpath)\n\t\tbreak\nexcept Exception, e:\n\tif not isinstance(e, error_temp):\n\t\tsend_email('[email protected]', '%s error occured: %s' % (failed_file, str(e)), 'Error details: %s' % traceback.format_exc())\n\ttraceback.print_exc(sys.stderr)\n\nif not ftp_h is None:\n\tftp_h.quit()\n\n\nprint 'Finished at %s' % time.strftime('%d/%m %H:%M:%S')\n" ]
true
99,499
29eb2d76ead787c82e2de1c3f2ab127ea214d813
# Generated by Django 3.0.4 on 2020-03-21 15:37 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Car', fields=[ ('car_make', models.CharField(max_length=50)), ('car_model', models.CharField(max_length=50)), ('car_color', models.CharField(max_length=50)), ('id_car', models.CharField(max_length=50, primary_key=True, serialize=False)), ], ), migrations.CreateModel( name='Car_owner', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('owner_name', models.CharField(max_length=50)), ('owner_surname', models.CharField(max_length=50)), ('date_of_birth', models.DateField()), ], ), migrations.CreateModel( name='Owning', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('date_of_start_owning', models.DateField()), ('date_of_end_owning', models.DateField()), ('car', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project_first_app.Car')), ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project_first_app.Car_owner')), ], ), migrations.CreateModel( name='Driver_license', fields=[ ('id_license', models.CharField(max_length=50, primary_key=True, serialize=False)), ('date_of_issue', models.DateField()), ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project_first_app.Car_owner')), ], ), ]
[ "# Generated by Django 3.0.4 on 2020-03-21 15:37\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Car',\n fields=[\n ('car_make', models.CharField(max_length=50)),\n ('car_model', models.CharField(max_length=50)),\n ('car_color', models.CharField(max_length=50)),\n ('id_car', models.CharField(max_length=50, primary_key=True, serialize=False)),\n ],\n ),\n migrations.CreateModel(\n name='Car_owner',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('owner_name', models.CharField(max_length=50)),\n ('owner_surname', models.CharField(max_length=50)),\n ('date_of_birth', models.DateField()),\n ],\n ),\n migrations.CreateModel(\n name='Owning',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('date_of_start_owning', models.DateField()),\n ('date_of_end_owning', models.DateField()),\n ('car', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project_first_app.Car')),\n ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project_first_app.Car_owner')),\n ],\n ),\n migrations.CreateModel(\n name='Driver_license',\n fields=[\n ('id_license', models.CharField(max_length=50, primary_key=True, serialize=False)),\n ('date_of_issue', models.DateField()),\n ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project_first_app.Car_owner')),\n ],\n ),\n ]\n", "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Car', fields=[('car_make',\n models.CharField(max_length=50)), ('car_model', models.CharField(\n max_length=50)), ('car_color', models.CharField(max_length=50)), (\n 'id_car', models.CharField(max_length=50, primary_key=True,\n serialize=False))]), migrations.CreateModel(name='Car_owner',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('owner_name', models.\n CharField(max_length=50)), ('owner_surname', models.CharField(\n max_length=50)), ('date_of_birth', models.DateField())]),\n migrations.CreateModel(name='Owning', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('date_of_start_owning', models.DateField()),\n ('date_of_end_owning', models.DateField()), ('car', models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'project_first_app.Car')), ('owner', models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, to='project_first_app.Car_owner'\n ))]), migrations.CreateModel(name='Driver_license', fields=[(\n 'id_license', models.CharField(max_length=50, primary_key=True,\n serialize=False)), ('date_of_issue', models.DateField()), ('owner',\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'project_first_app.Car_owner'))])]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Car', fields=[('car_make',\n models.CharField(max_length=50)), ('car_model', models.CharField(\n max_length=50)), ('car_color', models.CharField(max_length=50)), (\n 'id_car', models.CharField(max_length=50, primary_key=True,\n serialize=False))]), migrations.CreateModel(name='Car_owner',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('owner_name', models.\n CharField(max_length=50)), ('owner_surname', models.CharField(\n max_length=50)), ('date_of_birth', models.DateField())]),\n migrations.CreateModel(name='Owning', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('date_of_start_owning', models.DateField()),\n ('date_of_end_owning', models.DateField()), ('car', models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'project_first_app.Car')), ('owner', models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, to='project_first_app.Car_owner'\n ))]), migrations.CreateModel(name='Driver_license', fields=[(\n 'id_license', models.CharField(max_length=50, primary_key=True,\n serialize=False)), ('date_of_issue', models.DateField()), ('owner',\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'project_first_app.Car_owner'))])]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false