index
int64
0
100k
blob_id
stringlengths
40
40
code
stringlengths
7
7.27M
steps
listlengths
1
1.25k
error
bool
2 classes
98,900
bf8f6fcc17231f215c95ba45cceb7dd9ce02087c
from Utils import * from os import path def add_score(diff): current_score = '' new_score = '' # open file for writting in append mode if exist if (path.exists(SCORES_FILE_NAME)): current_score = get_score_from_file(SCORES_FILE_NAME) if (current_score != ''): new_score = int(current_score) + (diff * 3) + 5 else: new_score = (diff * 3) + 5 f = open(SCORES_FILE_NAME, "w", encoding='utf-8') f.write(str(new_score)) else: f = open(SCORES_FILE_NAME, "x", encoding='utf-8') f.write(str((diff * 3) + 5)) f.close() def get_score_from_file(file_path): try: f = open(SCORES_FILE_NAME, 'r') score = f.readline() return score except: print(" File cannot be found") finally: f.close()
[ "from Utils import *\nfrom os import path\n\n\ndef add_score(diff):\n current_score = ''\n new_score = ''\n # open file for writting in append mode if exist\n if (path.exists(SCORES_FILE_NAME)):\n current_score = get_score_from_file(SCORES_FILE_NAME)\n if (current_score != ''):\n new_score = int(current_score) + (diff * 3) + 5\n else:\n new_score = (diff * 3) + 5\n f = open(SCORES_FILE_NAME, \"w\", encoding='utf-8')\n f.write(str(new_score))\n else:\n f = open(SCORES_FILE_NAME, \"x\", encoding='utf-8')\n f.write(str((diff * 3) + 5))\n f.close()\n\n\ndef get_score_from_file(file_path):\n try:\n f = open(SCORES_FILE_NAME, 'r')\n score = f.readline()\n return score\n except:\n print(\" File cannot be found\")\n finally:\n f.close()\n", "from Utils import *\nfrom os import path\n\n\ndef add_score(diff):\n current_score = ''\n new_score = ''\n if path.exists(SCORES_FILE_NAME):\n current_score = get_score_from_file(SCORES_FILE_NAME)\n if current_score != '':\n new_score = int(current_score) + diff * 3 + 5\n else:\n new_score = diff * 3 + 5\n f = open(SCORES_FILE_NAME, 'w', encoding='utf-8')\n f.write(str(new_score))\n else:\n f = open(SCORES_FILE_NAME, 'x', encoding='utf-8')\n f.write(str(diff * 3 + 5))\n f.close()\n\n\ndef get_score_from_file(file_path):\n try:\n f = open(SCORES_FILE_NAME, 'r')\n score = f.readline()\n return score\n except:\n print(' File cannot be found')\n finally:\n f.close()\n", "<import token>\n\n\ndef add_score(diff):\n current_score = ''\n new_score = ''\n if path.exists(SCORES_FILE_NAME):\n current_score = get_score_from_file(SCORES_FILE_NAME)\n if current_score != '':\n new_score = int(current_score) + diff * 3 + 5\n else:\n new_score = diff * 3 + 5\n f = open(SCORES_FILE_NAME, 'w', encoding='utf-8')\n f.write(str(new_score))\n else:\n f = open(SCORES_FILE_NAME, 'x', encoding='utf-8')\n f.write(str(diff * 3 + 5))\n f.close()\n\n\ndef get_score_from_file(file_path):\n try:\n f = open(SCORES_FILE_NAME, 'r')\n score = f.readline()\n return score\n except:\n print(' File cannot be found')\n finally:\n f.close()\n", "<import token>\n<function token>\n\n\ndef get_score_from_file(file_path):\n try:\n f = open(SCORES_FILE_NAME, 'r')\n score = f.readline()\n return score\n except:\n print(' File cannot be found')\n finally:\n f.close()\n", "<import token>\n<function token>\n<function token>\n" ]
false
98,901
acbf4f2c76685740e7a5da5524f9d4fcbadd4a12
#! /usr/bin/env python import os from os.path import abspath, basename import sys from gc3libs import Application from gc3libs.cmdline import SessionBasedScript if __name__ == '__main__': from ex2b import GrayscalingScript GrayscalingScript().run() # alternatively, you can just copy+paste # the code for `GrayscaleApp` here from grayscale_app import GrayscaleApp class GrayscalingScript(SessionBasedScript): """ Convert an image to grayscale. """ def __init__(self): super(GrayscalingScript, self).__init__(version='1.0') def new_tasks(self, extra): input_file = abspath(self.params.args[0]) apps_to_run = [ GrayscaleApp(input_file) ] return apps_to_run
[ "#! /usr/bin/env python\n\nimport os\nfrom os.path import abspath, basename\nimport sys\n\nfrom gc3libs import Application\nfrom gc3libs.cmdline import SessionBasedScript\n\n\nif __name__ == '__main__':\n from ex2b import GrayscalingScript\n GrayscalingScript().run()\n\n\n# alternatively, you can just copy+paste\n# the code for `GrayscaleApp` here\nfrom grayscale_app import GrayscaleApp\n\n\nclass GrayscalingScript(SessionBasedScript):\n \"\"\"\n Convert an image to grayscale.\n \"\"\"\n def __init__(self):\n super(GrayscalingScript, self).__init__(version='1.0')\n def new_tasks(self, extra):\n input_file = abspath(self.params.args[0])\n apps_to_run = [ GrayscaleApp(input_file) ]\n return apps_to_run\n", "import os\nfrom os.path import abspath, basename\nimport sys\nfrom gc3libs import Application\nfrom gc3libs.cmdline import SessionBasedScript\nif __name__ == '__main__':\n from ex2b import GrayscalingScript\n GrayscalingScript().run()\nfrom grayscale_app import GrayscaleApp\n\n\nclass GrayscalingScript(SessionBasedScript):\n \"\"\"\n Convert an image to grayscale.\n \"\"\"\n\n def __init__(self):\n super(GrayscalingScript, self).__init__(version='1.0')\n\n def new_tasks(self, extra):\n input_file = abspath(self.params.args[0])\n apps_to_run = [GrayscaleApp(input_file)]\n return apps_to_run\n", "<import token>\nif __name__ == '__main__':\n from ex2b import GrayscalingScript\n GrayscalingScript().run()\n<import token>\n\n\nclass GrayscalingScript(SessionBasedScript):\n \"\"\"\n Convert an image to grayscale.\n \"\"\"\n\n def __init__(self):\n super(GrayscalingScript, self).__init__(version='1.0')\n\n def new_tasks(self, extra):\n input_file = abspath(self.params.args[0])\n apps_to_run = [GrayscaleApp(input_file)]\n return apps_to_run\n", "<import token>\n<code token>\n<import token>\n\n\nclass GrayscalingScript(SessionBasedScript):\n \"\"\"\n Convert an image to grayscale.\n \"\"\"\n\n def __init__(self):\n super(GrayscalingScript, self).__init__(version='1.0')\n\n def new_tasks(self, extra):\n input_file = abspath(self.params.args[0])\n apps_to_run = [GrayscaleApp(input_file)]\n return apps_to_run\n", "<import token>\n<code token>\n<import token>\n\n\nclass GrayscalingScript(SessionBasedScript):\n <docstring token>\n\n def __init__(self):\n super(GrayscalingScript, self).__init__(version='1.0')\n\n def new_tasks(self, extra):\n input_file = abspath(self.params.args[0])\n apps_to_run = [GrayscaleApp(input_file)]\n return apps_to_run\n", "<import token>\n<code token>\n<import token>\n\n\nclass GrayscalingScript(SessionBasedScript):\n <docstring token>\n <function token>\n\n def new_tasks(self, extra):\n input_file = abspath(self.params.args[0])\n apps_to_run = [GrayscaleApp(input_file)]\n return apps_to_run\n", "<import token>\n<code token>\n<import token>\n\n\nclass GrayscalingScript(SessionBasedScript):\n <docstring token>\n <function token>\n <function token>\n", "<import token>\n<code token>\n<import token>\n<class token>\n" ]
false
98,902
38b6a63bc9dea3d291eb8a0b681f4e1b40085e4c
class Solution: def countPrimes(self, n: int) -> int: cnt = 0 for i in range(2, n): for j in range(2, i): if i % j == 0: # flag = False break else: cnt += 1 # # if flag == True: # cnt += 1 return cnt def shaifa(self, n): isPrim = [True]*n for i in range(2, int(n**0.5) + 1): if isPrim[i]: for j in range(i**2, n, i): isPrim[j] = False cnt = 0 for i in range(2, len(isPrim)): if isPrim[i]: cnt += 1 return cnt if __name__ == '__main__': S = Solution() print(S.shaifa(12))
[ "class Solution:\n def countPrimes(self, n: int) -> int:\n cnt = 0\n for i in range(2, n):\n for j in range(2, i):\n if i % j == 0:\n # flag = False\n break\n else:\n cnt += 1\n #\n # if flag == True:\n # cnt += 1\n\n return cnt\n\n def shaifa(self, n):\n isPrim = [True]*n\n for i in range(2, int(n**0.5) + 1):\n if isPrim[i]:\n for j in range(i**2, n, i):\n isPrim[j] = False\n\n cnt = 0\n for i in range(2, len(isPrim)):\n if isPrim[i]:\n cnt += 1\n\n return cnt\n\nif __name__ == '__main__':\n S = Solution()\n print(S.shaifa(12))", "class Solution:\n\n def countPrimes(self, n: int) ->int:\n cnt = 0\n for i in range(2, n):\n for j in range(2, i):\n if i % j == 0:\n break\n else:\n cnt += 1\n return cnt\n\n def shaifa(self, n):\n isPrim = [True] * n\n for i in range(2, int(n ** 0.5) + 1):\n if isPrim[i]:\n for j in range(i ** 2, n, i):\n isPrim[j] = False\n cnt = 0\n for i in range(2, len(isPrim)):\n if isPrim[i]:\n cnt += 1\n return cnt\n\n\nif __name__ == '__main__':\n S = Solution()\n print(S.shaifa(12))\n", "class Solution:\n\n def countPrimes(self, n: int) ->int:\n cnt = 0\n for i in range(2, n):\n for j in range(2, i):\n if i % j == 0:\n break\n else:\n cnt += 1\n return cnt\n\n def shaifa(self, n):\n isPrim = [True] * n\n for i in range(2, int(n ** 0.5) + 1):\n if isPrim[i]:\n for j in range(i ** 2, n, i):\n isPrim[j] = False\n cnt = 0\n for i in range(2, len(isPrim)):\n if isPrim[i]:\n cnt += 1\n return cnt\n\n\n<code token>\n", "class Solution:\n\n def countPrimes(self, n: int) ->int:\n cnt = 0\n for i in range(2, n):\n for j in range(2, i):\n if i % j == 0:\n break\n else:\n cnt += 1\n return cnt\n <function token>\n\n\n<code token>\n", "class Solution:\n <function token>\n <function token>\n\n\n<code token>\n", "<class token>\n<code token>\n" ]
false
98,903
bce091e6450cb71a49081949b5863f2b3e19ecc7
from sqlalchemy import * from migrate import * from migrate.changeset import schema pre_meta = MetaData() post_meta = MetaData() skit = Table('skit', post_meta, Column('id', Integer, primary_key=True, nullable=False), Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)), Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)), Column('title', Text), Column('story', Text), Column('team_id', Integer), ) member = Table('member', pre_meta, Column('id', INTEGER, primary_key=True, nullable=False), Column('first_name', VARCHAR(length=50)), Column('last_name', VARCHAR(length=50)), Column('gender', VARCHAR(length=10)), Column('mobile_number', INTEGER), Column('email', VARCHAR(length=120)), Column('birthdate', TIMESTAMP), Column('created_at', TIMESTAMP), Column('modified_at', TIMESTAMP), ) member = Table('member', post_meta, Column('id', Integer, primary_key=True, nullable=False), Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)), Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)), Column('first_name', String(length=50)), Column('last_name', String(length=50)), Column('gender', String(length=10)), Column('mobile_number', Integer), Column('email', String(length=120)), Column('birthdate', DateTime), ) user = Table('user', post_meta, Column('id', Integer, primary_key=True, nullable=False), Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)), Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)), Column('username', String(length=40)), Column('password_hash', String(length=128)), Column('first_name', String(length=50)), Column('last_name', String(length=50)), Column('email', String(length=120)), ) team = Table('team', pre_meta, Column('id', INTEGER, primary_key=True, nullable=False), Column('year', INTEGER), Column('theme', TEXT), Column('verse', TEXT), Column('email', VARCHAR(length=120)), Column('created_at', TIMESTAMP), Column('modified_at', TIMESTAMP), ) team = Table('team', post_meta, Column('id', Integer, primary_key=True, nullable=False), Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)), Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)), Column('year', Integer), Column('theme', Text), Column('verse', Text), Column('email', String(length=120)), ) post = Table('post', pre_meta, Column('id', INTEGER, primary_key=True, nullable=False), Column('title', VARCHAR(length=200)), Column('body', TEXT), Column('created_by', INTEGER), Column('created_at', TIMESTAMP), Column('modified_at', TIMESTAMP), ) post = Table('post', post_meta, Column('id', Integer, primary_key=True, nullable=False), Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)), Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)), Column('title', String(length=200)), Column('body', Text), Column('created_by', Integer), ) kid = Table('kid', pre_meta, Column('id', INTEGER, primary_key=True, nullable=False), Column('first_name', VARCHAR(length=50)), Column('last_name', VARCHAR(length=50)), Column('gender', VARCHAR(length=10)), Column('birthdate', TIMESTAMP), Column('year', TIMESTAMP), Column('tribe', VARCHAR(length=50)), Column('prayer', TEXT), Column('notes', TEXT), Column('created_at', TIMESTAMP), Column('modified_at', TIMESTAMP), ) kid = Table('kid', post_meta, Column('id', Integer, primary_key=True, nullable=False), Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)), Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)), Column('first_name', String(length=50)), Column('last_name', String(length=50)), Column('gender', String(length=10)), Column('birthdate', DateTime), Column('year', DateTime), Column('tribe', String(length=50)), Column('prayer', Text), Column('notes', Text), ) def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata pre_meta.bind = migrate_engine post_meta.bind = migrate_engine post_meta.tables['skit'].columns['created_on'].create() post_meta.tables['skit'].columns['updated_on'].create() pre_meta.tables['member'].columns['created_at'].drop() pre_meta.tables['member'].columns['modified_at'].drop() post_meta.tables['member'].columns['created_on'].create() post_meta.tables['member'].columns['updated_on'].create() post_meta.tables['user'].columns['created_on'].create() post_meta.tables['user'].columns['updated_on'].create() pre_meta.tables['team'].columns['created_at'].drop() pre_meta.tables['team'].columns['modified_at'].drop() post_meta.tables['team'].columns['created_on'].create() post_meta.tables['team'].columns['updated_on'].create() pre_meta.tables['post'].columns['created_at'].drop() pre_meta.tables['post'].columns['modified_at'].drop() post_meta.tables['post'].columns['created_on'].create() post_meta.tables['post'].columns['updated_on'].create() pre_meta.tables['kid'].columns['created_at'].drop() pre_meta.tables['kid'].columns['modified_at'].drop() post_meta.tables['kid'].columns['created_on'].create() post_meta.tables['kid'].columns['updated_on'].create() def downgrade(migrate_engine): # Operations to reverse the above upgrade go here. pre_meta.bind = migrate_engine post_meta.bind = migrate_engine post_meta.tables['skit'].columns['created_on'].drop() post_meta.tables['skit'].columns['updated_on'].drop() pre_meta.tables['member'].columns['created_at'].create() pre_meta.tables['member'].columns['modified_at'].create() post_meta.tables['member'].columns['created_on'].drop() post_meta.tables['member'].columns['updated_on'].drop() post_meta.tables['user'].columns['created_on'].drop() post_meta.tables['user'].columns['updated_on'].drop() pre_meta.tables['team'].columns['created_at'].create() pre_meta.tables['team'].columns['modified_at'].create() post_meta.tables['team'].columns['created_on'].drop() post_meta.tables['team'].columns['updated_on'].drop() pre_meta.tables['post'].columns['created_at'].create() pre_meta.tables['post'].columns['modified_at'].create() post_meta.tables['post'].columns['created_on'].drop() post_meta.tables['post'].columns['updated_on'].drop() pre_meta.tables['kid'].columns['created_at'].create() pre_meta.tables['kid'].columns['modified_at'].create() post_meta.tables['kid'].columns['created_on'].drop() post_meta.tables['kid'].columns['updated_on'].drop()
[ "from sqlalchemy import *\nfrom migrate import *\n\n\nfrom migrate.changeset import schema\npre_meta = MetaData()\npost_meta = MetaData()\nskit = Table('skit', post_meta,\n Column('id', Integer, primary_key=True, nullable=False),\n Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)),\n Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)),\n Column('title', Text),\n Column('story', Text),\n Column('team_id', Integer),\n)\n\nmember = Table('member', pre_meta,\n Column('id', INTEGER, primary_key=True, nullable=False),\n Column('first_name', VARCHAR(length=50)),\n Column('last_name', VARCHAR(length=50)),\n Column('gender', VARCHAR(length=10)),\n Column('mobile_number', INTEGER),\n Column('email', VARCHAR(length=120)),\n Column('birthdate', TIMESTAMP),\n Column('created_at', TIMESTAMP),\n Column('modified_at', TIMESTAMP),\n)\n\nmember = Table('member', post_meta,\n Column('id', Integer, primary_key=True, nullable=False),\n Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)),\n Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)),\n Column('first_name', String(length=50)),\n Column('last_name', String(length=50)),\n Column('gender', String(length=10)),\n Column('mobile_number', Integer),\n Column('email', String(length=120)),\n Column('birthdate', DateTime),\n)\n\nuser = Table('user', post_meta,\n Column('id', Integer, primary_key=True, nullable=False),\n Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)),\n Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)),\n Column('username', String(length=40)),\n Column('password_hash', String(length=128)),\n Column('first_name', String(length=50)),\n Column('last_name', String(length=50)),\n Column('email', String(length=120)),\n)\n\nteam = Table('team', pre_meta,\n Column('id', INTEGER, primary_key=True, nullable=False),\n Column('year', INTEGER),\n Column('theme', TEXT),\n Column('verse', TEXT),\n Column('email', VARCHAR(length=120)),\n Column('created_at', TIMESTAMP),\n Column('modified_at', TIMESTAMP),\n)\n\nteam = Table('team', post_meta,\n Column('id', Integer, primary_key=True, nullable=False),\n Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)),\n Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)),\n Column('year', Integer),\n Column('theme', Text),\n Column('verse', Text),\n Column('email', String(length=120)),\n)\n\npost = Table('post', pre_meta,\n Column('id', INTEGER, primary_key=True, nullable=False),\n Column('title', VARCHAR(length=200)),\n Column('body', TEXT),\n Column('created_by', INTEGER),\n Column('created_at', TIMESTAMP),\n Column('modified_at', TIMESTAMP),\n)\n\npost = Table('post', post_meta,\n Column('id', Integer, primary_key=True, nullable=False),\n Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)),\n Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)),\n Column('title', String(length=200)),\n Column('body', Text),\n Column('created_by', Integer),\n)\n\nkid = Table('kid', pre_meta,\n Column('id', INTEGER, primary_key=True, nullable=False),\n Column('first_name', VARCHAR(length=50)),\n Column('last_name', VARCHAR(length=50)),\n Column('gender', VARCHAR(length=10)),\n Column('birthdate', TIMESTAMP),\n Column('year', TIMESTAMP),\n Column('tribe', VARCHAR(length=50)),\n Column('prayer', TEXT),\n Column('notes', TEXT),\n Column('created_at', TIMESTAMP),\n Column('modified_at', TIMESTAMP),\n)\n\nkid = Table('kid', post_meta,\n Column('id', Integer, primary_key=True, nullable=False),\n Column('created_on', DateTime, default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x1052c3dd0; now>)),\n Column('updated_on', DateTime, onupdate=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f1d0; now>), default=ColumnDefault(<sqlalchemy.sql.functions.now at 0x10530f090; now>)),\n Column('first_name', String(length=50)),\n Column('last_name', String(length=50)),\n Column('gender', String(length=10)),\n Column('birthdate', DateTime),\n Column('year', DateTime),\n Column('tribe', String(length=50)),\n Column('prayer', Text),\n Column('notes', Text),\n)\n\n\ndef upgrade(migrate_engine):\n # Upgrade operations go here. Don't create your own engine; bind\n # migrate_engine to your metadata\n pre_meta.bind = migrate_engine\n post_meta.bind = migrate_engine\n post_meta.tables['skit'].columns['created_on'].create()\n post_meta.tables['skit'].columns['updated_on'].create()\n pre_meta.tables['member'].columns['created_at'].drop()\n pre_meta.tables['member'].columns['modified_at'].drop()\n post_meta.tables['member'].columns['created_on'].create()\n post_meta.tables['member'].columns['updated_on'].create()\n post_meta.tables['user'].columns['created_on'].create()\n post_meta.tables['user'].columns['updated_on'].create()\n pre_meta.tables['team'].columns['created_at'].drop()\n pre_meta.tables['team'].columns['modified_at'].drop()\n post_meta.tables['team'].columns['created_on'].create()\n post_meta.tables['team'].columns['updated_on'].create()\n pre_meta.tables['post'].columns['created_at'].drop()\n pre_meta.tables['post'].columns['modified_at'].drop()\n post_meta.tables['post'].columns['created_on'].create()\n post_meta.tables['post'].columns['updated_on'].create()\n pre_meta.tables['kid'].columns['created_at'].drop()\n pre_meta.tables['kid'].columns['modified_at'].drop()\n post_meta.tables['kid'].columns['created_on'].create()\n post_meta.tables['kid'].columns['updated_on'].create()\n\n\ndef downgrade(migrate_engine):\n # Operations to reverse the above upgrade go here.\n pre_meta.bind = migrate_engine\n post_meta.bind = migrate_engine\n post_meta.tables['skit'].columns['created_on'].drop()\n post_meta.tables['skit'].columns['updated_on'].drop()\n pre_meta.tables['member'].columns['created_at'].create()\n pre_meta.tables['member'].columns['modified_at'].create()\n post_meta.tables['member'].columns['created_on'].drop()\n post_meta.tables['member'].columns['updated_on'].drop()\n post_meta.tables['user'].columns['created_on'].drop()\n post_meta.tables['user'].columns['updated_on'].drop()\n pre_meta.tables['team'].columns['created_at'].create()\n pre_meta.tables['team'].columns['modified_at'].create()\n post_meta.tables['team'].columns['created_on'].drop()\n post_meta.tables['team'].columns['updated_on'].drop()\n pre_meta.tables['post'].columns['created_at'].create()\n pre_meta.tables['post'].columns['modified_at'].create()\n post_meta.tables['post'].columns['created_on'].drop()\n post_meta.tables['post'].columns['updated_on'].drop()\n pre_meta.tables['kid'].columns['created_at'].create()\n pre_meta.tables['kid'].columns['modified_at'].create()\n post_meta.tables['kid'].columns['created_on'].drop()\n post_meta.tables['kid'].columns['updated_on'].drop()\n" ]
true
98,904
9349aa717469b6a538453d1fb2fb2942b384473a
from simplemaths.simplemaths import SimpleMaths as sm test = sm(6) print(sm(6))
[ "from simplemaths.simplemaths import SimpleMaths as sm\n\ntest = sm(6)\n\n\nprint(sm(6))", "from simplemaths.simplemaths import SimpleMaths as sm\ntest = sm(6)\nprint(sm(6))\n", "<import token>\ntest = sm(6)\nprint(sm(6))\n", "<import token>\n<assignment token>\nprint(sm(6))\n", "<import token>\n<assignment token>\n<code token>\n" ]
false
98,905
31de58bedbbe090f63555d75a435cccfd65ba08f
# Generated by Django 2.1.2 on 2019-01-18 06:41 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('movie', '0006_auto_20190118_0255'), ] operations = [ migrations.RenameField( model_name='classification', old_name='movies', new_name='movie', ), ]
[ "# Generated by Django 2.1.2 on 2019-01-18 06:41\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('movie', '0006_auto_20190118_0255'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='classification',\n old_name='movies',\n new_name='movie',\n ),\n ]\n", "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('movie', '0006_auto_20190118_0255')]\n operations = [migrations.RenameField(model_name='classification',\n old_name='movies', new_name='movie')]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('movie', '0006_auto_20190118_0255')]\n operations = [migrations.RenameField(model_name='classification',\n old_name='movies', new_name='movie')]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
98,906
9cbb8d8bbb12581507deb8d27d388b2b2552b65c
# Implement a class to hold room information. This should have name and # description attributes. class Room: def __init__(self, name, description, loot, n, s, w, e): self.description = description self.name = name self.loot = loot self.n = n self.s = s self.w = w self.e = e
[ "# Implement a class to hold room information. This should have name and\n# description attributes.\n\n\nclass Room:\n def __init__(self, name, description, loot, n, s, w, e):\n self.description = description\n self.name = name\n self.loot = loot\n self.n = n\n self.s = s\n self.w = w\n self.e = e\n", "class Room:\n\n def __init__(self, name, description, loot, n, s, w, e):\n self.description = description\n self.name = name\n self.loot = loot\n self.n = n\n self.s = s\n self.w = w\n self.e = e\n", "class Room:\n <function token>\n", "<class token>\n" ]
false
98,907
5fb9d3e0acc97bb168f6f1f3bca6c627859b5d3d
# This is a comment line, which is ignored by Python print("Hello World")
[ "# This is a comment line, which is ignored by Python\r\n\r\nprint(\"Hello World\")\r\n", "print('Hello World')\n", "<code token>\n" ]
false
98,908
a2c18a0c1c720274dfcc0d13d358589cd9ff0b03
import docker client = docker.DockerClient('unix://var/run/docker.sock') container = client.containers.run("nginx", detach=True, ports={'80/tcp': 80}) for container in client.containers.list(): print(container.name) for image in client.images.list(): print(image.tag)
[ "import docker\n\nclient = docker.DockerClient('unix://var/run/docker.sock')\n\ncontainer = client.containers.run(\"nginx\", detach=True, ports={'80/tcp': 80})\n\nfor container in client.containers.list():\n print(container.name)\n\nfor image in client.images.list():\n print(image.tag) ", "import docker\nclient = docker.DockerClient('unix://var/run/docker.sock')\ncontainer = client.containers.run('nginx', detach=True, ports={'80/tcp': 80})\nfor container in client.containers.list():\n print(container.name)\nfor image in client.images.list():\n print(image.tag)\n", "<import token>\nclient = docker.DockerClient('unix://var/run/docker.sock')\ncontainer = client.containers.run('nginx', detach=True, ports={'80/tcp': 80})\nfor container in client.containers.list():\n print(container.name)\nfor image in client.images.list():\n print(image.tag)\n", "<import token>\n<assignment token>\nfor container in client.containers.list():\n print(container.name)\nfor image in client.images.list():\n print(image.tag)\n", "<import token>\n<assignment token>\n<code token>\n" ]
false
98,909
b31e9ce14b11ab5d97a3e37a00c35c23f4ac5b18
# coding=utf-8 from django.shortcuts import render from django.http import HttpResponseRedirect from datetime import datetime from .forms import form_user, form_forgot from django.contrib.auth import authenticate, login, logout from django.contrib.auth.models import User from posto.models import credor from divida.models import divida as divida_model from django.contrib.auth.decorators import login_required, user_passes_test import string import random from django.core.mail import send_mail from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger def login_view(request): if request.method == 'POST': form = form_user(request.POST) if form.is_valid(): email = form.cleaned_data['email'] pwd = form.cleaned_data['password'] user = authenticate(username=email.strip(), password=pwd) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect(request.POST.get('next')) else: form_errors = form.errors else: form_errors = form.errors else: form_errors = form.errors else: form = form_user() form_errors = '' return render( request, 'usuario/logon.html', { 'form': form, 'form_errors': form_errors } ) def logout_view(request): logout(request) return HttpResponseRedirect('/') def user_check(request, user_id): if user_id == request.user.id: return True else: return False @login_required def user_view(request, user_id): if int(request.user.id) == int(user_id): user = User.objects.get(id=user_id) user_profile = credor.objects.get(email=user) user_credor = credor.objects.get(email=request.user) dividas = divida_model.objects.filter(credor_cnpj=user).order_by('data_add') dividas_count = divida_model.objects.filter(credor_cnpj=user).count() dividas_enc_count = divida_model.objects.filter(credor_cnpj=user).filter(is_open=False).count() dividas_abertas_count = divida_model.objects.filter(credor_cnpj=user).filter(is_open=True).count() paginator = Paginator(dividas, 20) page = request.GET.get('page') try: div_page = paginator.page(page) except PageNotAnInteger: div_page = paginator.page(1) except EmptyPage: div_page = paginator.page(paginator.num_pages) context = { 'user_profile': user_profile, 'credor': user_credor, 'div_page': div_page, 'dividas_enc_count': dividas_enc_count, 'dividas_abertas_count': dividas_abertas_count, 'dividas_count': dividas_count } else: context = {} template = 'usuario/detail.html' return render(request, template, context) def pass_generator(size=6, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) def forgot_pass_view(request): if request.method == 'POST': form = form_forgot(request.POST) if form.is_valid(): email = form.cleaned_data['email'] user = User.objects.get(username=email) print(user) senha = pass_generator() user.set_password(senha) send_mail('Senha desabono', 'Sua nova senha do desabono.com é {pwd}'.format(pwd=senha), '[email protected]', [user.username, ], fail_silently=False) user.save() else: form = form_forgot() context = { 'form': form } template = 'usuario/forgot_pwd.html' return render(request, template, context)
[ "# coding=utf-8\nfrom django.shortcuts import render\nfrom django.http import HttpResponseRedirect\nfrom datetime import datetime\nfrom .forms import form_user, form_forgot\nfrom django.contrib.auth import authenticate, login, logout\nfrom django.contrib.auth.models import User\nfrom posto.models import credor\nfrom divida.models import divida as divida_model\nfrom django.contrib.auth.decorators import login_required, user_passes_test\nimport string\nimport random\nfrom django.core.mail import send_mail\nfrom django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n\n\ndef login_view(request):\n if request.method == 'POST':\n form = form_user(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n pwd = form.cleaned_data['password']\n user = authenticate(username=email.strip(), password=pwd)\n if user is not None:\n if user.is_active:\n login(request, user)\n return HttpResponseRedirect(request.POST.get('next'))\n else:\n form_errors = form.errors\n else:\n form_errors = form.errors\n else:\n form_errors = form.errors\n else:\n form = form_user()\n form_errors = ''\n return render(\n request,\n 'usuario/logon.html',\n {\n 'form': form,\n 'form_errors': form_errors\n }\n )\n\n\ndef logout_view(request):\n logout(request)\n return HttpResponseRedirect('/')\n\n\ndef user_check(request, user_id):\n if user_id == request.user.id:\n return True\n else:\n return False\n\n\n@login_required\ndef user_view(request, user_id):\n if int(request.user.id) == int(user_id):\n user = User.objects.get(id=user_id)\n user_profile = credor.objects.get(email=user)\n user_credor = credor.objects.get(email=request.user)\n dividas = divida_model.objects.filter(credor_cnpj=user).order_by('data_add')\n dividas_count = divida_model.objects.filter(credor_cnpj=user).count()\n dividas_enc_count = divida_model.objects.filter(credor_cnpj=user).filter(is_open=False).count()\n dividas_abertas_count = divida_model.objects.filter(credor_cnpj=user).filter(is_open=True).count()\n\n paginator = Paginator(dividas, 20)\n\n page = request.GET.get('page')\n try:\n div_page = paginator.page(page)\n except PageNotAnInteger:\n div_page = paginator.page(1)\n except EmptyPage:\n div_page = paginator.page(paginator.num_pages)\n\n context = {\n 'user_profile': user_profile,\n 'credor': user_credor,\n 'div_page': div_page,\n 'dividas_enc_count': dividas_enc_count,\n 'dividas_abertas_count': dividas_abertas_count,\n 'dividas_count': dividas_count\n }\n else:\n context = {}\n template = 'usuario/detail.html'\n return render(request, template, context)\n\n\ndef pass_generator(size=6, chars=string.ascii_uppercase + string.digits):\n return ''.join(random.choice(chars) for _ in range(size))\n\n\ndef forgot_pass_view(request):\n if request.method == 'POST':\n form = form_forgot(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n user = User.objects.get(username=email)\n print(user)\n senha = pass_generator()\n user.set_password(senha)\n send_mail('Senha desabono', 'Sua nova senha do desabono.com é {pwd}'.format(pwd=senha), '[email protected]',\n [user.username, ], fail_silently=False)\n user.save()\n else:\n form = form_forgot()\n context = {\n 'form': form\n }\n template = 'usuario/forgot_pwd.html'\n return render(request, template, context)\n\n", "from django.shortcuts import render\nfrom django.http import HttpResponseRedirect\nfrom datetime import datetime\nfrom .forms import form_user, form_forgot\nfrom django.contrib.auth import authenticate, login, logout\nfrom django.contrib.auth.models import User\nfrom posto.models import credor\nfrom divida.models import divida as divida_model\nfrom django.contrib.auth.decorators import login_required, user_passes_test\nimport string\nimport random\nfrom django.core.mail import send_mail\nfrom django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n\n\ndef login_view(request):\n if request.method == 'POST':\n form = form_user(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n pwd = form.cleaned_data['password']\n user = authenticate(username=email.strip(), password=pwd)\n if user is not None:\n if user.is_active:\n login(request, user)\n return HttpResponseRedirect(request.POST.get('next'))\n else:\n form_errors = form.errors\n else:\n form_errors = form.errors\n else:\n form_errors = form.errors\n else:\n form = form_user()\n form_errors = ''\n return render(request, 'usuario/logon.html', {'form': form,\n 'form_errors': form_errors})\n\n\ndef logout_view(request):\n logout(request)\n return HttpResponseRedirect('/')\n\n\ndef user_check(request, user_id):\n if user_id == request.user.id:\n return True\n else:\n return False\n\n\n@login_required\ndef user_view(request, user_id):\n if int(request.user.id) == int(user_id):\n user = User.objects.get(id=user_id)\n user_profile = credor.objects.get(email=user)\n user_credor = credor.objects.get(email=request.user)\n dividas = divida_model.objects.filter(credor_cnpj=user).order_by(\n 'data_add')\n dividas_count = divida_model.objects.filter(credor_cnpj=user).count()\n dividas_enc_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=False).count()\n dividas_abertas_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=True).count()\n paginator = Paginator(dividas, 20)\n page = request.GET.get('page')\n try:\n div_page = paginator.page(page)\n except PageNotAnInteger:\n div_page = paginator.page(1)\n except EmptyPage:\n div_page = paginator.page(paginator.num_pages)\n context = {'user_profile': user_profile, 'credor': user_credor,\n 'div_page': div_page, 'dividas_enc_count': dividas_enc_count,\n 'dividas_abertas_count': dividas_abertas_count, 'dividas_count':\n dividas_count}\n else:\n context = {}\n template = 'usuario/detail.html'\n return render(request, template, context)\n\n\ndef pass_generator(size=6, chars=string.ascii_uppercase + string.digits):\n return ''.join(random.choice(chars) for _ in range(size))\n\n\ndef forgot_pass_view(request):\n if request.method == 'POST':\n form = form_forgot(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n user = User.objects.get(username=email)\n print(user)\n senha = pass_generator()\n user.set_password(senha)\n send_mail('Senha desabono',\n 'Sua nova senha do desabono.com é {pwd}'.format(pwd=senha),\n '[email protected]', [user.username], fail_silently=False)\n user.save()\n else:\n form = form_forgot()\n context = {'form': form}\n template = 'usuario/forgot_pwd.html'\n return render(request, template, context)\n", "<import token>\n\n\ndef login_view(request):\n if request.method == 'POST':\n form = form_user(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n pwd = form.cleaned_data['password']\n user = authenticate(username=email.strip(), password=pwd)\n if user is not None:\n if user.is_active:\n login(request, user)\n return HttpResponseRedirect(request.POST.get('next'))\n else:\n form_errors = form.errors\n else:\n form_errors = form.errors\n else:\n form_errors = form.errors\n else:\n form = form_user()\n form_errors = ''\n return render(request, 'usuario/logon.html', {'form': form,\n 'form_errors': form_errors})\n\n\ndef logout_view(request):\n logout(request)\n return HttpResponseRedirect('/')\n\n\ndef user_check(request, user_id):\n if user_id == request.user.id:\n return True\n else:\n return False\n\n\n@login_required\ndef user_view(request, user_id):\n if int(request.user.id) == int(user_id):\n user = User.objects.get(id=user_id)\n user_profile = credor.objects.get(email=user)\n user_credor = credor.objects.get(email=request.user)\n dividas = divida_model.objects.filter(credor_cnpj=user).order_by(\n 'data_add')\n dividas_count = divida_model.objects.filter(credor_cnpj=user).count()\n dividas_enc_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=False).count()\n dividas_abertas_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=True).count()\n paginator = Paginator(dividas, 20)\n page = request.GET.get('page')\n try:\n div_page = paginator.page(page)\n except PageNotAnInteger:\n div_page = paginator.page(1)\n except EmptyPage:\n div_page = paginator.page(paginator.num_pages)\n context = {'user_profile': user_profile, 'credor': user_credor,\n 'div_page': div_page, 'dividas_enc_count': dividas_enc_count,\n 'dividas_abertas_count': dividas_abertas_count, 'dividas_count':\n dividas_count}\n else:\n context = {}\n template = 'usuario/detail.html'\n return render(request, template, context)\n\n\ndef pass_generator(size=6, chars=string.ascii_uppercase + string.digits):\n return ''.join(random.choice(chars) for _ in range(size))\n\n\ndef forgot_pass_view(request):\n if request.method == 'POST':\n form = form_forgot(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n user = User.objects.get(username=email)\n print(user)\n senha = pass_generator()\n user.set_password(senha)\n send_mail('Senha desabono',\n 'Sua nova senha do desabono.com é {pwd}'.format(pwd=senha),\n '[email protected]', [user.username], fail_silently=False)\n user.save()\n else:\n form = form_forgot()\n context = {'form': form}\n template = 'usuario/forgot_pwd.html'\n return render(request, template, context)\n", "<import token>\n<function token>\n\n\ndef logout_view(request):\n logout(request)\n return HttpResponseRedirect('/')\n\n\ndef user_check(request, user_id):\n if user_id == request.user.id:\n return True\n else:\n return False\n\n\n@login_required\ndef user_view(request, user_id):\n if int(request.user.id) == int(user_id):\n user = User.objects.get(id=user_id)\n user_profile = credor.objects.get(email=user)\n user_credor = credor.objects.get(email=request.user)\n dividas = divida_model.objects.filter(credor_cnpj=user).order_by(\n 'data_add')\n dividas_count = divida_model.objects.filter(credor_cnpj=user).count()\n dividas_enc_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=False).count()\n dividas_abertas_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=True).count()\n paginator = Paginator(dividas, 20)\n page = request.GET.get('page')\n try:\n div_page = paginator.page(page)\n except PageNotAnInteger:\n div_page = paginator.page(1)\n except EmptyPage:\n div_page = paginator.page(paginator.num_pages)\n context = {'user_profile': user_profile, 'credor': user_credor,\n 'div_page': div_page, 'dividas_enc_count': dividas_enc_count,\n 'dividas_abertas_count': dividas_abertas_count, 'dividas_count':\n dividas_count}\n else:\n context = {}\n template = 'usuario/detail.html'\n return render(request, template, context)\n\n\ndef pass_generator(size=6, chars=string.ascii_uppercase + string.digits):\n return ''.join(random.choice(chars) for _ in range(size))\n\n\ndef forgot_pass_view(request):\n if request.method == 'POST':\n form = form_forgot(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n user = User.objects.get(username=email)\n print(user)\n senha = pass_generator()\n user.set_password(senha)\n send_mail('Senha desabono',\n 'Sua nova senha do desabono.com é {pwd}'.format(pwd=senha),\n '[email protected]', [user.username], fail_silently=False)\n user.save()\n else:\n form = form_forgot()\n context = {'form': form}\n template = 'usuario/forgot_pwd.html'\n return render(request, template, context)\n", "<import token>\n<function token>\n<function token>\n\n\ndef user_check(request, user_id):\n if user_id == request.user.id:\n return True\n else:\n return False\n\n\n@login_required\ndef user_view(request, user_id):\n if int(request.user.id) == int(user_id):\n user = User.objects.get(id=user_id)\n user_profile = credor.objects.get(email=user)\n user_credor = credor.objects.get(email=request.user)\n dividas = divida_model.objects.filter(credor_cnpj=user).order_by(\n 'data_add')\n dividas_count = divida_model.objects.filter(credor_cnpj=user).count()\n dividas_enc_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=False).count()\n dividas_abertas_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=True).count()\n paginator = Paginator(dividas, 20)\n page = request.GET.get('page')\n try:\n div_page = paginator.page(page)\n except PageNotAnInteger:\n div_page = paginator.page(1)\n except EmptyPage:\n div_page = paginator.page(paginator.num_pages)\n context = {'user_profile': user_profile, 'credor': user_credor,\n 'div_page': div_page, 'dividas_enc_count': dividas_enc_count,\n 'dividas_abertas_count': dividas_abertas_count, 'dividas_count':\n dividas_count}\n else:\n context = {}\n template = 'usuario/detail.html'\n return render(request, template, context)\n\n\ndef pass_generator(size=6, chars=string.ascii_uppercase + string.digits):\n return ''.join(random.choice(chars) for _ in range(size))\n\n\ndef forgot_pass_view(request):\n if request.method == 'POST':\n form = form_forgot(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n user = User.objects.get(username=email)\n print(user)\n senha = pass_generator()\n user.set_password(senha)\n send_mail('Senha desabono',\n 'Sua nova senha do desabono.com é {pwd}'.format(pwd=senha),\n '[email protected]', [user.username], fail_silently=False)\n user.save()\n else:\n form = form_forgot()\n context = {'form': form}\n template = 'usuario/forgot_pwd.html'\n return render(request, template, context)\n", "<import token>\n<function token>\n<function token>\n\n\ndef user_check(request, user_id):\n if user_id == request.user.id:\n return True\n else:\n return False\n\n\n@login_required\ndef user_view(request, user_id):\n if int(request.user.id) == int(user_id):\n user = User.objects.get(id=user_id)\n user_profile = credor.objects.get(email=user)\n user_credor = credor.objects.get(email=request.user)\n dividas = divida_model.objects.filter(credor_cnpj=user).order_by(\n 'data_add')\n dividas_count = divida_model.objects.filter(credor_cnpj=user).count()\n dividas_enc_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=False).count()\n dividas_abertas_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=True).count()\n paginator = Paginator(dividas, 20)\n page = request.GET.get('page')\n try:\n div_page = paginator.page(page)\n except PageNotAnInteger:\n div_page = paginator.page(1)\n except EmptyPage:\n div_page = paginator.page(paginator.num_pages)\n context = {'user_profile': user_profile, 'credor': user_credor,\n 'div_page': div_page, 'dividas_enc_count': dividas_enc_count,\n 'dividas_abertas_count': dividas_abertas_count, 'dividas_count':\n dividas_count}\n else:\n context = {}\n template = 'usuario/detail.html'\n return render(request, template, context)\n\n\ndef pass_generator(size=6, chars=string.ascii_uppercase + string.digits):\n return ''.join(random.choice(chars) for _ in range(size))\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n\n\ndef user_check(request, user_id):\n if user_id == request.user.id:\n return True\n else:\n return False\n\n\n@login_required\ndef user_view(request, user_id):\n if int(request.user.id) == int(user_id):\n user = User.objects.get(id=user_id)\n user_profile = credor.objects.get(email=user)\n user_credor = credor.objects.get(email=request.user)\n dividas = divida_model.objects.filter(credor_cnpj=user).order_by(\n 'data_add')\n dividas_count = divida_model.objects.filter(credor_cnpj=user).count()\n dividas_enc_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=False).count()\n dividas_abertas_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=True).count()\n paginator = Paginator(dividas, 20)\n page = request.GET.get('page')\n try:\n div_page = paginator.page(page)\n except PageNotAnInteger:\n div_page = paginator.page(1)\n except EmptyPage:\n div_page = paginator.page(paginator.num_pages)\n context = {'user_profile': user_profile, 'credor': user_credor,\n 'div_page': div_page, 'dividas_enc_count': dividas_enc_count,\n 'dividas_abertas_count': dividas_abertas_count, 'dividas_count':\n dividas_count}\n else:\n context = {}\n template = 'usuario/detail.html'\n return render(request, template, context)\n\n\n<function token>\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n\n\n@login_required\ndef user_view(request, user_id):\n if int(request.user.id) == int(user_id):\n user = User.objects.get(id=user_id)\n user_profile = credor.objects.get(email=user)\n user_credor = credor.objects.get(email=request.user)\n dividas = divida_model.objects.filter(credor_cnpj=user).order_by(\n 'data_add')\n dividas_count = divida_model.objects.filter(credor_cnpj=user).count()\n dividas_enc_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=False).count()\n dividas_abertas_count = divida_model.objects.filter(credor_cnpj=user\n ).filter(is_open=True).count()\n paginator = Paginator(dividas, 20)\n page = request.GET.get('page')\n try:\n div_page = paginator.page(page)\n except PageNotAnInteger:\n div_page = paginator.page(1)\n except EmptyPage:\n div_page = paginator.page(paginator.num_pages)\n context = {'user_profile': user_profile, 'credor': user_credor,\n 'div_page': div_page, 'dividas_enc_count': dividas_enc_count,\n 'dividas_abertas_count': dividas_abertas_count, 'dividas_count':\n dividas_count}\n else:\n context = {}\n template = 'usuario/detail.html'\n return render(request, template, context)\n\n\n<function token>\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
98,910
6ed110d36896f9d7a22a8478281799d3fe564e1b
# -*- coding: utf-8 -*- """ Created on Fri Mar 12 23:45:02 2021 @author: Doğukan Bozkurt """ goal= float(input("Enter goal: ")) dist= float(input("Enter distance: ")) if goal==1: if dist >= 16.5: print("He scores, absolutely brilliant!") else: if dist >= 5.5: print("A fantastic move and good finish!") else: print("He finds the net with ease!") else: print("He should have scored!")
[ "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Mar 12 23:45:02 2021\r\n\r\n@author: Doğukan Bozkurt\r\n\"\"\"\r\n\r\ngoal= float(input(\"Enter goal: \"))\r\ndist= float(input(\"Enter distance: \"))\r\n\r\nif goal==1:\r\n if dist >= 16.5:\r\n print(\"He scores, absolutely brilliant!\")\r\n else:\r\n if dist >= 5.5:\r\n print(\"A fantastic move and good finish!\")\r\n else:\r\n print(\"He finds the net with ease!\")\r\nelse:\r\n print(\"He should have scored!\")", "<docstring token>\ngoal = float(input('Enter goal: '))\ndist = float(input('Enter distance: '))\nif goal == 1:\n if dist >= 16.5:\n print('He scores, absolutely brilliant!')\n elif dist >= 5.5:\n print('A fantastic move and good finish!')\n else:\n print('He finds the net with ease!')\nelse:\n print('He should have scored!')\n", "<docstring token>\n<assignment token>\nif goal == 1:\n if dist >= 16.5:\n print('He scores, absolutely brilliant!')\n elif dist >= 5.5:\n print('A fantastic move and good finish!')\n else:\n print('He finds the net with ease!')\nelse:\n print('He should have scored!')\n", "<docstring token>\n<assignment token>\n<code token>\n" ]
false
98,911
60b516b3624aa410ff949b33c0628b8105fb543c
from rest_framework import status, generics from rest_framework.decorators import permission_classes from rest_framework.permissions import IsAdminUser from rest_framework.response import Response from rest_framework.views import APIView from posts.models import Member from posts.serializers import MemberSerializer class MemberList(APIView): def get(self, request, format=None): member = Member.objects.all().order_by('-email') serializer = MemberSerializer(member, many=True) return Response(serializer.data) @permission_classes((IsAdminUser, )) def post(self, request, format=None): user = request.user serializer = MemberSerializer( data=request.data, context={'user': user}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) class MemberDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Member.objects.all() serializer_class = MemberSerializer
[ "from rest_framework import status, generics\nfrom rest_framework.decorators import permission_classes\nfrom rest_framework.permissions import IsAdminUser\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom posts.models import Member\nfrom posts.serializers import MemberSerializer\n\n\nclass MemberList(APIView):\n def get(self, request, format=None):\n member = Member.objects.all().order_by('-email')\n serializer = MemberSerializer(member, many=True)\n return Response(serializer.data)\n\n @permission_classes((IsAdminUser, ))\n def post(self, request, format=None):\n user = request.user\n serializer = MemberSerializer(\n data=request.data, context={'user': user})\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\nclass MemberDetail(generics.RetrieveUpdateDestroyAPIView):\n queryset = Member.objects.all()\n serializer_class = MemberSerializer\n", "from rest_framework import status, generics\nfrom rest_framework.decorators import permission_classes\nfrom rest_framework.permissions import IsAdminUser\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\nfrom posts.models import Member\nfrom posts.serializers import MemberSerializer\n\n\nclass MemberList(APIView):\n\n def get(self, request, format=None):\n member = Member.objects.all().order_by('-email')\n serializer = MemberSerializer(member, many=True)\n return Response(serializer.data)\n\n @permission_classes((IsAdminUser,))\n def post(self, request, format=None):\n user = request.user\n serializer = MemberSerializer(data=request.data, context={'user': user}\n )\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\nclass MemberDetail(generics.RetrieveUpdateDestroyAPIView):\n queryset = Member.objects.all()\n serializer_class = MemberSerializer\n", "<import token>\n\n\nclass MemberList(APIView):\n\n def get(self, request, format=None):\n member = Member.objects.all().order_by('-email')\n serializer = MemberSerializer(member, many=True)\n return Response(serializer.data)\n\n @permission_classes((IsAdminUser,))\n def post(self, request, format=None):\n user = request.user\n serializer = MemberSerializer(data=request.data, context={'user': user}\n )\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\nclass MemberDetail(generics.RetrieveUpdateDestroyAPIView):\n queryset = Member.objects.all()\n serializer_class = MemberSerializer\n", "<import token>\n\n\nclass MemberList(APIView):\n <function token>\n\n @permission_classes((IsAdminUser,))\n def post(self, request, format=None):\n user = request.user\n serializer = MemberSerializer(data=request.data, context={'user': user}\n )\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\nclass MemberDetail(generics.RetrieveUpdateDestroyAPIView):\n queryset = Member.objects.all()\n serializer_class = MemberSerializer\n", "<import token>\n\n\nclass MemberList(APIView):\n <function token>\n <function token>\n\n\nclass MemberDetail(generics.RetrieveUpdateDestroyAPIView):\n queryset = Member.objects.all()\n serializer_class = MemberSerializer\n", "<import token>\n<class token>\n\n\nclass MemberDetail(generics.RetrieveUpdateDestroyAPIView):\n queryset = Member.objects.all()\n serializer_class = MemberSerializer\n", "<import token>\n<class token>\n\n\nclass MemberDetail(generics.RetrieveUpdateDestroyAPIView):\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n<class token>\n" ]
false
98,912
a455b7fd1f2538cd932deed2acd8bd53128f6e10
import base64 as sys_base64 class Base64(object): @staticmethod def encode(src: str, encoding="utf8") -> str: """ base64编码 :param src: 待编码文本 :param encoding: 编码类型 :return: """ return sys_base64.b64encode((bytes(src, encoding=encoding))).decode() @staticmethod def decode(src: str, encoding="utf8") -> str: """ base64解码 :param src: 待解码文本 :param encoding: 编码类型 :return: """ return sys_base64.b64decode(bytes(src, encoding=encoding)).decode() @staticmethod def encode_multilines(src_lines: [], encoding="utf8") -> []: """ base64多行编码 :param src_lines: 待编码的多行文本 :param encoding: 编码类型 :return: """ base64_lines = [] for line in src_lines: base64_str = sys_base64.b64encode(bytes(line, encoding=encoding)).decode() # 编码 base64_lines.append(base64_str) return base64_lines @staticmethod def decode_multilines(src_lines: [], encoding="utf8") -> []: """ base64多行解码 :param src_lines: 待解码的多行文本 :param encoding: 编码类型 :return: """ base64_lines = [] for line in src_lines: base64_str = sys_base64.b64decode(bytes(line, encoding=encoding)).decode() # 解码 base64_lines.append(base64_str) return base64_lines
[ "import base64 as sys_base64\n\n\nclass Base64(object):\n\n @staticmethod\n def encode(src: str, encoding=\"utf8\") -> str:\n \"\"\"\n base64编码\n :param src: 待编码文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n return sys_base64.b64encode((bytes(src, encoding=encoding))).decode()\n\n @staticmethod\n def decode(src: str, encoding=\"utf8\") -> str:\n \"\"\"\n base64解码\n :param src: 待解码文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n return sys_base64.b64decode(bytes(src, encoding=encoding)).decode()\n\n @staticmethod\n def encode_multilines(src_lines: [], encoding=\"utf8\") -> []:\n \"\"\"\n base64多行编码\n :param src_lines: 待编码的多行文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n base64_lines = []\n for line in src_lines:\n base64_str = sys_base64.b64encode(bytes(line, encoding=encoding)).decode() # 编码\n base64_lines.append(base64_str)\n return base64_lines\n\n @staticmethod\n def decode_multilines(src_lines: [], encoding=\"utf8\") -> []:\n \"\"\"\n base64多行解码\n :param src_lines: 待解码的多行文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n base64_lines = []\n for line in src_lines:\n base64_str = sys_base64.b64decode(bytes(line, encoding=encoding)).decode() # 解码\n base64_lines.append(base64_str)\n return base64_lines\n", "import base64 as sys_base64\n\n\nclass Base64(object):\n\n @staticmethod\n def encode(src: str, encoding='utf8') ->str:\n \"\"\"\n base64编码\n :param src: 待编码文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n return sys_base64.b64encode(bytes(src, encoding=encoding)).decode()\n\n @staticmethod\n def decode(src: str, encoding='utf8') ->str:\n \"\"\"\n base64解码\n :param src: 待解码文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n return sys_base64.b64decode(bytes(src, encoding=encoding)).decode()\n\n @staticmethod\n def encode_multilines(src_lines: [], encoding='utf8') ->[]:\n \"\"\"\n base64多行编码\n :param src_lines: 待编码的多行文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n base64_lines = []\n for line in src_lines:\n base64_str = sys_base64.b64encode(bytes(line, encoding=encoding)\n ).decode()\n base64_lines.append(base64_str)\n return base64_lines\n\n @staticmethod\n def decode_multilines(src_lines: [], encoding='utf8') ->[]:\n \"\"\"\n base64多行解码\n :param src_lines: 待解码的多行文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n base64_lines = []\n for line in src_lines:\n base64_str = sys_base64.b64decode(bytes(line, encoding=encoding)\n ).decode()\n base64_lines.append(base64_str)\n return base64_lines\n", "<import token>\n\n\nclass Base64(object):\n\n @staticmethod\n def encode(src: str, encoding='utf8') ->str:\n \"\"\"\n base64编码\n :param src: 待编码文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n return sys_base64.b64encode(bytes(src, encoding=encoding)).decode()\n\n @staticmethod\n def decode(src: str, encoding='utf8') ->str:\n \"\"\"\n base64解码\n :param src: 待解码文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n return sys_base64.b64decode(bytes(src, encoding=encoding)).decode()\n\n @staticmethod\n def encode_multilines(src_lines: [], encoding='utf8') ->[]:\n \"\"\"\n base64多行编码\n :param src_lines: 待编码的多行文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n base64_lines = []\n for line in src_lines:\n base64_str = sys_base64.b64encode(bytes(line, encoding=encoding)\n ).decode()\n base64_lines.append(base64_str)\n return base64_lines\n\n @staticmethod\n def decode_multilines(src_lines: [], encoding='utf8') ->[]:\n \"\"\"\n base64多行解码\n :param src_lines: 待解码的多行文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n base64_lines = []\n for line in src_lines:\n base64_str = sys_base64.b64decode(bytes(line, encoding=encoding)\n ).decode()\n base64_lines.append(base64_str)\n return base64_lines\n", "<import token>\n\n\nclass Base64(object):\n\n @staticmethod\n def encode(src: str, encoding='utf8') ->str:\n \"\"\"\n base64编码\n :param src: 待编码文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n return sys_base64.b64encode(bytes(src, encoding=encoding)).decode()\n\n @staticmethod\n def decode(src: str, encoding='utf8') ->str:\n \"\"\"\n base64解码\n :param src: 待解码文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n return sys_base64.b64decode(bytes(src, encoding=encoding)).decode()\n\n @staticmethod\n def encode_multilines(src_lines: [], encoding='utf8') ->[]:\n \"\"\"\n base64多行编码\n :param src_lines: 待编码的多行文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n base64_lines = []\n for line in src_lines:\n base64_str = sys_base64.b64encode(bytes(line, encoding=encoding)\n ).decode()\n base64_lines.append(base64_str)\n return base64_lines\n <function token>\n", "<import token>\n\n\nclass Base64(object):\n\n @staticmethod\n def encode(src: str, encoding='utf8') ->str:\n \"\"\"\n base64编码\n :param src: 待编码文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n return sys_base64.b64encode(bytes(src, encoding=encoding)).decode()\n\n @staticmethod\n def decode(src: str, encoding='utf8') ->str:\n \"\"\"\n base64解码\n :param src: 待解码文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n return sys_base64.b64decode(bytes(src, encoding=encoding)).decode()\n <function token>\n <function token>\n", "<import token>\n\n\nclass Base64(object):\n <function token>\n\n @staticmethod\n def decode(src: str, encoding='utf8') ->str:\n \"\"\"\n base64解码\n :param src: 待解码文本\n :param encoding: 编码类型\n :return:\n \"\"\"\n return sys_base64.b64decode(bytes(src, encoding=encoding)).decode()\n <function token>\n <function token>\n", "<import token>\n\n\nclass Base64(object):\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
98,913
8a190e9d8dca44e82cd9125d171ab54f6018d10f
from __future__ import unicode_literals from django.apps import AppConfig class NewstockConfig(AppConfig): name = 'newstock'
[ "from __future__ import unicode_literals\n\nfrom django.apps import AppConfig\n\n\nclass NewstockConfig(AppConfig):\n name = 'newstock'\n", "from __future__ import unicode_literals\nfrom django.apps import AppConfig\n\n\nclass NewstockConfig(AppConfig):\n name = 'newstock'\n", "<import token>\n\n\nclass NewstockConfig(AppConfig):\n name = 'newstock'\n", "<import token>\n\n\nclass NewstockConfig(AppConfig):\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
98,914
9c701874b799fb1b7f90a510d1bac4e98972e5da
import numpy as np import matplotlib.pyplot as plt fig = plt.figure(1) ax = fig.add_subplot(111)# 111 denotes 1 row, 1 column, 1 graph fig.tight_layout() ## the data N = 4 #--- Total packets Reduced totalpacketsHN = sum(np.loadtxt('D:/Datasets/Normal/Reduced/nflstathwn.txt', usecols=(0,), delimiter=',')) totalpacketsISCX = sum(np.loadtxt('D:/Datasets/Normal/ISCX/nflstathwn.txt', usecols=(0,), delimiter=',')) totalpacketsUNIBS = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/nflstatunibs.txt', usecols=(0,), delimiter=',')) totalpacketsCAIDA = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/aflstathwn_prev.txt', usecols=(0,), delimiter=',')) '''totalpacketsBF = sum(np.loadtxt('D:/Datasets/Attack/ISCX/BruteForceSSH/aflstathwn.txt', usecols=(0,), delimiter=',')) totalpacketsBotnet = sum(np.loadtxt('D:/Datasets/Attack/ISCX/DDoSBotnet/aflstathwn.txt', usecols=(0,), delimiter=',')) totalpacketsHTTP = sum(np.loadtxt('D:/Datasets/Attack/ISCX/HTTPDDoS/aflstathwn.txt', usecols=(0,), delimiter=',')) totalpacketsInf = sum(np.loadtxt('D:/Datasets/Attack/ISCX/infiltration/aflstathwn.txt', usecols=(0,), delimiter=','))''' print(totalpacketsHN) print(totalpacketsISCX) print(totalpacketsUNIBS) print(totalpacketsCAIDA) #--- ICMP data = sum(np.loadtxt('D:/Datasets/Normal/Reduced/icmpstat.txt', usecols=(0,), delimiter=',')) data1 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/icmpstat.txt', usecols=(0,), delimiter=',')) data2 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/icmpstat.txt', usecols=(0,), delimiter=',')) dataA1 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/icmpstat_prev.txt', usecols=(0,), delimiter=',')) '''dataA4 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/BruteForceSSH/icmpstat.txt', usecols=(0,), delimiter=',')) dataA7= sum(np.loadtxt('D:/Datasets/Attack/ISCX/DDoSBotnet/icmpstat.txt', usecols=(0,), delimiter=',')) dataA10= sum(np.loadtxt('D:/Datasets/Attack/ISCX/HTTPDDoS/icmpstat.txt', usecols=(0,), delimiter=',')) dataA13= sum(np.loadtxt('D:/Datasets/Attack/ISCX/infiltration/icmpstat.txt', usecols=(0,), delimiter=','))''' icmp = (data/totalpacketsHN)*100 icmp1 = (data1/totalpacketsISCX)*100 icmp2 = (data2/totalpacketsUNIBS)*100 icmp3 = (dataA1/totalpacketsCAIDA)*100 '''icmp4 = (dataA4/totalpacketsBF)*100 icmp5 = (dataA7/totalpacketsBotnet)*100 icmp6 = (dataA10/totalpacketsHTTP)*100 icmp7 = (dataA13/totalpacketsInf)*100''' print(icmp) print(icmp1) print(icmp2) #--- TCP data3 = sum(np.loadtxt('D:/Datasets/Normal/Reduced/tcpstat.txt', usecols=(0,), delimiter=',')) data4 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/tcpstat.txt', usecols=(0,), delimiter=',')) data5 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/tcpstat.txt', usecols=(0,), delimiter=',')) dataA2 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/tcpstat_prev.txt', usecols=(0,), delimiter=',')) '''dataA5 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/BruteForceSSH/tcpstat.txt', usecols=(0,), delimiter=',')) dataA8 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/DDoSBotnet/tcpstat.txt', usecols=(0,), delimiter=',')) dataA11 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/HTTPDDoS/tcpstat.txt', usecols=(0,), delimiter=',')) dataA14 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/infiltration/tcpstat.txt', usecols=(0,), delimiter=','))''' tcp = (data3/totalpacketsHN)*100 tcp1 = (data4/totalpacketsISCX)*100 tcp2 = (data5/totalpacketsUNIBS)*100 tcp3 = (dataA2/totalpacketsCAIDA)*100 '''tcp4 = (dataA5/totalpacketsBF)*100 tcp5 = (dataA8/totalpacketsBotnet)*100 tcp6 = (dataA11/totalpacketsHTTP)*100 tcp7 = (dataA14/totalpacketsInf)*100''' print(tcp2) #--- UDP data6 = sum(np.loadtxt('D:/Datasets/Normal/Reduced/udpstat.txt', usecols=(0,), delimiter=',')) data7 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/udpstat.txt', usecols=(0,), delimiter=',')) data8 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/udpstat.txt', usecols=(0,), delimiter=',')) dataA3 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/udpstat_prev.txt', usecols=(0,), delimiter=',')) '''dataA6 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/BruteForceSSH/udpstat.txt', usecols=(0,), delimiter=',')) dataA9 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/DDoSBotnet/udpstat.txt', usecols=(0,), delimiter=',')) dataA12 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/HTTPDDoS/udpstat.txt', usecols=(0,), delimiter=',')) dataA15 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/infiltration/udpstat.txt', usecols=(0,), delimiter=','))''' udp = (data6/totalpacketsHN)*100 udp1 = (data7/totalpacketsISCX)*100 udp2 = (data8/totalpacketsUNIBS)*100 udp3 = (dataA3/totalpacketsCAIDA)*100 '''udp4 = (dataA6/totalpacketsBF)*100 udp5 = (dataA9/totalpacketsBotnet)*100 udp6 = (dataA12/totalpacketsHTTP)*100 udp7 = (dataA15/totalpacketsInf)*100''' ICMP = [icmp, icmp1, icmp2,icmp3] TCP = [tcp,tcp1,tcp2,tcp3] UDP = [udp, udp1, udp2,udp3] ## necessary variables ind = np.arange(N) # the x locations for the groups width = 0.30 # the width of the bars ## the bars rects1 = ax.bar(ind, ICMP, width, color='#FFFFFF' ) rects2 = ax.bar(ind+width, TCP, width, color='#C0C0C0') #, #yerr=womenStd, #error_kw=dict(elinewidth=3,ecolor='black') rects3 = ax.bar(ind+width+width,UDP, width, color='#000000') # axes and labels ax.set_xlim(-width/2,len(ind)+width) ax.set_ylim(0,120) ax.set_ylabel('Packet Percentage') #ax.set_title('Normal and Attack Datasets') #xTickMarks = ['Home\n Network' , 'ISCX', 'UNIBS','CAIDA', 'ISCX \n Brute Force\n SSH', 'ISCX \n DDoS\n Botnet','ISCX \n HTTP\n DDoS','ISCX \n Infiltration \nfrom Inside'] xTickMarks = ['Home\n Network' , 'ISCX', 'UNIBS','CAIDA'] ax.set_xticks(ind+width) xtickNames = ax.set_xticklabels(xTickMarks) plt.setp(xtickNames, rotation=0, fontsize=14) ## add a legend ax.legend( (rects1[0], rects2[0],rects3[0]), ('ICMP', 'TCP','UDP'), loc='upper left' ) def autolabel(rects): # attach some text labels for rect in rects: height = rect.get_height() ax.text(rect.get_x() + rect.get_width()/2., 1.0*height, '%.2f' % float(height), ha='center', va='bottom',fontsize='12') autolabel(rects1) autolabel(rects2) autolabel(rects3) plt.show()
[ "import numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\nfig = plt.figure(1)\r\nax = fig.add_subplot(111)# 111 denotes 1 row, 1 column, 1 graph\r\nfig.tight_layout()\r\n## the data\r\n\r\nN = 4\r\n#--- Total packets Reduced\r\ntotalpacketsHN = sum(np.loadtxt('D:/Datasets/Normal/Reduced/nflstathwn.txt', usecols=(0,), delimiter=','))\r\ntotalpacketsISCX = sum(np.loadtxt('D:/Datasets/Normal/ISCX/nflstathwn.txt', usecols=(0,), delimiter=','))\r\ntotalpacketsUNIBS = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/nflstatunibs.txt', usecols=(0,), delimiter=','))\r\ntotalpacketsCAIDA = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/aflstathwn_prev.txt', usecols=(0,), delimiter=','))\r\n'''totalpacketsBF = sum(np.loadtxt('D:/Datasets/Attack/ISCX/BruteForceSSH/aflstathwn.txt', usecols=(0,), delimiter=','))\r\ntotalpacketsBotnet = sum(np.loadtxt('D:/Datasets/Attack/ISCX/DDoSBotnet/aflstathwn.txt', usecols=(0,), delimiter=','))\r\ntotalpacketsHTTP = sum(np.loadtxt('D:/Datasets/Attack/ISCX/HTTPDDoS/aflstathwn.txt', usecols=(0,), delimiter=','))\r\ntotalpacketsInf = sum(np.loadtxt('D:/Datasets/Attack/ISCX/infiltration/aflstathwn.txt', usecols=(0,), delimiter=','))'''\r\n\r\n\r\n\r\nprint(totalpacketsHN)\r\nprint(totalpacketsISCX)\r\nprint(totalpacketsUNIBS)\r\nprint(totalpacketsCAIDA)\r\n#--- ICMP\r\ndata = sum(np.loadtxt('D:/Datasets/Normal/Reduced/icmpstat.txt', usecols=(0,), delimiter=','))\r\ndata1 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/icmpstat.txt', usecols=(0,), delimiter=','))\r\ndata2 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/icmpstat.txt', usecols=(0,), delimiter=','))\r\ndataA1 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/icmpstat_prev.txt', usecols=(0,), delimiter=','))\r\n'''dataA4 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/BruteForceSSH/icmpstat.txt', usecols=(0,), delimiter=','))\r\ndataA7= sum(np.loadtxt('D:/Datasets/Attack/ISCX/DDoSBotnet/icmpstat.txt', usecols=(0,), delimiter=','))\r\ndataA10= sum(np.loadtxt('D:/Datasets/Attack/ISCX/HTTPDDoS/icmpstat.txt', usecols=(0,), delimiter=','))\r\ndataA13= sum(np.loadtxt('D:/Datasets/Attack/ISCX/infiltration/icmpstat.txt', usecols=(0,), delimiter=','))'''\r\n\r\n\r\n\r\nicmp = (data/totalpacketsHN)*100\r\nicmp1 = (data1/totalpacketsISCX)*100\r\nicmp2 = (data2/totalpacketsUNIBS)*100\r\nicmp3 = (dataA1/totalpacketsCAIDA)*100\r\n'''icmp4 = (dataA4/totalpacketsBF)*100\r\nicmp5 = (dataA7/totalpacketsBotnet)*100\r\nicmp6 = (dataA10/totalpacketsHTTP)*100\r\nicmp7 = (dataA13/totalpacketsInf)*100'''\r\n\r\nprint(icmp)\r\nprint(icmp1)\r\nprint(icmp2)\r\n\r\n\r\n#--- TCP\r\ndata3 = sum(np.loadtxt('D:/Datasets/Normal/Reduced/tcpstat.txt', usecols=(0,), delimiter=','))\r\ndata4 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/tcpstat.txt', usecols=(0,), delimiter=','))\r\ndata5 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/tcpstat.txt', usecols=(0,), delimiter=','))\r\ndataA2 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/tcpstat_prev.txt', usecols=(0,), delimiter=','))\r\n'''dataA5 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/BruteForceSSH/tcpstat.txt', usecols=(0,), delimiter=','))\r\ndataA8 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/DDoSBotnet/tcpstat.txt', usecols=(0,), delimiter=','))\r\ndataA11 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/HTTPDDoS/tcpstat.txt', usecols=(0,), delimiter=','))\r\ndataA14 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/infiltration/tcpstat.txt', usecols=(0,), delimiter=','))'''\r\n\r\ntcp = (data3/totalpacketsHN)*100\r\ntcp1 = (data4/totalpacketsISCX)*100\r\ntcp2 = (data5/totalpacketsUNIBS)*100\r\ntcp3 = (dataA2/totalpacketsCAIDA)*100\r\n'''tcp4 = (dataA5/totalpacketsBF)*100\r\ntcp5 = (dataA8/totalpacketsBotnet)*100\r\ntcp6 = (dataA11/totalpacketsHTTP)*100\r\ntcp7 = (dataA14/totalpacketsInf)*100'''\r\n\r\nprint(tcp2)\r\n\r\n\r\n#--- UDP\r\ndata6 = sum(np.loadtxt('D:/Datasets/Normal/Reduced/udpstat.txt', usecols=(0,), delimiter=','))\r\ndata7 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/udpstat.txt', usecols=(0,), delimiter=','))\r\ndata8 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/udpstat.txt', usecols=(0,), delimiter=','))\r\ndataA3 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/udpstat_prev.txt', usecols=(0,), delimiter=','))\r\n'''dataA6 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/BruteForceSSH/udpstat.txt', usecols=(0,), delimiter=','))\r\ndataA9 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/DDoSBotnet/udpstat.txt', usecols=(0,), delimiter=','))\r\ndataA12 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/HTTPDDoS/udpstat.txt', usecols=(0,), delimiter=','))\r\ndataA15 = sum(np.loadtxt('D:/Datasets/Attack/ISCX/infiltration/udpstat.txt', usecols=(0,), delimiter=','))'''\r\n\r\nudp = (data6/totalpacketsHN)*100\r\nudp1 = (data7/totalpacketsISCX)*100\r\nudp2 = (data8/totalpacketsUNIBS)*100\r\nudp3 = (dataA3/totalpacketsCAIDA)*100\r\n'''udp4 = (dataA6/totalpacketsBF)*100\r\nudp5 = (dataA9/totalpacketsBotnet)*100\r\nudp6 = (dataA12/totalpacketsHTTP)*100\r\nudp7 = (dataA15/totalpacketsInf)*100'''\r\n\r\n\r\n\r\nICMP = [icmp, icmp1, icmp2,icmp3]\r\n\r\nTCP = [tcp,tcp1,tcp2,tcp3]\r\n\r\nUDP = [udp, udp1, udp2,udp3]\r\n\r\n\r\n## necessary variables\r\nind = np.arange(N) # the x locations for the groups\r\nwidth = 0.30 # the width of the bars\r\n\r\n## the bars\r\nrects1 = ax.bar(ind, ICMP, width,\r\n color='#FFFFFF'\r\n )\r\n\r\nrects2 = ax.bar(ind+width, TCP, width,\r\n color='#C0C0C0')\r\n\t#,\r\n #yerr=womenStd,\r\n #error_kw=dict(elinewidth=3,ecolor='black')\t\t\t\t\r\nrects3 = ax.bar(ind+width+width,UDP, width,\r\n color='#000000')\r\n\t\t\t\t\t\r\n\r\n# axes and labels\r\nax.set_xlim(-width/2,len(ind)+width)\r\nax.set_ylim(0,120)\r\nax.set_ylabel('Packet Percentage')\r\n#ax.set_title('Normal and Attack Datasets')\r\n#xTickMarks = ['Home\\n Network' , 'ISCX', 'UNIBS','CAIDA', 'ISCX \\n Brute Force\\n SSH', 'ISCX \\n DDoS\\n Botnet','ISCX \\n HTTP\\n DDoS','ISCX \\n Infiltration \\nfrom Inside']\r\nxTickMarks = ['Home\\n Network' , 'ISCX', 'UNIBS','CAIDA']\r\n\r\nax.set_xticks(ind+width)\r\nxtickNames = ax.set_xticklabels(xTickMarks)\r\nplt.setp(xtickNames, rotation=0, fontsize=14)\r\n\r\n\r\n\r\n## add a legend\r\nax.legend( (rects1[0], rects2[0],rects3[0]), ('ICMP', 'TCP','UDP'), loc='upper left' )\r\n\r\ndef autolabel(rects):\r\n # attach some text labels\r\n for rect in rects:\r\n height = rect.get_height()\r\n ax.text(rect.get_x() + rect.get_width()/2., 1.0*height,\r\n '%.2f' % float(height),\r\n ha='center', va='bottom',fontsize='12')\r\nautolabel(rects1)\r\nautolabel(rects2)\r\nautolabel(rects3)\r\n\r\n\r\n\r\n\r\nplt.show()\r\n\r\n\r\n\r\n\r\n", "import numpy as np\nimport matplotlib.pyplot as plt\nfig = plt.figure(1)\nax = fig.add_subplot(111)\nfig.tight_layout()\nN = 4\ntotalpacketsHN = sum(np.loadtxt('D:/Datasets/Normal/Reduced/nflstathwn.txt',\n usecols=(0,), delimiter=','))\ntotalpacketsISCX = sum(np.loadtxt('D:/Datasets/Normal/ISCX/nflstathwn.txt',\n usecols=(0,), delimiter=','))\ntotalpacketsUNIBS = sum(np.loadtxt(\n 'D:/Datasets/Normal/UNIBS/nflstatunibs.txt', usecols=(0,), delimiter=','))\ntotalpacketsCAIDA = sum(np.loadtxt(\n 'D:/Datasets/Attack/CAIDA/aflstathwn_prev.txt', usecols=(0,), delimiter\n =','))\n<docstring token>\nprint(totalpacketsHN)\nprint(totalpacketsISCX)\nprint(totalpacketsUNIBS)\nprint(totalpacketsCAIDA)\ndata = sum(np.loadtxt('D:/Datasets/Normal/Reduced/icmpstat.txt', usecols=(0\n ,), delimiter=','))\ndata1 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/icmpstat.txt', usecols=(0,),\n delimiter=','))\ndata2 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/icmpstat.txt', usecols=(0,\n ), delimiter=','))\ndataA1 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/icmpstat_prev.txt',\n usecols=(0,), delimiter=','))\n<docstring token>\nicmp = data / totalpacketsHN * 100\nicmp1 = data1 / totalpacketsISCX * 100\nicmp2 = data2 / totalpacketsUNIBS * 100\nicmp3 = dataA1 / totalpacketsCAIDA * 100\n<docstring token>\nprint(icmp)\nprint(icmp1)\nprint(icmp2)\ndata3 = sum(np.loadtxt('D:/Datasets/Normal/Reduced/tcpstat.txt', usecols=(0\n ,), delimiter=','))\ndata4 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/tcpstat.txt', usecols=(0,),\n delimiter=','))\ndata5 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/tcpstat.txt', usecols=(0,),\n delimiter=','))\ndataA2 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/tcpstat_prev.txt',\n usecols=(0,), delimiter=','))\n<docstring token>\ntcp = data3 / totalpacketsHN * 100\ntcp1 = data4 / totalpacketsISCX * 100\ntcp2 = data5 / totalpacketsUNIBS * 100\ntcp3 = dataA2 / totalpacketsCAIDA * 100\n<docstring token>\nprint(tcp2)\ndata6 = sum(np.loadtxt('D:/Datasets/Normal/Reduced/udpstat.txt', usecols=(0\n ,), delimiter=','))\ndata7 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/udpstat.txt', usecols=(0,),\n delimiter=','))\ndata8 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/udpstat.txt', usecols=(0,),\n delimiter=','))\ndataA3 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/udpstat_prev.txt',\n usecols=(0,), delimiter=','))\n<docstring token>\nudp = data6 / totalpacketsHN * 100\nudp1 = data7 / totalpacketsISCX * 100\nudp2 = data8 / totalpacketsUNIBS * 100\nudp3 = dataA3 / totalpacketsCAIDA * 100\n<docstring token>\nICMP = [icmp, icmp1, icmp2, icmp3]\nTCP = [tcp, tcp1, tcp2, tcp3]\nUDP = [udp, udp1, udp2, udp3]\nind = np.arange(N)\nwidth = 0.3\nrects1 = ax.bar(ind, ICMP, width, color='#FFFFFF')\nrects2 = ax.bar(ind + width, TCP, width, color='#C0C0C0')\nrects3 = ax.bar(ind + width + width, UDP, width, color='#000000')\nax.set_xlim(-width / 2, len(ind) + width)\nax.set_ylim(0, 120)\nax.set_ylabel('Packet Percentage')\nxTickMarks = ['Home\\n Network', 'ISCX', 'UNIBS', 'CAIDA']\nax.set_xticks(ind + width)\nxtickNames = ax.set_xticklabels(xTickMarks)\nplt.setp(xtickNames, rotation=0, fontsize=14)\nax.legend((rects1[0], rects2[0], rects3[0]), ('ICMP', 'TCP', 'UDP'), loc=\n 'upper left')\n\n\ndef autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width() / 2.0, 1.0 * height, '%.2f' %\n float(height), ha='center', va='bottom', fontsize='12')\n\n\nautolabel(rects1)\nautolabel(rects2)\nautolabel(rects3)\nplt.show()\n", "<import token>\nfig = plt.figure(1)\nax = fig.add_subplot(111)\nfig.tight_layout()\nN = 4\ntotalpacketsHN = sum(np.loadtxt('D:/Datasets/Normal/Reduced/nflstathwn.txt',\n usecols=(0,), delimiter=','))\ntotalpacketsISCX = sum(np.loadtxt('D:/Datasets/Normal/ISCX/nflstathwn.txt',\n usecols=(0,), delimiter=','))\ntotalpacketsUNIBS = sum(np.loadtxt(\n 'D:/Datasets/Normal/UNIBS/nflstatunibs.txt', usecols=(0,), delimiter=','))\ntotalpacketsCAIDA = sum(np.loadtxt(\n 'D:/Datasets/Attack/CAIDA/aflstathwn_prev.txt', usecols=(0,), delimiter\n =','))\n<docstring token>\nprint(totalpacketsHN)\nprint(totalpacketsISCX)\nprint(totalpacketsUNIBS)\nprint(totalpacketsCAIDA)\ndata = sum(np.loadtxt('D:/Datasets/Normal/Reduced/icmpstat.txt', usecols=(0\n ,), delimiter=','))\ndata1 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/icmpstat.txt', usecols=(0,),\n delimiter=','))\ndata2 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/icmpstat.txt', usecols=(0,\n ), delimiter=','))\ndataA1 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/icmpstat_prev.txt',\n usecols=(0,), delimiter=','))\n<docstring token>\nicmp = data / totalpacketsHN * 100\nicmp1 = data1 / totalpacketsISCX * 100\nicmp2 = data2 / totalpacketsUNIBS * 100\nicmp3 = dataA1 / totalpacketsCAIDA * 100\n<docstring token>\nprint(icmp)\nprint(icmp1)\nprint(icmp2)\ndata3 = sum(np.loadtxt('D:/Datasets/Normal/Reduced/tcpstat.txt', usecols=(0\n ,), delimiter=','))\ndata4 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/tcpstat.txt', usecols=(0,),\n delimiter=','))\ndata5 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/tcpstat.txt', usecols=(0,),\n delimiter=','))\ndataA2 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/tcpstat_prev.txt',\n usecols=(0,), delimiter=','))\n<docstring token>\ntcp = data3 / totalpacketsHN * 100\ntcp1 = data4 / totalpacketsISCX * 100\ntcp2 = data5 / totalpacketsUNIBS * 100\ntcp3 = dataA2 / totalpacketsCAIDA * 100\n<docstring token>\nprint(tcp2)\ndata6 = sum(np.loadtxt('D:/Datasets/Normal/Reduced/udpstat.txt', usecols=(0\n ,), delimiter=','))\ndata7 = sum(np.loadtxt('D:/Datasets/Normal/ISCX/udpstat.txt', usecols=(0,),\n delimiter=','))\ndata8 = sum(np.loadtxt('D:/Datasets/Normal/UNIBS/udpstat.txt', usecols=(0,),\n delimiter=','))\ndataA3 = sum(np.loadtxt('D:/Datasets/Attack/CAIDA/udpstat_prev.txt',\n usecols=(0,), delimiter=','))\n<docstring token>\nudp = data6 / totalpacketsHN * 100\nudp1 = data7 / totalpacketsISCX * 100\nudp2 = data8 / totalpacketsUNIBS * 100\nudp3 = dataA3 / totalpacketsCAIDA * 100\n<docstring token>\nICMP = [icmp, icmp1, icmp2, icmp3]\nTCP = [tcp, tcp1, tcp2, tcp3]\nUDP = [udp, udp1, udp2, udp3]\nind = np.arange(N)\nwidth = 0.3\nrects1 = ax.bar(ind, ICMP, width, color='#FFFFFF')\nrects2 = ax.bar(ind + width, TCP, width, color='#C0C0C0')\nrects3 = ax.bar(ind + width + width, UDP, width, color='#000000')\nax.set_xlim(-width / 2, len(ind) + width)\nax.set_ylim(0, 120)\nax.set_ylabel('Packet Percentage')\nxTickMarks = ['Home\\n Network', 'ISCX', 'UNIBS', 'CAIDA']\nax.set_xticks(ind + width)\nxtickNames = ax.set_xticklabels(xTickMarks)\nplt.setp(xtickNames, rotation=0, fontsize=14)\nax.legend((rects1[0], rects2[0], rects3[0]), ('ICMP', 'TCP', 'UDP'), loc=\n 'upper left')\n\n\ndef autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width() / 2.0, 1.0 * height, '%.2f' %\n float(height), ha='center', va='bottom', fontsize='12')\n\n\nautolabel(rects1)\nautolabel(rects2)\nautolabel(rects3)\nplt.show()\n", "<import token>\n<assignment token>\nfig.tight_layout()\n<assignment token>\n<docstring token>\nprint(totalpacketsHN)\nprint(totalpacketsISCX)\nprint(totalpacketsUNIBS)\nprint(totalpacketsCAIDA)\n<assignment token>\n<docstring token>\n<assignment token>\n<docstring token>\nprint(icmp)\nprint(icmp1)\nprint(icmp2)\n<assignment token>\n<docstring token>\n<assignment token>\n<docstring token>\nprint(tcp2)\n<assignment token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\nax.set_xlim(-width / 2, len(ind) + width)\nax.set_ylim(0, 120)\nax.set_ylabel('Packet Percentage')\n<assignment token>\nax.set_xticks(ind + width)\n<assignment token>\nplt.setp(xtickNames, rotation=0, fontsize=14)\nax.legend((rects1[0], rects2[0], rects3[0]), ('ICMP', 'TCP', 'UDP'), loc=\n 'upper left')\n\n\ndef autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width() / 2.0, 1.0 * height, '%.2f' %\n float(height), ha='center', va='bottom', fontsize='12')\n\n\nautolabel(rects1)\nautolabel(rects2)\nautolabel(rects3)\nplt.show()\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<docstring token>\n<code token>\n<assignment token>\n<docstring token>\n<assignment token>\n<docstring token>\n<code token>\n<assignment token>\n<docstring token>\n<assignment token>\n<docstring token>\n<code token>\n<assignment token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n\n\ndef autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width() / 2.0, 1.0 * height, '%.2f' %\n float(height), ha='center', va='bottom', fontsize='12')\n\n\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<docstring token>\n<code token>\n<assignment token>\n<docstring token>\n<assignment token>\n<docstring token>\n<code token>\n<assignment token>\n<docstring token>\n<assignment token>\n<docstring token>\n<code token>\n<assignment token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<function token>\n<code token>\n" ]
false
98,915
ed42f4f7eae8ab271369daaca3c1d88ae4d3e0fa
import os import torch import time from core.utils import * from datasets.meters import AVAMeter def train_ava(cfg, epoch, model, train_loader, loss_module, optimizer): print("training function!!!!!!!!!!") t0 = time.time() loss_module.reset_meters() l_loader = len(train_loader) print(" lenth l_loader: ", l_loader) model.train() for batch_idx, batch in enumerate(train_loader): print("&&&&&&&&&&&&&&&&batch_idx: ", batch_idx) data = batch['clip'].cuda() target = {'cls': batch['cls'], 'boxes': batch['boxes']} output = model(data) loss = loss_module(output, target, epoch, batch_idx, l_loader) loss.backward() steps = cfg.TRAIN.TOTAL_BATCH_SIZE // cfg.TRAIN.BATCH_SIZE if batch_idx % steps == 0: optimizer.step() optimizer.zero_grad() # save result every 1000 batches if batch_idx % 2000 == 0: # From time to time, reset averagemeters to see improvements loss_module.reset_meters() t1 = time.time() logging('trained with %f samples/s' % (len(train_loader.dataset)/(t1-t0))) print('') def train_ucf24_jhmdb21(cfg, epoch, model, train_loader, loss_module, optimizer): t0 = time.time() loss_module.reset_meters() l_loader = len(train_loader) model.train() for batch_idx, (data, target) in enumerate(train_loader): data = data.cuda() output = model(data) loss = loss_module(output, target, epoch, batch_idx, l_loader) loss.backward() steps = cfg.TRAIN.TOTAL_BATCH_SIZE // cfg.TRAIN.BATCH_SIZE if batch_idx % steps == 0: optimizer.step() optimizer.zero_grad() # save result every 1000 batches if batch_idx % 2000 == 0: # From time to time, reset averagemeters to see improvements loss_module.reset_meters() t1 = time.time() logging('trained with %f samples/s' % (len(train_loader.dataset)/(t1-t0))) print('') @torch.no_grad() def test_ava(cfg, epoch, model, test_loader): # Test parameters num_classes = cfg.MODEL.NUM_CLASSES anchors = [float(i) for i in cfg.SOLVER.ANCHORS] num_anchors = cfg.SOLVER.NUM_ANCHORS nms_thresh = 0.5 conf_thresh_valid = 0.005 nbatch = len(test_loader) meter = AVAMeter(cfg, cfg.TRAIN.MODE, 'latest_detection.json') model.eval() for batch_idx, batch in enumerate(test_loader): data = batch['clip'].cuda() target = {'cls': batch['cls'], 'boxes': batch['boxes']} with torch.no_grad(): output = model(data) metadata = batch['metadata'].cpu().numpy() preds = [] all_boxes = get_region_boxes_ava(output, conf_thresh_valid, num_classes, anchors, num_anchors, 0, 1) for i in range(output.size(0)): boxes = all_boxes[i] boxes = nms(boxes, nms_thresh) for box in boxes: x1 = float(box[0]-box[2]/2.0) y1 = float(box[1]-box[3]/2.0) x2 = float(box[0]+box[2]/2.0) y2 = float(box[1]+box[3]/2.0) det_conf = float(box[4]) cls_out = [det_conf * x.cpu().numpy() for x in box[5]] preds.append([[x1,y1,x2,y2], cls_out, metadata[i][:2].tolist()]) meter.update_stats(preds) logging("[%d/%d]" % (batch_idx, nbatch)) mAP = meter.evaluate_ava() logging("mode: {} -- mAP: {}".format(meter.mode, mAP)) return mAP @torch.no_grad() def test_ucf24_jhmdb21(cfg, epoch, model, test_loader): def truths_length(truths): for i in range(50): if truths[i][1] == 0: # print("i: ", i) return i # Test parameters # nms_thresh = 0.4 # iou_thresh = 0.5 nms_thresh = 0.1 iou_thresh = 0.2 eps = 1e-5 num_classes = cfg.MODEL.NUM_CLASSES anchors = [float(i) for i in cfg.SOLVER.ANCHORS] num_anchors = cfg.SOLVER.NUM_ANCHORS conf_thresh_valid = 0.005 total = 0.0 proposals = 0.0 correct = 0.0 fscore = 0.0 print("num_classes: {}".format(num_classes)) print("anchors: {}".format(anchors)) print("num_anchors: {}".format(num_anchors)) # print("num_classes: {}".format()) correct_classification = 0.0 total_detected = 0.0 nbatch = len(test_loader) print("nbatch: ", nbatch) model.eval() print("1111111111111111111111111111111111111111111111111111111111111111111111") for batch_idx, (frame_idx, data, target) in enumerate(test_loader): # related to class UCF_JHMDB_Dataset __getitem__ # print("@@@@@@@@@@@@@@@@@@@@@@ batch_idx: ", batch_idx) # 0~11 if len is 12 # print("@@@@@@ frame_idx: ", frame_idx) # ['Basketball_v_Basketball_g01_c01_00009.txt', 'Basketball_v_Basketball_g01_c01_00010.txt'] # print("data: ", data.shape) # torch.Size([2, 3, 16, 224, 224]) # print("target: ", target.shape) # torch.Size([2, 250]) # print("data: ", data) # print("target: ", target) data = data.cuda() with torch.no_grad(): output = model(data).data # model output, 4 ∗ 145 ∗ 7 ∗ 7 all_boxes = get_region_boxes(output, conf_thresh_valid, num_classes, anchors, num_anchors, 0, 1) for i in range(output.size(0)): boxes = all_boxes[i] boxes = nms(boxes, nms_thresh) # print(" len of boxes: ", len(boxes)) if cfg.TRAIN.DATASET == 'ucf24': detection_path = os.path.join('ucf_detections', 'detections_'+str(epoch), frame_idx[i]) print("detection_path: ", detection_path) current_dir = os.path.join('ucf_detections', 'detections_'+str(epoch)) if not os.path.exists('ucf_detections'): os.mkdir('ucf_detections') if not os.path.exists(current_dir): os.mkdir(current_dir) else: detection_path = os.path.join('jhmdb_detections', 'detections_'+str(epoch), frame_idx[i]) current_dir = os.path.join('jhmdb_detections', 'detections_'+str(epoch)) if not os.path.exists('jhmdb_detections'): os.mkdir('jhmdb_detections') if not os.path.exists(current_dir): os.mkdir(current_dir) with open(detection_path, 'w+') as f_detect: for box in boxes: x1 = round(float(box[0]-box[2]/2.0) * 320.0) y1 = round(float(box[1]-box[3]/2.0) * 240.0) x2 = round(float(box[0]+box[2]/2.0) * 320.0) y2 = round(float(box[1]+box[3]/2.0) * 240.0) det_conf = float(box[4]) for j in range((len(box)-5)//2): cls_conf = float(box[5+2*j].item()) prob = det_conf * cls_conf f_detect.write(str(int(box[6])+1) + ' ' + str(prob) + ' ' + str(x1) + ' ' + str(y1) + ' ' + str(x2) + ' ' + str(y2) + '\n') # 其shape为50∗5 通过truths_length获取真实的target数量。 truths = target[i].view(-1, 5) # print("opt truths:", truths) num_gts = truths_length(truths) print("num_gts: ", num_gts) total = total + num_gts pred_list = [] # LIST OF CONFIDENT BOX INDICES for i in range(len(boxes)): if boxes[i][4] > 0.25: # print("######### boxes[i]: ", boxes[i]) proposals = proposals+1 pred_list.append(i) for i in range(num_gts): box_gt = [truths[i][1], truths[i][2], truths[i][3], truths[i][4], 1.0, 1.0, truths[i][0]] print("box_gt: ", box_gt) print("len(pred_list): ", len(pred_list)) best_iou = 0 best_j = -1 for j in pred_list: # ITERATE THROUGH ONLY CONFIDENT BOXES iou = bbox_iou(box_gt, boxes[j], x1y1x2y2=False) if iou > best_iou: best_j = j best_iou = iou if best_iou > iou_thresh: total_detected += 1 if int(boxes[best_j][6]) == box_gt[6]: correct_classification += 1 if best_iou > iou_thresh and int(boxes[best_j][6]) == box_gt[6]: correct = correct+1 precision = 1.0*correct/(proposals+eps) recall = 1.0*correct/(total+eps) fscore = 2.0*precision*recall/(precision+recall+eps) logging("[%d/%d] precision: %f, recall: %f, fscore: %f" % (batch_idx, nbatch, precision, recall, fscore)) classification_accuracy = 1.0 * correct_classification / (total_detected + eps) locolization_recall = 1.0 * total_detected / (total + eps) print("Classification accuracy: %.3f" % classification_accuracy) print("Locolization recall: %.3f" % locolization_recall) return fscore
[ "import os\nimport torch\nimport time\nfrom core.utils import *\nfrom datasets.meters import AVAMeter\n\n\n\ndef train_ava(cfg, epoch, model, train_loader, loss_module, optimizer):\n print(\"training function!!!!!!!!!!\")\n t0 = time.time()\n loss_module.reset_meters()\n l_loader = len(train_loader)\n print(\" lenth l_loader: \", l_loader)\n\n model.train()\n for batch_idx, batch in enumerate(train_loader):\n print(\"&&&&&&&&&&&&&&&&batch_idx: \", batch_idx)\n data = batch['clip'].cuda()\n target = {'cls': batch['cls'], 'boxes': batch['boxes']}\n output = model(data)\n loss = loss_module(output, target, epoch, batch_idx, l_loader)\n\n loss.backward()\n steps = cfg.TRAIN.TOTAL_BATCH_SIZE // cfg.TRAIN.BATCH_SIZE\n if batch_idx % steps == 0:\n optimizer.step()\n optimizer.zero_grad()\n\n # save result every 1000 batches\n if batch_idx % 2000 == 0: # From time to time, reset averagemeters to see improvements\n loss_module.reset_meters()\n t1 = time.time()\n logging('trained with %f samples/s' % (len(train_loader.dataset)/(t1-t0)))\n print('')\n\n\ndef train_ucf24_jhmdb21(cfg, epoch, model, train_loader, loss_module, optimizer):\n t0 = time.time()\n loss_module.reset_meters()\n l_loader = len(train_loader)\n\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n data = data.cuda()\n output = model(data)\n loss = loss_module(output, target, epoch, batch_idx, l_loader)\n\n loss.backward()\n steps = cfg.TRAIN.TOTAL_BATCH_SIZE // cfg.TRAIN.BATCH_SIZE\n if batch_idx % steps == 0:\n optimizer.step()\n optimizer.zero_grad()\n\n # save result every 1000 batches\n if batch_idx % 2000 == 0: # From time to time, reset averagemeters to see improvements\n loss_module.reset_meters()\n\n t1 = time.time()\n logging('trained with %f samples/s' % (len(train_loader.dataset)/(t1-t0)))\n print('')\n\n\n\[email protected]_grad()\ndef test_ava(cfg, epoch, model, test_loader):\n # Test parameters\n num_classes = cfg.MODEL.NUM_CLASSES\n anchors = [float(i) for i in cfg.SOLVER.ANCHORS]\n num_anchors = cfg.SOLVER.NUM_ANCHORS\n nms_thresh = 0.5\n conf_thresh_valid = 0.005\n\n nbatch = len(test_loader)\n meter = AVAMeter(cfg, cfg.TRAIN.MODE, 'latest_detection.json')\n\n model.eval()\n for batch_idx, batch in enumerate(test_loader):\n data = batch['clip'].cuda()\n target = {'cls': batch['cls'], 'boxes': batch['boxes']}\n\n with torch.no_grad():\n output = model(data)\n metadata = batch['metadata'].cpu().numpy()\n\n preds = []\n all_boxes = get_region_boxes_ava(output, conf_thresh_valid, num_classes, anchors, num_anchors, 0, 1)\n for i in range(output.size(0)):\n boxes = all_boxes[i]\n boxes = nms(boxes, nms_thresh)\n \n for box in boxes:\n x1 = float(box[0]-box[2]/2.0)\n y1 = float(box[1]-box[3]/2.0)\n x2 = float(box[0]+box[2]/2.0)\n y2 = float(box[1]+box[3]/2.0)\n det_conf = float(box[4])\n cls_out = [det_conf * x.cpu().numpy() for x in box[5]]\n preds.append([[x1,y1,x2,y2], cls_out, metadata[i][:2].tolist()])\n\n meter.update_stats(preds)\n logging(\"[%d/%d]\" % (batch_idx, nbatch))\n\n mAP = meter.evaluate_ava()\n logging(\"mode: {} -- mAP: {}\".format(meter.mode, mAP))\n\n return mAP\n\n\n\[email protected]_grad()\ndef test_ucf24_jhmdb21(cfg, epoch, model, test_loader):\n\n def truths_length(truths):\n for i in range(50):\n if truths[i][1] == 0:\n # print(\"i: \", i)\n return i\n\n # Test parameters\n # nms_thresh = 0.4\n # iou_thresh = 0.5\n nms_thresh = 0.1\n iou_thresh = 0.2\n eps = 1e-5\n num_classes = cfg.MODEL.NUM_CLASSES\n anchors = [float(i) for i in cfg.SOLVER.ANCHORS]\n num_anchors = cfg.SOLVER.NUM_ANCHORS\n conf_thresh_valid = 0.005\n total = 0.0\n proposals = 0.0\n correct = 0.0\n fscore = 0.0\n\n print(\"num_classes: {}\".format(num_classes))\n print(\"anchors: {}\".format(anchors))\n print(\"num_anchors: {}\".format(num_anchors))\n # print(\"num_classes: {}\".format())\n correct_classification = 0.0\n total_detected = 0.0\n nbatch = len(test_loader)\n print(\"nbatch: \", nbatch)\n model.eval()\n print(\"1111111111111111111111111111111111111111111111111111111111111111111111\")\n\n for batch_idx, (frame_idx, data, target) in enumerate(test_loader): # related to class UCF_JHMDB_Dataset __getitem__\n # print(\"@@@@@@@@@@@@@@@@@@@@@@ batch_idx: \", batch_idx) # 0~11 if len is 12\n # print(\"@@@@@@ frame_idx: \", frame_idx) # ['Basketball_v_Basketball_g01_c01_00009.txt', 'Basketball_v_Basketball_g01_c01_00010.txt']\n # print(\"data: \", data.shape) # torch.Size([2, 3, 16, 224, 224])\n # print(\"target: \", target.shape) # torch.Size([2, 250])\n # print(\"data: \", data)\n # print(\"target: \", target)\n data = data.cuda()\n with torch.no_grad():\n output = model(data).data # model output, 4 ∗ 145 ∗ 7 ∗ 7\n all_boxes = get_region_boxes(output, conf_thresh_valid, num_classes, anchors, num_anchors, 0, 1)\n for i in range(output.size(0)):\n boxes = all_boxes[i]\n boxes = nms(boxes, nms_thresh)\n # print(\" len of boxes: \", len(boxes))\n\n if cfg.TRAIN.DATASET == 'ucf24':\n detection_path = os.path.join('ucf_detections', 'detections_'+str(epoch), frame_idx[i])\n print(\"detection_path: \", detection_path)\n current_dir = os.path.join('ucf_detections', 'detections_'+str(epoch))\n if not os.path.exists('ucf_detections'):\n os.mkdir('ucf_detections')\n if not os.path.exists(current_dir):\n os.mkdir(current_dir)\n else:\n detection_path = os.path.join('jhmdb_detections', 'detections_'+str(epoch), frame_idx[i])\n current_dir = os.path.join('jhmdb_detections', 'detections_'+str(epoch))\n if not os.path.exists('jhmdb_detections'):\n os.mkdir('jhmdb_detections')\n if not os.path.exists(current_dir):\n os.mkdir(current_dir)\n\n with open(detection_path, 'w+') as f_detect:\n for box in boxes:\n x1 = round(float(box[0]-box[2]/2.0) * 320.0)\n y1 = round(float(box[1]-box[3]/2.0) * 240.0)\n x2 = round(float(box[0]+box[2]/2.0) * 320.0)\n y2 = round(float(box[1]+box[3]/2.0) * 240.0)\n\n det_conf = float(box[4])\n for j in range((len(box)-5)//2):\n cls_conf = float(box[5+2*j].item())\n prob = det_conf * cls_conf\n\n f_detect.write(str(int(box[6])+1) + ' ' + str(prob) + ' ' + str(x1) + ' ' + str(y1) + ' ' + str(x2) + ' ' + str(y2) + '\\n')\n # 其shape为50∗5 通过truths_length获取真实的target数量。\n truths = target[i].view(-1, 5)\n # print(\"opt truths:\", truths)\n num_gts = truths_length(truths)\n print(\"num_gts: \", num_gts)\n \n total = total + num_gts\n pred_list = [] # LIST OF CONFIDENT BOX INDICES\n for i in range(len(boxes)):\n if boxes[i][4] > 0.25:\n # print(\"######### boxes[i]: \", boxes[i])\n proposals = proposals+1\n pred_list.append(i)\n\n for i in range(num_gts):\n box_gt = [truths[i][1], truths[i][2], truths[i][3], truths[i][4], 1.0, 1.0, truths[i][0]]\n print(\"box_gt: \", box_gt)\n print(\"len(pred_list): \", len(pred_list))\n best_iou = 0\n best_j = -1\n for j in pred_list: # ITERATE THROUGH ONLY CONFIDENT BOXES\n iou = bbox_iou(box_gt, boxes[j], x1y1x2y2=False)\n if iou > best_iou:\n best_j = j\n best_iou = iou\n\n if best_iou > iou_thresh:\n total_detected += 1\n if int(boxes[best_j][6]) == box_gt[6]:\n correct_classification += 1\n\n if best_iou > iou_thresh and int(boxes[best_j][6]) == box_gt[6]:\n correct = correct+1\n\n precision = 1.0*correct/(proposals+eps)\n recall = 1.0*correct/(total+eps)\n fscore = 2.0*precision*recall/(precision+recall+eps)\n logging(\"[%d/%d] precision: %f, recall: %f, fscore: %f\" % (batch_idx, nbatch, precision, recall, fscore))\n\n classification_accuracy = 1.0 * correct_classification / (total_detected + eps)\n locolization_recall = 1.0 * total_detected / (total + eps)\n\n print(\"Classification accuracy: %.3f\" % classification_accuracy)\n print(\"Locolization recall: %.3f\" % locolization_recall)\n\n return fscore\n", "import os\nimport torch\nimport time\nfrom core.utils import *\nfrom datasets.meters import AVAMeter\n\n\ndef train_ava(cfg, epoch, model, train_loader, loss_module, optimizer):\n print('training function!!!!!!!!!!')\n t0 = time.time()\n loss_module.reset_meters()\n l_loader = len(train_loader)\n print(' lenth l_loader: ', l_loader)\n model.train()\n for batch_idx, batch in enumerate(train_loader):\n print('&&&&&&&&&&&&&&&&batch_idx: ', batch_idx)\n data = batch['clip'].cuda()\n target = {'cls': batch['cls'], 'boxes': batch['boxes']}\n output = model(data)\n loss = loss_module(output, target, epoch, batch_idx, l_loader)\n loss.backward()\n steps = cfg.TRAIN.TOTAL_BATCH_SIZE // cfg.TRAIN.BATCH_SIZE\n if batch_idx % steps == 0:\n optimizer.step()\n optimizer.zero_grad()\n if batch_idx % 2000 == 0:\n loss_module.reset_meters()\n t1 = time.time()\n logging('trained with %f samples/s' % (len(train_loader.dataset) / (t1 -\n t0)))\n print('')\n\n\ndef train_ucf24_jhmdb21(cfg, epoch, model, train_loader, loss_module, optimizer\n ):\n t0 = time.time()\n loss_module.reset_meters()\n l_loader = len(train_loader)\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n data = data.cuda()\n output = model(data)\n loss = loss_module(output, target, epoch, batch_idx, l_loader)\n loss.backward()\n steps = cfg.TRAIN.TOTAL_BATCH_SIZE // cfg.TRAIN.BATCH_SIZE\n if batch_idx % steps == 0:\n optimizer.step()\n optimizer.zero_grad()\n if batch_idx % 2000 == 0:\n loss_module.reset_meters()\n t1 = time.time()\n logging('trained with %f samples/s' % (len(train_loader.dataset) / (t1 -\n t0)))\n print('')\n\n\[email protected]_grad()\ndef test_ava(cfg, epoch, model, test_loader):\n num_classes = cfg.MODEL.NUM_CLASSES\n anchors = [float(i) for i in cfg.SOLVER.ANCHORS]\n num_anchors = cfg.SOLVER.NUM_ANCHORS\n nms_thresh = 0.5\n conf_thresh_valid = 0.005\n nbatch = len(test_loader)\n meter = AVAMeter(cfg, cfg.TRAIN.MODE, 'latest_detection.json')\n model.eval()\n for batch_idx, batch in enumerate(test_loader):\n data = batch['clip'].cuda()\n target = {'cls': batch['cls'], 'boxes': batch['boxes']}\n with torch.no_grad():\n output = model(data)\n metadata = batch['metadata'].cpu().numpy()\n preds = []\n all_boxes = get_region_boxes_ava(output, conf_thresh_valid,\n num_classes, anchors, num_anchors, 0, 1)\n for i in range(output.size(0)):\n boxes = all_boxes[i]\n boxes = nms(boxes, nms_thresh)\n for box in boxes:\n x1 = float(box[0] - box[2] / 2.0)\n y1 = float(box[1] - box[3] / 2.0)\n x2 = float(box[0] + box[2] / 2.0)\n y2 = float(box[1] + box[3] / 2.0)\n det_conf = float(box[4])\n cls_out = [(det_conf * x.cpu().numpy()) for x in box[5]]\n preds.append([[x1, y1, x2, y2], cls_out, metadata[i][:2\n ].tolist()])\n meter.update_stats(preds)\n logging('[%d/%d]' % (batch_idx, nbatch))\n mAP = meter.evaluate_ava()\n logging('mode: {} -- mAP: {}'.format(meter.mode, mAP))\n return mAP\n\n\[email protected]_grad()\ndef test_ucf24_jhmdb21(cfg, epoch, model, test_loader):\n\n def truths_length(truths):\n for i in range(50):\n if truths[i][1] == 0:\n return i\n nms_thresh = 0.1\n iou_thresh = 0.2\n eps = 1e-05\n num_classes = cfg.MODEL.NUM_CLASSES\n anchors = [float(i) for i in cfg.SOLVER.ANCHORS]\n num_anchors = cfg.SOLVER.NUM_ANCHORS\n conf_thresh_valid = 0.005\n total = 0.0\n proposals = 0.0\n correct = 0.0\n fscore = 0.0\n print('num_classes: {}'.format(num_classes))\n print('anchors: {}'.format(anchors))\n print('num_anchors: {}'.format(num_anchors))\n correct_classification = 0.0\n total_detected = 0.0\n nbatch = len(test_loader)\n print('nbatch: ', nbatch)\n model.eval()\n print(\n '1111111111111111111111111111111111111111111111111111111111111111111111'\n )\n for batch_idx, (frame_idx, data, target) in enumerate(test_loader):\n data = data.cuda()\n with torch.no_grad():\n output = model(data).data\n all_boxes = get_region_boxes(output, conf_thresh_valid,\n num_classes, anchors, num_anchors, 0, 1)\n for i in range(output.size(0)):\n boxes = all_boxes[i]\n boxes = nms(boxes, nms_thresh)\n if cfg.TRAIN.DATASET == 'ucf24':\n detection_path = os.path.join('ucf_detections', \n 'detections_' + str(epoch), frame_idx[i])\n print('detection_path: ', detection_path)\n current_dir = os.path.join('ucf_detections', \n 'detections_' + str(epoch))\n if not os.path.exists('ucf_detections'):\n os.mkdir('ucf_detections')\n if not os.path.exists(current_dir):\n os.mkdir(current_dir)\n else:\n detection_path = os.path.join('jhmdb_detections', \n 'detections_' + str(epoch), frame_idx[i])\n current_dir = os.path.join('jhmdb_detections', \n 'detections_' + str(epoch))\n if not os.path.exists('jhmdb_detections'):\n os.mkdir('jhmdb_detections')\n if not os.path.exists(current_dir):\n os.mkdir(current_dir)\n with open(detection_path, 'w+') as f_detect:\n for box in boxes:\n x1 = round(float(box[0] - box[2] / 2.0) * 320.0)\n y1 = round(float(box[1] - box[3] / 2.0) * 240.0)\n x2 = round(float(box[0] + box[2] / 2.0) * 320.0)\n y2 = round(float(box[1] + box[3] / 2.0) * 240.0)\n det_conf = float(box[4])\n for j in range((len(box) - 5) // 2):\n cls_conf = float(box[5 + 2 * j].item())\n prob = det_conf * cls_conf\n f_detect.write(str(int(box[6]) + 1) + ' ' + str\n (prob) + ' ' + str(x1) + ' ' + str(y1) +\n ' ' + str(x2) + ' ' + str(y2) + '\\n')\n truths = target[i].view(-1, 5)\n num_gts = truths_length(truths)\n print('num_gts: ', num_gts)\n total = total + num_gts\n pred_list = []\n for i in range(len(boxes)):\n if boxes[i][4] > 0.25:\n proposals = proposals + 1\n pred_list.append(i)\n for i in range(num_gts):\n box_gt = [truths[i][1], truths[i][2], truths[i][3],\n truths[i][4], 1.0, 1.0, truths[i][0]]\n print('box_gt: ', box_gt)\n print('len(pred_list): ', len(pred_list))\n best_iou = 0\n best_j = -1\n for j in pred_list:\n iou = bbox_iou(box_gt, boxes[j], x1y1x2y2=False)\n if iou > best_iou:\n best_j = j\n best_iou = iou\n if best_iou > iou_thresh:\n total_detected += 1\n if int(boxes[best_j][6]) == box_gt[6]:\n correct_classification += 1\n if best_iou > iou_thresh and int(boxes[best_j][6]\n ) == box_gt[6]:\n correct = correct + 1\n precision = 1.0 * correct / (proposals + eps)\n recall = 1.0 * correct / (total + eps)\n fscore = 2.0 * precision * recall / (precision + recall + eps)\n logging('[%d/%d] precision: %f, recall: %f, fscore: %f' % (\n batch_idx, nbatch, precision, recall, fscore))\n classification_accuracy = 1.0 * correct_classification / (total_detected +\n eps)\n locolization_recall = 1.0 * total_detected / (total + eps)\n print('Classification accuracy: %.3f' % classification_accuracy)\n print('Locolization recall: %.3f' % locolization_recall)\n return fscore\n", "<import token>\n\n\ndef train_ava(cfg, epoch, model, train_loader, loss_module, optimizer):\n print('training function!!!!!!!!!!')\n t0 = time.time()\n loss_module.reset_meters()\n l_loader = len(train_loader)\n print(' lenth l_loader: ', l_loader)\n model.train()\n for batch_idx, batch in enumerate(train_loader):\n print('&&&&&&&&&&&&&&&&batch_idx: ', batch_idx)\n data = batch['clip'].cuda()\n target = {'cls': batch['cls'], 'boxes': batch['boxes']}\n output = model(data)\n loss = loss_module(output, target, epoch, batch_idx, l_loader)\n loss.backward()\n steps = cfg.TRAIN.TOTAL_BATCH_SIZE // cfg.TRAIN.BATCH_SIZE\n if batch_idx % steps == 0:\n optimizer.step()\n optimizer.zero_grad()\n if batch_idx % 2000 == 0:\n loss_module.reset_meters()\n t1 = time.time()\n logging('trained with %f samples/s' % (len(train_loader.dataset) / (t1 -\n t0)))\n print('')\n\n\ndef train_ucf24_jhmdb21(cfg, epoch, model, train_loader, loss_module, optimizer\n ):\n t0 = time.time()\n loss_module.reset_meters()\n l_loader = len(train_loader)\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n data = data.cuda()\n output = model(data)\n loss = loss_module(output, target, epoch, batch_idx, l_loader)\n loss.backward()\n steps = cfg.TRAIN.TOTAL_BATCH_SIZE // cfg.TRAIN.BATCH_SIZE\n if batch_idx % steps == 0:\n optimizer.step()\n optimizer.zero_grad()\n if batch_idx % 2000 == 0:\n loss_module.reset_meters()\n t1 = time.time()\n logging('trained with %f samples/s' % (len(train_loader.dataset) / (t1 -\n t0)))\n print('')\n\n\[email protected]_grad()\ndef test_ava(cfg, epoch, model, test_loader):\n num_classes = cfg.MODEL.NUM_CLASSES\n anchors = [float(i) for i in cfg.SOLVER.ANCHORS]\n num_anchors = cfg.SOLVER.NUM_ANCHORS\n nms_thresh = 0.5\n conf_thresh_valid = 0.005\n nbatch = len(test_loader)\n meter = AVAMeter(cfg, cfg.TRAIN.MODE, 'latest_detection.json')\n model.eval()\n for batch_idx, batch in enumerate(test_loader):\n data = batch['clip'].cuda()\n target = {'cls': batch['cls'], 'boxes': batch['boxes']}\n with torch.no_grad():\n output = model(data)\n metadata = batch['metadata'].cpu().numpy()\n preds = []\n all_boxes = get_region_boxes_ava(output, conf_thresh_valid,\n num_classes, anchors, num_anchors, 0, 1)\n for i in range(output.size(0)):\n boxes = all_boxes[i]\n boxes = nms(boxes, nms_thresh)\n for box in boxes:\n x1 = float(box[0] - box[2] / 2.0)\n y1 = float(box[1] - box[3] / 2.0)\n x2 = float(box[0] + box[2] / 2.0)\n y2 = float(box[1] + box[3] / 2.0)\n det_conf = float(box[4])\n cls_out = [(det_conf * x.cpu().numpy()) for x in box[5]]\n preds.append([[x1, y1, x2, y2], cls_out, metadata[i][:2\n ].tolist()])\n meter.update_stats(preds)\n logging('[%d/%d]' % (batch_idx, nbatch))\n mAP = meter.evaluate_ava()\n logging('mode: {} -- mAP: {}'.format(meter.mode, mAP))\n return mAP\n\n\[email protected]_grad()\ndef test_ucf24_jhmdb21(cfg, epoch, model, test_loader):\n\n def truths_length(truths):\n for i in range(50):\n if truths[i][1] == 0:\n return i\n nms_thresh = 0.1\n iou_thresh = 0.2\n eps = 1e-05\n num_classes = cfg.MODEL.NUM_CLASSES\n anchors = [float(i) for i in cfg.SOLVER.ANCHORS]\n num_anchors = cfg.SOLVER.NUM_ANCHORS\n conf_thresh_valid = 0.005\n total = 0.0\n proposals = 0.0\n correct = 0.0\n fscore = 0.0\n print('num_classes: {}'.format(num_classes))\n print('anchors: {}'.format(anchors))\n print('num_anchors: {}'.format(num_anchors))\n correct_classification = 0.0\n total_detected = 0.0\n nbatch = len(test_loader)\n print('nbatch: ', nbatch)\n model.eval()\n print(\n '1111111111111111111111111111111111111111111111111111111111111111111111'\n )\n for batch_idx, (frame_idx, data, target) in enumerate(test_loader):\n data = data.cuda()\n with torch.no_grad():\n output = model(data).data\n all_boxes = get_region_boxes(output, conf_thresh_valid,\n num_classes, anchors, num_anchors, 0, 1)\n for i in range(output.size(0)):\n boxes = all_boxes[i]\n boxes = nms(boxes, nms_thresh)\n if cfg.TRAIN.DATASET == 'ucf24':\n detection_path = os.path.join('ucf_detections', \n 'detections_' + str(epoch), frame_idx[i])\n print('detection_path: ', detection_path)\n current_dir = os.path.join('ucf_detections', \n 'detections_' + str(epoch))\n if not os.path.exists('ucf_detections'):\n os.mkdir('ucf_detections')\n if not os.path.exists(current_dir):\n os.mkdir(current_dir)\n else:\n detection_path = os.path.join('jhmdb_detections', \n 'detections_' + str(epoch), frame_idx[i])\n current_dir = os.path.join('jhmdb_detections', \n 'detections_' + str(epoch))\n if not os.path.exists('jhmdb_detections'):\n os.mkdir('jhmdb_detections')\n if not os.path.exists(current_dir):\n os.mkdir(current_dir)\n with open(detection_path, 'w+') as f_detect:\n for box in boxes:\n x1 = round(float(box[0] - box[2] / 2.0) * 320.0)\n y1 = round(float(box[1] - box[3] / 2.0) * 240.0)\n x2 = round(float(box[0] + box[2] / 2.0) * 320.0)\n y2 = round(float(box[1] + box[3] / 2.0) * 240.0)\n det_conf = float(box[4])\n for j in range((len(box) - 5) // 2):\n cls_conf = float(box[5 + 2 * j].item())\n prob = det_conf * cls_conf\n f_detect.write(str(int(box[6]) + 1) + ' ' + str\n (prob) + ' ' + str(x1) + ' ' + str(y1) +\n ' ' + str(x2) + ' ' + str(y2) + '\\n')\n truths = target[i].view(-1, 5)\n num_gts = truths_length(truths)\n print('num_gts: ', num_gts)\n total = total + num_gts\n pred_list = []\n for i in range(len(boxes)):\n if boxes[i][4] > 0.25:\n proposals = proposals + 1\n pred_list.append(i)\n for i in range(num_gts):\n box_gt = [truths[i][1], truths[i][2], truths[i][3],\n truths[i][4], 1.0, 1.0, truths[i][0]]\n print('box_gt: ', box_gt)\n print('len(pred_list): ', len(pred_list))\n best_iou = 0\n best_j = -1\n for j in pred_list:\n iou = bbox_iou(box_gt, boxes[j], x1y1x2y2=False)\n if iou > best_iou:\n best_j = j\n best_iou = iou\n if best_iou > iou_thresh:\n total_detected += 1\n if int(boxes[best_j][6]) == box_gt[6]:\n correct_classification += 1\n if best_iou > iou_thresh and int(boxes[best_j][6]\n ) == box_gt[6]:\n correct = correct + 1\n precision = 1.0 * correct / (proposals + eps)\n recall = 1.0 * correct / (total + eps)\n fscore = 2.0 * precision * recall / (precision + recall + eps)\n logging('[%d/%d] precision: %f, recall: %f, fscore: %f' % (\n batch_idx, nbatch, precision, recall, fscore))\n classification_accuracy = 1.0 * correct_classification / (total_detected +\n eps)\n locolization_recall = 1.0 * total_detected / (total + eps)\n print('Classification accuracy: %.3f' % classification_accuracy)\n print('Locolization recall: %.3f' % locolization_recall)\n return fscore\n", "<import token>\n<function token>\n\n\ndef train_ucf24_jhmdb21(cfg, epoch, model, train_loader, loss_module, optimizer\n ):\n t0 = time.time()\n loss_module.reset_meters()\n l_loader = len(train_loader)\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n data = data.cuda()\n output = model(data)\n loss = loss_module(output, target, epoch, batch_idx, l_loader)\n loss.backward()\n steps = cfg.TRAIN.TOTAL_BATCH_SIZE // cfg.TRAIN.BATCH_SIZE\n if batch_idx % steps == 0:\n optimizer.step()\n optimizer.zero_grad()\n if batch_idx % 2000 == 0:\n loss_module.reset_meters()\n t1 = time.time()\n logging('trained with %f samples/s' % (len(train_loader.dataset) / (t1 -\n t0)))\n print('')\n\n\[email protected]_grad()\ndef test_ava(cfg, epoch, model, test_loader):\n num_classes = cfg.MODEL.NUM_CLASSES\n anchors = [float(i) for i in cfg.SOLVER.ANCHORS]\n num_anchors = cfg.SOLVER.NUM_ANCHORS\n nms_thresh = 0.5\n conf_thresh_valid = 0.005\n nbatch = len(test_loader)\n meter = AVAMeter(cfg, cfg.TRAIN.MODE, 'latest_detection.json')\n model.eval()\n for batch_idx, batch in enumerate(test_loader):\n data = batch['clip'].cuda()\n target = {'cls': batch['cls'], 'boxes': batch['boxes']}\n with torch.no_grad():\n output = model(data)\n metadata = batch['metadata'].cpu().numpy()\n preds = []\n all_boxes = get_region_boxes_ava(output, conf_thresh_valid,\n num_classes, anchors, num_anchors, 0, 1)\n for i in range(output.size(0)):\n boxes = all_boxes[i]\n boxes = nms(boxes, nms_thresh)\n for box in boxes:\n x1 = float(box[0] - box[2] / 2.0)\n y1 = float(box[1] - box[3] / 2.0)\n x2 = float(box[0] + box[2] / 2.0)\n y2 = float(box[1] + box[3] / 2.0)\n det_conf = float(box[4])\n cls_out = [(det_conf * x.cpu().numpy()) for x in box[5]]\n preds.append([[x1, y1, x2, y2], cls_out, metadata[i][:2\n ].tolist()])\n meter.update_stats(preds)\n logging('[%d/%d]' % (batch_idx, nbatch))\n mAP = meter.evaluate_ava()\n logging('mode: {} -- mAP: {}'.format(meter.mode, mAP))\n return mAP\n\n\[email protected]_grad()\ndef test_ucf24_jhmdb21(cfg, epoch, model, test_loader):\n\n def truths_length(truths):\n for i in range(50):\n if truths[i][1] == 0:\n return i\n nms_thresh = 0.1\n iou_thresh = 0.2\n eps = 1e-05\n num_classes = cfg.MODEL.NUM_CLASSES\n anchors = [float(i) for i in cfg.SOLVER.ANCHORS]\n num_anchors = cfg.SOLVER.NUM_ANCHORS\n conf_thresh_valid = 0.005\n total = 0.0\n proposals = 0.0\n correct = 0.0\n fscore = 0.0\n print('num_classes: {}'.format(num_classes))\n print('anchors: {}'.format(anchors))\n print('num_anchors: {}'.format(num_anchors))\n correct_classification = 0.0\n total_detected = 0.0\n nbatch = len(test_loader)\n print('nbatch: ', nbatch)\n model.eval()\n print(\n '1111111111111111111111111111111111111111111111111111111111111111111111'\n )\n for batch_idx, (frame_idx, data, target) in enumerate(test_loader):\n data = data.cuda()\n with torch.no_grad():\n output = model(data).data\n all_boxes = get_region_boxes(output, conf_thresh_valid,\n num_classes, anchors, num_anchors, 0, 1)\n for i in range(output.size(0)):\n boxes = all_boxes[i]\n boxes = nms(boxes, nms_thresh)\n if cfg.TRAIN.DATASET == 'ucf24':\n detection_path = os.path.join('ucf_detections', \n 'detections_' + str(epoch), frame_idx[i])\n print('detection_path: ', detection_path)\n current_dir = os.path.join('ucf_detections', \n 'detections_' + str(epoch))\n if not os.path.exists('ucf_detections'):\n os.mkdir('ucf_detections')\n if not os.path.exists(current_dir):\n os.mkdir(current_dir)\n else:\n detection_path = os.path.join('jhmdb_detections', \n 'detections_' + str(epoch), frame_idx[i])\n current_dir = os.path.join('jhmdb_detections', \n 'detections_' + str(epoch))\n if not os.path.exists('jhmdb_detections'):\n os.mkdir('jhmdb_detections')\n if not os.path.exists(current_dir):\n os.mkdir(current_dir)\n with open(detection_path, 'w+') as f_detect:\n for box in boxes:\n x1 = round(float(box[0] - box[2] / 2.0) * 320.0)\n y1 = round(float(box[1] - box[3] / 2.0) * 240.0)\n x2 = round(float(box[0] + box[2] / 2.0) * 320.0)\n y2 = round(float(box[1] + box[3] / 2.0) * 240.0)\n det_conf = float(box[4])\n for j in range((len(box) - 5) // 2):\n cls_conf = float(box[5 + 2 * j].item())\n prob = det_conf * cls_conf\n f_detect.write(str(int(box[6]) + 1) + ' ' + str\n (prob) + ' ' + str(x1) + ' ' + str(y1) +\n ' ' + str(x2) + ' ' + str(y2) + '\\n')\n truths = target[i].view(-1, 5)\n num_gts = truths_length(truths)\n print('num_gts: ', num_gts)\n total = total + num_gts\n pred_list = []\n for i in range(len(boxes)):\n if boxes[i][4] > 0.25:\n proposals = proposals + 1\n pred_list.append(i)\n for i in range(num_gts):\n box_gt = [truths[i][1], truths[i][2], truths[i][3],\n truths[i][4], 1.0, 1.0, truths[i][0]]\n print('box_gt: ', box_gt)\n print('len(pred_list): ', len(pred_list))\n best_iou = 0\n best_j = -1\n for j in pred_list:\n iou = bbox_iou(box_gt, boxes[j], x1y1x2y2=False)\n if iou > best_iou:\n best_j = j\n best_iou = iou\n if best_iou > iou_thresh:\n total_detected += 1\n if int(boxes[best_j][6]) == box_gt[6]:\n correct_classification += 1\n if best_iou > iou_thresh and int(boxes[best_j][6]\n ) == box_gt[6]:\n correct = correct + 1\n precision = 1.0 * correct / (proposals + eps)\n recall = 1.0 * correct / (total + eps)\n fscore = 2.0 * precision * recall / (precision + recall + eps)\n logging('[%d/%d] precision: %f, recall: %f, fscore: %f' % (\n batch_idx, nbatch, precision, recall, fscore))\n classification_accuracy = 1.0 * correct_classification / (total_detected +\n eps)\n locolization_recall = 1.0 * total_detected / (total + eps)\n print('Classification accuracy: %.3f' % classification_accuracy)\n print('Locolization recall: %.3f' % locolization_recall)\n return fscore\n", "<import token>\n<function token>\n\n\ndef train_ucf24_jhmdb21(cfg, epoch, model, train_loader, loss_module, optimizer\n ):\n t0 = time.time()\n loss_module.reset_meters()\n l_loader = len(train_loader)\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n data = data.cuda()\n output = model(data)\n loss = loss_module(output, target, epoch, batch_idx, l_loader)\n loss.backward()\n steps = cfg.TRAIN.TOTAL_BATCH_SIZE // cfg.TRAIN.BATCH_SIZE\n if batch_idx % steps == 0:\n optimizer.step()\n optimizer.zero_grad()\n if batch_idx % 2000 == 0:\n loss_module.reset_meters()\n t1 = time.time()\n logging('trained with %f samples/s' % (len(train_loader.dataset) / (t1 -\n t0)))\n print('')\n\n\n<function token>\n\n\[email protected]_grad()\ndef test_ucf24_jhmdb21(cfg, epoch, model, test_loader):\n\n def truths_length(truths):\n for i in range(50):\n if truths[i][1] == 0:\n return i\n nms_thresh = 0.1\n iou_thresh = 0.2\n eps = 1e-05\n num_classes = cfg.MODEL.NUM_CLASSES\n anchors = [float(i) for i in cfg.SOLVER.ANCHORS]\n num_anchors = cfg.SOLVER.NUM_ANCHORS\n conf_thresh_valid = 0.005\n total = 0.0\n proposals = 0.0\n correct = 0.0\n fscore = 0.0\n print('num_classes: {}'.format(num_classes))\n print('anchors: {}'.format(anchors))\n print('num_anchors: {}'.format(num_anchors))\n correct_classification = 0.0\n total_detected = 0.0\n nbatch = len(test_loader)\n print('nbatch: ', nbatch)\n model.eval()\n print(\n '1111111111111111111111111111111111111111111111111111111111111111111111'\n )\n for batch_idx, (frame_idx, data, target) in enumerate(test_loader):\n data = data.cuda()\n with torch.no_grad():\n output = model(data).data\n all_boxes = get_region_boxes(output, conf_thresh_valid,\n num_classes, anchors, num_anchors, 0, 1)\n for i in range(output.size(0)):\n boxes = all_boxes[i]\n boxes = nms(boxes, nms_thresh)\n if cfg.TRAIN.DATASET == 'ucf24':\n detection_path = os.path.join('ucf_detections', \n 'detections_' + str(epoch), frame_idx[i])\n print('detection_path: ', detection_path)\n current_dir = os.path.join('ucf_detections', \n 'detections_' + str(epoch))\n if not os.path.exists('ucf_detections'):\n os.mkdir('ucf_detections')\n if not os.path.exists(current_dir):\n os.mkdir(current_dir)\n else:\n detection_path = os.path.join('jhmdb_detections', \n 'detections_' + str(epoch), frame_idx[i])\n current_dir = os.path.join('jhmdb_detections', \n 'detections_' + str(epoch))\n if not os.path.exists('jhmdb_detections'):\n os.mkdir('jhmdb_detections')\n if not os.path.exists(current_dir):\n os.mkdir(current_dir)\n with open(detection_path, 'w+') as f_detect:\n for box in boxes:\n x1 = round(float(box[0] - box[2] / 2.0) * 320.0)\n y1 = round(float(box[1] - box[3] / 2.0) * 240.0)\n x2 = round(float(box[0] + box[2] / 2.0) * 320.0)\n y2 = round(float(box[1] + box[3] / 2.0) * 240.0)\n det_conf = float(box[4])\n for j in range((len(box) - 5) // 2):\n cls_conf = float(box[5 + 2 * j].item())\n prob = det_conf * cls_conf\n f_detect.write(str(int(box[6]) + 1) + ' ' + str\n (prob) + ' ' + str(x1) + ' ' + str(y1) +\n ' ' + str(x2) + ' ' + str(y2) + '\\n')\n truths = target[i].view(-1, 5)\n num_gts = truths_length(truths)\n print('num_gts: ', num_gts)\n total = total + num_gts\n pred_list = []\n for i in range(len(boxes)):\n if boxes[i][4] > 0.25:\n proposals = proposals + 1\n pred_list.append(i)\n for i in range(num_gts):\n box_gt = [truths[i][1], truths[i][2], truths[i][3],\n truths[i][4], 1.0, 1.0, truths[i][0]]\n print('box_gt: ', box_gt)\n print('len(pred_list): ', len(pred_list))\n best_iou = 0\n best_j = -1\n for j in pred_list:\n iou = bbox_iou(box_gt, boxes[j], x1y1x2y2=False)\n if iou > best_iou:\n best_j = j\n best_iou = iou\n if best_iou > iou_thresh:\n total_detected += 1\n if int(boxes[best_j][6]) == box_gt[6]:\n correct_classification += 1\n if best_iou > iou_thresh and int(boxes[best_j][6]\n ) == box_gt[6]:\n correct = correct + 1\n precision = 1.0 * correct / (proposals + eps)\n recall = 1.0 * correct / (total + eps)\n fscore = 2.0 * precision * recall / (precision + recall + eps)\n logging('[%d/%d] precision: %f, recall: %f, fscore: %f' % (\n batch_idx, nbatch, precision, recall, fscore))\n classification_accuracy = 1.0 * correct_classification / (total_detected +\n eps)\n locolization_recall = 1.0 * total_detected / (total + eps)\n print('Classification accuracy: %.3f' % classification_accuracy)\n print('Locolization recall: %.3f' % locolization_recall)\n return fscore\n", "<import token>\n<function token>\n\n\ndef train_ucf24_jhmdb21(cfg, epoch, model, train_loader, loss_module, optimizer\n ):\n t0 = time.time()\n loss_module.reset_meters()\n l_loader = len(train_loader)\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n data = data.cuda()\n output = model(data)\n loss = loss_module(output, target, epoch, batch_idx, l_loader)\n loss.backward()\n steps = cfg.TRAIN.TOTAL_BATCH_SIZE // cfg.TRAIN.BATCH_SIZE\n if batch_idx % steps == 0:\n optimizer.step()\n optimizer.zero_grad()\n if batch_idx % 2000 == 0:\n loss_module.reset_meters()\n t1 = time.time()\n logging('trained with %f samples/s' % (len(train_loader.dataset) / (t1 -\n t0)))\n print('')\n\n\n<function token>\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
98,916
8b46179803486a5f5a38a85850e46c60248a3b6d
list1 = [1, 3, 5, 7, 9, 10] list2 = [2, 4, 6, 8] list1[-1:] = list2 print(f"New list: {list1}")
[ "list1 = [1, 3, 5, 7, 9, 10]\r\nlist2 = [2, 4, 6, 8]\r\nlist1[-1:] = list2\r\nprint(f\"New list: {list1}\")\r\n", "list1 = [1, 3, 5, 7, 9, 10]\nlist2 = [2, 4, 6, 8]\nlist1[-1:] = list2\nprint(f'New list: {list1}')\n", "<assignment token>\nprint(f'New list: {list1}')\n", "<assignment token>\n<code token>\n" ]
false
98,917
9827b854637693ca6e7876f9c141b1689a206ff8
from django.contrib import admin from ..constants import ANNUAL from ..forms import HivTestedForm from ..models import HivTested from .subject_admin_exclude_mixin import SubjectAdminExcludeMixin from .subject_visit_model_admin import SubjectVisitModelAdmin class HivTestedAdmin(SubjectAdminExcludeMixin, SubjectVisitModelAdmin): form = HivTestedForm fields = [ "subject_visit", 'num_hiv_tests', 'where_hiv_test', 'where_hiv_test_other', 'why_hiv_test', 'hiv_pills', 'arvs_hiv_test'] custom_exclude = {ANNUAL: [ 'num_hiv_tests', 'hiv_pills', 'arvs_hiv_test', 'why_hiv_test'] } radio_fields = { "where_hiv_test": admin.VERTICAL, "why_hiv_test": admin.VERTICAL, "hiv_pills": admin.VERTICAL, "arvs_hiv_test": admin.VERTICAL, } admin.site.register(HivTested, HivTestedAdmin)
[ "from django.contrib import admin\n\nfrom ..constants import ANNUAL\nfrom ..forms import HivTestedForm\nfrom ..models import HivTested\n\nfrom .subject_admin_exclude_mixin import SubjectAdminExcludeMixin\nfrom .subject_visit_model_admin import SubjectVisitModelAdmin\n\n\nclass HivTestedAdmin(SubjectAdminExcludeMixin, SubjectVisitModelAdmin):\n\n form = HivTestedForm\n fields = [\n \"subject_visit\",\n 'num_hiv_tests',\n 'where_hiv_test',\n 'where_hiv_test_other',\n 'why_hiv_test',\n 'hiv_pills',\n 'arvs_hiv_test']\n custom_exclude = {ANNUAL: [\n 'num_hiv_tests', 'hiv_pills', 'arvs_hiv_test', 'why_hiv_test']\n }\n\n radio_fields = {\n \"where_hiv_test\": admin.VERTICAL,\n \"why_hiv_test\": admin.VERTICAL,\n \"hiv_pills\": admin.VERTICAL,\n \"arvs_hiv_test\": admin.VERTICAL, }\n\nadmin.site.register(HivTested, HivTestedAdmin)\n", "from django.contrib import admin\nfrom ..constants import ANNUAL\nfrom ..forms import HivTestedForm\nfrom ..models import HivTested\nfrom .subject_admin_exclude_mixin import SubjectAdminExcludeMixin\nfrom .subject_visit_model_admin import SubjectVisitModelAdmin\n\n\nclass HivTestedAdmin(SubjectAdminExcludeMixin, SubjectVisitModelAdmin):\n form = HivTestedForm\n fields = ['subject_visit', 'num_hiv_tests', 'where_hiv_test',\n 'where_hiv_test_other', 'why_hiv_test', 'hiv_pills', 'arvs_hiv_test']\n custom_exclude = {ANNUAL: ['num_hiv_tests', 'hiv_pills',\n 'arvs_hiv_test', 'why_hiv_test']}\n radio_fields = {'where_hiv_test': admin.VERTICAL, 'why_hiv_test': admin\n .VERTICAL, 'hiv_pills': admin.VERTICAL, 'arvs_hiv_test': admin.VERTICAL\n }\n\n\nadmin.site.register(HivTested, HivTestedAdmin)\n", "<import token>\n\n\nclass HivTestedAdmin(SubjectAdminExcludeMixin, SubjectVisitModelAdmin):\n form = HivTestedForm\n fields = ['subject_visit', 'num_hiv_tests', 'where_hiv_test',\n 'where_hiv_test_other', 'why_hiv_test', 'hiv_pills', 'arvs_hiv_test']\n custom_exclude = {ANNUAL: ['num_hiv_tests', 'hiv_pills',\n 'arvs_hiv_test', 'why_hiv_test']}\n radio_fields = {'where_hiv_test': admin.VERTICAL, 'why_hiv_test': admin\n .VERTICAL, 'hiv_pills': admin.VERTICAL, 'arvs_hiv_test': admin.VERTICAL\n }\n\n\nadmin.site.register(HivTested, HivTestedAdmin)\n", "<import token>\n\n\nclass HivTestedAdmin(SubjectAdminExcludeMixin, SubjectVisitModelAdmin):\n form = HivTestedForm\n fields = ['subject_visit', 'num_hiv_tests', 'where_hiv_test',\n 'where_hiv_test_other', 'why_hiv_test', 'hiv_pills', 'arvs_hiv_test']\n custom_exclude = {ANNUAL: ['num_hiv_tests', 'hiv_pills',\n 'arvs_hiv_test', 'why_hiv_test']}\n radio_fields = {'where_hiv_test': admin.VERTICAL, 'why_hiv_test': admin\n .VERTICAL, 'hiv_pills': admin.VERTICAL, 'arvs_hiv_test': admin.VERTICAL\n }\n\n\n<code token>\n", "<import token>\n\n\nclass HivTestedAdmin(SubjectAdminExcludeMixin, SubjectVisitModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n<code token>\n", "<import token>\n<class token>\n<code token>\n" ]
false
98,918
71bb9996d81acfb4c2b6596c34aff009ac0d2e20
# -*- coding:utf-8 -*- import os import sys import configparser import threading class ConfigSingleton(object): _instance_lock = threading.Lock() def __init__(self): pass def parser(self): cur_dir = os.path.dirname(__file__) file_name = cur_dir + '/pack.cfg' if not os.path.exists(file_name): print("%s not exist, and exit()" % file_name) sys.exit(1) return # 生成config对象 conf = configparser.ConfigParser() # 用config对象读取配置文件 conf.read(file_name) self.back_up_dir = conf.get("back_up", "back-up-dir") self.base_dir = conf.get("pack","base-dir") print("config info = ", self.base_dir, self.back_up_dir) def __new__(cls, *args, **kwargs): if not hasattr(ConfigSingleton, "_instance"): with ConfigSingleton._instance_lock: if not hasattr(ConfigSingleton, "_instance"): ConfigSingleton._instance = object.__new__(cls) ConfigSingleton._instance.parser() return ConfigSingleton._instance if __name__ == '__main__': config = ConfigSingleton(); print(config.back_up_dir) config1 = ConfigSingleton(); config2 = ConfigSingleton(); print(config,config1,config2)
[ "# -*- coding:utf-8 -*-\nimport os\nimport sys\n\nimport configparser\nimport threading\nclass ConfigSingleton(object):\n _instance_lock = threading.Lock()\n\n def __init__(self):\n pass\n\n def parser(self):\n cur_dir = os.path.dirname(__file__)\n file_name = cur_dir + '/pack.cfg'\n if not os.path.exists(file_name):\n print(\"%s not exist, and exit()\" % file_name)\n sys.exit(1)\n return\n # 生成config对象\n conf = configparser.ConfigParser()\n # 用config对象读取配置文件\n conf.read(file_name)\n self.back_up_dir = conf.get(\"back_up\", \"back-up-dir\")\n self.base_dir = conf.get(\"pack\",\"base-dir\")\n print(\"config info = \", self.base_dir, self.back_up_dir)\n\n def __new__(cls, *args, **kwargs):\n if not hasattr(ConfigSingleton, \"_instance\"):\n with ConfigSingleton._instance_lock:\n if not hasattr(ConfigSingleton, \"_instance\"):\n ConfigSingleton._instance = object.__new__(cls)\n ConfigSingleton._instance.parser()\n return ConfigSingleton._instance\n\n\nif __name__ == '__main__':\n config = ConfigSingleton();\n print(config.back_up_dir)\n config1 = ConfigSingleton();\n config2 = ConfigSingleton();\n print(config,config1,config2)", "import os\nimport sys\nimport configparser\nimport threading\n\n\nclass ConfigSingleton(object):\n _instance_lock = threading.Lock()\n\n def __init__(self):\n pass\n\n def parser(self):\n cur_dir = os.path.dirname(__file__)\n file_name = cur_dir + '/pack.cfg'\n if not os.path.exists(file_name):\n print('%s not exist, and exit()' % file_name)\n sys.exit(1)\n return\n conf = configparser.ConfigParser()\n conf.read(file_name)\n self.back_up_dir = conf.get('back_up', 'back-up-dir')\n self.base_dir = conf.get('pack', 'base-dir')\n print('config info = ', self.base_dir, self.back_up_dir)\n\n def __new__(cls, *args, **kwargs):\n if not hasattr(ConfigSingleton, '_instance'):\n with ConfigSingleton._instance_lock:\n if not hasattr(ConfigSingleton, '_instance'):\n ConfigSingleton._instance = object.__new__(cls)\n ConfigSingleton._instance.parser()\n return ConfigSingleton._instance\n\n\nif __name__ == '__main__':\n config = ConfigSingleton()\n print(config.back_up_dir)\n config1 = ConfigSingleton()\n config2 = ConfigSingleton()\n print(config, config1, config2)\n", "<import token>\n\n\nclass ConfigSingleton(object):\n _instance_lock = threading.Lock()\n\n def __init__(self):\n pass\n\n def parser(self):\n cur_dir = os.path.dirname(__file__)\n file_name = cur_dir + '/pack.cfg'\n if not os.path.exists(file_name):\n print('%s not exist, and exit()' % file_name)\n sys.exit(1)\n return\n conf = configparser.ConfigParser()\n conf.read(file_name)\n self.back_up_dir = conf.get('back_up', 'back-up-dir')\n self.base_dir = conf.get('pack', 'base-dir')\n print('config info = ', self.base_dir, self.back_up_dir)\n\n def __new__(cls, *args, **kwargs):\n if not hasattr(ConfigSingleton, '_instance'):\n with ConfigSingleton._instance_lock:\n if not hasattr(ConfigSingleton, '_instance'):\n ConfigSingleton._instance = object.__new__(cls)\n ConfigSingleton._instance.parser()\n return ConfigSingleton._instance\n\n\nif __name__ == '__main__':\n config = ConfigSingleton()\n print(config.back_up_dir)\n config1 = ConfigSingleton()\n config2 = ConfigSingleton()\n print(config, config1, config2)\n", "<import token>\n\n\nclass ConfigSingleton(object):\n _instance_lock = threading.Lock()\n\n def __init__(self):\n pass\n\n def parser(self):\n cur_dir = os.path.dirname(__file__)\n file_name = cur_dir + '/pack.cfg'\n if not os.path.exists(file_name):\n print('%s not exist, and exit()' % file_name)\n sys.exit(1)\n return\n conf = configparser.ConfigParser()\n conf.read(file_name)\n self.back_up_dir = conf.get('back_up', 'back-up-dir')\n self.base_dir = conf.get('pack', 'base-dir')\n print('config info = ', self.base_dir, self.back_up_dir)\n\n def __new__(cls, *args, **kwargs):\n if not hasattr(ConfigSingleton, '_instance'):\n with ConfigSingleton._instance_lock:\n if not hasattr(ConfigSingleton, '_instance'):\n ConfigSingleton._instance = object.__new__(cls)\n ConfigSingleton._instance.parser()\n return ConfigSingleton._instance\n\n\n<code token>\n", "<import token>\n\n\nclass ConfigSingleton(object):\n <assignment token>\n\n def __init__(self):\n pass\n\n def parser(self):\n cur_dir = os.path.dirname(__file__)\n file_name = cur_dir + '/pack.cfg'\n if not os.path.exists(file_name):\n print('%s not exist, and exit()' % file_name)\n sys.exit(1)\n return\n conf = configparser.ConfigParser()\n conf.read(file_name)\n self.back_up_dir = conf.get('back_up', 'back-up-dir')\n self.base_dir = conf.get('pack', 'base-dir')\n print('config info = ', self.base_dir, self.back_up_dir)\n\n def __new__(cls, *args, **kwargs):\n if not hasattr(ConfigSingleton, '_instance'):\n with ConfigSingleton._instance_lock:\n if not hasattr(ConfigSingleton, '_instance'):\n ConfigSingleton._instance = object.__new__(cls)\n ConfigSingleton._instance.parser()\n return ConfigSingleton._instance\n\n\n<code token>\n", "<import token>\n\n\nclass ConfigSingleton(object):\n <assignment token>\n <function token>\n\n def parser(self):\n cur_dir = os.path.dirname(__file__)\n file_name = cur_dir + '/pack.cfg'\n if not os.path.exists(file_name):\n print('%s not exist, and exit()' % file_name)\n sys.exit(1)\n return\n conf = configparser.ConfigParser()\n conf.read(file_name)\n self.back_up_dir = conf.get('back_up', 'back-up-dir')\n self.base_dir = conf.get('pack', 'base-dir')\n print('config info = ', self.base_dir, self.back_up_dir)\n\n def __new__(cls, *args, **kwargs):\n if not hasattr(ConfigSingleton, '_instance'):\n with ConfigSingleton._instance_lock:\n if not hasattr(ConfigSingleton, '_instance'):\n ConfigSingleton._instance = object.__new__(cls)\n ConfigSingleton._instance.parser()\n return ConfigSingleton._instance\n\n\n<code token>\n", "<import token>\n\n\nclass ConfigSingleton(object):\n <assignment token>\n <function token>\n <function token>\n\n def __new__(cls, *args, **kwargs):\n if not hasattr(ConfigSingleton, '_instance'):\n with ConfigSingleton._instance_lock:\n if not hasattr(ConfigSingleton, '_instance'):\n ConfigSingleton._instance = object.__new__(cls)\n ConfigSingleton._instance.parser()\n return ConfigSingleton._instance\n\n\n<code token>\n", "<import token>\n\n\nclass ConfigSingleton(object):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n\n<code token>\n", "<import token>\n<class token>\n<code token>\n" ]
false
98,919
5674f78f8ae5d687d40a284d00ba8a8c9210f3f2
def insertion_sort(a_list): for index in range(1,len(a_list)): current_value = a_list[index] current_position = index while current_position > 0 and a_list[current_position-1] > current_value: a_list[current_position] = a_list[current_position - 1] current_position = current_position - 1 a_list[current_position] = current_value a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20] insertion_sort(a_list) print(a_list)
[ "def insertion_sort(a_list):\n for index in range(1,len(a_list)):\n current_value = a_list[index]\n current_position = index\n while current_position > 0 and a_list[current_position-1] > current_value:\n a_list[current_position] = a_list[current_position - 1]\n current_position = current_position - 1\n\n a_list[current_position] = current_value\n\na_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]\ninsertion_sort(a_list)\nprint(a_list)", "def insertion_sort(a_list):\n for index in range(1, len(a_list)):\n current_value = a_list[index]\n current_position = index\n while current_position > 0 and a_list[current_position - 1\n ] > current_value:\n a_list[current_position] = a_list[current_position - 1]\n current_position = current_position - 1\n a_list[current_position] = current_value\n\n\na_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]\ninsertion_sort(a_list)\nprint(a_list)\n", "def insertion_sort(a_list):\n for index in range(1, len(a_list)):\n current_value = a_list[index]\n current_position = index\n while current_position > 0 and a_list[current_position - 1\n ] > current_value:\n a_list[current_position] = a_list[current_position - 1]\n current_position = current_position - 1\n a_list[current_position] = current_value\n\n\n<assignment token>\ninsertion_sort(a_list)\nprint(a_list)\n", "def insertion_sort(a_list):\n for index in range(1, len(a_list)):\n current_value = a_list[index]\n current_position = index\n while current_position > 0 and a_list[current_position - 1\n ] > current_value:\n a_list[current_position] = a_list[current_position - 1]\n current_position = current_position - 1\n a_list[current_position] = current_value\n\n\n<assignment token>\n<code token>\n", "<function token>\n<assignment token>\n<code token>\n" ]
false
98,920
b1855fdc05db27d58673921222ea84692ce102bc
#!/usr/bin/python # -*- coding: utf-8 -*- import numpy as np import imutils #PyQT5 Libraries from PyQt5.QtGui import * from PyQt5.QtWidgets import * from PyQt5.QtCore import * from PyQt5.QtMultimedia import * from PyQt5.QtMultimediaWidgets import * #OpenCV Libraries import cv2 class VideoWidgetSurface(QAbstractVideoSurface): def __init__(self, widget, parent=None): super(VideoWidgetSurface, self).__init__(parent) self.widget = widget self.imageFormat = QImage.Format_Invalid global frameCounter frameCounter = 0 #Frame Counter initialize def supportedPixelFormats(self, handleType=QAbstractVideoBuffer.NoHandle): formats = [QVideoFrame.PixelFormat()] if (handleType == QAbstractVideoBuffer.NoHandle): for f in [QVideoFrame.Format_RGB32, QVideoFrame.Format_ARGB32, QVideoFrame.Format_ARGB32_Premultiplied, QVideoFrame.Format_RGB565, QVideoFrame.Format_RGB555,QVideoFrame.Format_BGR24,QVideoFrame.Format_RGB24]: formats.append(f) return formats def isFormatSupported(self, _format): imageFormat = QVideoFrame.imageFormatFromPixelFormat(_format.pixelFormat()) size = _format.frameSize() _bool = False if (imageFormat != QImage.Format_Invalid and not size.isEmpty() and _format.handleType() == QAbstractVideoBuffer.NoHandle): _bool = True return _bool def start(self, _format): imageFormat = QVideoFrame.imageFormatFromPixelFormat(_format.pixelFormat()) size = _format.frameSize() #frameCounter = 0 #Frame Counter initialize if (imageFormat != QImage.Format_Invalid and not size.isEmpty()): self.imageFormat = imageFormat self.imageSize = size self.sourceRect = _format.viewport() QAbstractVideoSurface.start(self, _format) self.widget.updateGeometry() self.updateVideoRect() return True else: return False def stop(self): self.currentFrame = QVideoFrame() self.targetRect = QRect() QAbstractVideoSurface.stop(self) self.widget.update() def present(self, frame): global frameCounter,removeBool if (self.surfaceFormat().pixelFormat() != frame.pixelFormat() or self.surfaceFormat().frameSize() != frame.size()): self.setError(QAbstractVideoSurface.IncorrectFormatError) self.stop() return False else: self.currentFrame = frame frameCounter += 1 removeBool = True #Removes the boxes on current frame self.widget.repaint(self.targetRect) return True def videoRect(self): return self.targetRect def updateVideoRect(self): size = self.surfaceFormat().sizeHint() size.scale(self.widget.size().boundedTo(size), Qt.KeepAspectRatio) self.targetRect = QRect(QPoint(0, 0), size); self.targetRect.moveCenter(self.widget.rect().center()) def paint(self, painter): if (self.currentFrame.map(QAbstractVideoBuffer.ReadOnly)): oldTransform = painter.transform() if (self.surfaceFormat().scanLineDirection() == QVideoSurfaceFormat.BottomToTop): painter.scale(1, -1); painter.translate(0, -self.widget.height()) image = QImage(self.currentFrame.bits(), self.currentFrame.width(), self.currentFrame.height(), self.currentFrame.bytesPerLine(), self.imageFormat ) painter.drawImage(self.targetRect, image, self.sourceRect) painter.setTransform(oldTransform) self.currentFrame.unmap() class VideoWidget(QWidget): def __init__(self, parent=None): global classLabels, imageBuffer super(VideoWidget, self).__init__(parent) self.setAutoFillBackground(False) self.setAttribute(Qt.WA_NoSystemBackground, True) self.setAttribute(Qt.WA_OpaquePaintEvent) palette = self.palette() palette.setColor(QPalette.Background, Qt.black) self.setPalette(palette) self.setSizePolicy(QSizePolicy.MinimumExpanding , QSizePolicy.MinimumExpanding) self.surface = VideoWidgetSurface(self) classLabels = [] highLabels = [] imageBuffer = [] def videoSurface(self): return self.surface class VideoPlayer(QWidget): def __init__(self, parent=None): super(VideoPlayer, self).__init__(parent) #initialize video player window self.mediaPlayer = QMediaPlayer(None, QMediaPlayer.VideoSurface) try: #DEFINE PLAYER-PLAYLIST #---------------------- pass except: pass self.mediaPlayer.setMedia(QMediaContent(QUrl.fromLocalFile("/home/ediamant/Documents/myProjects/OpenCV-LeftBehindObjects/5.MOV"))) self.videoWidget = VideoWidget() self.videoWidget.setFixedSize(640, 480) #player buttons self.playButton = QPushButton() self.playButton.setIcon(self.style().standardIcon(QStyle.SP_MediaPlay)) self.playButton.clicked.connect(self.play) #video slider self.positionSlider = QSlider(Qt.Horizontal) #self.positionSlider.setRange(0,x) self.positionSlider.setMinimum(0) #self.positionSlider.setMaximum() self.positionSlider.setTickInterval(1) #self.positionSlider.sliderMoved.connect(self.setPosition) self.controlLayout = QHBoxLayout() self.controlLayout.addWidget(self.playButton) self.controlLayout.addWidget(self.positionSlider) self.controlLayout.setAlignment(Qt.AlignLeft) #self.controlLayout.addStretch(1) videoLayout = QVBoxLayout() #videoLayout.addStretch(1) videoLayout.addWidget(self.videoWidget) videoLayout.addLayout(self.controlLayout) self.setLayout(videoLayout) self.mediaPlayer.setVideoOutput(self.videoWidget.videoSurface()) self.setWindowTitle("Player") self.show() def play(self): print 'play' self.time_ = self.mediaPlayer.position() self.mediaPlayer.play() ''' if self.mediaPlayer.state() == QMediaPlayer.PlayingState: self.videoPosition() self.mediaPlayer.pause() self.time_ = self.positionSlider else: pass ''' def mediaStateChanged(self, state): if state == QMediaPlayer.PlayingState: self.playButton.setIcon(self.style().standardIcon(QStyle.SP_MediaPause)) else: self.playButton.setIcon(self.style().standardIcon(QStyle.SP_MediaPlay)) if __name__ == '__main__': import sys from PyQt5.QtWidgets import QApplication app = QApplication(sys.argv) #initialize main window player = VideoPlayer() sys.exit(app.exec_())
[ "#!/usr/bin/python\n# -*- coding: utf-8 -*-\nimport numpy as np\nimport imutils\n\n#PyQT5 Libraries\nfrom PyQt5.QtGui import *\nfrom PyQt5.QtWidgets import *\nfrom PyQt5.QtCore import *\nfrom PyQt5.QtMultimedia import *\nfrom PyQt5.QtMultimediaWidgets import *\n\n#OpenCV Libraries\nimport cv2\nclass VideoWidgetSurface(QAbstractVideoSurface):\n\n def __init__(self, widget, parent=None):\n super(VideoWidgetSurface, self).__init__(parent)\n self.widget = widget\n self.imageFormat = QImage.Format_Invalid\n global frameCounter\n frameCounter = 0 #Frame Counter initialize\n\n def supportedPixelFormats(self, handleType=QAbstractVideoBuffer.NoHandle):\n formats = [QVideoFrame.PixelFormat()]\n if (handleType == QAbstractVideoBuffer.NoHandle):\n for f in [QVideoFrame.Format_RGB32, QVideoFrame.Format_ARGB32, QVideoFrame.Format_ARGB32_Premultiplied, QVideoFrame.Format_RGB565, QVideoFrame.Format_RGB555,QVideoFrame.Format_BGR24,QVideoFrame.Format_RGB24]:\n formats.append(f)\n return formats\n\n def isFormatSupported(self, _format):\n imageFormat = QVideoFrame.imageFormatFromPixelFormat(_format.pixelFormat())\n size = _format.frameSize()\n _bool = False\n if (imageFormat != QImage.Format_Invalid and not size.isEmpty() and _format.handleType() == QAbstractVideoBuffer.NoHandle):\n _bool = True\n return _bool\n\n def start(self, _format):\n imageFormat = QVideoFrame.imageFormatFromPixelFormat(_format.pixelFormat())\n size = _format.frameSize()\n #frameCounter = 0 #Frame Counter initialize\n if (imageFormat != QImage.Format_Invalid and not size.isEmpty()):\n self.imageFormat = imageFormat\n self.imageSize = size\n self.sourceRect = _format.viewport()\n QAbstractVideoSurface.start(self, _format)\n self.widget.updateGeometry()\n self.updateVideoRect()\n return True\n else:\n return False\n\n def stop(self):\n self.currentFrame = QVideoFrame()\n self.targetRect = QRect()\n QAbstractVideoSurface.stop(self)\n\n self.widget.update()\n\n def present(self, frame):\n global frameCounter,removeBool\n if (self.surfaceFormat().pixelFormat() != frame.pixelFormat() or self.surfaceFormat().frameSize() != frame.size()):\n self.setError(QAbstractVideoSurface.IncorrectFormatError)\n self.stop()\n return False\n else:\n self.currentFrame = frame\n frameCounter += 1\n removeBool = True #Removes the boxes on current frame\n self.widget.repaint(self.targetRect)\n return True\n\n def videoRect(self):\n return self.targetRect\n\n def updateVideoRect(self):\n size = self.surfaceFormat().sizeHint()\n size.scale(self.widget.size().boundedTo(size), Qt.KeepAspectRatio)\n self.targetRect = QRect(QPoint(0, 0), size);\n self.targetRect.moveCenter(self.widget.rect().center())\n\n def paint(self, painter):\n if (self.currentFrame.map(QAbstractVideoBuffer.ReadOnly)):\n oldTransform = painter.transform()\n if (self.surfaceFormat().scanLineDirection() == QVideoSurfaceFormat.BottomToTop):\n painter.scale(1, -1);\n painter.translate(0, -self.widget.height())\n\n image = QImage(self.currentFrame.bits(),\n self.currentFrame.width(),\n self.currentFrame.height(),\n self.currentFrame.bytesPerLine(),\n self.imageFormat\n )\n\n painter.drawImage(self.targetRect, image, self.sourceRect)\n painter.setTransform(oldTransform)\n\n self.currentFrame.unmap()\n\nclass VideoWidget(QWidget):\n\n def __init__(self, parent=None):\n global classLabels, imageBuffer\n super(VideoWidget, self).__init__(parent)\n self.setAutoFillBackground(False)\n self.setAttribute(Qt.WA_NoSystemBackground, True)\n self.setAttribute(Qt.WA_OpaquePaintEvent)\n palette = self.palette()\n palette.setColor(QPalette.Background, Qt.black)\n self.setPalette(palette)\n self.setSizePolicy(QSizePolicy.MinimumExpanding ,\n QSizePolicy.MinimumExpanding)\n self.surface = VideoWidgetSurface(self)\n\n classLabels = []\n highLabels = []\n imageBuffer = []\n\n def videoSurface(self):\n return self.surface\n\n\nclass VideoPlayer(QWidget):\n def __init__(self, parent=None):\n super(VideoPlayer, self).__init__(parent)\n\n #initialize video player window\n self.mediaPlayer = QMediaPlayer(None, QMediaPlayer.VideoSurface)\n try:\n #DEFINE PLAYER-PLAYLIST\n #----------------------\n pass\n except:\n pass\n self.mediaPlayer.setMedia(QMediaContent(QUrl.fromLocalFile(\"/home/ediamant/Documents/myProjects/OpenCV-LeftBehindObjects/5.MOV\")))\n self.videoWidget = VideoWidget()\n self.videoWidget.setFixedSize(640, 480)\n\n #player buttons\n self.playButton = QPushButton()\n self.playButton.setIcon(self.style().standardIcon(QStyle.SP_MediaPlay))\n self.playButton.clicked.connect(self.play)\n\n #video slider\n self.positionSlider = QSlider(Qt.Horizontal)\n #self.positionSlider.setRange(0,x)\n self.positionSlider.setMinimum(0)\n #self.positionSlider.setMaximum()\n self.positionSlider.setTickInterval(1)\n #self.positionSlider.sliderMoved.connect(self.setPosition)\n\n self.controlLayout = QHBoxLayout()\n self.controlLayout.addWidget(self.playButton)\n self.controlLayout.addWidget(self.positionSlider)\n self.controlLayout.setAlignment(Qt.AlignLeft)\n #self.controlLayout.addStretch(1)\n\n videoLayout = QVBoxLayout()\n #videoLayout.addStretch(1)\n videoLayout.addWidget(self.videoWidget)\n videoLayout.addLayout(self.controlLayout)\n self.setLayout(videoLayout)\n\n self.mediaPlayer.setVideoOutput(self.videoWidget.videoSurface())\n self.setWindowTitle(\"Player\")\n self.show()\n\n def play(self):\n print 'play'\n self.time_ = self.mediaPlayer.position()\n self.mediaPlayer.play()\n '''\n if self.mediaPlayer.state() == QMediaPlayer.PlayingState:\n self.videoPosition()\n self.mediaPlayer.pause()\n self.time_ = self.positionSlider\n\n else:\n pass\n '''\n def mediaStateChanged(self, state):\n if state == QMediaPlayer.PlayingState:\n self.playButton.setIcon(self.style().standardIcon(QStyle.SP_MediaPause))\n else:\n self.playButton.setIcon(self.style().standardIcon(QStyle.SP_MediaPlay))\n\n\nif __name__ == '__main__':\n import sys\n\n from PyQt5.QtWidgets import QApplication\n\n app = QApplication(sys.argv)\n\n #initialize main window\n player = VideoPlayer()\n\n sys.exit(app.exec_())\n" ]
true
98,921
dd2c33066800ce4b77b265de031c42fc4ced2767
#!/usr/bin/env python from flask import url_for, g from flask.ext.restful import Resource, reqparse from sqlalchemy.exc import IntegrityError from igor_rest_api.api.grouping.login import auth from igor_rest_api.api.constants import * from igor_rest_api.api.grouping.models import ( Group, PduDetails, Outlets, GroupOutlets, UserOutletsGroups, UserPdus) from igor_rest_api.api.grouping.utils import ( query_group, outlet_details, check_outlet_permission) from igor_rest_api.db import db from pudmaster import Pdu_obj """ GET /outlet_groups/<int:groupid>/control Returns the Status of Outlets belonging to the outletgrouping POST /outlet_groups/<int:groupid>/control {'action': Status } Changes the Status of outlets belonging to outletgrouping """ class Groupcontrol(Resource): decorators = [auth.login_required] def __init__(self): self.reqparse = reqparse.RequestParser() self.reqparse.add_argument('action', type=str, required=True, help='No action provided', location='json') super(Groupcontrol, self).__init__() def get(self, groupid): if g.user.username == 'root': outlets = query_group(groupid) else: role = UserOutletsGroups.query.filter_by(userid=g.user.id, outletgroupid=groupid).first() if role is None: return {'message': 'User does not have necessary permission'} else: outlets = query_group(groupid) states = [] amperages = [] for outlet in outlets: pdu = Pdu_obj(outlet[0], 161, outlet[1]) state = pdu.get_outlet_status(outlet[2], outlet[3]) amperage = pdu.get_outlet_amperage(outlet[2], outlet[3]) if state == 'Error': states.append("unable to get data") else: states.append(state) if amperage == 'Error': amperages.append('unable to get data') else: amperages.append(amperage) state_dict = {} for i in range(len(outlets)): state_dict[str(outlets[i][0])+" "+str(outlets[i][2])+" "+str(outlets[i][3])] = states[i] amperage_dict = {} for i in range(len(outlets)): amperage_dict[str(outlets[i][0])+" "+str(outlets[i][2])+" "+str(outlets[i][3])] = amperages[i] return {'Status': state_dict, 'amperages': amperage_dict} def post(self, groupid): args = self.reqparse.parse_args() status = args['action'] if g.user.username == 'root': outlets = query_group(groupid) else: role = UserOutletsGroups.query.filter_by(userid=g.user.id, outletgroupid=groupid).first() if role is None: return {'message': 'User does not have necessary permission'} else: outlets = query_group(groupid) states = [] for outlet in outlets: pdu = Pdu_obj(outlet[0], 161, outlet[1]) ret_value = pdu.change_state(outlet[2], outlet[3], status) if 'No SNMP response received' in str(ret_value): states.append("unable to connect to pdu") else: states.append("changed state") state_dict = {} for i in range(len(outlets)): state_dict[str(outlets[i][0])+" "+str(outlets[i][2])+" "+str(outlets[i][3])] = states[i] return {'Status': state_dict} """ GET /outlet/<int:outletid>/control Returns the Status of outlet POST /outlet/<int:outletid>/control {'action': status } Changes the Status of outlet """ class Outletcontrol(Resource): decorators = [auth.login_required] def __init__(self): self.reqparse = reqparse.RequestParser() self.reqparse.add_argument('action', type=str, required=True, help='No action provided', location='json') super(Outletcontrol, self).__init__() def get(self, outletid): if g.user.username == 'root': outlet = outlet_details(outletid) else: role = check_outlet_permission(g.user.id, outletid) if role is False: return {'message': 'User does not have neccesary permission'} else: outlet = outlet_details(outletid) pdu = Pdu_obj(outlet[0], 161, outlet[1]) state = pdu.get_outlet_status(outlet[2], outlet[3]) amperage = pdu.get_outlet_amperage(outlet[2], outlet[3]) states = [] if state == 'Error': states.append("unable to get data") else: states.append(state) if amperage == 'Error': amperage = 'unable to fetch data' else: amperage = amperage state_dict = {} state_dict[str(outlet[0])+" "+str(outlet[2])+" "+str(outlet[3])] = states[0] state_dict['amperage'] = amperage return {'Status': state_dict} def post(self, outletid): args = self.reqparse.parse_args() status = args['action'] if g.user.username == 'root': outlet = outlet_details(outletid) else: role = check_outlet_permission(g.user.id, outletid) if role is False: return {'message': 'User does not have neccesary permission'} else: outlet = outlet_details(outletid) pdu = Pdu_obj(outlet[0], 161, outlet[1]) states = [] ret_value = pdu.change_state(outlet[2], outlet[3], status) if 'No SNMP response received' in str(ret_value): states.append("unable to connect to pdu") else: states.append("changed state") state_dict = {} state_dict[str(outlet[0])+" "+str(outlet[2])+" "+str(outlet[3])] = states[0] return {'Status': state_dict} """ GET /pdu/<string:pduip>/control Returns the Status of Pdu POST /pdu/<string:pduip>/control {'action': status } Changes the Status of Pdu """ class Pducontrol(Resource): decorators = [auth.login_required] def __init__(self): self.reqparse = reqparse.RequestParser() self.reqparse.add_argument('action', type=str, required=True, help='No action provided', location='json') super(Pducontrol, self).__init__() def get(self, pduip): if g.user.username == 'root': pdu = PduDetails.query.filter_by(ip=pduip).first() if pdu is None: return {'Error': 'pdu doesn"t exist'} else: pdu_access_string = pdu.access_string else: pdu = PduDetails.query.filter_by(ip=pduip).first() if pdu is None: return {'Error': 'pdu doesn"t exist'} relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id).first() if relation is None: return {'message': 'User does not have neccesary permission'} pdu_access_string = pdu.access_string pdu = Pdu_obj(pduip, 161, pdu_access_string) try: status, name = pdu.complete_status() except ValueError: return {'error': 'Unable to get data'} if status == "Error": return {'error': 'Unable to get data'} amperage = pdu.get_amperage_details() status_dict = {} for i in range(len(status)): status_dict[name[i]] = status[i] amperage_dict = {} amperage_dict['tower_A'] = amperage[0] amperage_dict['tower_B'] = amperage[1] return {'status': status_dict, 'amperage': amperage_dict} def post(self, pduip): args = self.reqparse.parse_args() status = args['action'] if g.user.username == 'root': pdu = PduDetails.query.filter_by(ip=pduip).first() if pdu is None: return {'Error': 'pdu doesn"t exist'} else: pdu_access_string = pdu.access_string else: pdu = PduDetails.query.filter_by(ip=pduip).first() if pdu is None: return {'Error': 'pdu doesn"t exist'} relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id).first() if relation is None: return {'message': 'User does not have neccesary permission'} pdu_access_string = pdu.access_string pdu = Pdu_obj(pduip, 161, pdu_access_string) """ GET /pdu/<string:pduip>/<string:tower>/<int:outlet>/control Returns the Status of Pdu POST /pdu/<string:pduip>/<string:tower>/<int:outlet>/control {'action': status } Changes the Status of Pdu """ class Pduoutletcontrol(Resource): decorators = [auth.login_required] def __init__(self): self.reqparse = reqparse.RequestParser() self.reqparse.add_argument('action', type=str, required=True, help='No action provided', location='json') super(Pduoutletcontrol, self).__init__() def get(self, pduip, tower, outlet): if g.user.username == 'root': pdu = PduDetails.query.filter_by(ip=pduip).first() if pdu is None: return {'Error': 'pdu doesn"t exist'} else: pdu_access_string = pdu.access_string else: pdu = PduDetails.query.filter_by(ip=pduip).first() if pdu is None: return {'Error': 'pdu doesn"t exist'} relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id).first() if relation is None: return {'message': 'User does not have neccesary permission'} pdu_access_string = pdu.access_string pdu = Pdu_obj(pduip, 161, pdu_access_string) state = pdu.get_outlet_status(tower, outlet) amperage = pdu.get_outlet_amperage(tower, outlet) if state == 'Error': state = 'Unable to fetch data' if amperage == 'Error': amperage = 'unable to fetch amperage' return {'state': state, 'amperage': amperage} def post(self, pduip, tower, outlet): args = self.reqparse.parse_args() status = args['action'] if g.user.username == 'root': pdu = PduDetails.query.filter_by(ip=pduip).first() if pdu is None: return {'Error': 'pdu doesn"t exist'} else: pdu_access_string = pdu.access_string else: pdu = PduDetails.query.filter_by(ip=pduip).first() if pdu is None: return {'Error': 'pdu doesn"t exist'} relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id).first() if relation is None: return {'message': 'User does not have neccesary permission'} pdu_access_string = pdu.access_string pdu = Pdu_obj(pduip, 161, pdu_access_string) ret_value = pdu.change_state(tower, outlet, status) if 'No SNMP response received' in str(ret_value): return {'Error': 'unable to connect to pdu'} else: return {'Success': 'Changed state'}
[ "#!/usr/bin/env python\n\nfrom flask import url_for, g\nfrom flask.ext.restful import Resource, reqparse\nfrom sqlalchemy.exc import IntegrityError\n\nfrom igor_rest_api.api.grouping.login import auth\nfrom igor_rest_api.api.constants import *\nfrom igor_rest_api.api.grouping.models import (\n Group, PduDetails, Outlets, GroupOutlets,\n UserOutletsGroups, UserPdus)\nfrom igor_rest_api.api.grouping.utils import (\n query_group, outlet_details,\n check_outlet_permission)\nfrom igor_rest_api.db import db\nfrom pudmaster import Pdu_obj\n\n\n\"\"\"\n GET /outlet_groups/<int:groupid>/control Returns the Status of Outlets belonging to the outletgrouping\n POST /outlet_groups/<int:groupid>/control {'action': Status } Changes the Status of outlets belonging to outletgrouping\n\"\"\"\n\n\nclass Groupcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True,\n help='No action provided',\n location='json')\n super(Groupcontrol, self).__init__()\n\n def get(self, groupid):\n if g.user.username == 'root':\n outlets = query_group(groupid)\n\n else:\n role = UserOutletsGroups.query.filter_by(userid=g.user.id,\n outletgroupid=groupid).first()\n if role is None:\n return {'message': 'User does not have necessary permission'}\n else:\n outlets = query_group(groupid)\n\n states = []\n amperages = []\n for outlet in outlets:\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n\n if state == 'Error':\n states.append(\"unable to get data\")\n else:\n states.append(state)\n\n if amperage == 'Error':\n amperages.append('unable to get data')\n else:\n amperages.append(amperage)\n\n state_dict = {}\n for i in range(len(outlets)):\n state_dict[str(outlets[i][0])+\" \"+str(outlets[i][2])+\" \"+str(outlets[i][3])] = states[i]\n\n amperage_dict = {}\n for i in range(len(outlets)):\n amperage_dict[str(outlets[i][0])+\" \"+str(outlets[i][2])+\" \"+str(outlets[i][3])] = amperages[i]\n return {'Status': state_dict, 'amperages': amperage_dict}\n\n def post(self, groupid):\n args = self.reqparse.parse_args()\n\n status = args['action']\n if g.user.username == 'root':\n outlets = query_group(groupid)\n\n else:\n role = UserOutletsGroups.query.filter_by(userid=g.user.id,\n outletgroupid=groupid).first()\n if role is None:\n return {'message': 'User does not have necessary permission'}\n else:\n outlets = query_group(groupid)\n\n states = []\n for outlet in outlets:\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append(\"unable to connect to pdu\")\n else:\n states.append(\"changed state\")\n\n state_dict = {}\n for i in range(len(outlets)):\n state_dict[str(outlets[i][0])+\" \"+str(outlets[i][2])+\" \"+str(outlets[i][3])] = states[i]\n\n return {'Status': state_dict}\n\n\n\"\"\"\n GET /outlet/<int:outletid>/control Returns the Status of outlet\n POST /outlet/<int:outletid>/control {'action': status } Changes the Status of outlet\n\"\"\"\n\n\nclass Outletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True,\n help='No action provided',\n location='json')\n super(Outletcontrol, self).__init__()\n\n def get(self, outletid):\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n\n states = []\n if state == 'Error':\n states.append(\"unable to get data\")\n else:\n states.append(state)\n\n if amperage == 'Error':\n amperage = 'unable to fetch data'\n else:\n amperage = amperage\n\n state_dict = {}\n state_dict[str(outlet[0])+\" \"+str(outlet[2])+\" \"+str(outlet[3])] = states[0]\n state_dict['amperage'] = amperage\n\n return {'Status': state_dict}\n\n def post(self, outletid):\n args = self.reqparse.parse_args()\n\n status = args['action']\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n states = []\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append(\"unable to connect to pdu\")\n else:\n states.append(\"changed state\")\n state_dict = {}\n state_dict[str(outlet[0])+\" \"+str(outlet[2])+\" \"+str(outlet[3])] = states[0]\n\n return {'Status': state_dict}\n\n\"\"\"\n GET /pdu/<string:pduip>/control Returns the Status of Pdu\n POST /pdu/<string:pduip>/control {'action': status } Changes the Status of Pdu\n\"\"\"\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True,\n help='No action provided',\n location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n\n if status == \"Error\":\n return {'error': 'Unable to get data'}\n\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\"\"\"\n GET /pdu/<string:pduip>/<string:tower>/<int:outlet>/control\n Returns the Status of Pdu\n POST /pdu/<string:pduip>/<string:tower>/<int:outlet>/control\n {'action': status }\n Changes the Status of Pdu\n\"\"\"\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True,\n help='No action provided',\n location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "from flask import url_for, g\nfrom flask.ext.restful import Resource, reqparse\nfrom sqlalchemy.exc import IntegrityError\nfrom igor_rest_api.api.grouping.login import auth\nfrom igor_rest_api.api.constants import *\nfrom igor_rest_api.api.grouping.models import Group, PduDetails, Outlets, GroupOutlets, UserOutletsGroups, UserPdus\nfrom igor_rest_api.api.grouping.utils import query_group, outlet_details, check_outlet_permission\nfrom igor_rest_api.db import db\nfrom pudmaster import Pdu_obj\n<docstring token>\n\n\nclass Groupcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Groupcontrol, self).__init__()\n\n def get(self, groupid):\n if g.user.username == 'root':\n outlets = query_group(groupid)\n else:\n role = UserOutletsGroups.query.filter_by(userid=g.user.id,\n outletgroupid=groupid).first()\n if role is None:\n return {'message': 'User does not have necessary permission'}\n else:\n outlets = query_group(groupid)\n states = []\n amperages = []\n for outlet in outlets:\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n if state == 'Error':\n states.append('unable to get data')\n else:\n states.append(state)\n if amperage == 'Error':\n amperages.append('unable to get data')\n else:\n amperages.append(amperage)\n state_dict = {}\n for i in range(len(outlets)):\n state_dict[str(outlets[i][0]) + ' ' + str(outlets[i][2]) + ' ' +\n str(outlets[i][3])] = states[i]\n amperage_dict = {}\n for i in range(len(outlets)):\n amperage_dict[str(outlets[i][0]) + ' ' + str(outlets[i][2]) +\n ' ' + str(outlets[i][3])] = amperages[i]\n return {'Status': state_dict, 'amperages': amperage_dict}\n\n def post(self, groupid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlets = query_group(groupid)\n else:\n role = UserOutletsGroups.query.filter_by(userid=g.user.id,\n outletgroupid=groupid).first()\n if role is None:\n return {'message': 'User does not have necessary permission'}\n else:\n outlets = query_group(groupid)\n states = []\n for outlet in outlets:\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n for i in range(len(outlets)):\n state_dict[str(outlets[i][0]) + ' ' + str(outlets[i][2]) + ' ' +\n str(outlets[i][3])] = states[i]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Outletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Outletcontrol, self).__init__()\n\n def get(self, outletid):\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n states = []\n if state == 'Error':\n states.append('unable to get data')\n else:\n states.append(state)\n if amperage == 'Error':\n amperage = 'unable to fetch data'\n else:\n amperage = amperage\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n state_dict['amperage'] = amperage\n return {'Status': state_dict}\n\n def post(self, outletid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n states = []\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n\n\nclass Groupcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Groupcontrol, self).__init__()\n\n def get(self, groupid):\n if g.user.username == 'root':\n outlets = query_group(groupid)\n else:\n role = UserOutletsGroups.query.filter_by(userid=g.user.id,\n outletgroupid=groupid).first()\n if role is None:\n return {'message': 'User does not have necessary permission'}\n else:\n outlets = query_group(groupid)\n states = []\n amperages = []\n for outlet in outlets:\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n if state == 'Error':\n states.append('unable to get data')\n else:\n states.append(state)\n if amperage == 'Error':\n amperages.append('unable to get data')\n else:\n amperages.append(amperage)\n state_dict = {}\n for i in range(len(outlets)):\n state_dict[str(outlets[i][0]) + ' ' + str(outlets[i][2]) + ' ' +\n str(outlets[i][3])] = states[i]\n amperage_dict = {}\n for i in range(len(outlets)):\n amperage_dict[str(outlets[i][0]) + ' ' + str(outlets[i][2]) +\n ' ' + str(outlets[i][3])] = amperages[i]\n return {'Status': state_dict, 'amperages': amperage_dict}\n\n def post(self, groupid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlets = query_group(groupid)\n else:\n role = UserOutletsGroups.query.filter_by(userid=g.user.id,\n outletgroupid=groupid).first()\n if role is None:\n return {'message': 'User does not have necessary permission'}\n else:\n outlets = query_group(groupid)\n states = []\n for outlet in outlets:\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n for i in range(len(outlets)):\n state_dict[str(outlets[i][0]) + ' ' + str(outlets[i][2]) + ' ' +\n str(outlets[i][3])] = states[i]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Outletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Outletcontrol, self).__init__()\n\n def get(self, outletid):\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n states = []\n if state == 'Error':\n states.append('unable to get data')\n else:\n states.append(state)\n if amperage == 'Error':\n amperage = 'unable to fetch data'\n else:\n amperage = amperage\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n state_dict['amperage'] = amperage\n return {'Status': state_dict}\n\n def post(self, outletid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n states = []\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n\n\nclass Groupcontrol(Resource):\n <assignment token>\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Groupcontrol, self).__init__()\n\n def get(self, groupid):\n if g.user.username == 'root':\n outlets = query_group(groupid)\n else:\n role = UserOutletsGroups.query.filter_by(userid=g.user.id,\n outletgroupid=groupid).first()\n if role is None:\n return {'message': 'User does not have necessary permission'}\n else:\n outlets = query_group(groupid)\n states = []\n amperages = []\n for outlet in outlets:\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n if state == 'Error':\n states.append('unable to get data')\n else:\n states.append(state)\n if amperage == 'Error':\n amperages.append('unable to get data')\n else:\n amperages.append(amperage)\n state_dict = {}\n for i in range(len(outlets)):\n state_dict[str(outlets[i][0]) + ' ' + str(outlets[i][2]) + ' ' +\n str(outlets[i][3])] = states[i]\n amperage_dict = {}\n for i in range(len(outlets)):\n amperage_dict[str(outlets[i][0]) + ' ' + str(outlets[i][2]) +\n ' ' + str(outlets[i][3])] = amperages[i]\n return {'Status': state_dict, 'amperages': amperage_dict}\n\n def post(self, groupid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlets = query_group(groupid)\n else:\n role = UserOutletsGroups.query.filter_by(userid=g.user.id,\n outletgroupid=groupid).first()\n if role is None:\n return {'message': 'User does not have necessary permission'}\n else:\n outlets = query_group(groupid)\n states = []\n for outlet in outlets:\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n for i in range(len(outlets)):\n state_dict[str(outlets[i][0]) + ' ' + str(outlets[i][2]) + ' ' +\n str(outlets[i][3])] = states[i]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Outletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Outletcontrol, self).__init__()\n\n def get(self, outletid):\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n states = []\n if state == 'Error':\n states.append('unable to get data')\n else:\n states.append(state)\n if amperage == 'Error':\n amperage = 'unable to fetch data'\n else:\n amperage = amperage\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n state_dict['amperage'] = amperage\n return {'Status': state_dict}\n\n def post(self, outletid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n states = []\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n\n\nclass Groupcontrol(Resource):\n <assignment token>\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Groupcontrol, self).__init__()\n <function token>\n\n def post(self, groupid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlets = query_group(groupid)\n else:\n role = UserOutletsGroups.query.filter_by(userid=g.user.id,\n outletgroupid=groupid).first()\n if role is None:\n return {'message': 'User does not have necessary permission'}\n else:\n outlets = query_group(groupid)\n states = []\n for outlet in outlets:\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n for i in range(len(outlets)):\n state_dict[str(outlets[i][0]) + ' ' + str(outlets[i][2]) + ' ' +\n str(outlets[i][3])] = states[i]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Outletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Outletcontrol, self).__init__()\n\n def get(self, outletid):\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n states = []\n if state == 'Error':\n states.append('unable to get data')\n else:\n states.append(state)\n if amperage == 'Error':\n amperage = 'unable to fetch data'\n else:\n amperage = amperage\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n state_dict['amperage'] = amperage\n return {'Status': state_dict}\n\n def post(self, outletid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n states = []\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n\n\nclass Groupcontrol(Resource):\n <assignment token>\n <function token>\n <function token>\n\n def post(self, groupid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlets = query_group(groupid)\n else:\n role = UserOutletsGroups.query.filter_by(userid=g.user.id,\n outletgroupid=groupid).first()\n if role is None:\n return {'message': 'User does not have necessary permission'}\n else:\n outlets = query_group(groupid)\n states = []\n for outlet in outlets:\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n for i in range(len(outlets)):\n state_dict[str(outlets[i][0]) + ' ' + str(outlets[i][2]) + ' ' +\n str(outlets[i][3])] = states[i]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Outletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Outletcontrol, self).__init__()\n\n def get(self, outletid):\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n states = []\n if state == 'Error':\n states.append('unable to get data')\n else:\n states.append(state)\n if amperage == 'Error':\n amperage = 'unable to fetch data'\n else:\n amperage = amperage\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n state_dict['amperage'] = amperage\n return {'Status': state_dict}\n\n def post(self, outletid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n states = []\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n\n\nclass Groupcontrol(Resource):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n\n<docstring token>\n\n\nclass Outletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Outletcontrol, self).__init__()\n\n def get(self, outletid):\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n states = []\n if state == 'Error':\n states.append('unable to get data')\n else:\n states.append(state)\n if amperage == 'Error':\n amperage = 'unable to fetch data'\n else:\n amperage = amperage\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n state_dict['amperage'] = amperage\n return {'Status': state_dict}\n\n def post(self, outletid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n states = []\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Outletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Outletcontrol, self).__init__()\n\n def get(self, outletid):\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n states = []\n if state == 'Error':\n states.append('unable to get data')\n else:\n states.append(state)\n if amperage == 'Error':\n amperage = 'unable to fetch data'\n else:\n amperage = amperage\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n state_dict['amperage'] = amperage\n return {'Status': state_dict}\n\n def post(self, outletid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n states = []\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Outletcontrol(Resource):\n <assignment token>\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Outletcontrol, self).__init__()\n\n def get(self, outletid):\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n state = pdu.get_outlet_status(outlet[2], outlet[3])\n amperage = pdu.get_outlet_amperage(outlet[2], outlet[3])\n states = []\n if state == 'Error':\n states.append('unable to get data')\n else:\n states.append(state)\n if amperage == 'Error':\n amperage = 'unable to fetch data'\n else:\n amperage = amperage\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n state_dict['amperage'] = amperage\n return {'Status': state_dict}\n\n def post(self, outletid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n states = []\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Outletcontrol(Resource):\n <assignment token>\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Outletcontrol, self).__init__()\n <function token>\n\n def post(self, outletid):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n outlet = outlet_details(outletid)\n else:\n role = check_outlet_permission(g.user.id, outletid)\n if role is False:\n return {'message': 'User does not have neccesary permission'}\n else:\n outlet = outlet_details(outletid)\n pdu = Pdu_obj(outlet[0], 161, outlet[1])\n states = []\n ret_value = pdu.change_state(outlet[2], outlet[3], status)\n if 'No SNMP response received' in str(ret_value):\n states.append('unable to connect to pdu')\n else:\n states.append('changed state')\n state_dict = {}\n state_dict[str(outlet[0]) + ' ' + str(outlet[2]) + ' ' + str(outlet[3])\n ] = states[0]\n return {'Status': state_dict}\n\n\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Outletcontrol(Resource):\n <assignment token>\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Outletcontrol, self).__init__()\n <function token>\n <function token>\n\n\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Outletcontrol(Resource):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Pducontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Pducontrol(Resource):\n <assignment token>\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n\n def get(self, pduip):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n try:\n status, name = pdu.complete_status()\n except ValueError:\n return {'error': 'Unable to get data'}\n if status == 'Error':\n return {'error': 'Unable to get data'}\n amperage = pdu.get_amperage_details()\n status_dict = {}\n for i in range(len(status)):\n status_dict[name[i]] = status[i]\n amperage_dict = {}\n amperage_dict['tower_A'] = amperage[0]\n amperage_dict['tower_B'] = amperage[1]\n return {'status': status_dict, 'amperage': amperage_dict}\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Pducontrol(Resource):\n <assignment token>\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pducontrol, self).__init__()\n <function token>\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Pducontrol(Resource):\n <assignment token>\n <function token>\n <function token>\n\n def post(self, pduip):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Pducontrol(Resource):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n decorators = [auth.login_required]\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n <assignment token>\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n\n def post(self, pduip, tower, outlet):\n args = self.reqparse.parse_args()\n status = args['action']\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n ret_value = pdu.change_state(tower, outlet, status)\n if 'No SNMP response received' in str(ret_value):\n return {'Error': 'unable to connect to pdu'}\n else:\n return {'Success': 'Changed state'}\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n <assignment token>\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n\n def get(self, pduip, tower, outlet):\n if g.user.username == 'root':\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n else:\n pdu_access_string = pdu.access_string\n else:\n pdu = PduDetails.query.filter_by(ip=pduip).first()\n if pdu is None:\n return {'Error': 'pdu doesn\"t exist'}\n relation = UserPdus.query.filter_by(userid=g.user.id, pduid=pdu.id\n ).first()\n if relation is None:\n return {'message': 'User does not have neccesary permission'}\n pdu_access_string = pdu.access_string\n pdu = Pdu_obj(pduip, 161, pdu_access_string)\n state = pdu.get_outlet_status(tower, outlet)\n amperage = pdu.get_outlet_amperage(tower, outlet)\n if state == 'Error':\n state = 'Unable to fetch data'\n if amperage == 'Error':\n amperage = 'unable to fetch amperage'\n return {'state': state, 'amperage': amperage}\n <function token>\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n <assignment token>\n\n def __init__(self):\n self.reqparse = reqparse.RequestParser()\n self.reqparse.add_argument('action', type=str, required=True, help=\n 'No action provided', location='json')\n super(Pduoutletcontrol, self).__init__()\n <function token>\n <function token>\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n\n\nclass Pduoutletcontrol(Resource):\n <assignment token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n<docstring token>\n<class token>\n" ]
false
98,922
e8922212abf5b36eb8c446df3d355732b3fab566
num1,num2=map(int,input().split()) num3,num4=map(int,input().split()) if(num1>=num2 and num3>=num4): print("yes") else: print("no")
[ "num1,num2=map(int,input().split())\nnum3,num4=map(int,input().split())\nif(num1>=num2 and num3>=num4):\n\tprint(\"yes\")\nelse:\n\tprint(\"no\")\n", "num1, num2 = map(int, input().split())\nnum3, num4 = map(int, input().split())\nif num1 >= num2 and num3 >= num4:\n print('yes')\nelse:\n print('no')\n", "<assignment token>\nif num1 >= num2 and num3 >= num4:\n print('yes')\nelse:\n print('no')\n", "<assignment token>\n<code token>\n" ]
false
98,923
35291330ba757cd231435882cd0bacc378a6c4c3
import sys def ccat(l): if type(l)!=list: return None stl='' for i in l: stl+='%s '%i stl=stl.strip() return stl if len(sys.argv)>1: print 'begin' print '\t%s'%ccat(sys.argv[1:]) try: while True: pass except: print 'end'
[ "import sys\r\n\r\ndef ccat(l):\r\n\tif type(l)!=list: return None\r\n\tstl=''\r\n\tfor i in l:\r\n\t\tstl+='%s '%i\r\n\tstl=stl.strip()\r\n\treturn stl\r\n\r\nif len(sys.argv)>1:\r\n\tprint 'begin'\r\n\tprint '\\t%s'%ccat(sys.argv[1:])\r\n\ttry:\r\n\t\twhile True: pass\r\n\texcept: print 'end'" ]
true
98,924
6a0cdead3cc8bcb224cc6241299c89bd7f01df9e
""" YouTrack REST API YouTrack issue tracking and project management system # noqa: E501 The version of the OpenAPI document: 2021.3 Generated by: https://openapi-generator.tech """ import sys import unittest import youtrack_api from youtrack_api.model.appearance_settings import AppearanceSettings from youtrack_api.model.license import License from youtrack_api.model.locale_settings import LocaleSettings from youtrack_api.model.notification_settings import NotificationSettings from youtrack_api.model.rest_cors_settings import RestCorsSettings from youtrack_api.model.system_settings import SystemSettings globals()['AppearanceSettings'] = AppearanceSettings globals()['License'] = License globals()['LocaleSettings'] = LocaleSettings globals()['NotificationSettings'] = NotificationSettings globals()['RestCorsSettings'] = RestCorsSettings globals()['SystemSettings'] = SystemSettings from youtrack_api.model.global_settings import GlobalSettings class TestGlobalSettings(unittest.TestCase): """GlobalSettings unit test stubs""" def setUp(self): pass def tearDown(self): pass def testGlobalSettings(self): """Test GlobalSettings""" # FIXME: construct object with mandatory attributes with example values # model = GlobalSettings() # noqa: E501 pass if __name__ == '__main__': unittest.main()
[ "\"\"\"\n YouTrack REST API\n\n YouTrack issue tracking and project management system # noqa: E501\n\n The version of the OpenAPI document: 2021.3\n Generated by: https://openapi-generator.tech\n\"\"\"\n\n\nimport sys\nimport unittest\n\nimport youtrack_api\nfrom youtrack_api.model.appearance_settings import AppearanceSettings\nfrom youtrack_api.model.license import License\nfrom youtrack_api.model.locale_settings import LocaleSettings\nfrom youtrack_api.model.notification_settings import NotificationSettings\nfrom youtrack_api.model.rest_cors_settings import RestCorsSettings\nfrom youtrack_api.model.system_settings import SystemSettings\nglobals()['AppearanceSettings'] = AppearanceSettings\nglobals()['License'] = License\nglobals()['LocaleSettings'] = LocaleSettings\nglobals()['NotificationSettings'] = NotificationSettings\nglobals()['RestCorsSettings'] = RestCorsSettings\nglobals()['SystemSettings'] = SystemSettings\nfrom youtrack_api.model.global_settings import GlobalSettings\n\n\nclass TestGlobalSettings(unittest.TestCase):\n \"\"\"GlobalSettings unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testGlobalSettings(self):\n \"\"\"Test GlobalSettings\"\"\"\n # FIXME: construct object with mandatory attributes with example values\n # model = GlobalSettings() # noqa: E501\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n", "<docstring token>\nimport sys\nimport unittest\nimport youtrack_api\nfrom youtrack_api.model.appearance_settings import AppearanceSettings\nfrom youtrack_api.model.license import License\nfrom youtrack_api.model.locale_settings import LocaleSettings\nfrom youtrack_api.model.notification_settings import NotificationSettings\nfrom youtrack_api.model.rest_cors_settings import RestCorsSettings\nfrom youtrack_api.model.system_settings import SystemSettings\nglobals()['AppearanceSettings'] = AppearanceSettings\nglobals()['License'] = License\nglobals()['LocaleSettings'] = LocaleSettings\nglobals()['NotificationSettings'] = NotificationSettings\nglobals()['RestCorsSettings'] = RestCorsSettings\nglobals()['SystemSettings'] = SystemSettings\nfrom youtrack_api.model.global_settings import GlobalSettings\n\n\nclass TestGlobalSettings(unittest.TestCase):\n \"\"\"GlobalSettings unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testGlobalSettings(self):\n \"\"\"Test GlobalSettings\"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n", "<docstring token>\n<import token>\nglobals()['AppearanceSettings'] = AppearanceSettings\nglobals()['License'] = License\nglobals()['LocaleSettings'] = LocaleSettings\nglobals()['NotificationSettings'] = NotificationSettings\nglobals()['RestCorsSettings'] = RestCorsSettings\nglobals()['SystemSettings'] = SystemSettings\n<import token>\n\n\nclass TestGlobalSettings(unittest.TestCase):\n \"\"\"GlobalSettings unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testGlobalSettings(self):\n \"\"\"Test GlobalSettings\"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n", "<docstring token>\n<import token>\n<assignment token>\n<import token>\n\n\nclass TestGlobalSettings(unittest.TestCase):\n \"\"\"GlobalSettings unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testGlobalSettings(self):\n \"\"\"Test GlobalSettings\"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n", "<docstring token>\n<import token>\n<assignment token>\n<import token>\n\n\nclass TestGlobalSettings(unittest.TestCase):\n \"\"\"GlobalSettings unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testGlobalSettings(self):\n \"\"\"Test GlobalSettings\"\"\"\n pass\n\n\n<code token>\n", "<docstring token>\n<import token>\n<assignment token>\n<import token>\n\n\nclass TestGlobalSettings(unittest.TestCase):\n <docstring token>\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testGlobalSettings(self):\n \"\"\"Test GlobalSettings\"\"\"\n pass\n\n\n<code token>\n", "<docstring token>\n<import token>\n<assignment token>\n<import token>\n\n\nclass TestGlobalSettings(unittest.TestCase):\n <docstring token>\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n <function token>\n\n\n<code token>\n", "<docstring token>\n<import token>\n<assignment token>\n<import token>\n\n\nclass TestGlobalSettings(unittest.TestCase):\n <docstring token>\n <function token>\n\n def tearDown(self):\n pass\n <function token>\n\n\n<code token>\n", "<docstring token>\n<import token>\n<assignment token>\n<import token>\n\n\nclass TestGlobalSettings(unittest.TestCase):\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n\n<code token>\n", "<docstring token>\n<import token>\n<assignment token>\n<import token>\n<class token>\n<code token>\n" ]
false
98,925
f7c6307a57c24c15d5c431a81509cb972ed52fc4
import numpy as np import tensorflow as tf import matplotlib.pyplot as plt import pandas as pd import seaborn as sns from sklearn.datasets import load_boston from sklearn.model_selection import train_test_split data = load_boston() boston = pd.DataFrame(data.data, columns=data.feature_names) print(boston.columns) boston['MEDV'] = data.target correlation_matrix = boston.corr() sns.heatmap(data=correlation_matrix, annot=True).set_title('Correlation matrix') plt.show() # matrix is showing strong correlation between target value and RM (number of rooms) and LSTAT (% of low status population) target = 'MEDV' features = ['RM', 'LSTAT'] # plotting house median with respect to features for feature in features: ax = boston.plot.scatter(x=feature, y='MEDV', label=feature) plt.show() price = np.array(boston['MEDV'], np.float32) rooms = np.array(boston['RM'], np.float32) low_status_pop = np.array(boston['LSTAT'], np.float32) rooms_train, rooms_valid, low_status_pop_train, low_status_pop_valid = train_test_split(rooms, low_status_pop, random_state=42) price_train, price_valid = train_test_split(price, random_state=42) # setting variables intercept = tf.Variable(0.1, np.float32) slope_1 = tf.Variable(0.1, np.float32) slope_2 = tf.Variable(0.1, np.float32) # creating loss function def loss_function(intercept, slope_1, slope_2, price, low_status_pop, rooms): return tf.keras.losses.mae(price, intercept + low_status_pop * slope_1 + rooms * slope_2) opt = tf.keras.optimizers.Adam() for j in range(8000): opt.minimize(lambda: loss_function(intercept, slope_1, slope_2, price_train, low_status_pop_train, rooms_train), var_list=[intercept, slope_1, slope_2]) if j % 500 == 0: print('Loss on train set: ' + str(loss_function(intercept, slope_1, slope_2, price_train, low_status_pop_train, rooms_train).numpy()), end=' ') print('Loss on valid set: ' + str(loss_function(intercept, slope_1, slope_2, price_valid, low_status_pop_valid, rooms_valid).numpy()))
[ "import numpy as np\r\nimport tensorflow as tf\r\nimport matplotlib.pyplot as plt\r\nimport pandas as pd\r\nimport seaborn as sns\r\nfrom sklearn.datasets import load_boston\r\nfrom sklearn.model_selection import train_test_split\r\n\r\ndata = load_boston()\r\nboston = pd.DataFrame(data.data, columns=data.feature_names)\r\n\r\nprint(boston.columns)\r\nboston['MEDV'] = data.target\r\n\r\ncorrelation_matrix = boston.corr()\r\nsns.heatmap(data=correlation_matrix, annot=True).set_title('Correlation matrix')\r\nplt.show()\r\n# matrix is showing strong correlation between target value and RM (number of rooms) and LSTAT (% of low status population)\r\n\r\ntarget = 'MEDV'\r\nfeatures = ['RM', 'LSTAT']\r\n# plotting house median with respect to features\r\nfor feature in features:\r\n ax = boston.plot.scatter(x=feature, y='MEDV', label=feature)\r\n plt.show()\r\n\r\nprice = np.array(boston['MEDV'], np.float32)\r\nrooms = np.array(boston['RM'], np.float32)\r\nlow_status_pop = np.array(boston['LSTAT'], np.float32)\r\nrooms_train, rooms_valid, low_status_pop_train, low_status_pop_valid = train_test_split(rooms, low_status_pop,\r\n random_state=42)\r\nprice_train, price_valid = train_test_split(price, random_state=42)\r\n\r\n# setting variables\r\nintercept = tf.Variable(0.1, np.float32)\r\nslope_1 = tf.Variable(0.1, np.float32)\r\nslope_2 = tf.Variable(0.1, np.float32)\r\n\r\n\r\n# creating loss function\r\ndef loss_function(intercept, slope_1, slope_2, price, low_status_pop, rooms):\r\n return tf.keras.losses.mae(price, intercept + low_status_pop * slope_1 + rooms * slope_2)\r\n\r\n\r\nopt = tf.keras.optimizers.Adam()\r\nfor j in range(8000):\r\n opt.minimize(lambda: loss_function(intercept, slope_1, slope_2, price_train, low_status_pop_train, rooms_train),\r\n var_list=[intercept, slope_1, slope_2])\r\n if j % 500 == 0:\r\n print('Loss on train set: ' + str(loss_function(intercept, slope_1, slope_2, price_train,\r\n low_status_pop_train, rooms_train).numpy()), end=' ')\r\n print('Loss on valid set: ' + str(loss_function(intercept, slope_1, slope_2, price_valid,\r\n low_status_pop_valid, rooms_valid).numpy()))\r\n", "import numpy as np\nimport tensorflow as tf\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport seaborn as sns\nfrom sklearn.datasets import load_boston\nfrom sklearn.model_selection import train_test_split\ndata = load_boston()\nboston = pd.DataFrame(data.data, columns=data.feature_names)\nprint(boston.columns)\nboston['MEDV'] = data.target\ncorrelation_matrix = boston.corr()\nsns.heatmap(data=correlation_matrix, annot=True).set_title('Correlation matrix'\n )\nplt.show()\ntarget = 'MEDV'\nfeatures = ['RM', 'LSTAT']\nfor feature in features:\n ax = boston.plot.scatter(x=feature, y='MEDV', label=feature)\n plt.show()\nprice = np.array(boston['MEDV'], np.float32)\nrooms = np.array(boston['RM'], np.float32)\nlow_status_pop = np.array(boston['LSTAT'], np.float32)\nrooms_train, rooms_valid, low_status_pop_train, low_status_pop_valid = (\n train_test_split(rooms, low_status_pop, random_state=42))\nprice_train, price_valid = train_test_split(price, random_state=42)\nintercept = tf.Variable(0.1, np.float32)\nslope_1 = tf.Variable(0.1, np.float32)\nslope_2 = tf.Variable(0.1, np.float32)\n\n\ndef loss_function(intercept, slope_1, slope_2, price, low_status_pop, rooms):\n return tf.keras.losses.mae(price, intercept + low_status_pop * slope_1 +\n rooms * slope_2)\n\n\nopt = tf.keras.optimizers.Adam()\nfor j in range(8000):\n opt.minimize(lambda : loss_function(intercept, slope_1, slope_2,\n price_train, low_status_pop_train, rooms_train), var_list=[\n intercept, slope_1, slope_2])\n if j % 500 == 0:\n print('Loss on train set: ' + str(loss_function(intercept,\n slope_1, slope_2, price_train, low_status_pop_train,\n rooms_train).numpy()), end=' ')\n print('Loss on valid set: ' + str(loss_function(intercept,\n slope_1, slope_2, price_valid, low_status_pop_valid,\n rooms_valid).numpy()))\n", "<import token>\ndata = load_boston()\nboston = pd.DataFrame(data.data, columns=data.feature_names)\nprint(boston.columns)\nboston['MEDV'] = data.target\ncorrelation_matrix = boston.corr()\nsns.heatmap(data=correlation_matrix, annot=True).set_title('Correlation matrix'\n )\nplt.show()\ntarget = 'MEDV'\nfeatures = ['RM', 'LSTAT']\nfor feature in features:\n ax = boston.plot.scatter(x=feature, y='MEDV', label=feature)\n plt.show()\nprice = np.array(boston['MEDV'], np.float32)\nrooms = np.array(boston['RM'], np.float32)\nlow_status_pop = np.array(boston['LSTAT'], np.float32)\nrooms_train, rooms_valid, low_status_pop_train, low_status_pop_valid = (\n train_test_split(rooms, low_status_pop, random_state=42))\nprice_train, price_valid = train_test_split(price, random_state=42)\nintercept = tf.Variable(0.1, np.float32)\nslope_1 = tf.Variable(0.1, np.float32)\nslope_2 = tf.Variable(0.1, np.float32)\n\n\ndef loss_function(intercept, slope_1, slope_2, price, low_status_pop, rooms):\n return tf.keras.losses.mae(price, intercept + low_status_pop * slope_1 +\n rooms * slope_2)\n\n\nopt = tf.keras.optimizers.Adam()\nfor j in range(8000):\n opt.minimize(lambda : loss_function(intercept, slope_1, slope_2,\n price_train, low_status_pop_train, rooms_train), var_list=[\n intercept, slope_1, slope_2])\n if j % 500 == 0:\n print('Loss on train set: ' + str(loss_function(intercept,\n slope_1, slope_2, price_train, low_status_pop_train,\n rooms_train).numpy()), end=' ')\n print('Loss on valid set: ' + str(loss_function(intercept,\n slope_1, slope_2, price_valid, low_status_pop_valid,\n rooms_valid).numpy()))\n", "<import token>\n<assignment token>\nprint(boston.columns)\n<assignment token>\nsns.heatmap(data=correlation_matrix, annot=True).set_title('Correlation matrix'\n )\nplt.show()\n<assignment token>\nfor feature in features:\n ax = boston.plot.scatter(x=feature, y='MEDV', label=feature)\n plt.show()\n<assignment token>\n\n\ndef loss_function(intercept, slope_1, slope_2, price, low_status_pop, rooms):\n return tf.keras.losses.mae(price, intercept + low_status_pop * slope_1 +\n rooms * slope_2)\n\n\n<assignment token>\nfor j in range(8000):\n opt.minimize(lambda : loss_function(intercept, slope_1, slope_2,\n price_train, low_status_pop_train, rooms_train), var_list=[\n intercept, slope_1, slope_2])\n if j % 500 == 0:\n print('Loss on train set: ' + str(loss_function(intercept,\n slope_1, slope_2, price_train, low_status_pop_train,\n rooms_train).numpy()), end=' ')\n print('Loss on valid set: ' + str(loss_function(intercept,\n slope_1, slope_2, price_valid, low_status_pop_valid,\n rooms_valid).numpy()))\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\ndef loss_function(intercept, slope_1, slope_2, price, low_status_pop, rooms):\n return tf.keras.losses.mae(price, intercept + low_status_pop * slope_1 +\n rooms * slope_2)\n\n\n<assignment token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<assignment token>\n<code token>\n" ]
false
98,926
c95aa6ea88fbfe0e27a19d2acdcca954661c45f0
from django.shortcuts import render, get_object_or_404 from django.utils import timezone from .models import Alojamiento, Comentario from .forms import AlojamientoForm, ComentarioForm from django.shortcuts import redirect from django.contrib import messages # Create your views here. def alojamiento_list(request): alojamientos = Alojamiento.objects.all().order_by('-fecha_de_creacion') return render(request, 'alojamientos/alojamiento_list.html', {'alojamientos': alojamientos}) def alojamiento_detail(request, pk): alojamiento = get_object_or_404(Alojamiento, pk=pk) #comentarios = Comentario.objects.filter(alojamiento_id=pk).order_by('-fecha_de_creacion') return render(request, 'alojamientos/alojamiento_detail.html', {'alojamiento': alojamiento}) def alojamiento_new(request): if request.method == "POST": form = AlojamientoForm(request.POST) if form.is_valid(): alojamiento = form.save(commit=False) alojamiento.autor = request.user alojamiento.save() return redirect('/alojamientos', pk=alojamiento.pk) else: form = AlojamientoForm() return render(request, 'alojamientos/alojamiento_add.html', {'form': form}) def añadir_comentario(request, pk): alojamiento = get_object_or_404(Alojamiento, pk=pk) if request.method == "POST": form = ComentarioForm(request.POST) if form.is_valid(): comentario = form.save(commit=False) comentario.alojamiento = alojamiento comentario.autor = request.user comentario.save() return redirect('alojamiento_detail', pk=alojamiento.pk) else: form = ComentarioForm() return render(request, 'alojamientos/añadir_comentario.html', {'form': form}) def eliminar_comentario(request, pk): comentario = get_object_or_404(Comentario, pk=pk) comentario.delete() return redirect('alojamiento_detail', pk=comentario.alojamiento.pk) def alojamiento_edit(request, pk): instancia = Alojamiento.objects.get(pk=pk) form = AlojamientoForm(instance=instancia) if request.method == "POST": form = AlojamientoForm(request.POST, instance=instancia) if form.is_valid(): instancia = form.save(commit=False) instancia.save() messages.success(request, 'Editado con exito') return render(request, "alojamientos/alojamiento_edit.html", {'form': form}) def alojamiento_delete(request, pk): instancia = Alojamiento.objects.get(pk=pk) instancia.delete() return redirect("alojamiento_list")
[ "from django.shortcuts import render, get_object_or_404\nfrom django.utils import timezone\nfrom .models import Alojamiento, Comentario\nfrom .forms import AlojamientoForm, ComentarioForm\nfrom django.shortcuts import redirect\nfrom django.contrib import messages\n\n# Create your views here.\ndef alojamiento_list(request):\n alojamientos = Alojamiento.objects.all().order_by('-fecha_de_creacion')\n return render(request, 'alojamientos/alojamiento_list.html', {'alojamientos': alojamientos})\n\ndef alojamiento_detail(request, pk):\n alojamiento = get_object_or_404(Alojamiento, pk=pk)\n #comentarios = Comentario.objects.filter(alojamiento_id=pk).order_by('-fecha_de_creacion')\n return render(request, 'alojamientos/alojamiento_detail.html', {'alojamiento': alojamiento})\n\ndef alojamiento_new(request):\n if request.method == \"POST\":\n form = AlojamientoForm(request.POST)\n if form.is_valid():\n alojamiento = form.save(commit=False)\n alojamiento.autor = request.user\n alojamiento.save()\n return redirect('/alojamientos', pk=alojamiento.pk)\n else:\n form = AlojamientoForm()\n return render(request, 'alojamientos/alojamiento_add.html', {'form': form})\n\ndef añadir_comentario(request, pk):\n alojamiento = get_object_or_404(Alojamiento, pk=pk)\n if request.method == \"POST\":\n form = ComentarioForm(request.POST)\n if form.is_valid():\n comentario = form.save(commit=False)\n comentario.alojamiento = alojamiento\n comentario.autor = request.user\n comentario.save()\n return redirect('alojamiento_detail', pk=alojamiento.pk)\n else:\n form = ComentarioForm()\n return render(request, 'alojamientos/añadir_comentario.html', {'form': form})\n\ndef eliminar_comentario(request, pk):\n comentario = get_object_or_404(Comentario, pk=pk)\n comentario.delete()\n return redirect('alojamiento_detail', pk=comentario.alojamiento.pk)\n\ndef alojamiento_edit(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n form = AlojamientoForm(instance=instancia)\n if request.method == \"POST\":\n form = AlojamientoForm(request.POST, instance=instancia)\n if form.is_valid():\n instancia = form.save(commit=False)\n instancia.save()\n messages.success(request, 'Editado con exito')\n\n return render(request, \"alojamientos/alojamiento_edit.html\", {'form': form})\n\ndef alojamiento_delete(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n instancia.delete()\n\n return redirect(\"alojamiento_list\")\n", "from django.shortcuts import render, get_object_or_404\nfrom django.utils import timezone\nfrom .models import Alojamiento, Comentario\nfrom .forms import AlojamientoForm, ComentarioForm\nfrom django.shortcuts import redirect\nfrom django.contrib import messages\n\n\ndef alojamiento_list(request):\n alojamientos = Alojamiento.objects.all().order_by('-fecha_de_creacion')\n return render(request, 'alojamientos/alojamiento_list.html', {\n 'alojamientos': alojamientos})\n\n\ndef alojamiento_detail(request, pk):\n alojamiento = get_object_or_404(Alojamiento, pk=pk)\n return render(request, 'alojamientos/alojamiento_detail.html', {\n 'alojamiento': alojamiento})\n\n\ndef alojamiento_new(request):\n if request.method == 'POST':\n form = AlojamientoForm(request.POST)\n if form.is_valid():\n alojamiento = form.save(commit=False)\n alojamiento.autor = request.user\n alojamiento.save()\n return redirect('/alojamientos', pk=alojamiento.pk)\n else:\n form = AlojamientoForm()\n return render(request, 'alojamientos/alojamiento_add.html', {'form': form})\n\n\ndef añadir_comentario(request, pk):\n alojamiento = get_object_or_404(Alojamiento, pk=pk)\n if request.method == 'POST':\n form = ComentarioForm(request.POST)\n if form.is_valid():\n comentario = form.save(commit=False)\n comentario.alojamiento = alojamiento\n comentario.autor = request.user\n comentario.save()\n return redirect('alojamiento_detail', pk=alojamiento.pk)\n else:\n form = ComentarioForm()\n return render(request, 'alojamientos/añadir_comentario.html', {'form':\n form})\n\n\ndef eliminar_comentario(request, pk):\n comentario = get_object_or_404(Comentario, pk=pk)\n comentario.delete()\n return redirect('alojamiento_detail', pk=comentario.alojamiento.pk)\n\n\ndef alojamiento_edit(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n form = AlojamientoForm(instance=instancia)\n if request.method == 'POST':\n form = AlojamientoForm(request.POST, instance=instancia)\n if form.is_valid():\n instancia = form.save(commit=False)\n instancia.save()\n messages.success(request, 'Editado con exito')\n return render(request, 'alojamientos/alojamiento_edit.html', {'form': form}\n )\n\n\ndef alojamiento_delete(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n instancia.delete()\n return redirect('alojamiento_list')\n", "<import token>\n\n\ndef alojamiento_list(request):\n alojamientos = Alojamiento.objects.all().order_by('-fecha_de_creacion')\n return render(request, 'alojamientos/alojamiento_list.html', {\n 'alojamientos': alojamientos})\n\n\ndef alojamiento_detail(request, pk):\n alojamiento = get_object_or_404(Alojamiento, pk=pk)\n return render(request, 'alojamientos/alojamiento_detail.html', {\n 'alojamiento': alojamiento})\n\n\ndef alojamiento_new(request):\n if request.method == 'POST':\n form = AlojamientoForm(request.POST)\n if form.is_valid():\n alojamiento = form.save(commit=False)\n alojamiento.autor = request.user\n alojamiento.save()\n return redirect('/alojamientos', pk=alojamiento.pk)\n else:\n form = AlojamientoForm()\n return render(request, 'alojamientos/alojamiento_add.html', {'form': form})\n\n\ndef añadir_comentario(request, pk):\n alojamiento = get_object_or_404(Alojamiento, pk=pk)\n if request.method == 'POST':\n form = ComentarioForm(request.POST)\n if form.is_valid():\n comentario = form.save(commit=False)\n comentario.alojamiento = alojamiento\n comentario.autor = request.user\n comentario.save()\n return redirect('alojamiento_detail', pk=alojamiento.pk)\n else:\n form = ComentarioForm()\n return render(request, 'alojamientos/añadir_comentario.html', {'form':\n form})\n\n\ndef eliminar_comentario(request, pk):\n comentario = get_object_or_404(Comentario, pk=pk)\n comentario.delete()\n return redirect('alojamiento_detail', pk=comentario.alojamiento.pk)\n\n\ndef alojamiento_edit(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n form = AlojamientoForm(instance=instancia)\n if request.method == 'POST':\n form = AlojamientoForm(request.POST, instance=instancia)\n if form.is_valid():\n instancia = form.save(commit=False)\n instancia.save()\n messages.success(request, 'Editado con exito')\n return render(request, 'alojamientos/alojamiento_edit.html', {'form': form}\n )\n\n\ndef alojamiento_delete(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n instancia.delete()\n return redirect('alojamiento_list')\n", "<import token>\n\n\ndef alojamiento_list(request):\n alojamientos = Alojamiento.objects.all().order_by('-fecha_de_creacion')\n return render(request, 'alojamientos/alojamiento_list.html', {\n 'alojamientos': alojamientos})\n\n\ndef alojamiento_detail(request, pk):\n alojamiento = get_object_or_404(Alojamiento, pk=pk)\n return render(request, 'alojamientos/alojamiento_detail.html', {\n 'alojamiento': alojamiento})\n\n\ndef alojamiento_new(request):\n if request.method == 'POST':\n form = AlojamientoForm(request.POST)\n if form.is_valid():\n alojamiento = form.save(commit=False)\n alojamiento.autor = request.user\n alojamiento.save()\n return redirect('/alojamientos', pk=alojamiento.pk)\n else:\n form = AlojamientoForm()\n return render(request, 'alojamientos/alojamiento_add.html', {'form': form})\n\n\n<function token>\n\n\ndef eliminar_comentario(request, pk):\n comentario = get_object_or_404(Comentario, pk=pk)\n comentario.delete()\n return redirect('alojamiento_detail', pk=comentario.alojamiento.pk)\n\n\ndef alojamiento_edit(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n form = AlojamientoForm(instance=instancia)\n if request.method == 'POST':\n form = AlojamientoForm(request.POST, instance=instancia)\n if form.is_valid():\n instancia = form.save(commit=False)\n instancia.save()\n messages.success(request, 'Editado con exito')\n return render(request, 'alojamientos/alojamiento_edit.html', {'form': form}\n )\n\n\ndef alojamiento_delete(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n instancia.delete()\n return redirect('alojamiento_list')\n", "<import token>\n<function token>\n\n\ndef alojamiento_detail(request, pk):\n alojamiento = get_object_or_404(Alojamiento, pk=pk)\n return render(request, 'alojamientos/alojamiento_detail.html', {\n 'alojamiento': alojamiento})\n\n\ndef alojamiento_new(request):\n if request.method == 'POST':\n form = AlojamientoForm(request.POST)\n if form.is_valid():\n alojamiento = form.save(commit=False)\n alojamiento.autor = request.user\n alojamiento.save()\n return redirect('/alojamientos', pk=alojamiento.pk)\n else:\n form = AlojamientoForm()\n return render(request, 'alojamientos/alojamiento_add.html', {'form': form})\n\n\n<function token>\n\n\ndef eliminar_comentario(request, pk):\n comentario = get_object_or_404(Comentario, pk=pk)\n comentario.delete()\n return redirect('alojamiento_detail', pk=comentario.alojamiento.pk)\n\n\ndef alojamiento_edit(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n form = AlojamientoForm(instance=instancia)\n if request.method == 'POST':\n form = AlojamientoForm(request.POST, instance=instancia)\n if form.is_valid():\n instancia = form.save(commit=False)\n instancia.save()\n messages.success(request, 'Editado con exito')\n return render(request, 'alojamientos/alojamiento_edit.html', {'form': form}\n )\n\n\ndef alojamiento_delete(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n instancia.delete()\n return redirect('alojamiento_list')\n", "<import token>\n<function token>\n\n\ndef alojamiento_detail(request, pk):\n alojamiento = get_object_or_404(Alojamiento, pk=pk)\n return render(request, 'alojamientos/alojamiento_detail.html', {\n 'alojamiento': alojamiento})\n\n\n<function token>\n<function token>\n\n\ndef eliminar_comentario(request, pk):\n comentario = get_object_or_404(Comentario, pk=pk)\n comentario.delete()\n return redirect('alojamiento_detail', pk=comentario.alojamiento.pk)\n\n\ndef alojamiento_edit(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n form = AlojamientoForm(instance=instancia)\n if request.method == 'POST':\n form = AlojamientoForm(request.POST, instance=instancia)\n if form.is_valid():\n instancia = form.save(commit=False)\n instancia.save()\n messages.success(request, 'Editado con exito')\n return render(request, 'alojamientos/alojamiento_edit.html', {'form': form}\n )\n\n\ndef alojamiento_delete(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n instancia.delete()\n return redirect('alojamiento_list')\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef eliminar_comentario(request, pk):\n comentario = get_object_or_404(Comentario, pk=pk)\n comentario.delete()\n return redirect('alojamiento_detail', pk=comentario.alojamiento.pk)\n\n\ndef alojamiento_edit(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n form = AlojamientoForm(instance=instancia)\n if request.method == 'POST':\n form = AlojamientoForm(request.POST, instance=instancia)\n if form.is_valid():\n instancia = form.save(commit=False)\n instancia.save()\n messages.success(request, 'Editado con exito')\n return render(request, 'alojamientos/alojamiento_edit.html', {'form': form}\n )\n\n\ndef alojamiento_delete(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n instancia.delete()\n return redirect('alojamiento_list')\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef eliminar_comentario(request, pk):\n comentario = get_object_or_404(Comentario, pk=pk)\n comentario.delete()\n return redirect('alojamiento_detail', pk=comentario.alojamiento.pk)\n\n\ndef alojamiento_edit(request, pk):\n instancia = Alojamiento.objects.get(pk=pk)\n form = AlojamientoForm(instance=instancia)\n if request.method == 'POST':\n form = AlojamientoForm(request.POST, instance=instancia)\n if form.is_valid():\n instancia = form.save(commit=False)\n instancia.save()\n messages.success(request, 'Editado con exito')\n return render(request, 'alojamientos/alojamiento_edit.html', {'form': form}\n )\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef eliminar_comentario(request, pk):\n comentario = get_object_or_404(Comentario, pk=pk)\n comentario.delete()\n return redirect('alojamiento_detail', pk=comentario.alojamiento.pk)\n\n\n<function token>\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
98,927
a593ffee449cae2af29b55241eacc2b5a07d8392
from __future__ import annotations from dataclasses import dataclass from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from aiosmtplib import SMTP from aiosmtplib.errors import SMTPException from app.core.logger import logger from .errors import SendEmailError @dataclass class SMTPMailer: host: str port: int tls: bool username: str | None = None password: str | None = None async def send_email( self, *, recipient: tuple[str, str | None], sender: tuple[str, str | None], subject: str, text: str | None = None, html: str | None = None, ): message = MIMEMultipart("alternative") from_email, from_name = sender to_email, to_name = recipient message["From"] = from_email if not from_name else f"{from_name} <{from_email}>" message["To"] = to_email if not from_name else f"{to_name} <{to_email}>" message["Subject"] = subject if text: message.attach(MIMEText(text, "plain", "utf-8")) if html: message.attach(MIMEText(html, "html", "utf-8")) kwargs = {"hostname": self.host, "port": self.port, "use_tls": self.tls} if self.username: kwargs["username"] = self.username if self.password: kwargs["password"] = self.password smtp_client = SMTP(**kwargs) async with smtp_client: try: response = await smtp_client.send_message(message) except SMTPException as e: raise SendEmailError(str(e)) from e logger.info(f"send email result: {response}")
[ "from __future__ import annotations\n\nfrom dataclasses import dataclass\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\n\nfrom aiosmtplib import SMTP\nfrom aiosmtplib.errors import SMTPException\n\nfrom app.core.logger import logger\nfrom .errors import SendEmailError\n\n\n@dataclass\nclass SMTPMailer:\n host: str\n port: int\n tls: bool\n username: str | None = None\n password: str | None = None\n\n async def send_email(\n self,\n *,\n recipient: tuple[str, str | None],\n sender: tuple[str, str | None],\n subject: str,\n text: str | None = None,\n html: str | None = None,\n ):\n message = MIMEMultipart(\"alternative\")\n\n from_email, from_name = sender\n to_email, to_name = recipient\n message[\"From\"] = from_email if not from_name else f\"{from_name} <{from_email}>\"\n message[\"To\"] = to_email if not from_name else f\"{to_name} <{to_email}>\"\n message[\"Subject\"] = subject\n if text:\n message.attach(MIMEText(text, \"plain\", \"utf-8\"))\n if html:\n message.attach(MIMEText(html, \"html\", \"utf-8\"))\n\n kwargs = {\"hostname\": self.host, \"port\": self.port, \"use_tls\": self.tls}\n if self.username:\n kwargs[\"username\"] = self.username\n if self.password:\n kwargs[\"password\"] = self.password\n smtp_client = SMTP(**kwargs)\n async with smtp_client:\n try:\n response = await smtp_client.send_message(message)\n except SMTPException as e:\n raise SendEmailError(str(e)) from e\n logger.info(f\"send email result: {response}\")\n", "from __future__ import annotations\nfrom dataclasses import dataclass\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\nfrom aiosmtplib import SMTP\nfrom aiosmtplib.errors import SMTPException\nfrom app.core.logger import logger\nfrom .errors import SendEmailError\n\n\n@dataclass\nclass SMTPMailer:\n host: str\n port: int\n tls: bool\n username: str | None = None\n password: str | None = None\n\n async def send_email(self, *, recipient: tuple[str, str | None], sender:\n tuple[str, str | None], subject: str, text: (str | None)=None, html:\n (str | None)=None):\n message = MIMEMultipart('alternative')\n from_email, from_name = sender\n to_email, to_name = recipient\n message['From'\n ] = from_email if not from_name else f'{from_name} <{from_email}>'\n message['To'\n ] = to_email if not from_name else f'{to_name} <{to_email}>'\n message['Subject'] = subject\n if text:\n message.attach(MIMEText(text, 'plain', 'utf-8'))\n if html:\n message.attach(MIMEText(html, 'html', 'utf-8'))\n kwargs = {'hostname': self.host, 'port': self.port, 'use_tls': self.tls\n }\n if self.username:\n kwargs['username'] = self.username\n if self.password:\n kwargs['password'] = self.password\n smtp_client = SMTP(**kwargs)\n async with smtp_client:\n try:\n response = await smtp_client.send_message(message)\n except SMTPException as e:\n raise SendEmailError(str(e)) from e\n logger.info(f'send email result: {response}')\n", "<import token>\n\n\n@dataclass\nclass SMTPMailer:\n host: str\n port: int\n tls: bool\n username: str | None = None\n password: str | None = None\n\n async def send_email(self, *, recipient: tuple[str, str | None], sender:\n tuple[str, str | None], subject: str, text: (str | None)=None, html:\n (str | None)=None):\n message = MIMEMultipart('alternative')\n from_email, from_name = sender\n to_email, to_name = recipient\n message['From'\n ] = from_email if not from_name else f'{from_name} <{from_email}>'\n message['To'\n ] = to_email if not from_name else f'{to_name} <{to_email}>'\n message['Subject'] = subject\n if text:\n message.attach(MIMEText(text, 'plain', 'utf-8'))\n if html:\n message.attach(MIMEText(html, 'html', 'utf-8'))\n kwargs = {'hostname': self.host, 'port': self.port, 'use_tls': self.tls\n }\n if self.username:\n kwargs['username'] = self.username\n if self.password:\n kwargs['password'] = self.password\n smtp_client = SMTP(**kwargs)\n async with smtp_client:\n try:\n response = await smtp_client.send_message(message)\n except SMTPException as e:\n raise SendEmailError(str(e)) from e\n logger.info(f'send email result: {response}')\n", "<import token>\n<class token>\n" ]
false
98,928
af27c13f9d37e204231c4e5a8c897aa0576a4b3f
class ListNode: def __init__(self, val=0, next=None): self.val = val self.next = next class Solution: def addTwoNumbers(self, l1, l2): num1='' num2='' while l1: num1+=str(l1.val) l1=l1.next while l2: num2+=str(l2.val) l2=l2.next total=int(num1[::-1])+int(num2[::-1]) point=head=ListNode(0) for x in str(total)[::-1]: point.next=ListNode(int(x)) point=point.next return head.next def printList(self,head): arr=[] while head: arr.append(str(head.val)) head=head.next return ''.join(arr) L1=ListNode(2) L2=ListNode(4) L3=ListNode(3) L1.next=L2 L2.next=L3 s=Solution() print(s.printList(L1)) LL1=ListNode(5) LL2=ListNode(6) LL3=ListNode(4) LL1.next=LL2 LL2.next=LL3 s=Solution() print(s.printList(LL1)) print(s.printList(s.addTwoNumbers(L1,LL1)))
[ "class ListNode:\n def __init__(self, val=0, next=None):\n self.val = val\n self.next = next\nclass Solution:\n def addTwoNumbers(self, l1, l2):\n num1=''\n num2=''\n while l1:\n num1+=str(l1.val)\n l1=l1.next\n while l2:\n num2+=str(l2.val)\n l2=l2.next\n total=int(num1[::-1])+int(num2[::-1])\n point=head=ListNode(0)\n for x in str(total)[::-1]:\n point.next=ListNode(int(x))\n point=point.next\n return head.next\n def printList(self,head):\n arr=[]\n while head:\n arr.append(str(head.val))\n head=head.next\n return ''.join(arr)\nL1=ListNode(2)\nL2=ListNode(4)\nL3=ListNode(3)\nL1.next=L2\nL2.next=L3\ns=Solution()\nprint(s.printList(L1))\nLL1=ListNode(5)\nLL2=ListNode(6)\nLL3=ListNode(4)\nLL1.next=LL2\nLL2.next=LL3\ns=Solution()\nprint(s.printList(LL1))\n\nprint(s.printList(s.addTwoNumbers(L1,LL1)))\n", "class ListNode:\n\n def __init__(self, val=0, next=None):\n self.val = val\n self.next = next\n\n\nclass Solution:\n\n def addTwoNumbers(self, l1, l2):\n num1 = ''\n num2 = ''\n while l1:\n num1 += str(l1.val)\n l1 = l1.next\n while l2:\n num2 += str(l2.val)\n l2 = l2.next\n total = int(num1[::-1]) + int(num2[::-1])\n point = head = ListNode(0)\n for x in str(total)[::-1]:\n point.next = ListNode(int(x))\n point = point.next\n return head.next\n\n def printList(self, head):\n arr = []\n while head:\n arr.append(str(head.val))\n head = head.next\n return ''.join(arr)\n\n\nL1 = ListNode(2)\nL2 = ListNode(4)\nL3 = ListNode(3)\nL1.next = L2\nL2.next = L3\ns = Solution()\nprint(s.printList(L1))\nLL1 = ListNode(5)\nLL2 = ListNode(6)\nLL3 = ListNode(4)\nLL1.next = LL2\nLL2.next = LL3\ns = Solution()\nprint(s.printList(LL1))\nprint(s.printList(s.addTwoNumbers(L1, LL1)))\n", "class ListNode:\n\n def __init__(self, val=0, next=None):\n self.val = val\n self.next = next\n\n\nclass Solution:\n\n def addTwoNumbers(self, l1, l2):\n num1 = ''\n num2 = ''\n while l1:\n num1 += str(l1.val)\n l1 = l1.next\n while l2:\n num2 += str(l2.val)\n l2 = l2.next\n total = int(num1[::-1]) + int(num2[::-1])\n point = head = ListNode(0)\n for x in str(total)[::-1]:\n point.next = ListNode(int(x))\n point = point.next\n return head.next\n\n def printList(self, head):\n arr = []\n while head:\n arr.append(str(head.val))\n head = head.next\n return ''.join(arr)\n\n\n<assignment token>\nprint(s.printList(L1))\n<assignment token>\nprint(s.printList(LL1))\nprint(s.printList(s.addTwoNumbers(L1, LL1)))\n", "class ListNode:\n\n def __init__(self, val=0, next=None):\n self.val = val\n self.next = next\n\n\nclass Solution:\n\n def addTwoNumbers(self, l1, l2):\n num1 = ''\n num2 = ''\n while l1:\n num1 += str(l1.val)\n l1 = l1.next\n while l2:\n num2 += str(l2.val)\n l2 = l2.next\n total = int(num1[::-1]) + int(num2[::-1])\n point = head = ListNode(0)\n for x in str(total)[::-1]:\n point.next = ListNode(int(x))\n point = point.next\n return head.next\n\n def printList(self, head):\n arr = []\n while head:\n arr.append(str(head.val))\n head = head.next\n return ''.join(arr)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "class ListNode:\n <function token>\n\n\nclass Solution:\n\n def addTwoNumbers(self, l1, l2):\n num1 = ''\n num2 = ''\n while l1:\n num1 += str(l1.val)\n l1 = l1.next\n while l2:\n num2 += str(l2.val)\n l2 = l2.next\n total = int(num1[::-1]) + int(num2[::-1])\n point = head = ListNode(0)\n for x in str(total)[::-1]:\n point.next = ListNode(int(x))\n point = point.next\n return head.next\n\n def printList(self, head):\n arr = []\n while head:\n arr.append(str(head.val))\n head = head.next\n return ''.join(arr)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<class token>\n\n\nclass Solution:\n\n def addTwoNumbers(self, l1, l2):\n num1 = ''\n num2 = ''\n while l1:\n num1 += str(l1.val)\n l1 = l1.next\n while l2:\n num2 += str(l2.val)\n l2 = l2.next\n total = int(num1[::-1]) + int(num2[::-1])\n point = head = ListNode(0)\n for x in str(total)[::-1]:\n point.next = ListNode(int(x))\n point = point.next\n return head.next\n\n def printList(self, head):\n arr = []\n while head:\n arr.append(str(head.val))\n head = head.next\n return ''.join(arr)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<class token>\n\n\nclass Solution:\n <function token>\n\n def printList(self, head):\n arr = []\n while head:\n arr.append(str(head.val))\n head = head.next\n return ''.join(arr)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<class token>\n\n\nclass Solution:\n <function token>\n <function token>\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<class token>\n<class token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
98,929
76a14d9341648e1b424283a59b5f5d14040a8665
from django.contrib.auth import get_user_model from django.urls import reverse from django.test import TestCase from rest_framework import status from rest_framework.test import APIClient from core.models import Product, ProductCategory from product.serializers import ProductCategorySerializer, ProductSerializer PRODUCT_ADD_URL = reverse('product:product_add') PRODUCTS_LIST_URL = reverse('product:product') # PRODUCT_DETAIL_URL = reverse('product:product_detail') class PrivateProductsApiTests(TestCase): """Test the authorized user products API""" def setUp(self): self.user = get_user_model().objects.create_user( '[email protected]', 'test123' ) self.client = APIClient() self.client.force_authenticate(self.user) def test_retrieve_product_categories(self): """Test retrieving products""" # test_key = ProductCategory.objects. ProductCategory.objects.create(name="test name", description="new name") test_key = ProductCategory.objects.values()[0] # print(test_key.get('id')) Product.objects.create(product_category_id=test_key.get('id'), name='Test Product Category #1', description='Test Description #1', unit_price=12, quantity=15) Product.objects.create(product_category_id=test_key.get('id'), name='Test Product Category #2', description='Test Description #1', unit_price=12, quantity=15) # product_categories = ProductCategory.objects.all().order_by('-name') # serializer = ProductCategorySerializer(product_categories, many=True) res = self.client.get(PRODUCTS_LIST_URL) products = Product.objects.all().order_by('-name') serializer = ProductSerializer(products, many=False) # print(res.data) self.assertEqual(res.status_code, status.HTTP_200_OK) # self.assertDictEqual(dict(res.data), dict(serializer.data)) def test_create_product_successful(self): """Test creating a new product category""" ProductCategory.objects.create(name="test name", description="new name") test_key = ProductCategory.objects.values()[0] # print(test_key) payload = { 'name': 'Test Tag', 'product_category_id': test_key.get('id'), 'unit_price': 100, 'quantity': 12, 'description': 'Test description' } res = self.client.post(PRODUCT_ADD_URL, payload) # print(res.data) self.assertEqual(res.status_code, status.HTTP_201_CREATED) def test_get_product_detail(self): """Test viewing a product detail""" ProductCategory.objects.create(name='Test Product Category #1', description='Test Description #1') ProductCategory.objects.create(name='Test Product Category #2', description='Test Description #1') ProductCategory.objects.create(name='Test Product Category #3', description='Test Description #1') test_key = ProductCategory.objects.values()[1].get('id') Product.objects.create(product_category_id=test_key, name='Test Product Category #1', description='Test Description #1', unit_price=12, quantity=15) pk = Product.objects.values()[0].get('id') PRODUCTS_DETAIL_URL = reverse('product:product_details', args=(pk,)) res = self.client.get(PRODUCTS_DETAIL_URL) # print(res.data) self.assertEqual(res.status_code, status.HTTP_200_OK) def test_update_product_successful(self): ProductCategory.objects.create(name='Test Product Category #1', description='Test Description #1') ProductCategory.objects.create(name='Test Product Category #2', description='Test Description #1') ProductCategory.objects.create(name='Test Product Category #3', description='Test Description #1') test_key = ProductCategory.objects.values()[1].get('id') Product.objects.create(product_category_id=test_key, name='Test Product Category #1', description='Test Description #124', unit_price=12, quantity=15) pk = Product.objects.values()[0].get('id') payload = { 'name': 'testtt12312321t', 'description': '123123111111' } PRODUCTS_EDIT_URL = reverse('product:product_edit', args=(pk,)) res = self.client.patch(PRODUCTS_EDIT_URL, payload) self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT) def test_update_product_deleted_successfully(self): ProductCategory.objects.create(name='Test Product Category #1', description='Test Description #1') ProductCategory.objects.create(name='Test Product Category #2', description='Test Description #1') ProductCategory.objects.create(name='Test Product Category #3', description='Test Description #1') test_key = ProductCategory.objects.values()[1].get('id') Product.objects.create(product_category_id=test_key, name='Test Product Category #1', description='Test Description #124', unit_price=12, quantity=15) pk = Product.objects.values()[0].get('id') PRODUCT_DELETE_URL = reverse('product:product_delete', args=(pk,)) res = self.client.delete(PRODUCT_DELETE_URL) # print(Product.objects.values()) self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT) # def test_create_tag_invalid(self): # """Test creating a new product category with invalid payload""" # payload = {'name': 123} # res = self.client.post(PRODUCT_CATEGORY_ADD_URL, payload) # self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
[ "from django.contrib.auth import get_user_model\nfrom django.urls import reverse\nfrom django.test import TestCase\n\nfrom rest_framework import status\nfrom rest_framework.test import APIClient\n\nfrom core.models import Product, ProductCategory\n\nfrom product.serializers import ProductCategorySerializer, ProductSerializer\n\nPRODUCT_ADD_URL = reverse('product:product_add')\nPRODUCTS_LIST_URL = reverse('product:product')\n# PRODUCT_DETAIL_URL = reverse('product:product_detail')\n\n\nclass PrivateProductsApiTests(TestCase):\n \"\"\"Test the authorized user products API\"\"\"\n\n def setUp(self):\n self.user = get_user_model().objects.create_user(\n '[email protected]',\n 'test123'\n )\n self.client = APIClient()\n self.client.force_authenticate(self.user)\n\n def test_retrieve_product_categories(self):\n \"\"\"Test retrieving products\"\"\"\n # test_key = ProductCategory.objects. \n ProductCategory.objects.create(name=\"test name\", description=\"new name\")\n test_key = ProductCategory.objects.values()[0]\n # print(test_key.get('id'))\n Product.objects.create(product_category_id=test_key.get('id'), name='Test Product Category #1', description='Test Description #1', unit_price=12, quantity=15)\n Product.objects.create(product_category_id=test_key.get('id'), name='Test Product Category #2', description='Test Description #1', unit_price=12, quantity=15)\n\n # product_categories = ProductCategory.objects.all().order_by('-name')\n # serializer = ProductCategorySerializer(product_categories, many=True)\n res = self.client.get(PRODUCTS_LIST_URL)\n\n products = Product.objects.all().order_by('-name')\n serializer = ProductSerializer(products, many=False)\n\n # print(res.data)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n # self.assertDictEqual(dict(res.data), dict(serializer.data))\n \n def test_create_product_successful(self):\n \"\"\"Test creating a new product category\"\"\"\n \n ProductCategory.objects.create(name=\"test name\", description=\"new name\")\n test_key = ProductCategory.objects.values()[0]\n # print(test_key)\n payload = {\n 'name': 'Test Tag',\n 'product_category_id': test_key.get('id'),\n 'unit_price': 100,\n 'quantity': 12,\n 'description': 'Test description'\n }\n \n res = self.client.post(PRODUCT_ADD_URL, payload)\n\n # print(res.data)\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n def test_get_product_detail(self):\n \"\"\"Test viewing a product detail\"\"\"\n ProductCategory.objects.create(name='Test Product Category #1', description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2', description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3', description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n \n Product.objects.create(product_category_id=test_key, name='Test Product Category #1', description='Test Description #1', unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n\n PRODUCTS_DETAIL_URL = reverse('product:product_details', args=(pk,))\n res = self.client.get(PRODUCTS_DETAIL_URL)\n # print(res.data)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n \n def test_update_product_successful(self):\n ProductCategory.objects.create(name='Test Product Category #1', description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2', description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3', description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n\n Product.objects.create(product_category_id=test_key, name='Test Product Category #1', description='Test Description #124', unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n\n payload = {\n 'name': 'testtt12312321t',\n 'description': '123123111111'\n }\n PRODUCTS_EDIT_URL = reverse('product:product_edit', args=(pk,))\n res = self.client.patch(PRODUCTS_EDIT_URL, payload)\n\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n\n def test_update_product_deleted_successfully(self):\n ProductCategory.objects.create(name='Test Product Category #1', description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2', description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3', description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n\n Product.objects.create(product_category_id=test_key, name='Test Product Category #1', description='Test Description #124', unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n\n PRODUCT_DELETE_URL = reverse('product:product_delete', args=(pk,))\n res = self.client.delete(PRODUCT_DELETE_URL)\n # print(Product.objects.values())\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n\n\n # def test_create_tag_invalid(self):\n # \"\"\"Test creating a new product category with invalid payload\"\"\"\n # payload = {'name': 123}\n # res = self.client.post(PRODUCT_CATEGORY_ADD_URL, payload)\n\n # self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)", "from django.contrib.auth import get_user_model\nfrom django.urls import reverse\nfrom django.test import TestCase\nfrom rest_framework import status\nfrom rest_framework.test import APIClient\nfrom core.models import Product, ProductCategory\nfrom product.serializers import ProductCategorySerializer, ProductSerializer\nPRODUCT_ADD_URL = reverse('product:product_add')\nPRODUCTS_LIST_URL = reverse('product:product')\n\n\nclass PrivateProductsApiTests(TestCase):\n \"\"\"Test the authorized user products API\"\"\"\n\n def setUp(self):\n self.user = get_user_model().objects.create_user('[email protected]',\n 'test123')\n self.client = APIClient()\n self.client.force_authenticate(self.user)\n\n def test_retrieve_product_categories(self):\n \"\"\"Test retrieving products\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #2', description='Test Description #1',\n unit_price=12, quantity=15)\n res = self.client.get(PRODUCTS_LIST_URL)\n products = Product.objects.all().order_by('-name')\n serializer = ProductSerializer(products, many=False)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n def test_create_product_successful(self):\n \"\"\"Test creating a new product category\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n payload = {'name': 'Test Tag', 'product_category_id': test_key.get(\n 'id'), 'unit_price': 100, 'quantity': 12, 'description':\n 'Test description'}\n res = self.client.post(PRODUCT_ADD_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n def test_get_product_detail(self):\n \"\"\"Test viewing a product detail\"\"\"\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCTS_DETAIL_URL = reverse('product:product_details', args=(pk,))\n res = self.client.get(PRODUCTS_DETAIL_URL)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n def test_update_product_successful(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n payload = {'name': 'testtt12312321t', 'description': '123123111111'}\n PRODUCTS_EDIT_URL = reverse('product:product_edit', args=(pk,))\n res = self.client.patch(PRODUCTS_EDIT_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n\n def test_update_product_deleted_successfully(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCT_DELETE_URL = reverse('product:product_delete', args=(pk,))\n res = self.client.delete(PRODUCT_DELETE_URL)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n", "<import token>\nPRODUCT_ADD_URL = reverse('product:product_add')\nPRODUCTS_LIST_URL = reverse('product:product')\n\n\nclass PrivateProductsApiTests(TestCase):\n \"\"\"Test the authorized user products API\"\"\"\n\n def setUp(self):\n self.user = get_user_model().objects.create_user('[email protected]',\n 'test123')\n self.client = APIClient()\n self.client.force_authenticate(self.user)\n\n def test_retrieve_product_categories(self):\n \"\"\"Test retrieving products\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #2', description='Test Description #1',\n unit_price=12, quantity=15)\n res = self.client.get(PRODUCTS_LIST_URL)\n products = Product.objects.all().order_by('-name')\n serializer = ProductSerializer(products, many=False)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n def test_create_product_successful(self):\n \"\"\"Test creating a new product category\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n payload = {'name': 'Test Tag', 'product_category_id': test_key.get(\n 'id'), 'unit_price': 100, 'quantity': 12, 'description':\n 'Test description'}\n res = self.client.post(PRODUCT_ADD_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n def test_get_product_detail(self):\n \"\"\"Test viewing a product detail\"\"\"\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCTS_DETAIL_URL = reverse('product:product_details', args=(pk,))\n res = self.client.get(PRODUCTS_DETAIL_URL)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n def test_update_product_successful(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n payload = {'name': 'testtt12312321t', 'description': '123123111111'}\n PRODUCTS_EDIT_URL = reverse('product:product_edit', args=(pk,))\n res = self.client.patch(PRODUCTS_EDIT_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n\n def test_update_product_deleted_successfully(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCT_DELETE_URL = reverse('product:product_delete', args=(pk,))\n res = self.client.delete(PRODUCT_DELETE_URL)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n", "<import token>\n<assignment token>\n\n\nclass PrivateProductsApiTests(TestCase):\n \"\"\"Test the authorized user products API\"\"\"\n\n def setUp(self):\n self.user = get_user_model().objects.create_user('[email protected]',\n 'test123')\n self.client = APIClient()\n self.client.force_authenticate(self.user)\n\n def test_retrieve_product_categories(self):\n \"\"\"Test retrieving products\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #2', description='Test Description #1',\n unit_price=12, quantity=15)\n res = self.client.get(PRODUCTS_LIST_URL)\n products = Product.objects.all().order_by('-name')\n serializer = ProductSerializer(products, many=False)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n def test_create_product_successful(self):\n \"\"\"Test creating a new product category\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n payload = {'name': 'Test Tag', 'product_category_id': test_key.get(\n 'id'), 'unit_price': 100, 'quantity': 12, 'description':\n 'Test description'}\n res = self.client.post(PRODUCT_ADD_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n def test_get_product_detail(self):\n \"\"\"Test viewing a product detail\"\"\"\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCTS_DETAIL_URL = reverse('product:product_details', args=(pk,))\n res = self.client.get(PRODUCTS_DETAIL_URL)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n def test_update_product_successful(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n payload = {'name': 'testtt12312321t', 'description': '123123111111'}\n PRODUCTS_EDIT_URL = reverse('product:product_edit', args=(pk,))\n res = self.client.patch(PRODUCTS_EDIT_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n\n def test_update_product_deleted_successfully(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCT_DELETE_URL = reverse('product:product_delete', args=(pk,))\n res = self.client.delete(PRODUCT_DELETE_URL)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n", "<import token>\n<assignment token>\n\n\nclass PrivateProductsApiTests(TestCase):\n <docstring token>\n\n def setUp(self):\n self.user = get_user_model().objects.create_user('[email protected]',\n 'test123')\n self.client = APIClient()\n self.client.force_authenticate(self.user)\n\n def test_retrieve_product_categories(self):\n \"\"\"Test retrieving products\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #2', description='Test Description #1',\n unit_price=12, quantity=15)\n res = self.client.get(PRODUCTS_LIST_URL)\n products = Product.objects.all().order_by('-name')\n serializer = ProductSerializer(products, many=False)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n def test_create_product_successful(self):\n \"\"\"Test creating a new product category\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n payload = {'name': 'Test Tag', 'product_category_id': test_key.get(\n 'id'), 'unit_price': 100, 'quantity': 12, 'description':\n 'Test description'}\n res = self.client.post(PRODUCT_ADD_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n def test_get_product_detail(self):\n \"\"\"Test viewing a product detail\"\"\"\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCTS_DETAIL_URL = reverse('product:product_details', args=(pk,))\n res = self.client.get(PRODUCTS_DETAIL_URL)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n def test_update_product_successful(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n payload = {'name': 'testtt12312321t', 'description': '123123111111'}\n PRODUCTS_EDIT_URL = reverse('product:product_edit', args=(pk,))\n res = self.client.patch(PRODUCTS_EDIT_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n\n def test_update_product_deleted_successfully(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCT_DELETE_URL = reverse('product:product_delete', args=(pk,))\n res = self.client.delete(PRODUCT_DELETE_URL)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n", "<import token>\n<assignment token>\n\n\nclass PrivateProductsApiTests(TestCase):\n <docstring token>\n <function token>\n\n def test_retrieve_product_categories(self):\n \"\"\"Test retrieving products\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #2', description='Test Description #1',\n unit_price=12, quantity=15)\n res = self.client.get(PRODUCTS_LIST_URL)\n products = Product.objects.all().order_by('-name')\n serializer = ProductSerializer(products, many=False)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n def test_create_product_successful(self):\n \"\"\"Test creating a new product category\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n payload = {'name': 'Test Tag', 'product_category_id': test_key.get(\n 'id'), 'unit_price': 100, 'quantity': 12, 'description':\n 'Test description'}\n res = self.client.post(PRODUCT_ADD_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n def test_get_product_detail(self):\n \"\"\"Test viewing a product detail\"\"\"\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCTS_DETAIL_URL = reverse('product:product_details', args=(pk,))\n res = self.client.get(PRODUCTS_DETAIL_URL)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n def test_update_product_successful(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n payload = {'name': 'testtt12312321t', 'description': '123123111111'}\n PRODUCTS_EDIT_URL = reverse('product:product_edit', args=(pk,))\n res = self.client.patch(PRODUCTS_EDIT_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n\n def test_update_product_deleted_successfully(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCT_DELETE_URL = reverse('product:product_delete', args=(pk,))\n res = self.client.delete(PRODUCT_DELETE_URL)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n", "<import token>\n<assignment token>\n\n\nclass PrivateProductsApiTests(TestCase):\n <docstring token>\n <function token>\n\n def test_retrieve_product_categories(self):\n \"\"\"Test retrieving products\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #2', description='Test Description #1',\n unit_price=12, quantity=15)\n res = self.client.get(PRODUCTS_LIST_URL)\n products = Product.objects.all().order_by('-name')\n serializer = ProductSerializer(products, many=False)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n <function token>\n\n def test_get_product_detail(self):\n \"\"\"Test viewing a product detail\"\"\"\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCTS_DETAIL_URL = reverse('product:product_details', args=(pk,))\n res = self.client.get(PRODUCTS_DETAIL_URL)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n def test_update_product_successful(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n payload = {'name': 'testtt12312321t', 'description': '123123111111'}\n PRODUCTS_EDIT_URL = reverse('product:product_edit', args=(pk,))\n res = self.client.patch(PRODUCTS_EDIT_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n\n def test_update_product_deleted_successfully(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCT_DELETE_URL = reverse('product:product_delete', args=(pk,))\n res = self.client.delete(PRODUCT_DELETE_URL)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n", "<import token>\n<assignment token>\n\n\nclass PrivateProductsApiTests(TestCase):\n <docstring token>\n <function token>\n\n def test_retrieve_product_categories(self):\n \"\"\"Test retrieving products\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #2', description='Test Description #1',\n unit_price=12, quantity=15)\n res = self.client.get(PRODUCTS_LIST_URL)\n products = Product.objects.all().order_by('-name')\n serializer = ProductSerializer(products, many=False)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n <function token>\n <function token>\n\n def test_update_product_successful(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n payload = {'name': 'testtt12312321t', 'description': '123123111111'}\n PRODUCTS_EDIT_URL = reverse('product:product_edit', args=(pk,))\n res = self.client.patch(PRODUCTS_EDIT_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n\n def test_update_product_deleted_successfully(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n PRODUCT_DELETE_URL = reverse('product:product_delete', args=(pk,))\n res = self.client.delete(PRODUCT_DELETE_URL)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n", "<import token>\n<assignment token>\n\n\nclass PrivateProductsApiTests(TestCase):\n <docstring token>\n <function token>\n\n def test_retrieve_product_categories(self):\n \"\"\"Test retrieving products\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #2', description='Test Description #1',\n unit_price=12, quantity=15)\n res = self.client.get(PRODUCTS_LIST_URL)\n products = Product.objects.all().order_by('-name')\n serializer = ProductSerializer(products, many=False)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n <function token>\n <function token>\n\n def test_update_product_successful(self):\n ProductCategory.objects.create(name='Test Product Category #1',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #2',\n description='Test Description #1')\n ProductCategory.objects.create(name='Test Product Category #3',\n description='Test Description #1')\n test_key = ProductCategory.objects.values()[1].get('id')\n Product.objects.create(product_category_id=test_key, name=\n 'Test Product Category #1', description='Test Description #124',\n unit_price=12, quantity=15)\n pk = Product.objects.values()[0].get('id')\n payload = {'name': 'testtt12312321t', 'description': '123123111111'}\n PRODUCTS_EDIT_URL = reverse('product:product_edit', args=(pk,))\n res = self.client.patch(PRODUCTS_EDIT_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_204_NO_CONTENT)\n <function token>\n", "<import token>\n<assignment token>\n\n\nclass PrivateProductsApiTests(TestCase):\n <docstring token>\n <function token>\n\n def test_retrieve_product_categories(self):\n \"\"\"Test retrieving products\"\"\"\n ProductCategory.objects.create(name='test name', description='new name'\n )\n test_key = ProductCategory.objects.values()[0]\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #1', description='Test Description #1',\n unit_price=12, quantity=15)\n Product.objects.create(product_category_id=test_key.get('id'), name\n ='Test Product Category #2', description='Test Description #1',\n unit_price=12, quantity=15)\n res = self.client.get(PRODUCTS_LIST_URL)\n products = Product.objects.all().order_by('-name')\n serializer = ProductSerializer(products, many=False)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<assignment token>\n\n\nclass PrivateProductsApiTests(TestCase):\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<assignment token>\n<class token>\n" ]
false
98,930
32121f7fa5d9c4dbbec9023b4a6ec1d0e6faca83
#!/usr/bin/python # -*- coding: utf-8 -*- # # Licensed under the GNU General Public License, version 3. # See the file http://www.gnu.org/licenses/gpl.txt from pisi.actionsapi import shelltools from pisi.actionsapi import pisitools from pisi.actionsapi import autotools from pisi.actionsapi import libtools from pisi.actionsapi import get WorkDir = "." def setup(): shelltools.cd("MediaInfoLib/Project/GNU/Library") #libtools.libtoolize("--automake") #autotools.aclocal() #autotools.automake("-afc") #autotools.autoconf() shelltools.system("sh ./autogen.sh") autotools.configure("--enable-shared \ --disable-static \ --with-libcurl \ --with-libmms") def build(): shelltools.cd("MediaInfoLib/Project/GNU/Library") autotools.make() def install(): shelltools.cd("MediaInfoLib/Project/GNU/Library") autotools.rawInstall("DESTDIR=%s" % get.installDIR()) #autotools.install() #pisitools.dosed("libmediainfo.pc", "^(Version:)\s+$", r"\1 %s\n" % get.srcVERSION()) #pisitools.dosed("libmediainfo.pc", "^Libs_Static.*$", "") #pisitools.dodir("/usr/lib/pkgconfig") #pisitools.insinto("/usr/lib/pkgconfig", "libmediainfo.pc") shelltools.cd("../../../") pisitools.dodoc("*.txt") #pisitools.dohtml("*.html") #for it in ["MediaInfo", "MediaInfoDLL"]: #pisitools.dodir("/usr/include/%s" % it) #pisitools.insinto("/usr/include/%s" % it, "Source/%s/*.h" % it)
[ "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n#\n# Licensed under the GNU General Public License, version 3.\n# See the file http://www.gnu.org/licenses/gpl.txt\n\nfrom pisi.actionsapi import shelltools\nfrom pisi.actionsapi import pisitools\nfrom pisi.actionsapi import autotools\nfrom pisi.actionsapi import libtools\nfrom pisi.actionsapi import get\n\nWorkDir = \".\"\n\ndef setup():\n shelltools.cd(\"MediaInfoLib/Project/GNU/Library\")\n #libtools.libtoolize(\"--automake\")\n #autotools.aclocal()\n #autotools.automake(\"-afc\")\n #autotools.autoconf()\n shelltools.system(\"sh ./autogen.sh\")\n autotools.configure(\"--enable-shared \\\n --disable-static \\\n --with-libcurl \\\n --with-libmms\")\n\ndef build():\n shelltools.cd(\"MediaInfoLib/Project/GNU/Library\")\n autotools.make()\n\ndef install():\n shelltools.cd(\"MediaInfoLib/Project/GNU/Library\")\n autotools.rawInstall(\"DESTDIR=%s\" % get.installDIR())\n #autotools.install()\n #pisitools.dosed(\"libmediainfo.pc\", \"^(Version:)\\s+$\", r\"\\1 %s\\n\" % get.srcVERSION())\n #pisitools.dosed(\"libmediainfo.pc\", \"^Libs_Static.*$\", \"\")\n #pisitools.dodir(\"/usr/lib/pkgconfig\")\n #pisitools.insinto(\"/usr/lib/pkgconfig\", \"libmediainfo.pc\")\n shelltools.cd(\"../../../\")\n pisitools.dodoc(\"*.txt\")\n #pisitools.dohtml(\"*.html\")\n #for it in [\"MediaInfo\", \"MediaInfoDLL\"]:\n #pisitools.dodir(\"/usr/include/%s\" % it)\n #pisitools.insinto(\"/usr/include/%s\" % it, \"Source/%s/*.h\" % it)\n\n\n", "from pisi.actionsapi import shelltools\nfrom pisi.actionsapi import pisitools\nfrom pisi.actionsapi import autotools\nfrom pisi.actionsapi import libtools\nfrom pisi.actionsapi import get\nWorkDir = '.'\n\n\ndef setup():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n shelltools.system('sh ./autogen.sh')\n autotools.configure(\n '--enable-shared --disable-static --with-libcurl --with-libmms'\n )\n\n\ndef build():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n autotools.make()\n\n\ndef install():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n autotools.rawInstall('DESTDIR=%s' % get.installDIR())\n shelltools.cd('../../../')\n pisitools.dodoc('*.txt')\n", "<import token>\nWorkDir = '.'\n\n\ndef setup():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n shelltools.system('sh ./autogen.sh')\n autotools.configure(\n '--enable-shared --disable-static --with-libcurl --with-libmms'\n )\n\n\ndef build():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n autotools.make()\n\n\ndef install():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n autotools.rawInstall('DESTDIR=%s' % get.installDIR())\n shelltools.cd('../../../')\n pisitools.dodoc('*.txt')\n", "<import token>\n<assignment token>\n\n\ndef setup():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n shelltools.system('sh ./autogen.sh')\n autotools.configure(\n '--enable-shared --disable-static --with-libcurl --with-libmms'\n )\n\n\ndef build():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n autotools.make()\n\n\ndef install():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n autotools.rawInstall('DESTDIR=%s' % get.installDIR())\n shelltools.cd('../../../')\n pisitools.dodoc('*.txt')\n", "<import token>\n<assignment token>\n<function token>\n\n\ndef build():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n autotools.make()\n\n\ndef install():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n autotools.rawInstall('DESTDIR=%s' % get.installDIR())\n shelltools.cd('../../../')\n pisitools.dodoc('*.txt')\n", "<import token>\n<assignment token>\n<function token>\n\n\ndef build():\n shelltools.cd('MediaInfoLib/Project/GNU/Library')\n autotools.make()\n\n\n<function token>\n", "<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n" ]
false
98,931
d6c6dc7bf0d6d7c98e72458c0332cbc61fe2a55f
import tweepy import time from kafka import KafkaConsumer, KafkaProducer from datetime import datetime, timedelta import os import subprocess import socket hostname = socket.gethostname() IPAddr = socket.gethostbyname(hostname) if os.name == 'nt': #Running the zookeeper server subprocess.Popen("(cd " + os.path.dirname(os.path.realpath(__file__)) + "\kafka " + "&& bin\windows\zookeeper-server-start.bat config\zookeeper.properties)", shell=True) time.sleep(15) #Running the kafka server subprocess.Popen("(cd " + os.path.dirname(os.path.realpath(__file__)) + "\kafka " + "&& bin\windows\kafka-server-start.bat config\server.properties)", shell=True) time.sleep(15) #Creating topic subprocess.Popen("(cd " + os.path.dirname(os.path.realpath(__file__)) + "\kafka " + "&& bin\windows\kafka-topics.bat --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic tweets-lambdal)", shell=True) time.sleep(15) #Creating consumer subprocess.Popen("(cd " + os.path.dirname(os.path.realpath(__file__)) + "\kafka " + "&& bin\windows\kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic tweets-lambdal --from-beginning)", shell=True) time.sleep(15) else: #Running the zookeeper server subprocess.Popen("/kafka/bin/zookeeper-server-start.sh /kafka/config/zookeeper.properties", shell=True) time.sleep(15) #Running the kafka server subprocess.Popen("(cd " + os.path.dirname(os.path.realpath(__file__)) + "/kafka " + "&& bin/kafka-server-start.sh config/server.properties)", shell=True) time.sleep(15) #Creating topic subprocess.Popen("(cd " + os.path.dirname(os.path.realpath(__file__)) + "/kafka " + "&& bin/kafka-topics.sh --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic tweets-lambdal)", shell=True) time.sleep(15) #Creating consumer subprocess.Popen("(cd " + os.path.dirname(os.path.realpath(__file__)) + "/kafka " + "&& bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic tweets-lambdal --from-beginning)", shell=True) time.sleep(15) #Keys for twitter API authentication consumer_key = "eejYxthKBRYGPUXehkNiQZD03" consumer_secret = "uWEEPyed0EFJK4FVa3aUe9beYlr6mUW12DFMociDWc6YfzQPzj" access_token = "871016646718214145-l0s1yU6f0xOF9LP8N7nF3iW323FWKqN" access_token_secret = "mjNu6BJQ5NFy1SZWlMHjExpwBamGtBFcCTE4UtTQOAAMP" #Setting up authentication and API auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) api = tweepy.API(auth) #Global variables for counting words per hour words_received = 0 words_per_hour = 0 seconds_spent = 1 # Working with time (normalizing timestamps) def normalize_timestamp(time): mytime = datetime.strptime(time, "%Y-%m-%d %H:%M:%S") mytime += timedelta(hours = 4) return (mytime.strftime("%Y-%m-%d %H:%M:%S")) producer = KafkaProducer(bootstrap_servers = 'localhost:9092') topic_name = 'tweets-lambdal' #Gets the twitter data def get_twitter_data(): global words_received global words_per_hour res = api.search("Donald Trump") for i in res: record = '' ''' A number of data we choose not to run. record += str(i.user.id_str) record += ';' record += str(i.user.followers_count) record += ';' record += str(i.user.location) record += ';' record += str(i.favorite_count) record += ';' record += str(i.retweet_count) record += ';' ''' record += str(i.user.name) record += '\n' record += str(normalize_timestamp(str(i.created_at))) record += '\n' record += str(i.text) words_received += len(i.text.split()) record += '\n' producer.send(topic_name, str.encode(record)) #Setting up the consumer consumer = KafkaConsumer( bootstrap_servers='localhost:9092', auto_offset_reset='latest', group_id='test4', consumer_timeout_ms=10000) consumer.subscribe('tweets-lambdal') #Reads twitter data every second def periodic_work(interval): global words_received global words_per_hour global seconds_spent while True: get_twitter_data() for message in consumer: print(message) words_per_hour = words_received * 3600/seconds_spent seconds_spent += 1 if seconds_spent == 3600: words_received = 0 seconds_spent = 1 print("------------------------" + str(words_per_hour) + "words per hour") producer.send(topic_name, str.encode("--------------------" + str(words_per_hour) + "words per hour")) time.sleep(interval) periodic_work(1)
[ "import tweepy\nimport time\nfrom kafka import KafkaConsumer, KafkaProducer\nfrom datetime import datetime, timedelta\nimport os\nimport subprocess\nimport socket\n\nhostname = socket.gethostname()\nIPAddr = socket.gethostbyname(hostname)\n\nif os.name == 'nt':\n\t#Running the zookeeper server\n\tsubprocess.Popen(\"(cd \" + os.path.dirname(os.path.realpath(__file__)) + \"\\kafka \" + \"&& bin\\windows\\zookeeper-server-start.bat config\\zookeeper.properties)\", shell=True)\n\ttime.sleep(15)\n\t#Running the kafka server\n\tsubprocess.Popen(\"(cd \" + os.path.dirname(os.path.realpath(__file__)) + \"\\kafka \" + \"&& bin\\windows\\kafka-server-start.bat config\\server.properties)\", shell=True)\n\ttime.sleep(15)\n\t#Creating topic\n\tsubprocess.Popen(\"(cd \" + os.path.dirname(os.path.realpath(__file__)) + \"\\kafka \" + \"&& bin\\windows\\kafka-topics.bat --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic tweets-lambdal)\", shell=True)\n\ttime.sleep(15)\n\t#Creating consumer\n\tsubprocess.Popen(\"(cd \" + os.path.dirname(os.path.realpath(__file__)) + \"\\kafka \" + \"&& bin\\windows\\kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic tweets-lambdal --from-beginning)\", shell=True)\n\ttime.sleep(15)\nelse:\n\t#Running the zookeeper server\n\tsubprocess.Popen(\"/kafka/bin/zookeeper-server-start.sh /kafka/config/zookeeper.properties\", shell=True)\n\ttime.sleep(15)\n\t#Running the kafka server\n\tsubprocess.Popen(\"(cd \" + os.path.dirname(os.path.realpath(__file__)) + \"/kafka \" + \"&& bin/kafka-server-start.sh config/server.properties)\", shell=True)\n\ttime.sleep(15)\n\t#Creating topic\n\tsubprocess.Popen(\"(cd \" + os.path.dirname(os.path.realpath(__file__)) + \"/kafka \" + \"&& bin/kafka-topics.sh --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic tweets-lambdal)\", shell=True)\n\ttime.sleep(15)\n\t#Creating consumer\n\tsubprocess.Popen(\"(cd \" + os.path.dirname(os.path.realpath(__file__)) + \"/kafka \" + \"&& bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic tweets-lambdal --from-beginning)\", shell=True)\n\ttime.sleep(15)\n\n#Keys for twitter API authentication\nconsumer_key = \"eejYxthKBRYGPUXehkNiQZD03\"\nconsumer_secret = \"uWEEPyed0EFJK4FVa3aUe9beYlr6mUW12DFMociDWc6YfzQPzj\"\naccess_token = \"871016646718214145-l0s1yU6f0xOF9LP8N7nF3iW323FWKqN\"\naccess_token_secret = \"mjNu6BJQ5NFy1SZWlMHjExpwBamGtBFcCTE4UtTQOAAMP\"\n#Setting up authentication and API\nauth = tweepy.OAuthHandler(consumer_key, consumer_secret)\nauth.set_access_token(access_token, access_token_secret)\napi = tweepy.API(auth)\n#Global variables for counting words per hour\nwords_received = 0\nwords_per_hour = 0\nseconds_spent = 1\n\n# Working with time (normalizing timestamps)\ndef normalize_timestamp(time):\n\tmytime = datetime.strptime(time, \"%Y-%m-%d %H:%M:%S\")\n\tmytime += timedelta(hours = 4)\n\treturn (mytime.strftime(\"%Y-%m-%d %H:%M:%S\"))\n\nproducer = KafkaProducer(bootstrap_servers = 'localhost:9092')\ntopic_name = 'tweets-lambdal'\n\n#Gets the twitter data\ndef get_twitter_data():\n\tglobal words_received\n\tglobal words_per_hour\n\tres = api.search(\"Donald Trump\")\n\tfor i in res:\n\t\trecord = ''\n\t\t'''\n\t\tA number of data we choose not to run.\n\t\trecord += str(i.user.id_str)\n\t\trecord += ';'\n\t\trecord += str(i.user.followers_count)\n\t\trecord += ';'\n\t\trecord += str(i.user.location)\n\t\trecord += ';'\n\t\trecord += str(i.favorite_count)\n\t\trecord += ';'\n\t\trecord += str(i.retweet_count)\n\t\trecord += ';'\n\t\t'''\n\t\trecord += str(i.user.name)\n\t\trecord += '\\n'\n\t\trecord += str(normalize_timestamp(str(i.created_at)))\n\t\trecord += '\\n'\n\t\trecord += str(i.text)\n\t\twords_received += len(i.text.split())\n\t\trecord += '\\n'\n\t\tproducer.send(topic_name, str.encode(record))\n\n#Setting up the consumer\nconsumer = KafkaConsumer(\n\tbootstrap_servers='localhost:9092',\n\tauto_offset_reset='latest',\n\tgroup_id='test4',\n\tconsumer_timeout_ms=10000)\nconsumer.subscribe('tweets-lambdal')\n#Reads twitter data every second\ndef periodic_work(interval):\n\tglobal words_received\n\tglobal words_per_hour\n\tglobal seconds_spent\n\twhile True:\n\t\tget_twitter_data()\n\t\tfor message in consumer:\n\t\t\tprint(message)\n\t\twords_per_hour = words_received * 3600/seconds_spent\n\t\tseconds_spent += 1\n\t\tif seconds_spent == 3600:\n\t\t\t words_received = 0\n\t\t\t seconds_spent = 1\n\t\tprint(\"------------------------\" + str(words_per_hour) + \"words per hour\")\n\t\tproducer.send(topic_name, str.encode(\"--------------------\" + str(words_per_hour) + \"words per hour\"))\n\t\ttime.sleep(interval)\nperiodic_work(1)\n", "import tweepy\nimport time\nfrom kafka import KafkaConsumer, KafkaProducer\nfrom datetime import datetime, timedelta\nimport os\nimport subprocess\nimport socket\nhostname = socket.gethostname()\nIPAddr = socket.gethostbyname(hostname)\nif os.name == 'nt':\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\zookeeper-server-start.bat config\\\\zookeeper.properties)'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\kafka-server-start.bat config\\\\server.properties)',\n shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\kafka-topics.bat --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic tweets-lambdal)'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic tweets-lambdal --from-beginning)'\n , shell=True)\n time.sleep(15)\nelse:\n subprocess.Popen(\n '/kafka/bin/zookeeper-server-start.sh /kafka/config/zookeeper.properties'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '/kafka ' +\n '&& bin/kafka-server-start.sh config/server.properties)', shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '/kafka ' +\n '&& bin/kafka-topics.sh --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic tweets-lambdal)'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '/kafka ' +\n '&& bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic tweets-lambdal --from-beginning)'\n , shell=True)\n time.sleep(15)\nconsumer_key = 'eejYxthKBRYGPUXehkNiQZD03'\nconsumer_secret = 'uWEEPyed0EFJK4FVa3aUe9beYlr6mUW12DFMociDWc6YfzQPzj'\naccess_token = '871016646718214145-l0s1yU6f0xOF9LP8N7nF3iW323FWKqN'\naccess_token_secret = 'mjNu6BJQ5NFy1SZWlMHjExpwBamGtBFcCTE4UtTQOAAMP'\nauth = tweepy.OAuthHandler(consumer_key, consumer_secret)\nauth.set_access_token(access_token, access_token_secret)\napi = tweepy.API(auth)\nwords_received = 0\nwords_per_hour = 0\nseconds_spent = 1\n\n\ndef normalize_timestamp(time):\n mytime = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')\n mytime += timedelta(hours=4)\n return mytime.strftime('%Y-%m-%d %H:%M:%S')\n\n\nproducer = KafkaProducer(bootstrap_servers='localhost:9092')\ntopic_name = 'tweets-lambdal'\n\n\ndef get_twitter_data():\n global words_received\n global words_per_hour\n res = api.search('Donald Trump')\n for i in res:\n record = ''\n \"\"\"\n\t\tA number of data we choose not to run.\n\t\trecord += str(i.user.id_str)\n\t\trecord += ';'\n\t\trecord += str(i.user.followers_count)\n\t\trecord += ';'\n\t\trecord += str(i.user.location)\n\t\trecord += ';'\n\t\trecord += str(i.favorite_count)\n\t\trecord += ';'\n\t\trecord += str(i.retweet_count)\n\t\trecord += ';'\n\t\t\"\"\"\n record += str(i.user.name)\n record += '\\n'\n record += str(normalize_timestamp(str(i.created_at)))\n record += '\\n'\n record += str(i.text)\n words_received += len(i.text.split())\n record += '\\n'\n producer.send(topic_name, str.encode(record))\n\n\nconsumer = KafkaConsumer(bootstrap_servers='localhost:9092',\n auto_offset_reset='latest', group_id='test4', consumer_timeout_ms=10000)\nconsumer.subscribe('tweets-lambdal')\n\n\ndef periodic_work(interval):\n global words_received\n global words_per_hour\n global seconds_spent\n while True:\n get_twitter_data()\n for message in consumer:\n print(message)\n words_per_hour = words_received * 3600 / seconds_spent\n seconds_spent += 1\n if seconds_spent == 3600:\n words_received = 0\n seconds_spent = 1\n print('------------------------' + str(words_per_hour) +\n 'words per hour')\n producer.send(topic_name, str.encode('--------------------' + str(\n words_per_hour) + 'words per hour'))\n time.sleep(interval)\n\n\nperiodic_work(1)\n", "<import token>\nhostname = socket.gethostname()\nIPAddr = socket.gethostbyname(hostname)\nif os.name == 'nt':\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\zookeeper-server-start.bat config\\\\zookeeper.properties)'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\kafka-server-start.bat config\\\\server.properties)',\n shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\kafka-topics.bat --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic tweets-lambdal)'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic tweets-lambdal --from-beginning)'\n , shell=True)\n time.sleep(15)\nelse:\n subprocess.Popen(\n '/kafka/bin/zookeeper-server-start.sh /kafka/config/zookeeper.properties'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '/kafka ' +\n '&& bin/kafka-server-start.sh config/server.properties)', shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '/kafka ' +\n '&& bin/kafka-topics.sh --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic tweets-lambdal)'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '/kafka ' +\n '&& bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic tweets-lambdal --from-beginning)'\n , shell=True)\n time.sleep(15)\nconsumer_key = 'eejYxthKBRYGPUXehkNiQZD03'\nconsumer_secret = 'uWEEPyed0EFJK4FVa3aUe9beYlr6mUW12DFMociDWc6YfzQPzj'\naccess_token = '871016646718214145-l0s1yU6f0xOF9LP8N7nF3iW323FWKqN'\naccess_token_secret = 'mjNu6BJQ5NFy1SZWlMHjExpwBamGtBFcCTE4UtTQOAAMP'\nauth = tweepy.OAuthHandler(consumer_key, consumer_secret)\nauth.set_access_token(access_token, access_token_secret)\napi = tweepy.API(auth)\nwords_received = 0\nwords_per_hour = 0\nseconds_spent = 1\n\n\ndef normalize_timestamp(time):\n mytime = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')\n mytime += timedelta(hours=4)\n return mytime.strftime('%Y-%m-%d %H:%M:%S')\n\n\nproducer = KafkaProducer(bootstrap_servers='localhost:9092')\ntopic_name = 'tweets-lambdal'\n\n\ndef get_twitter_data():\n global words_received\n global words_per_hour\n res = api.search('Donald Trump')\n for i in res:\n record = ''\n \"\"\"\n\t\tA number of data we choose not to run.\n\t\trecord += str(i.user.id_str)\n\t\trecord += ';'\n\t\trecord += str(i.user.followers_count)\n\t\trecord += ';'\n\t\trecord += str(i.user.location)\n\t\trecord += ';'\n\t\trecord += str(i.favorite_count)\n\t\trecord += ';'\n\t\trecord += str(i.retweet_count)\n\t\trecord += ';'\n\t\t\"\"\"\n record += str(i.user.name)\n record += '\\n'\n record += str(normalize_timestamp(str(i.created_at)))\n record += '\\n'\n record += str(i.text)\n words_received += len(i.text.split())\n record += '\\n'\n producer.send(topic_name, str.encode(record))\n\n\nconsumer = KafkaConsumer(bootstrap_servers='localhost:9092',\n auto_offset_reset='latest', group_id='test4', consumer_timeout_ms=10000)\nconsumer.subscribe('tweets-lambdal')\n\n\ndef periodic_work(interval):\n global words_received\n global words_per_hour\n global seconds_spent\n while True:\n get_twitter_data()\n for message in consumer:\n print(message)\n words_per_hour = words_received * 3600 / seconds_spent\n seconds_spent += 1\n if seconds_spent == 3600:\n words_received = 0\n seconds_spent = 1\n print('------------------------' + str(words_per_hour) +\n 'words per hour')\n producer.send(topic_name, str.encode('--------------------' + str(\n words_per_hour) + 'words per hour'))\n time.sleep(interval)\n\n\nperiodic_work(1)\n", "<import token>\n<assignment token>\nif os.name == 'nt':\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\zookeeper-server-start.bat config\\\\zookeeper.properties)'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\kafka-server-start.bat config\\\\server.properties)',\n shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\kafka-topics.bat --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic tweets-lambdal)'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '\\\\kafka ' +\n '&& bin\\\\windows\\\\kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic tweets-lambdal --from-beginning)'\n , shell=True)\n time.sleep(15)\nelse:\n subprocess.Popen(\n '/kafka/bin/zookeeper-server-start.sh /kafka/config/zookeeper.properties'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '/kafka ' +\n '&& bin/kafka-server-start.sh config/server.properties)', shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '/kafka ' +\n '&& bin/kafka-topics.sh --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic tweets-lambdal)'\n , shell=True)\n time.sleep(15)\n subprocess.Popen('(cd ' + os.path.dirname(os.path.realpath(__file__)) +\n '/kafka ' +\n '&& bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic tweets-lambdal --from-beginning)'\n , shell=True)\n time.sleep(15)\n<assignment token>\nauth.set_access_token(access_token, access_token_secret)\n<assignment token>\n\n\ndef normalize_timestamp(time):\n mytime = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')\n mytime += timedelta(hours=4)\n return mytime.strftime('%Y-%m-%d %H:%M:%S')\n\n\n<assignment token>\n\n\ndef get_twitter_data():\n global words_received\n global words_per_hour\n res = api.search('Donald Trump')\n for i in res:\n record = ''\n \"\"\"\n\t\tA number of data we choose not to run.\n\t\trecord += str(i.user.id_str)\n\t\trecord += ';'\n\t\trecord += str(i.user.followers_count)\n\t\trecord += ';'\n\t\trecord += str(i.user.location)\n\t\trecord += ';'\n\t\trecord += str(i.favorite_count)\n\t\trecord += ';'\n\t\trecord += str(i.retweet_count)\n\t\trecord += ';'\n\t\t\"\"\"\n record += str(i.user.name)\n record += '\\n'\n record += str(normalize_timestamp(str(i.created_at)))\n record += '\\n'\n record += str(i.text)\n words_received += len(i.text.split())\n record += '\\n'\n producer.send(topic_name, str.encode(record))\n\n\n<assignment token>\nconsumer.subscribe('tweets-lambdal')\n\n\ndef periodic_work(interval):\n global words_received\n global words_per_hour\n global seconds_spent\n while True:\n get_twitter_data()\n for message in consumer:\n print(message)\n words_per_hour = words_received * 3600 / seconds_spent\n seconds_spent += 1\n if seconds_spent == 3600:\n words_received = 0\n seconds_spent = 1\n print('------------------------' + str(words_per_hour) +\n 'words per hour')\n producer.send(topic_name, str.encode('--------------------' + str(\n words_per_hour) + 'words per hour'))\n time.sleep(interval)\n\n\nperiodic_work(1)\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\ndef normalize_timestamp(time):\n mytime = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')\n mytime += timedelta(hours=4)\n return mytime.strftime('%Y-%m-%d %H:%M:%S')\n\n\n<assignment token>\n\n\ndef get_twitter_data():\n global words_received\n global words_per_hour\n res = api.search('Donald Trump')\n for i in res:\n record = ''\n \"\"\"\n\t\tA number of data we choose not to run.\n\t\trecord += str(i.user.id_str)\n\t\trecord += ';'\n\t\trecord += str(i.user.followers_count)\n\t\trecord += ';'\n\t\trecord += str(i.user.location)\n\t\trecord += ';'\n\t\trecord += str(i.favorite_count)\n\t\trecord += ';'\n\t\trecord += str(i.retweet_count)\n\t\trecord += ';'\n\t\t\"\"\"\n record += str(i.user.name)\n record += '\\n'\n record += str(normalize_timestamp(str(i.created_at)))\n record += '\\n'\n record += str(i.text)\n words_received += len(i.text.split())\n record += '\\n'\n producer.send(topic_name, str.encode(record))\n\n\n<assignment token>\n<code token>\n\n\ndef periodic_work(interval):\n global words_received\n global words_per_hour\n global seconds_spent\n while True:\n get_twitter_data()\n for message in consumer:\n print(message)\n words_per_hour = words_received * 3600 / seconds_spent\n seconds_spent += 1\n if seconds_spent == 3600:\n words_received = 0\n seconds_spent = 1\n print('------------------------' + str(words_per_hour) +\n 'words per hour')\n producer.send(topic_name, str.encode('--------------------' + str(\n words_per_hour) + 'words per hour'))\n time.sleep(interval)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\ndef normalize_timestamp(time):\n mytime = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')\n mytime += timedelta(hours=4)\n return mytime.strftime('%Y-%m-%d %H:%M:%S')\n\n\n<assignment token>\n\n\ndef get_twitter_data():\n global words_received\n global words_per_hour\n res = api.search('Donald Trump')\n for i in res:\n record = ''\n \"\"\"\n\t\tA number of data we choose not to run.\n\t\trecord += str(i.user.id_str)\n\t\trecord += ';'\n\t\trecord += str(i.user.followers_count)\n\t\trecord += ';'\n\t\trecord += str(i.user.location)\n\t\trecord += ';'\n\t\trecord += str(i.favorite_count)\n\t\trecord += ';'\n\t\trecord += str(i.retweet_count)\n\t\trecord += ';'\n\t\t\"\"\"\n record += str(i.user.name)\n record += '\\n'\n record += str(normalize_timestamp(str(i.created_at)))\n record += '\\n'\n record += str(i.text)\n words_received += len(i.text.split())\n record += '\\n'\n producer.send(topic_name, str.encode(record))\n\n\n<assignment token>\n<code token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\ndef normalize_timestamp(time):\n mytime = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')\n mytime += timedelta(hours=4)\n return mytime.strftime('%Y-%m-%d %H:%M:%S')\n\n\n<assignment token>\n<function token>\n<assignment token>\n<code token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<assignment token>\n<function token>\n<assignment token>\n<code token>\n<function token>\n<code token>\n" ]
false
98,932
4c48bafcbc280e6836807106e737343c891a3732
#!/usr/bin/env python # # Simple asynchronous HTTP proxy with tunnelling (CONNECT). # # GET/POST proxying based on # http://groups.google.com/group/python-tornado/msg/7bea08e7a049cf26 # # Copyright (C) 2012 Senko Rasic <[email protected]> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import logging import os import sys import socket import struct import errno import functools from urlparse import urlparse import tornado.httpserver import tornado.ioloop import tornado.web import tornado.httpclient import tornado.httputil import fukei.upstream.local from fukei.config import Config from fukei import crypto logger = logging.getLogger('tornado_proxy') __all__ = ['ProxyHandler', 'run_proxy'] class LocalConnectionHttps(object): def __init__(self, stream, address, upstream_cls): self.stream = stream self.addr = address self.upstream_cls = upstream_cls self.stream.set_close_callback(self.on_connection_close) self.dest = None self.on_connected() def on_connected(self): logger.debug('start connect...') self.atyp = 0x03 self.raw_dest_addr = struct.pack("!B", len(self.addr[0])) + self.addr[0] self.raw_dest_port = struct.pack("!H", self.addr[1]) self.dest = self.addr self.do_connect() def on_connection_close(self): logger.debug("disconnected!") self.clean_upstream() def do_connect(self): config = Config.current() logger.debug("server : %s, %s" % (config.server, config.server_port)) logger.debug("server dest: %s, %s" % self.dest) dest = (config.server, config.server_port) self.upstream = self.upstream_cls(dest, socket.AF_INET, self.on_upstream_connect, self.on_upstream_error, self.on_upstream_data, self.on_upstream_close) def on_upstream_connect(self, _dummy): config = Config.current() self.write_request() on_finish = functools.partial(self.on_socks_data, finished=True) self.stream.read_until_close(on_finish, self.on_socks_data) self.stream.write(b'HTTP/1.0 200 Connection established\r\n\r\n') # self.stream.read_until_close(self.client_close, self.read_from_client) def write_request(self, data=None): logger.debug('wait request...') address_type = self.atyp if data is None: if self.dest: data = self.raw_dest_addr + self.raw_dest_port else: data = struct.pack("!BLH", 0x01, 0x00, 0x00) else: if self.atyp == 0x03: address_type = 0x01 self.upstream.write(struct.pack("!B", address_type) + data) def on_upstream_error(self, _dummy, no): logger.debug("upstream error: %s" % no) self.stream.close() def on_upstream_data(self, _dummy, data, finished=False): try: self.stream.write(data) logger.debug("recevied %d bytes of data from upstream." % len(data)) except IOError as e: logger.debug("cannot write: %s" % str(e)) if self.upstream: self.upstream.close() if finished: self.on_connected() def on_upstream_close(self, _dummy=None): self.stream.close() logger.debug("upstream closed.") self.clean_upstream() def clean_upstream(self): if getattr(self, "upstream", None): self.upstream.close() self.upstream = None def on_socks_data(self, data, finished=False): if not self.upstream: return if data: self.upstream.write(data) logger.debug("sent %d bytes of data to upstream." % len(data)) class ProxyHandler(tornado.web.RequestHandler): SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT'] def compute_etag(self): return None # disable tornado Etag def on_connect(self): data = self.raw_dest_addr + self.raw_dest_port self.upstream.write(struct.pack("!B", 0x03) + data) data = "%s %s %s\r\n" % (self.request.method, self.request.uri.replace(self.request.protocol+"://"+self.request.host, ""), self.request.version) data += "\r\n".join(["%s: %s" % (i, j) for i, j in self.request.headers.items()])+"\r\n\r\n" self.upstream.write(data) # print self.request.body self.upstream.write(self.request.body) self.upstream.read_until('\r\n\r\n', self.on_headers) def on_headers(self, data): lines = data.split("\r\n") # print lines[0] self.request.connection.stream.write("%s\r\n" % lines[0]) headers_data = "\r\n".join(lines[1:]) # print headers_data self._headers = tornado.httputil.HTTPHeaders() # clear tornado default header headers = tornado.httputil.HTTPHeaders.parse(headers_data) for key, value in headers.get_all(): self.request.connection.stream.write("%s: %s\r\n" % (key, value)) self.request.connection.stream.write("\r\n") self.upstream.read_until_close(self.on_upstream_close, self.on_upstream_data) self.request.finish() def on_upstream_data(self, data): try: self.request.connection.stream.write(data) logger.debug("recevied %d bytes of data from upstream." % len(data)) except IOError as e: logger.debug("cannot write: %s" % str(e)) if self.upstream: self.upstream.close() def on_upstream_close(self, _dummy=None): self.request.finish() logger.debug("upstream closed.") self.clean_upstream() def clean_upstream(self): if getattr(self, "upstream", None): self.upstream.close() self.upstream = None def on_upstream_error(self, _dummy, no): logger.debug("upstream error: %s" % no) # self.upstream.close() self.request.finish() def on_close(self): if self.upstream and self.upstream.error: self.on_upstream_error(self, self.upstream.error) else: self.on_upstream_close(self) @tornado.web.asynchronous def get(self): # print self.request.connection._request_headers logger.debug('Handle %s request to %s', self.request.method, self.request.uri) addr = self.request.host.split(':') if len(addr) == 2: host, port = addr else: host, port = self.request.host, "80" self.addr = host, int(port) self.raw_dest_addr = struct.pack("!B", len(self.addr[0])) + self.addr[0] self.raw_dest_port = struct.pack("!H", self.addr[1]) dest = (config.server, config.server_port) self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.upstream = fukei.upstream.local.CryptoIOStream(self.socket) self.upstream.set_close_callback(self.on_close) self.upstream.connect(dest, self.on_connect) @tornado.web.asynchronous def post(self): return self.get() @tornado.web.asynchronous def connect(self): logger.debug('Start CONNECT to %s', self.request.uri) host, port = self.request.uri.split(':') connection = LocalConnectionHttps(self.request.connection.stream, (host, int(port)), fukei.upstream.local.LocalUpstream) if __name__ == '__main__': config_path = os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__))), 'config', 'config.json') config = Config.current(config_path) crypto.setup_table(config.password, config.method) from fukei.utils import log_config log_config('FukeiLocal', config.debug) app = tornado.web.Application([ (r'.*', ProxyHandler), ], debug=config.debug) app.listen(config.local_port) # print ("Starting HTTP proxy on port %d" % config.local_port) tornado.ioloop.IOLoop.instance().start()
[ "#!/usr/bin/env python\n#\n# Simple asynchronous HTTP proxy with tunnelling (CONNECT).\n#\n# GET/POST proxying based on\n# http://groups.google.com/group/python-tornado/msg/7bea08e7a049cf26\n#\n# Copyright (C) 2012 Senko Rasic <[email protected]>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\n\nimport logging\nimport os\nimport sys\nimport socket\nimport struct\nimport errno\nimport functools\nfrom urlparse import urlparse\n\nimport tornado.httpserver\nimport tornado.ioloop\nimport tornado.web\nimport tornado.httpclient\nimport tornado.httputil\n\nimport fukei.upstream.local\nfrom fukei.config import Config\nfrom fukei import crypto\n\nlogger = logging.getLogger('tornado_proxy')\n\n__all__ = ['ProxyHandler', 'run_proxy']\n\n\nclass LocalConnectionHttps(object):\n def __init__(self, stream, address, upstream_cls):\n self.stream = stream\n self.addr = address\n self.upstream_cls = upstream_cls\n self.stream.set_close_callback(self.on_connection_close)\n self.dest = None\n self.on_connected()\n\n def on_connected(self):\n logger.debug('start connect...')\n self.atyp = 0x03\n self.raw_dest_addr = struct.pack(\"!B\", len(self.addr[0])) + self.addr[0]\n self.raw_dest_port = struct.pack(\"!H\", self.addr[1])\n self.dest = self.addr\n self.do_connect()\n\n def on_connection_close(self):\n logger.debug(\"disconnected!\")\n self.clean_upstream()\n\n def do_connect(self):\n config = Config.current()\n\n logger.debug(\"server : %s, %s\" % (config.server, config.server_port))\n logger.debug(\"server dest: %s, %s\" % self.dest)\n dest = (config.server, config.server_port)\n self.upstream = self.upstream_cls(dest, socket.AF_INET,\n self.on_upstream_connect, self.on_upstream_error,\n self.on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n # self.stream.read_until_close(self.client_close, self.read_from_client)\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack(\"!BLH\", 0x01, 0x00, 0x00)\n else:\n if self.atyp == 0x03:\n address_type = 0x01\n self.upstream.write(struct.pack(\"!B\", address_type) + data)\n\n def on_upstream_error(self, _dummy, no):\n logger.debug(\"upstream error: %s\" % no)\n self.stream.close()\n\n def on_upstream_data(self, _dummy, data, finished=False):\n try:\n self.stream.write(data)\n logger.debug(\"recevied %d bytes of data from upstream.\" %\n len(data))\n except IOError as e:\n logger.debug(\"cannot write: %s\" % str(e))\n if self.upstream:\n self.upstream.close()\n if finished:\n self.on_connected()\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug(\"upstream closed.\")\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, \"upstream\", None):\n self.upstream.close()\n self.upstream = None\n\n def on_socks_data(self, data, finished=False):\n if not self.upstream:\n return\n if data:\n self.upstream.write(data)\n logger.debug(\"sent %d bytes of data to upstream.\" %\n len(data))\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None # disable tornado Etag\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack(\"!B\", 0x03) + data)\n\n data = \"%s %s %s\\r\\n\" % (self.request.method, self.request.uri.replace(self.request.protocol+\"://\"+self.request.host, \"\"), self.request.version)\n data += \"\\r\\n\".join([\"%s: %s\" % (i, j) for i, j in self.request.headers.items()])+\"\\r\\n\\r\\n\"\n self.upstream.write(data)\n # print self.request.body\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split(\"\\r\\n\")\n # print lines[0]\n self.request.connection.stream.write(\"%s\\r\\n\" % lines[0])\n\n headers_data = \"\\r\\n\".join(lines[1:])\n # print headers_data\n self._headers = tornado.httputil.HTTPHeaders() # clear tornado default header\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write(\"%s: %s\\r\\n\" % (key, value))\n self.request.connection.stream.write(\"\\r\\n\")\n\n self.upstream.read_until_close(self.on_upstream_close, self.on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug(\"recevied %d bytes of data from upstream.\" %\n len(data))\n except IOError as e:\n logger.debug(\"cannot write: %s\" % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug(\"upstream closed.\")\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, \"upstream\", None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug(\"upstream error: %s\" % no)\n # self.upstream.close()\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n # print self.request.connection._request_headers\n logger.debug('Handle %s request to %s', self.request.method,\n self.request.uri)\n\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, \"80\"\n\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack(\"!B\", len(self.addr[0])) + self.addr[0]\n self.raw_dest_port = struct.pack(\"!H\", self.addr[1])\n dest = (config.server, config.server_port)\n\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\nif __name__ == '__main__':\n config_path = os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__))), 'config', 'config.json')\n config = Config.current(config_path)\n crypto.setup_table(config.password, config.method)\n\n from fukei.utils import log_config\n log_config('FukeiLocal', config.debug)\n\n app = tornado.web.Application([\n (r'.*', ProxyHandler),\n ], debug=config.debug)\n\n app.listen(config.local_port)\n # print (\"Starting HTTP proxy on port %d\" % config.local_port)\n tornado.ioloop.IOLoop.instance().start()\n", "import logging\nimport os\nimport sys\nimport socket\nimport struct\nimport errno\nimport functools\nfrom urlparse import urlparse\nimport tornado.httpserver\nimport tornado.ioloop\nimport tornado.web\nimport tornado.httpclient\nimport tornado.httputil\nimport fukei.upstream.local\nfrom fukei.config import Config\nfrom fukei import crypto\nlogger = logging.getLogger('tornado_proxy')\n__all__ = ['ProxyHandler', 'run_proxy']\n\n\nclass LocalConnectionHttps(object):\n\n def __init__(self, stream, address, upstream_cls):\n self.stream = stream\n self.addr = address\n self.upstream_cls = upstream_cls\n self.stream.set_close_callback(self.on_connection_close)\n self.dest = None\n self.on_connected()\n\n def on_connected(self):\n logger.debug('start connect...')\n self.atyp = 3\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n self.dest = self.addr\n self.do_connect()\n\n def on_connection_close(self):\n logger.debug('disconnected!')\n self.clean_upstream()\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.stream.close()\n\n def on_upstream_data(self, _dummy, data, finished=False):\n try:\n self.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n if finished:\n self.on_connected()\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_socks_data(self, data, finished=False):\n if not self.upstream:\n return\n if data:\n self.upstream.write(data)\n logger.debug('sent %d bytes of data to upstream.' % len(data))\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\nif __name__ == '__main__':\n config_path = os.path.join(os.path.abspath(os.path.join(os.path.dirname\n (__file__))), 'config', 'config.json')\n config = Config.current(config_path)\n crypto.setup_table(config.password, config.method)\n from fukei.utils import log_config\n log_config('FukeiLocal', config.debug)\n app = tornado.web.Application([('.*', ProxyHandler)], debug=config.debug)\n app.listen(config.local_port)\n tornado.ioloop.IOLoop.instance().start()\n", "<import token>\nlogger = logging.getLogger('tornado_proxy')\n__all__ = ['ProxyHandler', 'run_proxy']\n\n\nclass LocalConnectionHttps(object):\n\n def __init__(self, stream, address, upstream_cls):\n self.stream = stream\n self.addr = address\n self.upstream_cls = upstream_cls\n self.stream.set_close_callback(self.on_connection_close)\n self.dest = None\n self.on_connected()\n\n def on_connected(self):\n logger.debug('start connect...')\n self.atyp = 3\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n self.dest = self.addr\n self.do_connect()\n\n def on_connection_close(self):\n logger.debug('disconnected!')\n self.clean_upstream()\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.stream.close()\n\n def on_upstream_data(self, _dummy, data, finished=False):\n try:\n self.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n if finished:\n self.on_connected()\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_socks_data(self, data, finished=False):\n if not self.upstream:\n return\n if data:\n self.upstream.write(data)\n logger.debug('sent %d bytes of data to upstream.' % len(data))\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\nif __name__ == '__main__':\n config_path = os.path.join(os.path.abspath(os.path.join(os.path.dirname\n (__file__))), 'config', 'config.json')\n config = Config.current(config_path)\n crypto.setup_table(config.password, config.method)\n from fukei.utils import log_config\n log_config('FukeiLocal', config.debug)\n app = tornado.web.Application([('.*', ProxyHandler)], debug=config.debug)\n app.listen(config.local_port)\n tornado.ioloop.IOLoop.instance().start()\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n\n def __init__(self, stream, address, upstream_cls):\n self.stream = stream\n self.addr = address\n self.upstream_cls = upstream_cls\n self.stream.set_close_callback(self.on_connection_close)\n self.dest = None\n self.on_connected()\n\n def on_connected(self):\n logger.debug('start connect...')\n self.atyp = 3\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n self.dest = self.addr\n self.do_connect()\n\n def on_connection_close(self):\n logger.debug('disconnected!')\n self.clean_upstream()\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.stream.close()\n\n def on_upstream_data(self, _dummy, data, finished=False):\n try:\n self.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n if finished:\n self.on_connected()\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_socks_data(self, data, finished=False):\n if not self.upstream:\n return\n if data:\n self.upstream.write(data)\n logger.debug('sent %d bytes of data to upstream.' % len(data))\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\nif __name__ == '__main__':\n config_path = os.path.join(os.path.abspath(os.path.join(os.path.dirname\n (__file__))), 'config', 'config.json')\n config = Config.current(config_path)\n crypto.setup_table(config.password, config.method)\n from fukei.utils import log_config\n log_config('FukeiLocal', config.debug)\n app = tornado.web.Application([('.*', ProxyHandler)], debug=config.debug)\n app.listen(config.local_port)\n tornado.ioloop.IOLoop.instance().start()\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n\n def __init__(self, stream, address, upstream_cls):\n self.stream = stream\n self.addr = address\n self.upstream_cls = upstream_cls\n self.stream.set_close_callback(self.on_connection_close)\n self.dest = None\n self.on_connected()\n\n def on_connected(self):\n logger.debug('start connect...')\n self.atyp = 3\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n self.dest = self.addr\n self.do_connect()\n\n def on_connection_close(self):\n logger.debug('disconnected!')\n self.clean_upstream()\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.stream.close()\n\n def on_upstream_data(self, _dummy, data, finished=False):\n try:\n self.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n if finished:\n self.on_connected()\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_socks_data(self, data, finished=False):\n if not self.upstream:\n return\n if data:\n self.upstream.write(data)\n logger.debug('sent %d bytes of data to upstream.' % len(data))\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n\n def __init__(self, stream, address, upstream_cls):\n self.stream = stream\n self.addr = address\n self.upstream_cls = upstream_cls\n self.stream.set_close_callback(self.on_connection_close)\n self.dest = None\n self.on_connected()\n\n def on_connected(self):\n logger.debug('start connect...')\n self.atyp = 3\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n self.dest = self.addr\n self.do_connect()\n\n def on_connection_close(self):\n logger.debug('disconnected!')\n self.clean_upstream()\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.stream.close()\n <function token>\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_socks_data(self, data, finished=False):\n if not self.upstream:\n return\n if data:\n self.upstream.write(data)\n logger.debug('sent %d bytes of data to upstream.' % len(data))\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n\n def __init__(self, stream, address, upstream_cls):\n self.stream = stream\n self.addr = address\n self.upstream_cls = upstream_cls\n self.stream.set_close_callback(self.on_connection_close)\n self.dest = None\n self.on_connected()\n\n def on_connected(self):\n logger.debug('start connect...')\n self.atyp = 3\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n self.dest = self.addr\n self.do_connect()\n <function token>\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.stream.close()\n <function token>\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_socks_data(self, data, finished=False):\n if not self.upstream:\n return\n if data:\n self.upstream.write(data)\n logger.debug('sent %d bytes of data to upstream.' % len(data))\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n\n def __init__(self, stream, address, upstream_cls):\n self.stream = stream\n self.addr = address\n self.upstream_cls = upstream_cls\n self.stream.set_close_callback(self.on_connection_close)\n self.dest = None\n self.on_connected()\n\n def on_connected(self):\n logger.debug('start connect...')\n self.atyp = 3\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n self.dest = self.addr\n self.do_connect()\n <function token>\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n <function token>\n <function token>\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_socks_data(self, data, finished=False):\n if not self.upstream:\n return\n if data:\n self.upstream.write(data)\n logger.debug('sent %d bytes of data to upstream.' % len(data))\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n\n def __init__(self, stream, address, upstream_cls):\n self.stream = stream\n self.addr = address\n self.upstream_cls = upstream_cls\n self.stream.set_close_callback(self.on_connection_close)\n self.dest = None\n self.on_connected()\n <function token>\n <function token>\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n <function token>\n <function token>\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_socks_data(self, data, finished=False):\n if not self.upstream:\n return\n if data:\n self.upstream.write(data)\n logger.debug('sent %d bytes of data to upstream.' % len(data))\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n\n def __init__(self, stream, address, upstream_cls):\n self.stream = stream\n self.addr = address\n self.upstream_cls = upstream_cls\n self.stream.set_close_callback(self.on_connection_close)\n self.dest = None\n self.on_connected()\n <function token>\n <function token>\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n <function token>\n <function token>\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n <function token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n\n def __init__(self, stream, address, upstream_cls):\n self.stream = stream\n self.addr = address\n self.upstream_cls = upstream_cls\n self.stream.set_close_callback(self.on_connection_close)\n self.dest = None\n self.on_connected()\n <function token>\n <function token>\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n <function token>\n <function token>\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug('upstream closed.')\n self.clean_upstream()\n <function token>\n <function token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n <function token>\n <function token>\n <function token>\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n <function token>\n <function token>\n\n def on_upstream_close(self, _dummy=None):\n self.stream.close()\n logger.debug('upstream closed.')\n self.clean_upstream()\n <function token>\n <function token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n <function token>\n <function token>\n <function token>\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n\n def on_upstream_connect(self, _dummy):\n config = Config.current()\n self.write_request()\n on_finish = functools.partial(self.on_socks_data, finished=True)\n self.stream.read_until_close(on_finish, self.on_socks_data)\n self.stream.write(b'HTTP/1.0 200 Connection established\\r\\n\\r\\n')\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n <function token>\n <function token>\n <function token>\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n <function token>\n\n def write_request(self, data=None):\n logger.debug('wait request...')\n address_type = self.atyp\n if data is None:\n if self.dest:\n data = self.raw_dest_addr + self.raw_dest_port\n else:\n data = struct.pack('!BLH', 1, 0, 0)\n elif self.atyp == 3:\n address_type = 1\n self.upstream.write(struct.pack('!B', address_type) + data)\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n <function token>\n <function token>\n <function token>\n\n def do_connect(self):\n config = Config.current()\n logger.debug('server : %s, %s' % (config.server, config.server_port))\n logger.debug('server dest: %s, %s' % self.dest)\n dest = config.server, config.server_port\n self.upstream = self.upstream_cls(dest, socket.AF_INET, self.\n on_upstream_connect, self.on_upstream_error, self.\n on_upstream_data, self.on_upstream_close)\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass LocalConnectionHttps(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT']\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n\n def compute_etag(self):\n return None\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n <function token>\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n\n def on_upstream_error(self, _dummy, no):\n logger.debug('upstream error: %s' % no)\n self.request.finish()\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n <function token>\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n <function token>\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n\n @tornado.web.asynchronous\n def get(self):\n logger.debug('Handle %s request to %s', self.request.method, self.\n request.uri)\n addr = self.request.host.split(':')\n if len(addr) == 2:\n host, port = addr\n else:\n host, port = self.request.host, '80'\n self.addr = host, int(port)\n self.raw_dest_addr = struct.pack('!B', len(self.addr[0])) + self.addr[0\n ]\n self.raw_dest_port = struct.pack('!H', self.addr[1])\n dest = config.server, config.server_port\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.upstream = fukei.upstream.local.CryptoIOStream(self.socket)\n self.upstream.set_close_callback(self.on_close)\n self.upstream.connect(dest, self.on_connect)\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n <function token>\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n\n def on_upstream_close(self, _dummy=None):\n self.request.finish()\n logger.debug('upstream closed.')\n self.clean_upstream()\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n <function token>\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n <function token>\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n <function token>\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n\n def on_headers(self, data):\n lines = data.split('\\r\\n')\n self.request.connection.stream.write('%s\\r\\n' % lines[0])\n headers_data = '\\r\\n'.join(lines[1:])\n self._headers = tornado.httputil.HTTPHeaders()\n headers = tornado.httputil.HTTPHeaders.parse(headers_data)\n for key, value in headers.get_all():\n self.request.connection.stream.write('%s: %s\\r\\n' % (key, value))\n self.request.connection.stream.write('\\r\\n')\n self.upstream.read_until_close(self.on_upstream_close, self.\n on_upstream_data)\n self.request.finish()\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n <function token>\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n <function token>\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n <function token>\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n <function token>\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n <function token>\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n <function token>\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n <function token>\n\n def on_close(self):\n if self.upstream and self.upstream.error:\n self.on_upstream_error(self, self.upstream.error)\n else:\n self.on_upstream_close(self)\n <function token>\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n <function token>\n\n def on_connect(self):\n data = self.raw_dest_addr + self.raw_dest_port\n self.upstream.write(struct.pack('!B', 3) + data)\n data = '%s %s %s\\r\\n' % (self.request.method, self.request.uri.\n replace(self.request.protocol + '://' + self.request.host, ''),\n self.request.version)\n data += '\\r\\n'.join([('%s: %s' % (i, j)) for i, j in self.request.\n headers.items()]) + '\\r\\n\\r\\n'\n self.upstream.write(data)\n self.upstream.write(self.request.body)\n self.upstream.read_until('\\r\\n\\r\\n', self.on_headers)\n <function token>\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n <function token>\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n <function token>\n <function token>\n <function token>\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n <function token>\n\n def clean_upstream(self):\n if getattr(self, 'upstream', None):\n self.upstream.close()\n self.upstream = None\n <function token>\n <function token>\n <function token>\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n\n @tornado.web.asynchronous\n def connect(self):\n logger.debug('Start CONNECT to %s', self.request.uri)\n host, port = self.request.uri.split(':')\n connection = LocalConnectionHttps(self.request.connection.stream, (\n host, int(port)), fukei.upstream.local.LocalUpstream)\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def on_upstream_data(self, data):\n try:\n self.request.connection.stream.write(data)\n logger.debug('recevied %d bytes of data from upstream.' % len(data)\n )\n except IOError as e:\n logger.debug('cannot write: %s' % str(e))\n if self.upstream:\n self.upstream.close()\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n <function token>\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n @tornado.web.asynchronous\n def post(self):\n return self.get()\n <function token>\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n\n\nclass ProxyHandler(tornado.web.RequestHandler):\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n<class token>\n<code token>\n" ]
false
98,933
a67d26ad29041eb1d58d4f745e09bf52ac2b5531
import os import numpy as np import pandas as pd from bs4 import BeautifulSoup as bs import requests from configurations import wantedRows, letterNumbers, percentageNumbers, dates from converter import Converter import pymongo #from config import password from tickers import gold, spy import logging import datetime class CompanyInserter: def __init__(self, baseurl, connString, tickers): self.baseurl = baseurl self.connection = pymongo.MongoClient(connString) self.tickers = tickers def getName(self, html): """Gets the string of the currency stock is priced in.""" soup = bs(html, "lxml") results = soup.findAll("h1", {"data-reactid" : "7"}) if len(results) != 1: return False, None name = results[0].text.split(' (')[0] return True, name def getCompanyData(self, ticker): data = requests.get(self.baseurl + "%s?p=%s" % (ticker, ticker)) exists, name = self.getName(data.text) if exists: print("Inserting:{}".format(name)) col = self.connection['stocks']['spy'] col.insert_one({ 'name' : name, 'ticker' : ticker }) if __name__ == "__main__": logging.basicConfig(filename="miner.log", filemode='a', level=logging.INFO) baseurl = "https://uk.finance.yahoo.com/quote/" connString = "mongodb+srv://improve:%[email protected]/myFirstDatabase?retryWrites=true&w=majority" % os.environ["DB_PASSWORD"] companyInserter = CompanyInserter(baseurl, connString, spy) for ticker in companyInserter.tickers: companyInserter.getCompanyData(ticker)
[ "import os\nimport numpy as np \nimport pandas as pd\nfrom bs4 import BeautifulSoup as bs\nimport requests\nfrom configurations import wantedRows, letterNumbers, percentageNumbers, dates\nfrom converter import Converter\nimport pymongo\n#from config import password\nfrom tickers import gold, spy\nimport logging\nimport datetime\n\nclass CompanyInserter:\n\n def __init__(self, baseurl, connString, tickers):\n self.baseurl = baseurl\n self.connection = pymongo.MongoClient(connString)\n self.tickers = tickers\n\n def getName(self, html):\n \"\"\"Gets the string of the currency stock is priced in.\"\"\"\n soup = bs(html, \"lxml\")\n results = soup.findAll(\"h1\", {\"data-reactid\" : \"7\"})\n if len(results) != 1:\n return False, None\n name = results[0].text.split(' (')[0]\n return True, name\n\n def getCompanyData(self, ticker):\n data = requests.get(self.baseurl + \"%s?p=%s\" % (ticker, ticker))\n exists, name = self.getName(data.text)\n if exists:\n print(\"Inserting:{}\".format(name))\n col = self.connection['stocks']['spy']\n col.insert_one({\n 'name' : name,\n 'ticker' : ticker\n })\n\nif __name__ == \"__main__\":\n logging.basicConfig(filename=\"miner.log\", filemode='a', level=logging.INFO)\n baseurl = \"https://uk.finance.yahoo.com/quote/\"\n connString = \"mongodb+srv://improve:%[email protected]/myFirstDatabase?retryWrites=true&w=majority\" % os.environ[\"DB_PASSWORD\"]\n companyInserter = CompanyInserter(baseurl, connString, spy)\n \n for ticker in companyInserter.tickers:\n companyInserter.getCompanyData(ticker)", "import os\nimport numpy as np\nimport pandas as pd\nfrom bs4 import BeautifulSoup as bs\nimport requests\nfrom configurations import wantedRows, letterNumbers, percentageNumbers, dates\nfrom converter import Converter\nimport pymongo\nfrom tickers import gold, spy\nimport logging\nimport datetime\n\n\nclass CompanyInserter:\n\n def __init__(self, baseurl, connString, tickers):\n self.baseurl = baseurl\n self.connection = pymongo.MongoClient(connString)\n self.tickers = tickers\n\n def getName(self, html):\n \"\"\"Gets the string of the currency stock is priced in.\"\"\"\n soup = bs(html, 'lxml')\n results = soup.findAll('h1', {'data-reactid': '7'})\n if len(results) != 1:\n return False, None\n name = results[0].text.split(' (')[0]\n return True, name\n\n def getCompanyData(self, ticker):\n data = requests.get(self.baseurl + '%s?p=%s' % (ticker, ticker))\n exists, name = self.getName(data.text)\n if exists:\n print('Inserting:{}'.format(name))\n col = self.connection['stocks']['spy']\n col.insert_one({'name': name, 'ticker': ticker})\n\n\nif __name__ == '__main__':\n logging.basicConfig(filename='miner.log', filemode='a', level=logging.INFO)\n baseurl = 'https://uk.finance.yahoo.com/quote/'\n connString = (\n 'mongodb+srv://improve:%[email protected]/myFirstDatabase?retryWrites=true&w=majority'\n % os.environ['DB_PASSWORD'])\n companyInserter = CompanyInserter(baseurl, connString, spy)\n for ticker in companyInserter.tickers:\n companyInserter.getCompanyData(ticker)\n", "<import token>\n\n\nclass CompanyInserter:\n\n def __init__(self, baseurl, connString, tickers):\n self.baseurl = baseurl\n self.connection = pymongo.MongoClient(connString)\n self.tickers = tickers\n\n def getName(self, html):\n \"\"\"Gets the string of the currency stock is priced in.\"\"\"\n soup = bs(html, 'lxml')\n results = soup.findAll('h1', {'data-reactid': '7'})\n if len(results) != 1:\n return False, None\n name = results[0].text.split(' (')[0]\n return True, name\n\n def getCompanyData(self, ticker):\n data = requests.get(self.baseurl + '%s?p=%s' % (ticker, ticker))\n exists, name = self.getName(data.text)\n if exists:\n print('Inserting:{}'.format(name))\n col = self.connection['stocks']['spy']\n col.insert_one({'name': name, 'ticker': ticker})\n\n\nif __name__ == '__main__':\n logging.basicConfig(filename='miner.log', filemode='a', level=logging.INFO)\n baseurl = 'https://uk.finance.yahoo.com/quote/'\n connString = (\n 'mongodb+srv://improve:%[email protected]/myFirstDatabase?retryWrites=true&w=majority'\n % os.environ['DB_PASSWORD'])\n companyInserter = CompanyInserter(baseurl, connString, spy)\n for ticker in companyInserter.tickers:\n companyInserter.getCompanyData(ticker)\n", "<import token>\n\n\nclass CompanyInserter:\n\n def __init__(self, baseurl, connString, tickers):\n self.baseurl = baseurl\n self.connection = pymongo.MongoClient(connString)\n self.tickers = tickers\n\n def getName(self, html):\n \"\"\"Gets the string of the currency stock is priced in.\"\"\"\n soup = bs(html, 'lxml')\n results = soup.findAll('h1', {'data-reactid': '7'})\n if len(results) != 1:\n return False, None\n name = results[0].text.split(' (')[0]\n return True, name\n\n def getCompanyData(self, ticker):\n data = requests.get(self.baseurl + '%s?p=%s' % (ticker, ticker))\n exists, name = self.getName(data.text)\n if exists:\n print('Inserting:{}'.format(name))\n col = self.connection['stocks']['spy']\n col.insert_one({'name': name, 'ticker': ticker})\n\n\n<code token>\n", "<import token>\n\n\nclass CompanyInserter:\n\n def __init__(self, baseurl, connString, tickers):\n self.baseurl = baseurl\n self.connection = pymongo.MongoClient(connString)\n self.tickers = tickers\n\n def getName(self, html):\n \"\"\"Gets the string of the currency stock is priced in.\"\"\"\n soup = bs(html, 'lxml')\n results = soup.findAll('h1', {'data-reactid': '7'})\n if len(results) != 1:\n return False, None\n name = results[0].text.split(' (')[0]\n return True, name\n <function token>\n\n\n<code token>\n", "<import token>\n\n\nclass CompanyInserter:\n <function token>\n\n def getName(self, html):\n \"\"\"Gets the string of the currency stock is priced in.\"\"\"\n soup = bs(html, 'lxml')\n results = soup.findAll('h1', {'data-reactid': '7'})\n if len(results) != 1:\n return False, None\n name = results[0].text.split(' (')[0]\n return True, name\n <function token>\n\n\n<code token>\n", "<import token>\n\n\nclass CompanyInserter:\n <function token>\n <function token>\n <function token>\n\n\n<code token>\n", "<import token>\n<class token>\n<code token>\n" ]
false
98,934
4b58416902fd7e24ce40ee69282de2c10ecd85ee
from flask_login import UserMixin def create_special(db): class Special(db.Model): __tablename__ = "special" id = db.Column(db.Integer, primary_key=True) timestamp = db.Column(db.DateTime) month = db.Column(db.String(32)) category = db.Column(db.String(64)) brand = db.Column(db.String(64)) product = db.Column(db.String(64)) volAmt = db.Column(db.Float) volUnit = db.Column(db.String(32)) price = db.Column(db.Float) xpack = db.Column(db.Integer) container = db.Column(db.String(64)) varietals = db.Column(db.String(64)) def __repr__(self): return f"<Special {self.brand} {self.product}>" return Special def create_user(db): class User(UserMixin, db.Model): __tablename__ = "user" id = db.Column(db.Integer, primary_key=True) email = db.Column(db.String(64), unique=True) password = db.Column(db.String(255)) name = db.Column(db.String(32)) def __repr__(self): return f"<User {self.name}>" return User def create_staff(db): class Staff(db.Model): __tablename__ = "staff" id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(32)) pos = db.Column(db.String(32)) specialties = db.Column(db.String(64)) hobbies = db.Column(db.String(64)) goals = db.Column(db.String(64)) def __repr__(self): return f"<Staff {self.name}>" return Staff
[ "from flask_login import UserMixin\n\ndef create_special(db):\n class Special(db.Model):\n __tablename__ = \"special\"\n\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime)\n month = db.Column(db.String(32))\n category = db.Column(db.String(64))\n brand = db.Column(db.String(64))\n product = db.Column(db.String(64))\n volAmt = db.Column(db.Float)\n volUnit = db.Column(db.String(32))\n price = db.Column(db.Float)\n xpack = db.Column(db.Integer)\n container = db.Column(db.String(64))\n varietals = db.Column(db.String(64))\n\n def __repr__(self):\n return f\"<Special {self.brand} {self.product}>\"\n return Special\n\ndef create_user(db):\n class User(UserMixin, db.Model):\n __tablename__ = \"user\"\n\n id = db.Column(db.Integer, primary_key=True)\n email = db.Column(db.String(64), unique=True)\n password = db.Column(db.String(255))\n name = db.Column(db.String(32))\n\n def __repr__(self):\n return f\"<User {self.name}>\"\n return User\n\ndef create_staff(db):\n class Staff(db.Model):\n __tablename__ = \"staff\"\n\n id = db.Column(db.Integer, primary_key=True)\n name = db.Column(db.String(32))\n pos = db.Column(db.String(32))\n specialties = db.Column(db.String(64))\n hobbies = db.Column(db.String(64))\n goals = db.Column(db.String(64))\n\n def __repr__(self):\n return f\"<Staff {self.name}>\"\n return Staff", "from flask_login import UserMixin\n\n\ndef create_special(db):\n\n\n class Special(db.Model):\n __tablename__ = 'special'\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime)\n month = db.Column(db.String(32))\n category = db.Column(db.String(64))\n brand = db.Column(db.String(64))\n product = db.Column(db.String(64))\n volAmt = db.Column(db.Float)\n volUnit = db.Column(db.String(32))\n price = db.Column(db.Float)\n xpack = db.Column(db.Integer)\n container = db.Column(db.String(64))\n varietals = db.Column(db.String(64))\n\n def __repr__(self):\n return f'<Special {self.brand} {self.product}>'\n return Special\n\n\ndef create_user(db):\n\n\n class User(UserMixin, db.Model):\n __tablename__ = 'user'\n id = db.Column(db.Integer, primary_key=True)\n email = db.Column(db.String(64), unique=True)\n password = db.Column(db.String(255))\n name = db.Column(db.String(32))\n\n def __repr__(self):\n return f'<User {self.name}>'\n return User\n\n\ndef create_staff(db):\n\n\n class Staff(db.Model):\n __tablename__ = 'staff'\n id = db.Column(db.Integer, primary_key=True)\n name = db.Column(db.String(32))\n pos = db.Column(db.String(32))\n specialties = db.Column(db.String(64))\n hobbies = db.Column(db.String(64))\n goals = db.Column(db.String(64))\n\n def __repr__(self):\n return f'<Staff {self.name}>'\n return Staff\n", "<import token>\n\n\ndef create_special(db):\n\n\n class Special(db.Model):\n __tablename__ = 'special'\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime)\n month = db.Column(db.String(32))\n category = db.Column(db.String(64))\n brand = db.Column(db.String(64))\n product = db.Column(db.String(64))\n volAmt = db.Column(db.Float)\n volUnit = db.Column(db.String(32))\n price = db.Column(db.Float)\n xpack = db.Column(db.Integer)\n container = db.Column(db.String(64))\n varietals = db.Column(db.String(64))\n\n def __repr__(self):\n return f'<Special {self.brand} {self.product}>'\n return Special\n\n\ndef create_user(db):\n\n\n class User(UserMixin, db.Model):\n __tablename__ = 'user'\n id = db.Column(db.Integer, primary_key=True)\n email = db.Column(db.String(64), unique=True)\n password = db.Column(db.String(255))\n name = db.Column(db.String(32))\n\n def __repr__(self):\n return f'<User {self.name}>'\n return User\n\n\ndef create_staff(db):\n\n\n class Staff(db.Model):\n __tablename__ = 'staff'\n id = db.Column(db.Integer, primary_key=True)\n name = db.Column(db.String(32))\n pos = db.Column(db.String(32))\n specialties = db.Column(db.String(64))\n hobbies = db.Column(db.String(64))\n goals = db.Column(db.String(64))\n\n def __repr__(self):\n return f'<Staff {self.name}>'\n return Staff\n", "<import token>\n<function token>\n\n\ndef create_user(db):\n\n\n class User(UserMixin, db.Model):\n __tablename__ = 'user'\n id = db.Column(db.Integer, primary_key=True)\n email = db.Column(db.String(64), unique=True)\n password = db.Column(db.String(255))\n name = db.Column(db.String(32))\n\n def __repr__(self):\n return f'<User {self.name}>'\n return User\n\n\ndef create_staff(db):\n\n\n class Staff(db.Model):\n __tablename__ = 'staff'\n id = db.Column(db.Integer, primary_key=True)\n name = db.Column(db.String(32))\n pos = db.Column(db.String(32))\n specialties = db.Column(db.String(64))\n hobbies = db.Column(db.String(64))\n goals = db.Column(db.String(64))\n\n def __repr__(self):\n return f'<Staff {self.name}>'\n return Staff\n", "<import token>\n<function token>\n<function token>\n\n\ndef create_staff(db):\n\n\n class Staff(db.Model):\n __tablename__ = 'staff'\n id = db.Column(db.Integer, primary_key=True)\n name = db.Column(db.String(32))\n pos = db.Column(db.String(32))\n specialties = db.Column(db.String(64))\n hobbies = db.Column(db.String(64))\n goals = db.Column(db.String(64))\n\n def __repr__(self):\n return f'<Staff {self.name}>'\n return Staff\n", "<import token>\n<function token>\n<function token>\n<function token>\n" ]
false
98,935
ad3a343de7ec22aa53bd9c1db532c7a7191c1536
''' The thief has found himself a new place for his thievery again. There is only one entrance to this area, called the "root." Besides the root, each house has one and only one parent house. After a tour, the smart thief realized that "all houses in this place forms a binary tree". It will automatically contact the police if two directly-linked houses were broken into on the same night. Determine the maximum amount of money the thief can rob tonight without alerting the police. Example 1: Input: [3,2,3,null,3,null,1] 3 / \ 2 3 \ \ 3 1 Output: 7 Explanation: Maximum amount of money the thief can rob = 3 + 3 + 1 = 7. Example 2: Input: [3,4,5,1,3,null,1] 3 / \ 4 5 / \ \ 1 3 1 Output: 9 Explanation: Maximum amount of money the thief can rob = 4 + 5 = 9. ''' # Definition for a binary tree node. # class TreeNode: # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right class Solution: def rob(self, root: TreeNode) -> int: @lru_cache(maxsize=None) def dfs(root): if not root: return 0 val = 0 if root.left: val += dfs(root.left.left) + dfs(root.left.right) if root.right: val += dfs(root.right.left) + dfs(root.right.right) val = max(root.val + val, dfs(root.left) + dfs(root.right)) return val return dfs(root) class Solution: def rob(self, root: TreeNode) -> int: def rob_sub(root): if not root: return [0, 0] left = rob_sub(root.left) right = rob_sub(root.right) res = [0, 0] res[0] = max(left) + max(right) res[1] = root.val + left[0] + right[0] return res res = rob_sub(root) return max(res)
[ "'''\nThe thief has found himself a new place for his thievery again. There is only one entrance to this area, called the \"root.\" Besides the root, each house has one and only one parent house. After a tour, the smart thief realized that \"all houses in this place forms a binary tree\". It will automatically contact the police if two directly-linked houses were broken into on the same night.\n\nDetermine the maximum amount of money the thief can rob tonight without alerting the police.\n\nExample 1:\n\nInput: [3,2,3,null,3,null,1]\n\n 3\n / \\\n 2 3\n \\ \\ \n 3 1\n\nOutput: 7 \nExplanation: Maximum amount of money the thief can rob = 3 + 3 + 1 = 7.\nExample 2:\n\nInput: [3,4,5,1,3,null,1]\n\n 3\n / \\\n 4 5\n / \\ \\ \n 1 3 1\n\nOutput: 9\nExplanation: Maximum amount of money the thief can rob = 4 + 5 = 9.\n'''\n\n# Definition for a binary tree node.\n# class TreeNode:\n# def __init__(self, val=0, left=None, right=None):\n# self.val = val\n# self.left = left\n# self.right = right\nclass Solution:\n def rob(self, root: TreeNode) -> int:\n @lru_cache(maxsize=None)\n def dfs(root):\n if not root:\n return 0\n val = 0\n if root.left:\n val += dfs(root.left.left) + dfs(root.left.right)\n if root.right:\n val += dfs(root.right.left) + dfs(root.right.right)\n val = max(root.val + val, dfs(root.left) + dfs(root.right))\n\n return val\n \n return dfs(root)\n \nclass Solution:\n def rob(self, root: TreeNode) -> int:\n def rob_sub(root):\n if not root:\n return [0, 0]\n left = rob_sub(root.left)\n right = rob_sub(root.right)\n res = [0, 0]\n res[0] = max(left) + max(right)\n res[1] = root.val + left[0] + right[0]\n \n return res\n \n res = rob_sub(root)\n \n return max(res)\n", "<docstring token>\n\n\nclass Solution:\n\n def rob(self, root: TreeNode) ->int:\n\n @lru_cache(maxsize=None)\n def dfs(root):\n if not root:\n return 0\n val = 0\n if root.left:\n val += dfs(root.left.left) + dfs(root.left.right)\n if root.right:\n val += dfs(root.right.left) + dfs(root.right.right)\n val = max(root.val + val, dfs(root.left) + dfs(root.right))\n return val\n return dfs(root)\n\n\nclass Solution:\n\n def rob(self, root: TreeNode) ->int:\n\n def rob_sub(root):\n if not root:\n return [0, 0]\n left = rob_sub(root.left)\n right = rob_sub(root.right)\n res = [0, 0]\n res[0] = max(left) + max(right)\n res[1] = root.val + left[0] + right[0]\n return res\n res = rob_sub(root)\n return max(res)\n", "<docstring token>\n\n\nclass Solution:\n <function token>\n\n\nclass Solution:\n\n def rob(self, root: TreeNode) ->int:\n\n def rob_sub(root):\n if not root:\n return [0, 0]\n left = rob_sub(root.left)\n right = rob_sub(root.right)\n res = [0, 0]\n res[0] = max(left) + max(right)\n res[1] = root.val + left[0] + right[0]\n return res\n res = rob_sub(root)\n return max(res)\n", "<docstring token>\n<class token>\n\n\nclass Solution:\n\n def rob(self, root: TreeNode) ->int:\n\n def rob_sub(root):\n if not root:\n return [0, 0]\n left = rob_sub(root.left)\n right = rob_sub(root.right)\n res = [0, 0]\n res[0] = max(left) + max(right)\n res[1] = root.val + left[0] + right[0]\n return res\n res = rob_sub(root)\n return max(res)\n", "<docstring token>\n<class token>\n\n\nclass Solution:\n <function token>\n", "<docstring token>\n<class token>\n<class token>\n" ]
false
98,936
5445d994e66ec9c5c4e9003d8869617e6fc8be25
# -*- coding: utf-8 -*- from django.db import models from noproblem.accounts.models import UserProfile # Create your models here. class Area (models.Model): name = models.CharField(max_length=100,unique=True) def __unicode__(self): return self.name class SubArea (models.Model): name = models.CharField(max_length=100) area = models.ForeignKey(Area) def __unicode__(self): return u'%s (%s)' % (self.name, self.area.name) class Problem (models.Model): category = models.ForeignKey(SubArea) title = models.CharField(max_length=200) wording = models.TextField() points = models.IntegerField() #crea_date = models.DateTimeField('fecha de creacion') created_at = models.DateTimeField(auto_now_add = True) updated_at = models.DateTimeField(auto_now = True) requirements = models.ManyToManyField('Problem',blank=True,null=True) datos = models.CharField(max_length=200) solucion = models.CharField(max_length=200) creator = models.ForeignKey(UserProfile, blank=True, null=True) def __unicode__(self): return self.title def get_children(self): return Problem.objects.filter(requirements=self).all() def get_parents(self): return Problem.objects.filter(id__in=[o.id for o in Problem.objects.all() if self in o.get_children()]) def degree_out(self): return self.get_children().count() def degree_in(self): return self.get_parents().count() def data(self): "Returns the data needed to solve a problem" from noproblem.problems.pyconnecta import probs return getattr(probs, self.datos)() def solve(self,data): "Solves a problem given the needed data" from noproblem.problems.pyconnecta import probs return getattr(probs, self.solucion)(data) def solved_by_user(self,usr): return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists() def is_next_to_solve(self,usr): unsolved_root = (not self.get_parents() and not self.solved_by_user(usr)) unsolved = any([o.solved_by_user(usr) for o in self.get_parents()]) return unsolved_root or unsolved class Solves(models.Model): user = models.ForeignKey(UserProfile) prob = models.ForeignKey(Problem) date = models.DateTimeField() time = models.TimeField() is_correct = models.BooleanField()
[ "# -*- coding: utf-8 -*-\nfrom django.db import models\nfrom noproblem.accounts.models import UserProfile\n\n# Create your models here.\nclass Area (models.Model):\n name = models.CharField(max_length=100,unique=True)\n def __unicode__(self):\n return self.name\n\nclass SubArea (models.Model):\n name = models.CharField(max_length=100)\n area = models.ForeignKey(Area)\n def __unicode__(self):\n return u'%s (%s)' % (self.name, self.area.name)\n\nclass Problem (models.Model):\n\tcategory = models.ForeignKey(SubArea)\n\ttitle = models.CharField(max_length=200)\n\twording = models.TextField()\n\tpoints = models.IntegerField()\n\t#crea_date = models.DateTimeField('fecha de creacion')\n\tcreated_at = models.DateTimeField(auto_now_add = True)\n\tupdated_at = models.DateTimeField(auto_now = True)\n\trequirements = models.ManyToManyField('Problem',blank=True,null=True)\n\tdatos = models.CharField(max_length=200)\n\tsolucion = models.CharField(max_length=200)\n\tcreator = models.ForeignKey(UserProfile, blank=True, null=True)\n\tdef __unicode__(self):\n\t\treturn self.title\n\tdef get_children(self):\n\t\treturn Problem.objects.filter(requirements=self).all()\n\tdef get_parents(self):\n\t\treturn Problem.objects.filter(id__in=[o.id for o in Problem.objects.all() if self in o.get_children()])\n\tdef degree_out(self):\n\t\treturn self.get_children().count()\n\tdef degree_in(self):\n\t\treturn self.get_parents().count()\n\tdef data(self):\n\t\t\"Returns the data needed to solve a problem\"\n\t\tfrom noproblem.problems.pyconnecta import probs\n\t\treturn getattr(probs, self.datos)()\n\tdef solve(self,data):\n\t\t\"Solves a problem given the needed data\"\n\t\tfrom noproblem.problems.pyconnecta import probs\n\t\treturn getattr(probs, self.solucion)(data)\n\tdef solved_by_user(self,usr):\n\t\treturn Solves.objects.filter(user=usr, prob=self, is_correct=1).exists()\n\tdef is_next_to_solve(self,usr):\n\t\tunsolved_root = (not self.get_parents() and not self.solved_by_user(usr))\n\t\tunsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n\t\treturn unsolved_root or unsolved\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n\n\n\n\n", "from django.db import models\nfrom noproblem.accounts.models import UserProfile\n\n\nclass Area(models.Model):\n name = models.CharField(max_length=100, unique=True)\n\n def __unicode__(self):\n return self.name\n\n\nclass SubArea(models.Model):\n name = models.CharField(max_length=100)\n area = models.ForeignKey(Area)\n\n def __unicode__(self):\n return u'%s (%s)' % (self.name, self.area.name)\n\n\nclass Problem(models.Model):\n category = models.ForeignKey(SubArea)\n title = models.CharField(max_length=200)\n wording = models.TextField()\n points = models.IntegerField()\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n requirements = models.ManyToManyField('Problem', blank=True, null=True)\n datos = models.CharField(max_length=200)\n solucion = models.CharField(max_length=200)\n creator = models.ForeignKey(UserProfile, blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n\n def get_parents(self):\n return Problem.objects.filter(id__in=[o.id for o in Problem.objects\n .all() if self in o.get_children()])\n\n def degree_out(self):\n return self.get_children().count()\n\n def degree_in(self):\n return self.get_parents().count()\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n\n\nclass Area(models.Model):\n name = models.CharField(max_length=100, unique=True)\n\n def __unicode__(self):\n return self.name\n\n\nclass SubArea(models.Model):\n name = models.CharField(max_length=100)\n area = models.ForeignKey(Area)\n\n def __unicode__(self):\n return u'%s (%s)' % (self.name, self.area.name)\n\n\nclass Problem(models.Model):\n category = models.ForeignKey(SubArea)\n title = models.CharField(max_length=200)\n wording = models.TextField()\n points = models.IntegerField()\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n requirements = models.ManyToManyField('Problem', blank=True, null=True)\n datos = models.CharField(max_length=200)\n solucion = models.CharField(max_length=200)\n creator = models.ForeignKey(UserProfile, blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n\n def get_parents(self):\n return Problem.objects.filter(id__in=[o.id for o in Problem.objects\n .all() if self in o.get_children()])\n\n def degree_out(self):\n return self.get_children().count()\n\n def degree_in(self):\n return self.get_parents().count()\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n\n\nclass Area(models.Model):\n <assignment token>\n\n def __unicode__(self):\n return self.name\n\n\nclass SubArea(models.Model):\n name = models.CharField(max_length=100)\n area = models.ForeignKey(Area)\n\n def __unicode__(self):\n return u'%s (%s)' % (self.name, self.area.name)\n\n\nclass Problem(models.Model):\n category = models.ForeignKey(SubArea)\n title = models.CharField(max_length=200)\n wording = models.TextField()\n points = models.IntegerField()\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n requirements = models.ManyToManyField('Problem', blank=True, null=True)\n datos = models.CharField(max_length=200)\n solucion = models.CharField(max_length=200)\n creator = models.ForeignKey(UserProfile, blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n\n def get_parents(self):\n return Problem.objects.filter(id__in=[o.id for o in Problem.objects\n .all() if self in o.get_children()])\n\n def degree_out(self):\n return self.get_children().count()\n\n def degree_in(self):\n return self.get_parents().count()\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n\n\nclass Area(models.Model):\n <assignment token>\n <function token>\n\n\nclass SubArea(models.Model):\n name = models.CharField(max_length=100)\n area = models.ForeignKey(Area)\n\n def __unicode__(self):\n return u'%s (%s)' % (self.name, self.area.name)\n\n\nclass Problem(models.Model):\n category = models.ForeignKey(SubArea)\n title = models.CharField(max_length=200)\n wording = models.TextField()\n points = models.IntegerField()\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n requirements = models.ManyToManyField('Problem', blank=True, null=True)\n datos = models.CharField(max_length=200)\n solucion = models.CharField(max_length=200)\n creator = models.ForeignKey(UserProfile, blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n\n def get_parents(self):\n return Problem.objects.filter(id__in=[o.id for o in Problem.objects\n .all() if self in o.get_children()])\n\n def degree_out(self):\n return self.get_children().count()\n\n def degree_in(self):\n return self.get_parents().count()\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n\n\nclass SubArea(models.Model):\n name = models.CharField(max_length=100)\n area = models.ForeignKey(Area)\n\n def __unicode__(self):\n return u'%s (%s)' % (self.name, self.area.name)\n\n\nclass Problem(models.Model):\n category = models.ForeignKey(SubArea)\n title = models.CharField(max_length=200)\n wording = models.TextField()\n points = models.IntegerField()\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n requirements = models.ManyToManyField('Problem', blank=True, null=True)\n datos = models.CharField(max_length=200)\n solucion = models.CharField(max_length=200)\n creator = models.ForeignKey(UserProfile, blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n\n def get_parents(self):\n return Problem.objects.filter(id__in=[o.id for o in Problem.objects\n .all() if self in o.get_children()])\n\n def degree_out(self):\n return self.get_children().count()\n\n def degree_in(self):\n return self.get_parents().count()\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n\n\nclass SubArea(models.Model):\n <assignment token>\n <assignment token>\n\n def __unicode__(self):\n return u'%s (%s)' % (self.name, self.area.name)\n\n\nclass Problem(models.Model):\n category = models.ForeignKey(SubArea)\n title = models.CharField(max_length=200)\n wording = models.TextField()\n points = models.IntegerField()\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n requirements = models.ManyToManyField('Problem', blank=True, null=True)\n datos = models.CharField(max_length=200)\n solucion = models.CharField(max_length=200)\n creator = models.ForeignKey(UserProfile, blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n\n def get_parents(self):\n return Problem.objects.filter(id__in=[o.id for o in Problem.objects\n .all() if self in o.get_children()])\n\n def degree_out(self):\n return self.get_children().count()\n\n def degree_in(self):\n return self.get_parents().count()\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n\n\nclass SubArea(models.Model):\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass Problem(models.Model):\n category = models.ForeignKey(SubArea)\n title = models.CharField(max_length=200)\n wording = models.TextField()\n points = models.IntegerField()\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n requirements = models.ManyToManyField('Problem', blank=True, null=True)\n datos = models.CharField(max_length=200)\n solucion = models.CharField(max_length=200)\n creator = models.ForeignKey(UserProfile, blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n\n def get_parents(self):\n return Problem.objects.filter(id__in=[o.id for o in Problem.objects\n .all() if self in o.get_children()])\n\n def degree_out(self):\n return self.get_children().count()\n\n def degree_in(self):\n return self.get_parents().count()\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n\n\nclass Problem(models.Model):\n category = models.ForeignKey(SubArea)\n title = models.CharField(max_length=200)\n wording = models.TextField()\n points = models.IntegerField()\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n requirements = models.ManyToManyField('Problem', blank=True, null=True)\n datos = models.CharField(max_length=200)\n solucion = models.CharField(max_length=200)\n creator = models.ForeignKey(UserProfile, blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n\n def get_parents(self):\n return Problem.objects.filter(id__in=[o.id for o in Problem.objects\n .all() if self in o.get_children()])\n\n def degree_out(self):\n return self.get_children().count()\n\n def degree_in(self):\n return self.get_parents().count()\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n\n\nclass Problem(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __unicode__(self):\n return self.title\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n\n def get_parents(self):\n return Problem.objects.filter(id__in=[o.id for o in Problem.objects\n .all() if self in o.get_children()])\n\n def degree_out(self):\n return self.get_children().count()\n\n def degree_in(self):\n return self.get_parents().count()\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n\n\nclass Problem(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __unicode__(self):\n return self.title\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n\n def get_parents(self):\n return Problem.objects.filter(id__in=[o.id for o in Problem.objects\n .all() if self in o.get_children()])\n\n def degree_out(self):\n return self.get_children().count()\n <function token>\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n\n\nclass Problem(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n\n def get_parents(self):\n return Problem.objects.filter(id__in=[o.id for o in Problem.objects\n .all() if self in o.get_children()])\n\n def degree_out(self):\n return self.get_children().count()\n <function token>\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n\n\nclass Problem(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n <function token>\n\n def degree_out(self):\n return self.get_children().count()\n <function token>\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n\n\nclass Problem(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n <function token>\n <function token>\n <function token>\n\n def data(self):\n \"\"\"Returns the data needed to solve a problem\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.datos)()\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n\n\nclass Problem(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def get_children(self):\n return Problem.objects.filter(requirements=self).all()\n <function token>\n <function token>\n <function token>\n <function token>\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n\n\nclass Problem(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def solve(self, data):\n \"\"\"Solves a problem given the needed data\"\"\"\n from noproblem.problems.pyconnecta import probs\n return getattr(probs, self.solucion)(data)\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n\n\nclass Problem(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n\n def is_next_to_solve(self, usr):\n unsolved_root = not self.get_parents() and not self.solved_by_user(usr)\n unsolved = any([o.solved_by_user(usr) for o in self.get_parents()])\n return unsolved_root or unsolved\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n\n\nclass Problem(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def solved_by_user(self, usr):\n return Solves.objects.filter(user=usr, prob=self, is_correct=1).exists(\n )\n <function token>\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n\n\nclass Problem(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass Solves(models.Model):\n user = models.ForeignKey(UserProfile)\n prob = models.ForeignKey(Problem)\n date = models.DateTimeField()\n time = models.TimeField()\n is_correct = models.BooleanField()\n", "<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass Solves(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n" ]
false
98,937
49d7b4ca5071109509d101ba1d4a2243fb16dea3
import pandas from demo20200322_CollaborativeFiltering.hyj.util import mid_train_data_path src_data = '../data/u.data' df = pandas.read_csv(src_data, sep='\t', names=['user_id', 'item_id', 'rating', 'timestamp']) # print(df.head()) # train_data 二位字典表 第一层key为用户id,value为字典(key=电影id,value=电影打分) train_data = dict() for _, row in df.iterrows(): user_id = str(row['user_id']) item_id = str(row['item_id']) rating = row['rating'] if train_data.get(user_id, -1) == -1: train_data[user_id] = {item_id: rating} else: train_data[user_id][item_id] = rating with open(mid_train_data_path,encoding='utf-8',mode='w') as f: f.write(str(train_data)) print('ok')
[ "import pandas\nfrom demo20200322_CollaborativeFiltering.hyj.util import mid_train_data_path\n\nsrc_data = '../data/u.data'\n\ndf = pandas.read_csv(src_data, sep='\\t', names=['user_id', 'item_id', 'rating', 'timestamp'])\n# print(df.head())\n# train_data 二位字典表 第一层key为用户id,value为字典(key=电影id,value=电影打分)\ntrain_data = dict()\nfor _, row in df.iterrows():\n user_id = str(row['user_id'])\n item_id = str(row['item_id'])\n rating = row['rating']\n\n if train_data.get(user_id, -1) == -1:\n train_data[user_id] = {item_id: rating}\n else:\n train_data[user_id][item_id] = rating\n\nwith open(mid_train_data_path,encoding='utf-8',mode='w') as f:\n f.write(str(train_data))\n print('ok')", "import pandas\nfrom demo20200322_CollaborativeFiltering.hyj.util import mid_train_data_path\nsrc_data = '../data/u.data'\ndf = pandas.read_csv(src_data, sep='\\t', names=['user_id', 'item_id',\n 'rating', 'timestamp'])\ntrain_data = dict()\nfor _, row in df.iterrows():\n user_id = str(row['user_id'])\n item_id = str(row['item_id'])\n rating = row['rating']\n if train_data.get(user_id, -1) == -1:\n train_data[user_id] = {item_id: rating}\n else:\n train_data[user_id][item_id] = rating\nwith open(mid_train_data_path, encoding='utf-8', mode='w') as f:\n f.write(str(train_data))\n print('ok')\n", "<import token>\nsrc_data = '../data/u.data'\ndf = pandas.read_csv(src_data, sep='\\t', names=['user_id', 'item_id',\n 'rating', 'timestamp'])\ntrain_data = dict()\nfor _, row in df.iterrows():\n user_id = str(row['user_id'])\n item_id = str(row['item_id'])\n rating = row['rating']\n if train_data.get(user_id, -1) == -1:\n train_data[user_id] = {item_id: rating}\n else:\n train_data[user_id][item_id] = rating\nwith open(mid_train_data_path, encoding='utf-8', mode='w') as f:\n f.write(str(train_data))\n print('ok')\n", "<import token>\n<assignment token>\nfor _, row in df.iterrows():\n user_id = str(row['user_id'])\n item_id = str(row['item_id'])\n rating = row['rating']\n if train_data.get(user_id, -1) == -1:\n train_data[user_id] = {item_id: rating}\n else:\n train_data[user_id][item_id] = rating\nwith open(mid_train_data_path, encoding='utf-8', mode='w') as f:\n f.write(str(train_data))\n print('ok')\n", "<import token>\n<assignment token>\n<code token>\n" ]
false
98,938
48ac91b0407569c00ddb9673da755a69f12cc5f0
#!/usr/bin/env python3 #enter reference out file from mapping.py followed by the sample file to be compared to the reference. If a third argument is included, it will be printed prior to a line of a sample that has no match in the reference import sys dict = {} for line in open(sys.argv[1]): fields = line.strip().split() dict [fields[0]] = fields[1] for line in open(sys.argv[2]): if line.startswith("t_id"): continue fields_2 = line.strip().split() sample_flyID = (fields_2[8]) if sample_flyID in dict.keys(): print(dict[sample_flyID] + line.strip("\n\r") ) else: if len(sys.argv) > 3: print (sys.argv[3] + " " + line.strip("\n\r"))
[ "#!/usr/bin/env python3\n\n#enter reference out file from mapping.py followed by the sample file to be compared to the reference. If a third argument is included, it will be printed prior to a line of a sample that has no match in the reference\n\nimport sys\n\ndict = {}\n\nfor line in open(sys.argv[1]):\n fields = line.strip().split()\n dict [fields[0]] = fields[1]\n\nfor line in open(sys.argv[2]):\n if line.startswith(\"t_id\"):\n continue\n fields_2 = line.strip().split()\n sample_flyID = (fields_2[8])\n \n if sample_flyID in dict.keys():\n print(dict[sample_flyID] + line.strip(\"\\n\\r\") )\n \n else: \n if len(sys.argv) > 3:\n print (sys.argv[3] + \" \" + line.strip(\"\\n\\r\"))\n \n \n \n \n \n \n\n\n ", "import sys\ndict = {}\nfor line in open(sys.argv[1]):\n fields = line.strip().split()\n dict[fields[0]] = fields[1]\nfor line in open(sys.argv[2]):\n if line.startswith('t_id'):\n continue\n fields_2 = line.strip().split()\n sample_flyID = fields_2[8]\n if sample_flyID in dict.keys():\n print(dict[sample_flyID] + line.strip('\\n\\r'))\n elif len(sys.argv) > 3:\n print(sys.argv[3] + ' ' + line.strip('\\n\\r'))\n", "<import token>\ndict = {}\nfor line in open(sys.argv[1]):\n fields = line.strip().split()\n dict[fields[0]] = fields[1]\nfor line in open(sys.argv[2]):\n if line.startswith('t_id'):\n continue\n fields_2 = line.strip().split()\n sample_flyID = fields_2[8]\n if sample_flyID in dict.keys():\n print(dict[sample_flyID] + line.strip('\\n\\r'))\n elif len(sys.argv) > 3:\n print(sys.argv[3] + ' ' + line.strip('\\n\\r'))\n", "<import token>\n<assignment token>\nfor line in open(sys.argv[1]):\n fields = line.strip().split()\n dict[fields[0]] = fields[1]\nfor line in open(sys.argv[2]):\n if line.startswith('t_id'):\n continue\n fields_2 = line.strip().split()\n sample_flyID = fields_2[8]\n if sample_flyID in dict.keys():\n print(dict[sample_flyID] + line.strip('\\n\\r'))\n elif len(sys.argv) > 3:\n print(sys.argv[3] + ' ' + line.strip('\\n\\r'))\n", "<import token>\n<assignment token>\n<code token>\n" ]
false
98,939
abbaad63a4bd5c253689281251f84dafb55e4e85
#!/usr/bin/env python from baselines.common import set_global_seeds, tf_util as U from baselines import bench import os.path as osp import gym, logging from baselines import logger import sys import joblib import tensorflow as tf import numpy as np from mpi4py import MPI from baselines.valueiteration.utils import * from baselines.valueiteration.value_iteration_learn import * def callback(localv, globalv): if localv['iters_so_far'] % 10 != 0: return save_dict = {} variables = localv['pi'].get_variables() for i in range(len(variables)): cur_val = variables[i].eval() save_dict[variables[i].name] = cur_val joblib.dump(save_dict, logger.get_dir()+'/policy_params_'+str(localv['iters_so_far'])+'.pkl', compress=True) joblib.dump(save_dict, logger.get_dir() + '/policy_params' + '.pkl', compress=True) def train(env_id, num_timesteps, seed): from baselines.ppo1 import mlp_mirror_policy from baselines.valueiteration import pposgd_disc U.make_session(num_cpu=1).__enter__() env = gym.make(env_id) '''path = 'data/value_iter_truehopper_discrete' [Vfunc, obs_disc, act_disc, state_filter_fn, state_unfilter_fn] = joblib.load(path + '/ref_policy_funcs.pkl') env.env.use_disc_ref_policy = True env.env.disc_ref_weight = 0.01 env.env.disc_funcs = [Vfunc, obs_disc, act_disc, state_filter_fn, state_unfilter_fn]''' def policy_fn(name, ob_space, ac_space): return mlp_mirror_policy.MlpMirrorPolicy(name=name, ob_space=ob_space, ac_space=ac_space, hid_size=64, num_hid_layers=3, gmm_comp=1, mirror_loss=True, observation_permutation=np.array( [1]*2), action_permutation=np.array( [0.001]*1)) env = bench.Monitor(env, logger.get_dir() and osp.join(logger.get_dir(), "monitor.json")) gym.logger.setLevel(logging.WARN) '''s_disc = [] for i in range(11): s_disc.append([30, 0.0, -0.0]) obs_disc = bin_disc(s_disc) act_disc = bin_disc([[10, 1.01, -1.01], [10, 1.01, -1.01], [10, 1.01, -1.01]]) state_filter_fn = state_filter_hopper state_unfilter_fn = state_unfilter_hopper''' obs_disc = bin_disc([[51, 0, -0.01], [51, 0.0, -0.01], [51, 0.0, -0.01], [51, 0.0, -0.01]]) act_disc = bin_disc([[100, 1.01, -1.01]]) state_filter_fn = state_filter_cartpole state_unfilter_fn = state_unfilter_cartpole pposgd_disc.learn(env, policy_fn, max_timesteps=num_timesteps, timesteps_per_batch=int(500), clip_param=0.2, entcoeff=0.0, optim_epochs=10, optim_stepsize=3e-4, optim_batchsize=64, gamma=0.99, lam=0.95, schedule='linear', callback=callback, sym_loss_weight = 0.0, #ref_policy_params=joblib.load('data/ppo_DartCartPoleSwingUp-v11_vanilla/policy_params.pkl') #discrete_learning = [obs_disc, act_disc, state_filter_fn, state_unfilter_fn, 0.2], #init_policy_params=joblib.load('data/ppo_DartHopper-v12_vanilla/policy_params.pkl') ) env.close() def main(): import argparse parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--env', help='environment ID', default='DartHumanWalker-v1') parser.add_argument('--seed', help='RNG seed', type=int, default=0) args = parser.parse_args() logger.reset() logger.configure('data/ppo_'+args.env+str(args.seed)+'_vf_vanilla_weak_2k') #logger.configure('data/ppo_'+args.env+str(args.seed)+'_energy05_bal_vel4smooth_mirror_up1fwd01ltl1_spinepen1yaw001_thighyawpen005_initbentelbow_velrew3_dcontrolconstraint1_strongerarm_asinput_treadmill') train(args.env, num_timesteps=int(500*4*100), seed=args.seed) if __name__ == '__main__': main()
[ "#!/usr/bin/env python\nfrom baselines.common import set_global_seeds, tf_util as U\nfrom baselines import bench\nimport os.path as osp\nimport gym, logging\nfrom baselines import logger\nimport sys\nimport joblib\nimport tensorflow as tf\nimport numpy as np\nfrom mpi4py import MPI\n\nfrom baselines.valueiteration.utils import *\nfrom baselines.valueiteration.value_iteration_learn import *\n\ndef callback(localv, globalv):\n if localv['iters_so_far'] % 10 != 0:\n return\n save_dict = {}\n variables = localv['pi'].get_variables()\n for i in range(len(variables)):\n cur_val = variables[i].eval()\n save_dict[variables[i].name] = cur_val\n joblib.dump(save_dict, logger.get_dir()+'/policy_params_'+str(localv['iters_so_far'])+'.pkl', compress=True)\n joblib.dump(save_dict, logger.get_dir() + '/policy_params' + '.pkl', compress=True)\n\n\ndef train(env_id, num_timesteps, seed):\n from baselines.ppo1 import mlp_mirror_policy\n from baselines.valueiteration import pposgd_disc\n U.make_session(num_cpu=1).__enter__()\n env = gym.make(env_id)\n\n '''path = 'data/value_iter_truehopper_discrete'\n [Vfunc, obs_disc, act_disc, state_filter_fn, state_unfilter_fn] = joblib.load(path + '/ref_policy_funcs.pkl')\n env.env.use_disc_ref_policy = True\n env.env.disc_ref_weight = 0.01\n env.env.disc_funcs = [Vfunc, obs_disc, act_disc, state_filter_fn, state_unfilter_fn]'''\n\n def policy_fn(name, ob_space, ac_space):\n return mlp_mirror_policy.MlpMirrorPolicy(name=name, ob_space=ob_space, ac_space=ac_space,\n hid_size=64, num_hid_layers=3, gmm_comp=1,\n mirror_loss=True,\n observation_permutation=np.array(\n [1]*2),\n action_permutation=np.array(\n [0.001]*1))\n env = bench.Monitor(env, logger.get_dir() and\n osp.join(logger.get_dir(), \"monitor.json\"))\n gym.logger.setLevel(logging.WARN)\n\n '''s_disc = []\n for i in range(11):\n s_disc.append([30, 0.0, -0.0])\n obs_disc = bin_disc(s_disc)\n act_disc = bin_disc([[10, 1.01, -1.01], [10, 1.01, -1.01], [10, 1.01, -1.01]])\n state_filter_fn = state_filter_hopper\n state_unfilter_fn = state_unfilter_hopper'''\n\n obs_disc = bin_disc([[51, 0, -0.01], [51, 0.0, -0.01], [51, 0.0, -0.01], [51, 0.0, -0.01]])\n act_disc = bin_disc([[100, 1.01, -1.01]])\n state_filter_fn = state_filter_cartpole\n state_unfilter_fn = state_unfilter_cartpole\n\n pposgd_disc.learn(env, policy_fn,\n max_timesteps=num_timesteps,\n timesteps_per_batch=int(500),\n clip_param=0.2, entcoeff=0.0,\n optim_epochs=10, optim_stepsize=3e-4, optim_batchsize=64,\n gamma=0.99, lam=0.95, schedule='linear',\n callback=callback,\n sym_loss_weight = 0.0,\n #ref_policy_params=joblib.load('data/ppo_DartCartPoleSwingUp-v11_vanilla/policy_params.pkl')\n #discrete_learning = [obs_disc, act_disc, state_filter_fn, state_unfilter_fn, 0.2],\n #init_policy_params=joblib.load('data/ppo_DartHopper-v12_vanilla/policy_params.pkl')\n )\n env.close()\n\n\ndef main():\n import argparse\n parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n parser.add_argument('--env', help='environment ID', default='DartHumanWalker-v1')\n parser.add_argument('--seed', help='RNG seed', type=int, default=0)\n args = parser.parse_args()\n logger.reset()\n logger.configure('data/ppo_'+args.env+str(args.seed)+'_vf_vanilla_weak_2k')\n #logger.configure('data/ppo_'+args.env+str(args.seed)+'_energy05_bal_vel4smooth_mirror_up1fwd01ltl1_spinepen1yaw001_thighyawpen005_initbentelbow_velrew3_dcontrolconstraint1_strongerarm_asinput_treadmill')\n train(args.env, num_timesteps=int(500*4*100), seed=args.seed)\n\nif __name__ == '__main__':\n main()\n", "from baselines.common import set_global_seeds, tf_util as U\nfrom baselines import bench\nimport os.path as osp\nimport gym, logging\nfrom baselines import logger\nimport sys\nimport joblib\nimport tensorflow as tf\nimport numpy as np\nfrom mpi4py import MPI\nfrom baselines.valueiteration.utils import *\nfrom baselines.valueiteration.value_iteration_learn import *\n\n\ndef callback(localv, globalv):\n if localv['iters_so_far'] % 10 != 0:\n return\n save_dict = {}\n variables = localv['pi'].get_variables()\n for i in range(len(variables)):\n cur_val = variables[i].eval()\n save_dict[variables[i].name] = cur_val\n joblib.dump(save_dict, logger.get_dir() + '/policy_params_' + str(\n localv['iters_so_far']) + '.pkl', compress=True)\n joblib.dump(save_dict, logger.get_dir() + '/policy_params' + '.pkl',\n compress=True)\n\n\ndef train(env_id, num_timesteps, seed):\n from baselines.ppo1 import mlp_mirror_policy\n from baselines.valueiteration import pposgd_disc\n U.make_session(num_cpu=1).__enter__()\n env = gym.make(env_id)\n \"\"\"path = 'data/value_iter_truehopper_discrete'\n [Vfunc, obs_disc, act_disc, state_filter_fn, state_unfilter_fn] = joblib.load(path + '/ref_policy_funcs.pkl')\n env.env.use_disc_ref_policy = True\n env.env.disc_ref_weight = 0.01\n env.env.disc_funcs = [Vfunc, obs_disc, act_disc, state_filter_fn, state_unfilter_fn]\"\"\"\n\n def policy_fn(name, ob_space, ac_space):\n return mlp_mirror_policy.MlpMirrorPolicy(name=name, ob_space=\n ob_space, ac_space=ac_space, hid_size=64, num_hid_layers=3,\n gmm_comp=1, mirror_loss=True, observation_permutation=np.array(\n [1] * 2), action_permutation=np.array([0.001] * 1))\n env = bench.Monitor(env, logger.get_dir() and osp.join(logger.get_dir(),\n 'monitor.json'))\n gym.logger.setLevel(logging.WARN)\n \"\"\"s_disc = []\n for i in range(11):\n s_disc.append([30, 0.0, -0.0])\n obs_disc = bin_disc(s_disc)\n act_disc = bin_disc([[10, 1.01, -1.01], [10, 1.01, -1.01], [10, 1.01, -1.01]])\n state_filter_fn = state_filter_hopper\n state_unfilter_fn = state_unfilter_hopper\"\"\"\n obs_disc = bin_disc([[51, 0, -0.01], [51, 0.0, -0.01], [51, 0.0, -0.01],\n [51, 0.0, -0.01]])\n act_disc = bin_disc([[100, 1.01, -1.01]])\n state_filter_fn = state_filter_cartpole\n state_unfilter_fn = state_unfilter_cartpole\n pposgd_disc.learn(env, policy_fn, max_timesteps=num_timesteps,\n timesteps_per_batch=int(500), clip_param=0.2, entcoeff=0.0,\n optim_epochs=10, optim_stepsize=0.0003, optim_batchsize=64, gamma=\n 0.99, lam=0.95, schedule='linear', callback=callback,\n sym_loss_weight=0.0)\n env.close()\n\n\ndef main():\n import argparse\n parser = argparse.ArgumentParser(formatter_class=argparse.\n ArgumentDefaultsHelpFormatter)\n parser.add_argument('--env', help='environment ID', default=\n 'DartHumanWalker-v1')\n parser.add_argument('--seed', help='RNG seed', type=int, default=0)\n args = parser.parse_args()\n logger.reset()\n logger.configure('data/ppo_' + args.env + str(args.seed) +\n '_vf_vanilla_weak_2k')\n train(args.env, num_timesteps=int(500 * 4 * 100), seed=args.seed)\n\n\nif __name__ == '__main__':\n main()\n", "<import token>\n\n\ndef callback(localv, globalv):\n if localv['iters_so_far'] % 10 != 0:\n return\n save_dict = {}\n variables = localv['pi'].get_variables()\n for i in range(len(variables)):\n cur_val = variables[i].eval()\n save_dict[variables[i].name] = cur_val\n joblib.dump(save_dict, logger.get_dir() + '/policy_params_' + str(\n localv['iters_so_far']) + '.pkl', compress=True)\n joblib.dump(save_dict, logger.get_dir() + '/policy_params' + '.pkl',\n compress=True)\n\n\ndef train(env_id, num_timesteps, seed):\n from baselines.ppo1 import mlp_mirror_policy\n from baselines.valueiteration import pposgd_disc\n U.make_session(num_cpu=1).__enter__()\n env = gym.make(env_id)\n \"\"\"path = 'data/value_iter_truehopper_discrete'\n [Vfunc, obs_disc, act_disc, state_filter_fn, state_unfilter_fn] = joblib.load(path + '/ref_policy_funcs.pkl')\n env.env.use_disc_ref_policy = True\n env.env.disc_ref_weight = 0.01\n env.env.disc_funcs = [Vfunc, obs_disc, act_disc, state_filter_fn, state_unfilter_fn]\"\"\"\n\n def policy_fn(name, ob_space, ac_space):\n return mlp_mirror_policy.MlpMirrorPolicy(name=name, ob_space=\n ob_space, ac_space=ac_space, hid_size=64, num_hid_layers=3,\n gmm_comp=1, mirror_loss=True, observation_permutation=np.array(\n [1] * 2), action_permutation=np.array([0.001] * 1))\n env = bench.Monitor(env, logger.get_dir() and osp.join(logger.get_dir(),\n 'monitor.json'))\n gym.logger.setLevel(logging.WARN)\n \"\"\"s_disc = []\n for i in range(11):\n s_disc.append([30, 0.0, -0.0])\n obs_disc = bin_disc(s_disc)\n act_disc = bin_disc([[10, 1.01, -1.01], [10, 1.01, -1.01], [10, 1.01, -1.01]])\n state_filter_fn = state_filter_hopper\n state_unfilter_fn = state_unfilter_hopper\"\"\"\n obs_disc = bin_disc([[51, 0, -0.01], [51, 0.0, -0.01], [51, 0.0, -0.01],\n [51, 0.0, -0.01]])\n act_disc = bin_disc([[100, 1.01, -1.01]])\n state_filter_fn = state_filter_cartpole\n state_unfilter_fn = state_unfilter_cartpole\n pposgd_disc.learn(env, policy_fn, max_timesteps=num_timesteps,\n timesteps_per_batch=int(500), clip_param=0.2, entcoeff=0.0,\n optim_epochs=10, optim_stepsize=0.0003, optim_batchsize=64, gamma=\n 0.99, lam=0.95, schedule='linear', callback=callback,\n sym_loss_weight=0.0)\n env.close()\n\n\ndef main():\n import argparse\n parser = argparse.ArgumentParser(formatter_class=argparse.\n ArgumentDefaultsHelpFormatter)\n parser.add_argument('--env', help='environment ID', default=\n 'DartHumanWalker-v1')\n parser.add_argument('--seed', help='RNG seed', type=int, default=0)\n args = parser.parse_args()\n logger.reset()\n logger.configure('data/ppo_' + args.env + str(args.seed) +\n '_vf_vanilla_weak_2k')\n train(args.env, num_timesteps=int(500 * 4 * 100), seed=args.seed)\n\n\nif __name__ == '__main__':\n main()\n", "<import token>\n\n\ndef callback(localv, globalv):\n if localv['iters_so_far'] % 10 != 0:\n return\n save_dict = {}\n variables = localv['pi'].get_variables()\n for i in range(len(variables)):\n cur_val = variables[i].eval()\n save_dict[variables[i].name] = cur_val\n joblib.dump(save_dict, logger.get_dir() + '/policy_params_' + str(\n localv['iters_so_far']) + '.pkl', compress=True)\n joblib.dump(save_dict, logger.get_dir() + '/policy_params' + '.pkl',\n compress=True)\n\n\ndef train(env_id, num_timesteps, seed):\n from baselines.ppo1 import mlp_mirror_policy\n from baselines.valueiteration import pposgd_disc\n U.make_session(num_cpu=1).__enter__()\n env = gym.make(env_id)\n \"\"\"path = 'data/value_iter_truehopper_discrete'\n [Vfunc, obs_disc, act_disc, state_filter_fn, state_unfilter_fn] = joblib.load(path + '/ref_policy_funcs.pkl')\n env.env.use_disc_ref_policy = True\n env.env.disc_ref_weight = 0.01\n env.env.disc_funcs = [Vfunc, obs_disc, act_disc, state_filter_fn, state_unfilter_fn]\"\"\"\n\n def policy_fn(name, ob_space, ac_space):\n return mlp_mirror_policy.MlpMirrorPolicy(name=name, ob_space=\n ob_space, ac_space=ac_space, hid_size=64, num_hid_layers=3,\n gmm_comp=1, mirror_loss=True, observation_permutation=np.array(\n [1] * 2), action_permutation=np.array([0.001] * 1))\n env = bench.Monitor(env, logger.get_dir() and osp.join(logger.get_dir(),\n 'monitor.json'))\n gym.logger.setLevel(logging.WARN)\n \"\"\"s_disc = []\n for i in range(11):\n s_disc.append([30, 0.0, -0.0])\n obs_disc = bin_disc(s_disc)\n act_disc = bin_disc([[10, 1.01, -1.01], [10, 1.01, -1.01], [10, 1.01, -1.01]])\n state_filter_fn = state_filter_hopper\n state_unfilter_fn = state_unfilter_hopper\"\"\"\n obs_disc = bin_disc([[51, 0, -0.01], [51, 0.0, -0.01], [51, 0.0, -0.01],\n [51, 0.0, -0.01]])\n act_disc = bin_disc([[100, 1.01, -1.01]])\n state_filter_fn = state_filter_cartpole\n state_unfilter_fn = state_unfilter_cartpole\n pposgd_disc.learn(env, policy_fn, max_timesteps=num_timesteps,\n timesteps_per_batch=int(500), clip_param=0.2, entcoeff=0.0,\n optim_epochs=10, optim_stepsize=0.0003, optim_batchsize=64, gamma=\n 0.99, lam=0.95, schedule='linear', callback=callback,\n sym_loss_weight=0.0)\n env.close()\n\n\ndef main():\n import argparse\n parser = argparse.ArgumentParser(formatter_class=argparse.\n ArgumentDefaultsHelpFormatter)\n parser.add_argument('--env', help='environment ID', default=\n 'DartHumanWalker-v1')\n parser.add_argument('--seed', help='RNG seed', type=int, default=0)\n args = parser.parse_args()\n logger.reset()\n logger.configure('data/ppo_' + args.env + str(args.seed) +\n '_vf_vanilla_weak_2k')\n train(args.env, num_timesteps=int(500 * 4 * 100), seed=args.seed)\n\n\n<code token>\n", "<import token>\n\n\ndef callback(localv, globalv):\n if localv['iters_so_far'] % 10 != 0:\n return\n save_dict = {}\n variables = localv['pi'].get_variables()\n for i in range(len(variables)):\n cur_val = variables[i].eval()\n save_dict[variables[i].name] = cur_val\n joblib.dump(save_dict, logger.get_dir() + '/policy_params_' + str(\n localv['iters_so_far']) + '.pkl', compress=True)\n joblib.dump(save_dict, logger.get_dir() + '/policy_params' + '.pkl',\n compress=True)\n\n\n<function token>\n\n\ndef main():\n import argparse\n parser = argparse.ArgumentParser(formatter_class=argparse.\n ArgumentDefaultsHelpFormatter)\n parser.add_argument('--env', help='environment ID', default=\n 'DartHumanWalker-v1')\n parser.add_argument('--seed', help='RNG seed', type=int, default=0)\n args = parser.parse_args()\n logger.reset()\n logger.configure('data/ppo_' + args.env + str(args.seed) +\n '_vf_vanilla_weak_2k')\n train(args.env, num_timesteps=int(500 * 4 * 100), seed=args.seed)\n\n\n<code token>\n", "<import token>\n\n\ndef callback(localv, globalv):\n if localv['iters_so_far'] % 10 != 0:\n return\n save_dict = {}\n variables = localv['pi'].get_variables()\n for i in range(len(variables)):\n cur_val = variables[i].eval()\n save_dict[variables[i].name] = cur_val\n joblib.dump(save_dict, logger.get_dir() + '/policy_params_' + str(\n localv['iters_so_far']) + '.pkl', compress=True)\n joblib.dump(save_dict, logger.get_dir() + '/policy_params' + '.pkl',\n compress=True)\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<code token>\n" ]
false
98,940
0dd8aee281655f8ab31c79d33012382c0d708f2b
import json from datetime import datetime from time import sleep import schedule as schedule import websocket from gmo.gmo import GMO from gmocoin_bot.bot import GMOCoinBot, EBotState WEBSOCKET_CALL_WAIT_TIME = 3 CHANNEL_NAME_TICKER = 'ticker' CHANNEL_NAME_TRADES = 'trades' CHANNEL_NAME_EXECUTION = 'executionEvents' CHANNEL_NAME_ORDER = 'orderEvents' CHANNEL_NAME_POSITION = 'positionEvents' class GMOWebsocketManager: _ws_list: dict[str, websocket.WebSocketApp or None] _bots: list[GMOCoinBot] def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'): self._bots = bots self._chart = chart self._api = api self._sim_flg = sim_flg self._symbol = symbol self.__token = api.get_ws_access_token() self._ws_list = { CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES: None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None, CHANNEL_NAME_POSITION: None, } self._connect() self.__setup_timer() def __del__(self): for channel, ws in self._ws_list.items(): if ws and ws.keep_running: if channel in [CHANNEL_NAME_TICKER, CHANNEL_NAME_TRADES]: ws.send(json.dumps({"command": "unsubscribe", "channel": channel, "symbol": self._symbol})) else: ws.send(json.dumps({"command": "unsubscribe", "channel": channel})) ws.close() sleep(WEBSOCKET_CALL_WAIT_TIME) def __setup_timer(self): # 5秒ごとにwebソケットの状態を確認 schedule.every(5).seconds.do(self._connect) # 50分ごとにトークンの延長 schedule.every(50).minutes.do(self._extend_token) def _extend_token(self): if self._api.status() != 'OPEN' or not self.__token: return self._api.extend_ws_access_token(self.__token) print("[{}] TOKEN EXTENDED".format(datetime.now())) def _connect(self): for channel, ws in self._ws_list.items(): if channel in [CHANNEL_NAME_EXECUTION, CHANNEL_NAME_ORDER, CHANNEL_NAME_POSITION] and self._sim_flg: continue if not ws or not ws.keep_running: try: self._ws_list[channel] = self.__ws_subscribe(channel) except TimeoutError or ConnectionError: if self._ws_list[channel] and not self._ws_list[channel].sock.closed(): self._ws_list[channel].close() self._ws_list[channel] = None for b in [b for b in self._bots if b.get_state() != EBotState.Running]: b.run() def __ws_subscribe(self, channel) -> websocket.WebSocketApp or None: if channel == CHANNEL_NAME_TICKER: ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self._symbol, lambda _, message: self.__on_ticker(json.loads(message))) elif channel == CHANNEL_NAME_TRADES: ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self._symbol, lambda _, message: self.__update_trades(json.loads(message))) elif channel == CHANNEL_NAME_EXECUTION: ws = self._api.subscribe_private_ws(self.__token, CHANNEL_NAME_EXECUTION, lambda _, message: self.__on_execution_events(json.loads(message))) elif channel == CHANNEL_NAME_ORDER: ws = self._api.subscribe_private_ws(self.__token, CHANNEL_NAME_ORDER, lambda _, message: self.__on_order_events(json.loads(message))) elif channel == CHANNEL_NAME_POSITION: ws = self._api.subscribe_private_ws(self.__token, CHANNEL_NAME_POSITION, lambda _, message: self.__on_position_events(json.loads(message))) else: return None print("[{}] Subscribe [{}]".format(datetime.now(), channel)) sleep(WEBSOCKET_CALL_WAIT_TIME) # 一秒間1回しか購読できないため return ws def __update_trades(self, trade): self._chart.update(trade) def __on_execution_events(self, data): for b in self._bots: b.on_execution_events(data) def __on_order_events(self, data): for b in self._bots: b.on_order_events(data) def __on_position_events(self, data): for b in self._bots: b.on_position_events(data) def __on_ticker(self, data): for b in self._bots: b.update_ticker(data)
[ "import json\nfrom datetime import datetime\nfrom time import sleep\n\nimport schedule as schedule\nimport websocket\n\nfrom gmo.gmo import GMO\nfrom gmocoin_bot.bot import GMOCoinBot, EBotState\n\nWEBSOCKET_CALL_WAIT_TIME = 3\nCHANNEL_NAME_TICKER = 'ticker'\nCHANNEL_NAME_TRADES = 'trades'\nCHANNEL_NAME_EXECUTION = 'executionEvents'\nCHANNEL_NAME_ORDER = 'orderEvents'\nCHANNEL_NAME_POSITION = 'positionEvents'\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {\n CHANNEL_NAME_TICKER: None,\n CHANNEL_NAME_TRADES: None,\n CHANNEL_NAME_EXECUTION: None,\n CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None,\n }\n\n self._connect()\n self.__setup_timer()\n\n def __del__(self):\n for channel, ws in self._ws_list.items():\n if ws and ws.keep_running:\n if channel in [CHANNEL_NAME_TICKER, CHANNEL_NAME_TRADES]:\n ws.send(json.dumps({\"command\": \"unsubscribe\", \"channel\": channel, \"symbol\": self._symbol}))\n else:\n ws.send(json.dumps({\"command\": \"unsubscribe\", \"channel\": channel}))\n ws.close()\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n\n def __setup_timer(self):\n # 5秒ごとにwebソケットの状態を確認\n schedule.every(5).seconds.do(self._connect)\n # 50分ごとにトークンの延長\n schedule.every(50).minutes.do(self._extend_token)\n\n def _extend_token(self):\n if self._api.status() != 'OPEN' or not self.__token:\n return\n\n self._api.extend_ws_access_token(self.__token)\n print(\"[{}] TOKEN EXTENDED\".format(datetime.now()))\n\n def _connect(self):\n for channel, ws in self._ws_list.items():\n if channel in [CHANNEL_NAME_EXECUTION, CHANNEL_NAME_ORDER, CHANNEL_NAME_POSITION] and self._sim_flg:\n continue\n\n if not ws or not ws.keep_running:\n try:\n self._ws_list[channel] = self.__ws_subscribe(channel)\n except TimeoutError or ConnectionError:\n if self._ws_list[channel] and not self._ws_list[channel].sock.closed():\n self._ws_list[channel].close()\n\n self._ws_list[channel] = None\n\n for b in [b for b in self._bots if b.get_state() != EBotState.Running]:\n b.run()\n\n def __ws_subscribe(self, channel) -> websocket.WebSocketApp or None:\n if channel == CHANNEL_NAME_TICKER:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self._symbol, lambda _, message: self.__on_ticker(json.loads(message)))\n elif channel == CHANNEL_NAME_TRADES:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self._symbol, lambda _, message: self.__update_trades(json.loads(message)))\n elif channel == CHANNEL_NAME_EXECUTION:\n ws = self._api.subscribe_private_ws(self.__token, CHANNEL_NAME_EXECUTION, lambda _, message: self.__on_execution_events(json.loads(message)))\n elif channel == CHANNEL_NAME_ORDER:\n ws = self._api.subscribe_private_ws(self.__token, CHANNEL_NAME_ORDER, lambda _, message: self.__on_order_events(json.loads(message)))\n elif channel == CHANNEL_NAME_POSITION:\n ws = self._api.subscribe_private_ws(self.__token, CHANNEL_NAME_POSITION, lambda _, message: self.__on_position_events(json.loads(message)))\n else:\n return None\n\n print(\"[{}] Subscribe [{}]\".format(datetime.now(), channel))\n sleep(WEBSOCKET_CALL_WAIT_TIME) # 一秒間1回しか購読できないため\n return ws\n\n def __update_trades(self, trade):\n self._chart.update(trade)\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n\n def __on_order_events(self, data):\n for b in self._bots:\n b.on_order_events(data)\n\n def __on_position_events(self, data):\n for b in self._bots:\n b.on_position_events(data)\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "import json\nfrom datetime import datetime\nfrom time import sleep\nimport schedule as schedule\nimport websocket\nfrom gmo.gmo import GMO\nfrom gmocoin_bot.bot import GMOCoinBot, EBotState\nWEBSOCKET_CALL_WAIT_TIME = 3\nCHANNEL_NAME_TICKER = 'ticker'\nCHANNEL_NAME_TRADES = 'trades'\nCHANNEL_NAME_EXECUTION = 'executionEvents'\nCHANNEL_NAME_ORDER = 'orderEvents'\nCHANNEL_NAME_POSITION = 'positionEvents'\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n\n def __del__(self):\n for channel, ws in self._ws_list.items():\n if ws and ws.keep_running:\n if channel in [CHANNEL_NAME_TICKER, CHANNEL_NAME_TRADES]:\n ws.send(json.dumps({'command': 'unsubscribe', 'channel':\n channel, 'symbol': self._symbol}))\n else:\n ws.send(json.dumps({'command': 'unsubscribe', 'channel':\n channel}))\n ws.close()\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n\n def __setup_timer(self):\n schedule.every(5).seconds.do(self._connect)\n schedule.every(50).minutes.do(self._extend_token)\n\n def _extend_token(self):\n if self._api.status() != 'OPEN' or not self.__token:\n return\n self._api.extend_ws_access_token(self.__token)\n print('[{}] TOKEN EXTENDED'.format(datetime.now()))\n\n def _connect(self):\n for channel, ws in self._ws_list.items():\n if channel in [CHANNEL_NAME_EXECUTION, CHANNEL_NAME_ORDER,\n CHANNEL_NAME_POSITION] and self._sim_flg:\n continue\n if not ws or not ws.keep_running:\n try:\n self._ws_list[channel] = self.__ws_subscribe(channel)\n except (TimeoutError or ConnectionError):\n if self._ws_list[channel] and not self._ws_list[channel\n ].sock.closed():\n self._ws_list[channel].close()\n self._ws_list[channel] = None\n for b in [b for b in self._bots if b.get_state() != EBotState.Running]:\n b.run()\n\n def __ws_subscribe(self, channel) ->(websocket.WebSocketApp or None):\n if channel == CHANNEL_NAME_TICKER:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self.\n _symbol, lambda _, message: self.__on_ticker(json.loads(\n message)))\n elif channel == CHANNEL_NAME_TRADES:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self.\n _symbol, lambda _, message: self.__update_trades(json.loads\n (message)))\n elif channel == CHANNEL_NAME_EXECUTION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_EXECUTION, lambda _, message: self.\n __on_execution_events(json.loads(message)))\n elif channel == CHANNEL_NAME_ORDER:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_ORDER, lambda _, message: self.\n __on_order_events(json.loads(message)))\n elif channel == CHANNEL_NAME_POSITION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_POSITION, lambda _, message: self.\n __on_position_events(json.loads(message)))\n else:\n return None\n print('[{}] Subscribe [{}]'.format(datetime.now(), channel))\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n return ws\n\n def __update_trades(self, trade):\n self._chart.update(trade)\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n\n def __on_order_events(self, data):\n for b in self._bots:\n b.on_order_events(data)\n\n def __on_position_events(self, data):\n for b in self._bots:\n b.on_position_events(data)\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\nWEBSOCKET_CALL_WAIT_TIME = 3\nCHANNEL_NAME_TICKER = 'ticker'\nCHANNEL_NAME_TRADES = 'trades'\nCHANNEL_NAME_EXECUTION = 'executionEvents'\nCHANNEL_NAME_ORDER = 'orderEvents'\nCHANNEL_NAME_POSITION = 'positionEvents'\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n\n def __del__(self):\n for channel, ws in self._ws_list.items():\n if ws and ws.keep_running:\n if channel in [CHANNEL_NAME_TICKER, CHANNEL_NAME_TRADES]:\n ws.send(json.dumps({'command': 'unsubscribe', 'channel':\n channel, 'symbol': self._symbol}))\n else:\n ws.send(json.dumps({'command': 'unsubscribe', 'channel':\n channel}))\n ws.close()\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n\n def __setup_timer(self):\n schedule.every(5).seconds.do(self._connect)\n schedule.every(50).minutes.do(self._extend_token)\n\n def _extend_token(self):\n if self._api.status() != 'OPEN' or not self.__token:\n return\n self._api.extend_ws_access_token(self.__token)\n print('[{}] TOKEN EXTENDED'.format(datetime.now()))\n\n def _connect(self):\n for channel, ws in self._ws_list.items():\n if channel in [CHANNEL_NAME_EXECUTION, CHANNEL_NAME_ORDER,\n CHANNEL_NAME_POSITION] and self._sim_flg:\n continue\n if not ws or not ws.keep_running:\n try:\n self._ws_list[channel] = self.__ws_subscribe(channel)\n except (TimeoutError or ConnectionError):\n if self._ws_list[channel] and not self._ws_list[channel\n ].sock.closed():\n self._ws_list[channel].close()\n self._ws_list[channel] = None\n for b in [b for b in self._bots if b.get_state() != EBotState.Running]:\n b.run()\n\n def __ws_subscribe(self, channel) ->(websocket.WebSocketApp or None):\n if channel == CHANNEL_NAME_TICKER:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self.\n _symbol, lambda _, message: self.__on_ticker(json.loads(\n message)))\n elif channel == CHANNEL_NAME_TRADES:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self.\n _symbol, lambda _, message: self.__update_trades(json.loads\n (message)))\n elif channel == CHANNEL_NAME_EXECUTION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_EXECUTION, lambda _, message: self.\n __on_execution_events(json.loads(message)))\n elif channel == CHANNEL_NAME_ORDER:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_ORDER, lambda _, message: self.\n __on_order_events(json.loads(message)))\n elif channel == CHANNEL_NAME_POSITION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_POSITION, lambda _, message: self.\n __on_position_events(json.loads(message)))\n else:\n return None\n print('[{}] Subscribe [{}]'.format(datetime.now(), channel))\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n return ws\n\n def __update_trades(self, trade):\n self._chart.update(trade)\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n\n def __on_order_events(self, data):\n for b in self._bots:\n b.on_order_events(data)\n\n def __on_position_events(self, data):\n for b in self._bots:\n b.on_position_events(data)\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n\n def __del__(self):\n for channel, ws in self._ws_list.items():\n if ws and ws.keep_running:\n if channel in [CHANNEL_NAME_TICKER, CHANNEL_NAME_TRADES]:\n ws.send(json.dumps({'command': 'unsubscribe', 'channel':\n channel, 'symbol': self._symbol}))\n else:\n ws.send(json.dumps({'command': 'unsubscribe', 'channel':\n channel}))\n ws.close()\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n\n def __setup_timer(self):\n schedule.every(5).seconds.do(self._connect)\n schedule.every(50).minutes.do(self._extend_token)\n\n def _extend_token(self):\n if self._api.status() != 'OPEN' or not self.__token:\n return\n self._api.extend_ws_access_token(self.__token)\n print('[{}] TOKEN EXTENDED'.format(datetime.now()))\n\n def _connect(self):\n for channel, ws in self._ws_list.items():\n if channel in [CHANNEL_NAME_EXECUTION, CHANNEL_NAME_ORDER,\n CHANNEL_NAME_POSITION] and self._sim_flg:\n continue\n if not ws or not ws.keep_running:\n try:\n self._ws_list[channel] = self.__ws_subscribe(channel)\n except (TimeoutError or ConnectionError):\n if self._ws_list[channel] and not self._ws_list[channel\n ].sock.closed():\n self._ws_list[channel].close()\n self._ws_list[channel] = None\n for b in [b for b in self._bots if b.get_state() != EBotState.Running]:\n b.run()\n\n def __ws_subscribe(self, channel) ->(websocket.WebSocketApp or None):\n if channel == CHANNEL_NAME_TICKER:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self.\n _symbol, lambda _, message: self.__on_ticker(json.loads(\n message)))\n elif channel == CHANNEL_NAME_TRADES:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self.\n _symbol, lambda _, message: self.__update_trades(json.loads\n (message)))\n elif channel == CHANNEL_NAME_EXECUTION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_EXECUTION, lambda _, message: self.\n __on_execution_events(json.loads(message)))\n elif channel == CHANNEL_NAME_ORDER:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_ORDER, lambda _, message: self.\n __on_order_events(json.loads(message)))\n elif channel == CHANNEL_NAME_POSITION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_POSITION, lambda _, message: self.\n __on_position_events(json.loads(message)))\n else:\n return None\n print('[{}] Subscribe [{}]'.format(datetime.now(), channel))\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n return ws\n\n def __update_trades(self, trade):\n self._chart.update(trade)\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n\n def __on_order_events(self, data):\n for b in self._bots:\n b.on_order_events(data)\n\n def __on_position_events(self, data):\n for b in self._bots:\n b.on_position_events(data)\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n <function token>\n\n def __setup_timer(self):\n schedule.every(5).seconds.do(self._connect)\n schedule.every(50).minutes.do(self._extend_token)\n\n def _extend_token(self):\n if self._api.status() != 'OPEN' or not self.__token:\n return\n self._api.extend_ws_access_token(self.__token)\n print('[{}] TOKEN EXTENDED'.format(datetime.now()))\n\n def _connect(self):\n for channel, ws in self._ws_list.items():\n if channel in [CHANNEL_NAME_EXECUTION, CHANNEL_NAME_ORDER,\n CHANNEL_NAME_POSITION] and self._sim_flg:\n continue\n if not ws or not ws.keep_running:\n try:\n self._ws_list[channel] = self.__ws_subscribe(channel)\n except (TimeoutError or ConnectionError):\n if self._ws_list[channel] and not self._ws_list[channel\n ].sock.closed():\n self._ws_list[channel].close()\n self._ws_list[channel] = None\n for b in [b for b in self._bots if b.get_state() != EBotState.Running]:\n b.run()\n\n def __ws_subscribe(self, channel) ->(websocket.WebSocketApp or None):\n if channel == CHANNEL_NAME_TICKER:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self.\n _symbol, lambda _, message: self.__on_ticker(json.loads(\n message)))\n elif channel == CHANNEL_NAME_TRADES:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self.\n _symbol, lambda _, message: self.__update_trades(json.loads\n (message)))\n elif channel == CHANNEL_NAME_EXECUTION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_EXECUTION, lambda _, message: self.\n __on_execution_events(json.loads(message)))\n elif channel == CHANNEL_NAME_ORDER:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_ORDER, lambda _, message: self.\n __on_order_events(json.loads(message)))\n elif channel == CHANNEL_NAME_POSITION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_POSITION, lambda _, message: self.\n __on_position_events(json.loads(message)))\n else:\n return None\n print('[{}] Subscribe [{}]'.format(datetime.now(), channel))\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n return ws\n\n def __update_trades(self, trade):\n self._chart.update(trade)\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n\n def __on_order_events(self, data):\n for b in self._bots:\n b.on_order_events(data)\n\n def __on_position_events(self, data):\n for b in self._bots:\n b.on_position_events(data)\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n <function token>\n\n def __setup_timer(self):\n schedule.every(5).seconds.do(self._connect)\n schedule.every(50).minutes.do(self._extend_token)\n <function token>\n\n def _connect(self):\n for channel, ws in self._ws_list.items():\n if channel in [CHANNEL_NAME_EXECUTION, CHANNEL_NAME_ORDER,\n CHANNEL_NAME_POSITION] and self._sim_flg:\n continue\n if not ws or not ws.keep_running:\n try:\n self._ws_list[channel] = self.__ws_subscribe(channel)\n except (TimeoutError or ConnectionError):\n if self._ws_list[channel] and not self._ws_list[channel\n ].sock.closed():\n self._ws_list[channel].close()\n self._ws_list[channel] = None\n for b in [b for b in self._bots if b.get_state() != EBotState.Running]:\n b.run()\n\n def __ws_subscribe(self, channel) ->(websocket.WebSocketApp or None):\n if channel == CHANNEL_NAME_TICKER:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self.\n _symbol, lambda _, message: self.__on_ticker(json.loads(\n message)))\n elif channel == CHANNEL_NAME_TRADES:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self.\n _symbol, lambda _, message: self.__update_trades(json.loads\n (message)))\n elif channel == CHANNEL_NAME_EXECUTION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_EXECUTION, lambda _, message: self.\n __on_execution_events(json.loads(message)))\n elif channel == CHANNEL_NAME_ORDER:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_ORDER, lambda _, message: self.\n __on_order_events(json.loads(message)))\n elif channel == CHANNEL_NAME_POSITION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_POSITION, lambda _, message: self.\n __on_position_events(json.loads(message)))\n else:\n return None\n print('[{}] Subscribe [{}]'.format(datetime.now(), channel))\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n return ws\n\n def __update_trades(self, trade):\n self._chart.update(trade)\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n\n def __on_order_events(self, data):\n for b in self._bots:\n b.on_order_events(data)\n\n def __on_position_events(self, data):\n for b in self._bots:\n b.on_position_events(data)\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n <function token>\n\n def __setup_timer(self):\n schedule.every(5).seconds.do(self._connect)\n schedule.every(50).minutes.do(self._extend_token)\n <function token>\n\n def _connect(self):\n for channel, ws in self._ws_list.items():\n if channel in [CHANNEL_NAME_EXECUTION, CHANNEL_NAME_ORDER,\n CHANNEL_NAME_POSITION] and self._sim_flg:\n continue\n if not ws or not ws.keep_running:\n try:\n self._ws_list[channel] = self.__ws_subscribe(channel)\n except (TimeoutError or ConnectionError):\n if self._ws_list[channel] and not self._ws_list[channel\n ].sock.closed():\n self._ws_list[channel].close()\n self._ws_list[channel] = None\n for b in [b for b in self._bots if b.get_state() != EBotState.Running]:\n b.run()\n\n def __ws_subscribe(self, channel) ->(websocket.WebSocketApp or None):\n if channel == CHANNEL_NAME_TICKER:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self.\n _symbol, lambda _, message: self.__on_ticker(json.loads(\n message)))\n elif channel == CHANNEL_NAME_TRADES:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self.\n _symbol, lambda _, message: self.__update_trades(json.loads\n (message)))\n elif channel == CHANNEL_NAME_EXECUTION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_EXECUTION, lambda _, message: self.\n __on_execution_events(json.loads(message)))\n elif channel == CHANNEL_NAME_ORDER:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_ORDER, lambda _, message: self.\n __on_order_events(json.loads(message)))\n elif channel == CHANNEL_NAME_POSITION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_POSITION, lambda _, message: self.\n __on_position_events(json.loads(message)))\n else:\n return None\n print('[{}] Subscribe [{}]'.format(datetime.now(), channel))\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n return ws\n\n def __update_trades(self, trade):\n self._chart.update(trade)\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n\n def __on_order_events(self, data):\n for b in self._bots:\n b.on_order_events(data)\n <function token>\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n <function token>\n\n def __setup_timer(self):\n schedule.every(5).seconds.do(self._connect)\n schedule.every(50).minutes.do(self._extend_token)\n <function token>\n\n def _connect(self):\n for channel, ws in self._ws_list.items():\n if channel in [CHANNEL_NAME_EXECUTION, CHANNEL_NAME_ORDER,\n CHANNEL_NAME_POSITION] and self._sim_flg:\n continue\n if not ws or not ws.keep_running:\n try:\n self._ws_list[channel] = self.__ws_subscribe(channel)\n except (TimeoutError or ConnectionError):\n if self._ws_list[channel] and not self._ws_list[channel\n ].sock.closed():\n self._ws_list[channel].close()\n self._ws_list[channel] = None\n for b in [b for b in self._bots if b.get_state() != EBotState.Running]:\n b.run()\n\n def __ws_subscribe(self, channel) ->(websocket.WebSocketApp or None):\n if channel == CHANNEL_NAME_TICKER:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self.\n _symbol, lambda _, message: self.__on_ticker(json.loads(\n message)))\n elif channel == CHANNEL_NAME_TRADES:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self.\n _symbol, lambda _, message: self.__update_trades(json.loads\n (message)))\n elif channel == CHANNEL_NAME_EXECUTION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_EXECUTION, lambda _, message: self.\n __on_execution_events(json.loads(message)))\n elif channel == CHANNEL_NAME_ORDER:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_ORDER, lambda _, message: self.\n __on_order_events(json.loads(message)))\n elif channel == CHANNEL_NAME_POSITION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_POSITION, lambda _, message: self.\n __on_position_events(json.loads(message)))\n else:\n return None\n print('[{}] Subscribe [{}]'.format(datetime.now(), channel))\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n return ws\n\n def __update_trades(self, trade):\n self._chart.update(trade)\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n <function token>\n <function token>\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n <function token>\n\n def __setup_timer(self):\n schedule.every(5).seconds.do(self._connect)\n schedule.every(50).minutes.do(self._extend_token)\n <function token>\n <function token>\n\n def __ws_subscribe(self, channel) ->(websocket.WebSocketApp or None):\n if channel == CHANNEL_NAME_TICKER:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self.\n _symbol, lambda _, message: self.__on_ticker(json.loads(\n message)))\n elif channel == CHANNEL_NAME_TRADES:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self.\n _symbol, lambda _, message: self.__update_trades(json.loads\n (message)))\n elif channel == CHANNEL_NAME_EXECUTION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_EXECUTION, lambda _, message: self.\n __on_execution_events(json.loads(message)))\n elif channel == CHANNEL_NAME_ORDER:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_ORDER, lambda _, message: self.\n __on_order_events(json.loads(message)))\n elif channel == CHANNEL_NAME_POSITION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_POSITION, lambda _, message: self.\n __on_position_events(json.loads(message)))\n else:\n return None\n print('[{}] Subscribe [{}]'.format(datetime.now(), channel))\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n return ws\n\n def __update_trades(self, trade):\n self._chart.update(trade)\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n <function token>\n <function token>\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n <function token>\n <function token>\n <function token>\n <function token>\n\n def __ws_subscribe(self, channel) ->(websocket.WebSocketApp or None):\n if channel == CHANNEL_NAME_TICKER:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self.\n _symbol, lambda _, message: self.__on_ticker(json.loads(\n message)))\n elif channel == CHANNEL_NAME_TRADES:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self.\n _symbol, lambda _, message: self.__update_trades(json.loads\n (message)))\n elif channel == CHANNEL_NAME_EXECUTION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_EXECUTION, lambda _, message: self.\n __on_execution_events(json.loads(message)))\n elif channel == CHANNEL_NAME_ORDER:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_ORDER, lambda _, message: self.\n __on_order_events(json.loads(message)))\n elif channel == CHANNEL_NAME_POSITION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_POSITION, lambda _, message: self.\n __on_position_events(json.loads(message)))\n else:\n return None\n print('[{}] Subscribe [{}]'.format(datetime.now(), channel))\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n return ws\n\n def __update_trades(self, trade):\n self._chart.update(trade)\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n <function token>\n <function token>\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n <function token>\n <function token>\n <function token>\n <function token>\n\n def __ws_subscribe(self, channel) ->(websocket.WebSocketApp or None):\n if channel == CHANNEL_NAME_TICKER:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TICKER, self.\n _symbol, lambda _, message: self.__on_ticker(json.loads(\n message)))\n elif channel == CHANNEL_NAME_TRADES:\n ws = self._api.subscribe_public_ws(CHANNEL_NAME_TRADES, self.\n _symbol, lambda _, message: self.__update_trades(json.loads\n (message)))\n elif channel == CHANNEL_NAME_EXECUTION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_EXECUTION, lambda _, message: self.\n __on_execution_events(json.loads(message)))\n elif channel == CHANNEL_NAME_ORDER:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_ORDER, lambda _, message: self.\n __on_order_events(json.loads(message)))\n elif channel == CHANNEL_NAME_POSITION:\n ws = self._api.subscribe_private_ws(self.__token,\n CHANNEL_NAME_POSITION, lambda _, message: self.\n __on_position_events(json.loads(message)))\n else:\n return None\n print('[{}] Subscribe [{}]'.format(datetime.now(), channel))\n sleep(WEBSOCKET_CALL_WAIT_TIME)\n return ws\n <function token>\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n <function token>\n <function token>\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def __on_execution_events(self, data):\n for b in self._bots:\n b.on_execution_events(data)\n <function token>\n <function token>\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n\n def __init__(self, bots, chart, api: GMO, sim_flg=True, symbol='BTC_JPY'):\n self._bots = bots\n self._chart = chart\n self._api = api\n self._sim_flg = sim_flg\n self._symbol = symbol\n self.__token = api.get_ws_access_token()\n self._ws_list = {CHANNEL_NAME_TICKER: None, CHANNEL_NAME_TRADES:\n None, CHANNEL_NAME_EXECUTION: None, CHANNEL_NAME_ORDER: None,\n CHANNEL_NAME_POSITION: None}\n self._connect()\n self.__setup_timer()\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def __on_ticker(self, data):\n for b in self._bots:\n b.update_ticker(data)\n", "<import token>\n<assignment token>\n\n\nclass GMOWebsocketManager:\n _ws_list: dict[str, websocket.WebSocketApp or None]\n _bots: list[GMOCoinBot]\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<assignment token>\n<class token>\n" ]
false
98,941
ad66b917326ffebc3324b8c47b1f422c2ee5b266
""" Django settings for kippo project. Generated by 'django-admin startproject' using Django 2.0.7. For more information on this file, see https://docs.djangoproject.com/en/2.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.0/ref/settings/ """ import logging import os from distutils.util import strtobool from pathlib import PurePath from django.conf.locale.en import formats as en_formats from django.conf.locale.ja import formats as ja_formats logging.getLogger("requests").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) logging.getLogger("botocore").setLevel(logging.WARNING) logging.getLogger("boto3").setLevel(logging.WARNING) logger = logging.getLogger(__name__) # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = PurePath(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "(asz2@@dcx1zvj0j)ym_tz!z!!i#f$z5!hh_*stl@&e$sd#jya" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False ALLOWED_HOSTS = ["*"] # Application definition INSTALLED_APPS = [ "social_django", "reversion", "bootstrap4", "common", # must be *before* "'common.apps.KippoAdminConfig', # 'django.contrib.admin'," in order to override admin template! "common.apps.KippoAdminConfig", # 'django.contrib.admin', "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.messages", "django.contrib.staticfiles", "accounts", # must be listed AFTER social_django and django.contrib.auth "projects", "tasks", "octocat", ] MIDDLEWARE = [ "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", "social_django.middleware.SocialAuthExceptionMiddleware", ] ROOT_URLCONF = "kippo.urls" TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", "DIRS": [], "APP_DIRS": True, "OPTIONS": { "context_processors": [ "django.template.context_processors.debug", "django.template.context_processors.request", "django.contrib.auth.context_processors.auth", "django.contrib.messages.context_processors.messages", "social_django.context_processors.backends", "social_django.context_processors.login_redirect", "kippo.context_processors.global_view_additional_context", # PROVIDES settings.URL_PREFIX to context ] }, } ] WSGI_APPLICATION = "kippo.wsgi.application" # Database # https://docs.djangoproject.com/en/2.0/ref/settings/#databases DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": os.path.join(BASE_DIR, "db.sqlite3")}} # Password validation # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, ] # Internationalization # https://docs.djangoproject.com/en/2.0/topics/i18n/ LANGUAGE_CODE = "en-us" USE_I18N = True USE_L10N = True USE_TZ = True TIME_ZONE = "Asia/Tokyo" # ISO 8601, ex: 2008-01-02T10:30:00.000123+02:00 "c" # NOTE: Will not show timezone offset if datetime object is "naive" ja_formats.DATETIME_FORMAT = "Y-m-d H:i:s (T)" # "c" ja_formats.DATE_FORMAT = "Y-m-d" en_formats.DATETIME_FORMAT = "Y-m-d H:i:s (T)" # "c" en_formats.DATE_FORMAT = "Y-m-d" DJANGO_LOG_LEVEL = "DEBUG" LOGGING = { "version": 1, "disable_existing_loggers": True, "formatters": {"standard": {"format": "{asctime} [{levelname:5}] ({name}) {funcName}: {message}", "style": "{"}}, "handlers": {"console": {"class": "logging.StreamHandler", "formatter": "standard"}}, "loggers": { "django": {"handlers": ["console"], "level": "INFO"}, # Change to DEBUG to see db queries "projects": {"handlers": ["console"], "level": DJANGO_LOG_LEVEL, "propagate": True}, "tasks": {"handlers": ["console"], "level": DJANGO_LOG_LEVEL, "propagate": True}, "accounts": {"handlers": ["console"], "level": DJANGO_LOG_LEVEL, "propagate": True}, "octocat": {"handlers": ["console"], "level": DJANGO_LOG_LEVEL, "propagate": True}, }, } STATIC_URL = "" STATIC_ROOT = "" BOOTSTRAP4 = { "include_jquery": True, # The Bootstrap base URL "base_url": "//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/4.1.3/", } # -- for data backup/dump DUMPDATA_S3_BUCKETNAME = "kippo-dumpdata-bucket-123xyz" # Authentication # http://docs.djangoproject.com/en/dev/ref/settings/?from=olddocs#authentication-backends AUTHENTICATION_BACKENDS = ("social_core.backends.google.GoogleOAuth2", "django.contrib.auth.backends.ModelBackend") DEFAULT_URL_PREFIX = "" URL_PREFIX = os.getenv("URL_PREFIX", DEFAULT_URL_PREFIX) # needed to support a prefix on urls (for zappa deployment) SOCIAL_AUTH_JSONFIELD_ENABLED = True SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = os.environ.get("GOOGLE_OAUTH2_KEY", None) # client ID SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = os.environ.get("GOOGLE_OAUTH2_SECRET", None) # for integration of social_auth with admin # https://python-social-auth.readthedocs.io/en/latest/configuration/django.html SOCIAL_AUTH_ADMIN_USER_SEARCH_FIELDS = ["username", "first_name", "email"] # for identification of SOCIAL_AUTH_USER # http://python-social-auth.readthedocs.io/en/latest/configuration/settings.html#user-model SOCIAL_AUTH_USER_MODEL = "accounts.KippoUser" AUTH_USER_MODEL = SOCIAL_AUTH_USER_MODEL SOCIAL_AUTH_LOGIN_REDIRECT_URL = f"{URL_PREFIX}/admin/" GITHUB_MANAGER_USERNAME = "github-manager" CLI_MANAGER_USERNAME = "cli-manager" SITE_HEADER = "Kippo (Project Goal & Milestone Manager)" SITE_TITLE = SITE_HEADER DEFAULT_KIPPOPROJECT_CATEGORY = "poc" DEFAULT_KIPPOTASK_CATEGORY = "study" DEFAULT_TASK_DISPLAY_STATE = "in-progress" DEFAULT_KIPPORPOJECT_TARGET_DATE_DAYS = 90 TEST = False # internally defined users UNASSIGNED_USER_GITHUB_LOGIN_PREFIX = "unassigned" # for managing unassigned github tasks DEFAULT_GITHUB_ISSUE_LABEL_CATEGORY_PREFIX = "category:" DEFAULT_GITHUB_ISSUE_LABEL_ESTIMATE_PREFIX = "estimate:" GITHUB_MILESTONE_CLOSE_STATE = "closed" LOGIN_REDIRECT_URL = f"{URL_PREFIX}/admin/" # defaults to /accounts/profile/# HOST_URL = os.getenv("HOST_URL", "http://127.0.0.1") WEBHOOK_ENDPOINT = "/octocat/webhook/" WEBHOOK_URL = f"{HOST_URL}{URL_PREFIX}{WEBHOOK_ENDPOINT}" DISPLAY_ADMIN_AUTH_FOR_MODELBACKEND = True DAY_WORKHOURS = 7 DEFAULT_WEBHOOK_DELETE_DAYS = "30" WEBHOOK_DELETE_DAYS = int(os.getenv("WEBHOOK_DELETE_DAYS", DEFAULT_WEBHOOK_DELETE_DAYS)) PROJECTID_MAPPING_JSON_S3URI = os.getenv("PROJECTID_MAPPING_JSON_S3URI", None) # AWS/BOTO3 Configuration BOTO3_CONNECT_TIMEOUT = 15 AWS_DEFAULT_REGION = os.getenv("AWS_DEFAULT_REGION", "ap-northeast-1") DEFAULT_S3_SERVICE_ENDPOINT = f"https://s3.{AWS_DEFAULT_REGION}.amazonaws.com" DEFAULT_SQS_SERVICE_ENDPOINT = f"https://sqs.{AWS_DEFAULT_REGION}.amazonaws.com" AWS_SERVICE_ENDPOINTS = { "s3": os.getenv("S3_SERVICE_ENDPOINT", DEFAULT_S3_SERVICE_ENDPOINT), "sqs": os.getenv("SQS_SERVICE_ENDPOINT", DEFAULT_SQS_SERVICE_ENDPOINT), } logger.info(f"AWS_SERVICE_ENDPOINTS: {AWS_SERVICE_ENDPOINTS}") DEFAULT_FALLBACK_ESTIMATE_DAYS = "3" FALLBACK_ESTIMATE_DAYS = int(os.getenv("FALLBACK_ESTIMATE_DAYS", DEFAULT_FALLBACK_ESTIMATE_DAYS)) TWO_YEARS_IN_DAYS = 365 * 2 DEFAULT_PROJECTID_MAPPING_CLOSED_IGNORED_DAYS = str(TWO_YEARS_IN_DAYS) PROJECTID_MAPPING_CLOSED_IGNORED_DAYS = int(os.getenv("PROJECTID_MAPPING_CLOSED_IGNORED_DAYS", DEFAULT_PROJECTID_MAPPING_CLOSED_IGNORED_DAYS)) DEFAULT_INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV = "False" INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV = bool( strtobool(os.getenv("INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV", DEFAULT_INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV)) ) DEFAULT_PROJECT_EFFORT_EXCEED_PERCENTAGE = "15" PROJECT_EFFORT_EXCEED_PERCENTAGE = ( int(os.getenv("PROJECT_EFFORT_EXCEED_PERCENTAGE", DEFAULT_PROJECT_EFFORT_EXCEED_PERCENTAGE)) / 100 ) # convert to percentage DEFAULT_DELETE_DAYS = "60" DELETE_DAYS = int(os.getenv("DELETE_DAYS", DEFAULT_DELETE_DAYS)) DEFAULT_OCTOCAT_APPLY_DEFAULT_LABELSET = "False" OCTOCAT_APPLY_DEFAULT_LABELSET = bool(strtobool(os.getenv("OCTOCAT_APPLY_DEFAULT_LABELSET", DEFAULT_OCTOCAT_APPLY_DEFAULT_LABELSET))) DEFAULT_OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE = "False" OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE = bool( strtobool(os.getenv("OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE", DEFAULT_OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE)) )
[ "\"\"\"\nDjango settings for kippo project.\n\nGenerated by 'django-admin startproject' using Django 2.0.7.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/2.0/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/2.0/ref/settings/\n\"\"\"\nimport logging\nimport os\nfrom distutils.util import strtobool\nfrom pathlib import PurePath\n\nfrom django.conf.locale.en import formats as en_formats\nfrom django.conf.locale.ja import formats as ja_formats\n\nlogging.getLogger(\"requests\").setLevel(logging.WARNING)\nlogging.getLogger(\"urllib3\").setLevel(logging.WARNING)\nlogging.getLogger(\"botocore\").setLevel(logging.WARNING)\nlogging.getLogger(\"boto3\").setLevel(logging.WARNING)\n\nlogger = logging.getLogger(__name__)\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nBASE_DIR = PurePath(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))\n\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = \"(asz2@@dcx1zvj0j)ym_tz!z!!i#f$z5!hh_*stl@&e$sd#jya\"\n\n# SECURITY WARNING: don't run with debug turned on in production!\nDEBUG = False\n\nALLOWED_HOSTS = [\"*\"]\n\n\n# Application definition\n\nINSTALLED_APPS = [\n \"social_django\",\n \"reversion\",\n \"bootstrap4\",\n \"common\", # must be *before* \"'common.apps.KippoAdminConfig', # 'django.contrib.admin',\" in order to override admin template!\n \"common.apps.KippoAdminConfig\", # 'django.contrib.admin',\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"django.contrib.staticfiles\",\n \"accounts\", # must be listed AFTER social_django and django.contrib.auth\n \"projects\",\n \"tasks\",\n \"octocat\",\n]\n\nMIDDLEWARE = [\n \"django.middleware.security.SecurityMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n \"social_django.middleware.SocialAuthExceptionMiddleware\",\n]\n\nROOT_URLCONF = \"kippo.urls\"\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [],\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"social_django.context_processors.backends\",\n \"social_django.context_processors.login_redirect\",\n \"kippo.context_processors.global_view_additional_context\", # PROVIDES settings.URL_PREFIX to context\n ]\n },\n }\n]\n\nWSGI_APPLICATION = \"kippo.wsgi.application\"\n\n\n# Database\n# https://docs.djangoproject.com/en/2.0/ref/settings/#databases\n\nDATABASES = {\"default\": {\"ENGINE\": \"django.db.backends.sqlite3\", \"NAME\": os.path.join(BASE_DIR, \"db.sqlite3\")}}\n\n\n# Password validation\n# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\"},\n {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"},\n {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"},\n {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"},\n]\n\n\n# Internationalization\n# https://docs.djangoproject.com/en/2.0/topics/i18n/\n\nLANGUAGE_CODE = \"en-us\"\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\nTIME_ZONE = \"Asia/Tokyo\"\n# ISO 8601, ex: 2008-01-02T10:30:00.000123+02:00 \"c\"\n# NOTE: Will not show timezone offset if datetime object is \"naive\"\nja_formats.DATETIME_FORMAT = \"Y-m-d H:i:s (T)\" # \"c\"\nja_formats.DATE_FORMAT = \"Y-m-d\"\nen_formats.DATETIME_FORMAT = \"Y-m-d H:i:s (T)\" # \"c\"\nen_formats.DATE_FORMAT = \"Y-m-d\"\n\nDJANGO_LOG_LEVEL = \"DEBUG\"\nLOGGING = {\n \"version\": 1,\n \"disable_existing_loggers\": True,\n \"formatters\": {\"standard\": {\"format\": \"{asctime} [{levelname:5}] ({name}) {funcName}: {message}\", \"style\": \"{\"}},\n \"handlers\": {\"console\": {\"class\": \"logging.StreamHandler\", \"formatter\": \"standard\"}},\n \"loggers\": {\n \"django\": {\"handlers\": [\"console\"], \"level\": \"INFO\"}, # Change to DEBUG to see db queries\n \"projects\": {\"handlers\": [\"console\"], \"level\": DJANGO_LOG_LEVEL, \"propagate\": True},\n \"tasks\": {\"handlers\": [\"console\"], \"level\": DJANGO_LOG_LEVEL, \"propagate\": True},\n \"accounts\": {\"handlers\": [\"console\"], \"level\": DJANGO_LOG_LEVEL, \"propagate\": True},\n \"octocat\": {\"handlers\": [\"console\"], \"level\": DJANGO_LOG_LEVEL, \"propagate\": True},\n },\n}\n\n\nSTATIC_URL = \"\"\nSTATIC_ROOT = \"\"\n\nBOOTSTRAP4 = {\n \"include_jquery\": True,\n # The Bootstrap base URL\n \"base_url\": \"//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/4.1.3/\",\n}\n\n# -- for data backup/dump\nDUMPDATA_S3_BUCKETNAME = \"kippo-dumpdata-bucket-123xyz\"\n\n# Authentication\n# http://docs.djangoproject.com/en/dev/ref/settings/?from=olddocs#authentication-backends\nAUTHENTICATION_BACKENDS = (\"social_core.backends.google.GoogleOAuth2\", \"django.contrib.auth.backends.ModelBackend\")\n\nDEFAULT_URL_PREFIX = \"\"\nURL_PREFIX = os.getenv(\"URL_PREFIX\", DEFAULT_URL_PREFIX) # needed to support a prefix on urls (for zappa deployment)\n\nSOCIAL_AUTH_JSONFIELD_ENABLED = True\nSOCIAL_AUTH_GOOGLE_OAUTH2_KEY = os.environ.get(\"GOOGLE_OAUTH2_KEY\", None) # client ID\nSOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = os.environ.get(\"GOOGLE_OAUTH2_SECRET\", None)\n\n# for integration of social_auth with admin\n# https://python-social-auth.readthedocs.io/en/latest/configuration/django.html\nSOCIAL_AUTH_ADMIN_USER_SEARCH_FIELDS = [\"username\", \"first_name\", \"email\"]\n\n# for identification of SOCIAL_AUTH_USER\n# http://python-social-auth.readthedocs.io/en/latest/configuration/settings.html#user-model\nSOCIAL_AUTH_USER_MODEL = \"accounts.KippoUser\"\nAUTH_USER_MODEL = SOCIAL_AUTH_USER_MODEL\nSOCIAL_AUTH_LOGIN_REDIRECT_URL = f\"{URL_PREFIX}/admin/\"\n\nGITHUB_MANAGER_USERNAME = \"github-manager\"\nCLI_MANAGER_USERNAME = \"cli-manager\"\n\nSITE_HEADER = \"Kippo (Project Goal & Milestone Manager)\"\nSITE_TITLE = SITE_HEADER\n\nDEFAULT_KIPPOPROJECT_CATEGORY = \"poc\"\nDEFAULT_KIPPOTASK_CATEGORY = \"study\"\nDEFAULT_TASK_DISPLAY_STATE = \"in-progress\"\nDEFAULT_KIPPORPOJECT_TARGET_DATE_DAYS = 90\n\nTEST = False\n\n# internally defined users\nUNASSIGNED_USER_GITHUB_LOGIN_PREFIX = \"unassigned\" # for managing unassigned github tasks\nDEFAULT_GITHUB_ISSUE_LABEL_CATEGORY_PREFIX = \"category:\"\nDEFAULT_GITHUB_ISSUE_LABEL_ESTIMATE_PREFIX = \"estimate:\"\nGITHUB_MILESTONE_CLOSE_STATE = \"closed\"\n\nLOGIN_REDIRECT_URL = f\"{URL_PREFIX}/admin/\" # defaults to /accounts/profile/#\nHOST_URL = os.getenv(\"HOST_URL\", \"http://127.0.0.1\")\nWEBHOOK_ENDPOINT = \"/octocat/webhook/\"\nWEBHOOK_URL = f\"{HOST_URL}{URL_PREFIX}{WEBHOOK_ENDPOINT}\"\nDISPLAY_ADMIN_AUTH_FOR_MODELBACKEND = True\n\nDAY_WORKHOURS = 7\n\nDEFAULT_WEBHOOK_DELETE_DAYS = \"30\"\nWEBHOOK_DELETE_DAYS = int(os.getenv(\"WEBHOOK_DELETE_DAYS\", DEFAULT_WEBHOOK_DELETE_DAYS))\n\nPROJECTID_MAPPING_JSON_S3URI = os.getenv(\"PROJECTID_MAPPING_JSON_S3URI\", None)\n\n# AWS/BOTO3 Configuration\nBOTO3_CONNECT_TIMEOUT = 15\nAWS_DEFAULT_REGION = os.getenv(\"AWS_DEFAULT_REGION\", \"ap-northeast-1\")\n\nDEFAULT_S3_SERVICE_ENDPOINT = f\"https://s3.{AWS_DEFAULT_REGION}.amazonaws.com\"\nDEFAULT_SQS_SERVICE_ENDPOINT = f\"https://sqs.{AWS_DEFAULT_REGION}.amazonaws.com\"\n\nAWS_SERVICE_ENDPOINTS = {\n \"s3\": os.getenv(\"S3_SERVICE_ENDPOINT\", DEFAULT_S3_SERVICE_ENDPOINT),\n \"sqs\": os.getenv(\"SQS_SERVICE_ENDPOINT\", DEFAULT_SQS_SERVICE_ENDPOINT),\n}\nlogger.info(f\"AWS_SERVICE_ENDPOINTS: {AWS_SERVICE_ENDPOINTS}\")\n\nDEFAULT_FALLBACK_ESTIMATE_DAYS = \"3\"\nFALLBACK_ESTIMATE_DAYS = int(os.getenv(\"FALLBACK_ESTIMATE_DAYS\", DEFAULT_FALLBACK_ESTIMATE_DAYS))\n\nTWO_YEARS_IN_DAYS = 365 * 2\nDEFAULT_PROJECTID_MAPPING_CLOSED_IGNORED_DAYS = str(TWO_YEARS_IN_DAYS)\nPROJECTID_MAPPING_CLOSED_IGNORED_DAYS = int(os.getenv(\"PROJECTID_MAPPING_CLOSED_IGNORED_DAYS\", DEFAULT_PROJECTID_MAPPING_CLOSED_IGNORED_DAYS))\n\n\nDEFAULT_INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV = \"False\"\nINCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV = bool(\n strtobool(os.getenv(\"INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV\", DEFAULT_INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV))\n)\n\nDEFAULT_PROJECT_EFFORT_EXCEED_PERCENTAGE = \"15\"\nPROJECT_EFFORT_EXCEED_PERCENTAGE = (\n int(os.getenv(\"PROJECT_EFFORT_EXCEED_PERCENTAGE\", DEFAULT_PROJECT_EFFORT_EXCEED_PERCENTAGE)) / 100\n) # convert to percentage\n\nDEFAULT_DELETE_DAYS = \"60\"\nDELETE_DAYS = int(os.getenv(\"DELETE_DAYS\", DEFAULT_DELETE_DAYS))\n\nDEFAULT_OCTOCAT_APPLY_DEFAULT_LABELSET = \"False\"\nOCTOCAT_APPLY_DEFAULT_LABELSET = bool(strtobool(os.getenv(\"OCTOCAT_APPLY_DEFAULT_LABELSET\", DEFAULT_OCTOCAT_APPLY_DEFAULT_LABELSET)))\n\nDEFAULT_OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE = \"False\"\nOCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE = bool(\n strtobool(os.getenv(\"OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE\", DEFAULT_OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE))\n)\n", "<docstring token>\nimport logging\nimport os\nfrom distutils.util import strtobool\nfrom pathlib import PurePath\nfrom django.conf.locale.en import formats as en_formats\nfrom django.conf.locale.ja import formats as ja_formats\nlogging.getLogger('requests').setLevel(logging.WARNING)\nlogging.getLogger('urllib3').setLevel(logging.WARNING)\nlogging.getLogger('botocore').setLevel(logging.WARNING)\nlogging.getLogger('boto3').setLevel(logging.WARNING)\nlogger = logging.getLogger(__name__)\nBASE_DIR = PurePath(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n )\nSECRET_KEY = '(asz2@@dcx1zvj0j)ym_tz!z!!i#f$z5!hh_*stl@&e$sd#jya'\nDEBUG = False\nALLOWED_HOSTS = ['*']\nINSTALLED_APPS = ['social_django', 'reversion', 'bootstrap4', 'common',\n 'common.apps.KippoAdminConfig', 'django.contrib.auth',\n 'django.contrib.contenttypes', 'django.contrib.sessions',\n 'django.contrib.messages', 'django.contrib.staticfiles', 'accounts',\n 'projects', 'tasks', 'octocat']\nMIDDLEWARE = ['django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n 'social_django.middleware.SocialAuthExceptionMiddleware']\nROOT_URLCONF = 'kippo.urls'\nTEMPLATES = [{'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': {'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n 'social_django.context_processors.backends',\n 'social_django.context_processors.login_redirect',\n 'kippo.context_processors.global_view_additional_context']}}]\nWSGI_APPLICATION = 'kippo.wsgi.application'\nDATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': os\n .path.join(BASE_DIR, 'db.sqlite3')}}\nAUTH_PASSWORD_VALIDATORS = [{'NAME':\n 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'\n }, {'NAME':\n 'django.contrib.auth.password_validation.MinimumLengthValidator'}, {\n 'NAME':\n 'django.contrib.auth.password_validation.CommonPasswordValidator'}, {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}\n ]\nLANGUAGE_CODE = 'en-us'\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nTIME_ZONE = 'Asia/Tokyo'\nja_formats.DATETIME_FORMAT = 'Y-m-d H:i:s (T)'\nja_formats.DATE_FORMAT = 'Y-m-d'\nen_formats.DATETIME_FORMAT = 'Y-m-d H:i:s (T)'\nen_formats.DATE_FORMAT = 'Y-m-d'\nDJANGO_LOG_LEVEL = 'DEBUG'\nLOGGING = {'version': 1, 'disable_existing_loggers': True, 'formatters': {\n 'standard': {'format':\n '{asctime} [{levelname:5}] ({name}) {funcName}: {message}', 'style':\n '{'}}, 'handlers': {'console': {'class': 'logging.StreamHandler',\n 'formatter': 'standard'}}, 'loggers': {'django': {'handlers': [\n 'console'], 'level': 'INFO'}, 'projects': {'handlers': ['console'],\n 'level': DJANGO_LOG_LEVEL, 'propagate': True}, 'tasks': {'handlers': [\n 'console'], 'level': DJANGO_LOG_LEVEL, 'propagate': True}, 'accounts':\n {'handlers': ['console'], 'level': DJANGO_LOG_LEVEL, 'propagate': True},\n 'octocat': {'handlers': ['console'], 'level': DJANGO_LOG_LEVEL,\n 'propagate': True}}}\nSTATIC_URL = ''\nSTATIC_ROOT = ''\nBOOTSTRAP4 = {'include_jquery': True, 'base_url':\n '//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/4.1.3/'}\nDUMPDATA_S3_BUCKETNAME = 'kippo-dumpdata-bucket-123xyz'\nAUTHENTICATION_BACKENDS = ('social_core.backends.google.GoogleOAuth2',\n 'django.contrib.auth.backends.ModelBackend')\nDEFAULT_URL_PREFIX = ''\nURL_PREFIX = os.getenv('URL_PREFIX', DEFAULT_URL_PREFIX)\nSOCIAL_AUTH_JSONFIELD_ENABLED = True\nSOCIAL_AUTH_GOOGLE_OAUTH2_KEY = os.environ.get('GOOGLE_OAUTH2_KEY', None)\nSOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = os.environ.get('GOOGLE_OAUTH2_SECRET', None)\nSOCIAL_AUTH_ADMIN_USER_SEARCH_FIELDS = ['username', 'first_name', 'email']\nSOCIAL_AUTH_USER_MODEL = 'accounts.KippoUser'\nAUTH_USER_MODEL = SOCIAL_AUTH_USER_MODEL\nSOCIAL_AUTH_LOGIN_REDIRECT_URL = f'{URL_PREFIX}/admin/'\nGITHUB_MANAGER_USERNAME = 'github-manager'\nCLI_MANAGER_USERNAME = 'cli-manager'\nSITE_HEADER = 'Kippo (Project Goal & Milestone Manager)'\nSITE_TITLE = SITE_HEADER\nDEFAULT_KIPPOPROJECT_CATEGORY = 'poc'\nDEFAULT_KIPPOTASK_CATEGORY = 'study'\nDEFAULT_TASK_DISPLAY_STATE = 'in-progress'\nDEFAULT_KIPPORPOJECT_TARGET_DATE_DAYS = 90\nTEST = False\nUNASSIGNED_USER_GITHUB_LOGIN_PREFIX = 'unassigned'\nDEFAULT_GITHUB_ISSUE_LABEL_CATEGORY_PREFIX = 'category:'\nDEFAULT_GITHUB_ISSUE_LABEL_ESTIMATE_PREFIX = 'estimate:'\nGITHUB_MILESTONE_CLOSE_STATE = 'closed'\nLOGIN_REDIRECT_URL = f'{URL_PREFIX}/admin/'\nHOST_URL = os.getenv('HOST_URL', 'http://127.0.0.1')\nWEBHOOK_ENDPOINT = '/octocat/webhook/'\nWEBHOOK_URL = f'{HOST_URL}{URL_PREFIX}{WEBHOOK_ENDPOINT}'\nDISPLAY_ADMIN_AUTH_FOR_MODELBACKEND = True\nDAY_WORKHOURS = 7\nDEFAULT_WEBHOOK_DELETE_DAYS = '30'\nWEBHOOK_DELETE_DAYS = int(os.getenv('WEBHOOK_DELETE_DAYS',\n DEFAULT_WEBHOOK_DELETE_DAYS))\nPROJECTID_MAPPING_JSON_S3URI = os.getenv('PROJECTID_MAPPING_JSON_S3URI', None)\nBOTO3_CONNECT_TIMEOUT = 15\nAWS_DEFAULT_REGION = os.getenv('AWS_DEFAULT_REGION', 'ap-northeast-1')\nDEFAULT_S3_SERVICE_ENDPOINT = f'https://s3.{AWS_DEFAULT_REGION}.amazonaws.com'\nDEFAULT_SQS_SERVICE_ENDPOINT = (\n f'https://sqs.{AWS_DEFAULT_REGION}.amazonaws.com')\nAWS_SERVICE_ENDPOINTS = {'s3': os.getenv('S3_SERVICE_ENDPOINT',\n DEFAULT_S3_SERVICE_ENDPOINT), 'sqs': os.getenv('SQS_SERVICE_ENDPOINT',\n DEFAULT_SQS_SERVICE_ENDPOINT)}\nlogger.info(f'AWS_SERVICE_ENDPOINTS: {AWS_SERVICE_ENDPOINTS}')\nDEFAULT_FALLBACK_ESTIMATE_DAYS = '3'\nFALLBACK_ESTIMATE_DAYS = int(os.getenv('FALLBACK_ESTIMATE_DAYS',\n DEFAULT_FALLBACK_ESTIMATE_DAYS))\nTWO_YEARS_IN_DAYS = 365 * 2\nDEFAULT_PROJECTID_MAPPING_CLOSED_IGNORED_DAYS = str(TWO_YEARS_IN_DAYS)\nPROJECTID_MAPPING_CLOSED_IGNORED_DAYS = int(os.getenv(\n 'PROJECTID_MAPPING_CLOSED_IGNORED_DAYS',\n DEFAULT_PROJECTID_MAPPING_CLOSED_IGNORED_DAYS))\nDEFAULT_INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV = 'False'\nINCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV = bool(strtobool(os.getenv(\n 'INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV',\n DEFAULT_INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV)))\nDEFAULT_PROJECT_EFFORT_EXCEED_PERCENTAGE = '15'\nPROJECT_EFFORT_EXCEED_PERCENTAGE = int(os.getenv(\n 'PROJECT_EFFORT_EXCEED_PERCENTAGE',\n DEFAULT_PROJECT_EFFORT_EXCEED_PERCENTAGE)) / 100\nDEFAULT_DELETE_DAYS = '60'\nDELETE_DAYS = int(os.getenv('DELETE_DAYS', DEFAULT_DELETE_DAYS))\nDEFAULT_OCTOCAT_APPLY_DEFAULT_LABELSET = 'False'\nOCTOCAT_APPLY_DEFAULT_LABELSET = bool(strtobool(os.getenv(\n 'OCTOCAT_APPLY_DEFAULT_LABELSET', DEFAULT_OCTOCAT_APPLY_DEFAULT_LABELSET)))\nDEFAULT_OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE = 'False'\nOCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE = bool(strtobool(os.getenv(\n 'OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE',\n DEFAULT_OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE)))\n", "<docstring token>\n<import token>\nlogging.getLogger('requests').setLevel(logging.WARNING)\nlogging.getLogger('urllib3').setLevel(logging.WARNING)\nlogging.getLogger('botocore').setLevel(logging.WARNING)\nlogging.getLogger('boto3').setLevel(logging.WARNING)\nlogger = logging.getLogger(__name__)\nBASE_DIR = PurePath(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n )\nSECRET_KEY = '(asz2@@dcx1zvj0j)ym_tz!z!!i#f$z5!hh_*stl@&e$sd#jya'\nDEBUG = False\nALLOWED_HOSTS = ['*']\nINSTALLED_APPS = ['social_django', 'reversion', 'bootstrap4', 'common',\n 'common.apps.KippoAdminConfig', 'django.contrib.auth',\n 'django.contrib.contenttypes', 'django.contrib.sessions',\n 'django.contrib.messages', 'django.contrib.staticfiles', 'accounts',\n 'projects', 'tasks', 'octocat']\nMIDDLEWARE = ['django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n 'social_django.middleware.SocialAuthExceptionMiddleware']\nROOT_URLCONF = 'kippo.urls'\nTEMPLATES = [{'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': {'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n 'social_django.context_processors.backends',\n 'social_django.context_processors.login_redirect',\n 'kippo.context_processors.global_view_additional_context']}}]\nWSGI_APPLICATION = 'kippo.wsgi.application'\nDATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': os\n .path.join(BASE_DIR, 'db.sqlite3')}}\nAUTH_PASSWORD_VALIDATORS = [{'NAME':\n 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'\n }, {'NAME':\n 'django.contrib.auth.password_validation.MinimumLengthValidator'}, {\n 'NAME':\n 'django.contrib.auth.password_validation.CommonPasswordValidator'}, {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}\n ]\nLANGUAGE_CODE = 'en-us'\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nTIME_ZONE = 'Asia/Tokyo'\nja_formats.DATETIME_FORMAT = 'Y-m-d H:i:s (T)'\nja_formats.DATE_FORMAT = 'Y-m-d'\nen_formats.DATETIME_FORMAT = 'Y-m-d H:i:s (T)'\nen_formats.DATE_FORMAT = 'Y-m-d'\nDJANGO_LOG_LEVEL = 'DEBUG'\nLOGGING = {'version': 1, 'disable_existing_loggers': True, 'formatters': {\n 'standard': {'format':\n '{asctime} [{levelname:5}] ({name}) {funcName}: {message}', 'style':\n '{'}}, 'handlers': {'console': {'class': 'logging.StreamHandler',\n 'formatter': 'standard'}}, 'loggers': {'django': {'handlers': [\n 'console'], 'level': 'INFO'}, 'projects': {'handlers': ['console'],\n 'level': DJANGO_LOG_LEVEL, 'propagate': True}, 'tasks': {'handlers': [\n 'console'], 'level': DJANGO_LOG_LEVEL, 'propagate': True}, 'accounts':\n {'handlers': ['console'], 'level': DJANGO_LOG_LEVEL, 'propagate': True},\n 'octocat': {'handlers': ['console'], 'level': DJANGO_LOG_LEVEL,\n 'propagate': True}}}\nSTATIC_URL = ''\nSTATIC_ROOT = ''\nBOOTSTRAP4 = {'include_jquery': True, 'base_url':\n '//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/4.1.3/'}\nDUMPDATA_S3_BUCKETNAME = 'kippo-dumpdata-bucket-123xyz'\nAUTHENTICATION_BACKENDS = ('social_core.backends.google.GoogleOAuth2',\n 'django.contrib.auth.backends.ModelBackend')\nDEFAULT_URL_PREFIX = ''\nURL_PREFIX = os.getenv('URL_PREFIX', DEFAULT_URL_PREFIX)\nSOCIAL_AUTH_JSONFIELD_ENABLED = True\nSOCIAL_AUTH_GOOGLE_OAUTH2_KEY = os.environ.get('GOOGLE_OAUTH2_KEY', None)\nSOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = os.environ.get('GOOGLE_OAUTH2_SECRET', None)\nSOCIAL_AUTH_ADMIN_USER_SEARCH_FIELDS = ['username', 'first_name', 'email']\nSOCIAL_AUTH_USER_MODEL = 'accounts.KippoUser'\nAUTH_USER_MODEL = SOCIAL_AUTH_USER_MODEL\nSOCIAL_AUTH_LOGIN_REDIRECT_URL = f'{URL_PREFIX}/admin/'\nGITHUB_MANAGER_USERNAME = 'github-manager'\nCLI_MANAGER_USERNAME = 'cli-manager'\nSITE_HEADER = 'Kippo (Project Goal & Milestone Manager)'\nSITE_TITLE = SITE_HEADER\nDEFAULT_KIPPOPROJECT_CATEGORY = 'poc'\nDEFAULT_KIPPOTASK_CATEGORY = 'study'\nDEFAULT_TASK_DISPLAY_STATE = 'in-progress'\nDEFAULT_KIPPORPOJECT_TARGET_DATE_DAYS = 90\nTEST = False\nUNASSIGNED_USER_GITHUB_LOGIN_PREFIX = 'unassigned'\nDEFAULT_GITHUB_ISSUE_LABEL_CATEGORY_PREFIX = 'category:'\nDEFAULT_GITHUB_ISSUE_LABEL_ESTIMATE_PREFIX = 'estimate:'\nGITHUB_MILESTONE_CLOSE_STATE = 'closed'\nLOGIN_REDIRECT_URL = f'{URL_PREFIX}/admin/'\nHOST_URL = os.getenv('HOST_URL', 'http://127.0.0.1')\nWEBHOOK_ENDPOINT = '/octocat/webhook/'\nWEBHOOK_URL = f'{HOST_URL}{URL_PREFIX}{WEBHOOK_ENDPOINT}'\nDISPLAY_ADMIN_AUTH_FOR_MODELBACKEND = True\nDAY_WORKHOURS = 7\nDEFAULT_WEBHOOK_DELETE_DAYS = '30'\nWEBHOOK_DELETE_DAYS = int(os.getenv('WEBHOOK_DELETE_DAYS',\n DEFAULT_WEBHOOK_DELETE_DAYS))\nPROJECTID_MAPPING_JSON_S3URI = os.getenv('PROJECTID_MAPPING_JSON_S3URI', None)\nBOTO3_CONNECT_TIMEOUT = 15\nAWS_DEFAULT_REGION = os.getenv('AWS_DEFAULT_REGION', 'ap-northeast-1')\nDEFAULT_S3_SERVICE_ENDPOINT = f'https://s3.{AWS_DEFAULT_REGION}.amazonaws.com'\nDEFAULT_SQS_SERVICE_ENDPOINT = (\n f'https://sqs.{AWS_DEFAULT_REGION}.amazonaws.com')\nAWS_SERVICE_ENDPOINTS = {'s3': os.getenv('S3_SERVICE_ENDPOINT',\n DEFAULT_S3_SERVICE_ENDPOINT), 'sqs': os.getenv('SQS_SERVICE_ENDPOINT',\n DEFAULT_SQS_SERVICE_ENDPOINT)}\nlogger.info(f'AWS_SERVICE_ENDPOINTS: {AWS_SERVICE_ENDPOINTS}')\nDEFAULT_FALLBACK_ESTIMATE_DAYS = '3'\nFALLBACK_ESTIMATE_DAYS = int(os.getenv('FALLBACK_ESTIMATE_DAYS',\n DEFAULT_FALLBACK_ESTIMATE_DAYS))\nTWO_YEARS_IN_DAYS = 365 * 2\nDEFAULT_PROJECTID_MAPPING_CLOSED_IGNORED_DAYS = str(TWO_YEARS_IN_DAYS)\nPROJECTID_MAPPING_CLOSED_IGNORED_DAYS = int(os.getenv(\n 'PROJECTID_MAPPING_CLOSED_IGNORED_DAYS',\n DEFAULT_PROJECTID_MAPPING_CLOSED_IGNORED_DAYS))\nDEFAULT_INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV = 'False'\nINCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV = bool(strtobool(os.getenv(\n 'INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV',\n DEFAULT_INCLUDE_PERSIONALHOLIDAYS_IN_WORKEFFORT_CSV)))\nDEFAULT_PROJECT_EFFORT_EXCEED_PERCENTAGE = '15'\nPROJECT_EFFORT_EXCEED_PERCENTAGE = int(os.getenv(\n 'PROJECT_EFFORT_EXCEED_PERCENTAGE',\n DEFAULT_PROJECT_EFFORT_EXCEED_PERCENTAGE)) / 100\nDEFAULT_DELETE_DAYS = '60'\nDELETE_DAYS = int(os.getenv('DELETE_DAYS', DEFAULT_DELETE_DAYS))\nDEFAULT_OCTOCAT_APPLY_DEFAULT_LABELSET = 'False'\nOCTOCAT_APPLY_DEFAULT_LABELSET = bool(strtobool(os.getenv(\n 'OCTOCAT_APPLY_DEFAULT_LABELSET', DEFAULT_OCTOCAT_APPLY_DEFAULT_LABELSET)))\nDEFAULT_OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE = 'False'\nOCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE = bool(strtobool(os.getenv(\n 'OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE',\n DEFAULT_OCTOCAT_DELETE_EXISTING_LABELS_ON_UPDATE)))\n", "<docstring token>\n<import token>\nlogging.getLogger('requests').setLevel(logging.WARNING)\nlogging.getLogger('urllib3').setLevel(logging.WARNING)\nlogging.getLogger('botocore').setLevel(logging.WARNING)\nlogging.getLogger('boto3').setLevel(logging.WARNING)\n<assignment token>\nlogger.info(f'AWS_SERVICE_ENDPOINTS: {AWS_SERVICE_ENDPOINTS}')\n<assignment token>\n", "<docstring token>\n<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n" ]
false
98,942
61131da8bf862773863145b9893c6ba6529702fd
# -*- coding: utf-8 -*- """Assignment1(18-06-20).ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/1BXwSEQo63xzrqmfarEj4AKMQtb-6-Ksx write a python to design simple calculator for the operator """ a=int(input("enter the value of a")) b=int(input("enter the value of b")) print("sum of two numbers is" ,a+b) print("difference of two numbers",a-b) print("product of the two numbers",a*b) print("division of two numbers is",a/b) print("modules of two numbers is",a%b) print("exponent of the number",a**b) print("floor division",a//b) """write the python for simple interest""" p=int(input("enter the value of p")) t=int(input("enter the time")) r=int(input("enter the r")) print("simple interest is",(p*t*r)/100) p='principal value' t='time' r='interest rate' """write the python program to calculate area of circle""" r="radius" r=int(input("enter the value of r")) print("area of the circle",3.14*r**2) """write the python program for to calculate area of triangle""" b='breadth' l='length' b=int(input("enter the value of b")) l=int(input("enter the value of l")) print("area of triangle is",0.5*b*1) """write the python programe to temperaturein celsius to fahrenheit""" c="celsius temperature" c=float(input("enter thevalue of c")) fahrenheit=(1.8*c+32) print("celsius to fahrenheit is",fahrenheit) """write a python program to calcualate the area of rectangle""" l="length" b="breadth" l=int(input("enter the value of l")) b=int(input("enter the value of b")) print("area of rectangle is",l*b) """write a python program to calcualate perimeter osf square""" a="side of square" a=int(input("enter the value of a")) print("perimeter of square is",4*a) """write a python program to calculate circumference of a circle""" r="radius of circle" r=int(input("enter the value of r")) print("circumferece of circle is",2*3.14*r) """write a python program to swap""" x=int(input("enter the value of x")) y=int(input("enter the value of y")) temp='x' x='y' y='temp' print("the value of x after swapping",x) print("the value of y after swapping",y)
[ "# -*- coding: utf-8 -*-\n\"\"\"Assignment1(18-06-20).ipynb\n\nAutomatically generated by Colaboratory.\n\nOriginal file is located at\n https://colab.research.google.com/drive/1BXwSEQo63xzrqmfarEj4AKMQtb-6-Ksx\n\nwrite a python to design simple calculator for the operator\n\"\"\"\n\na=int(input(\"enter the value of a\"))\nb=int(input(\"enter the value of b\"))\nprint(\"sum of two numbers is\" ,a+b)\nprint(\"difference of two numbers\",a-b)\nprint(\"product of the two numbers\",a*b)\nprint(\"division of two numbers is\",a/b)\nprint(\"modules of two numbers is\",a%b)\nprint(\"exponent of the number\",a**b)\nprint(\"floor division\",a//b)\n\n\"\"\"write the python for simple interest\"\"\"\n\np=int(input(\"enter the value of p\"))\nt=int(input(\"enter the time\"))\nr=int(input(\"enter the r\"))\nprint(\"simple interest is\",(p*t*r)/100)\np='principal value'\nt='time'\nr='interest rate'\n\n\"\"\"write the python program to calculate area of circle\"\"\"\n\nr=\"radius\"\nr=int(input(\"enter the value of r\"))\nprint(\"area of the circle\",3.14*r**2)\n\n\"\"\"write the python program for to calculate area of triangle\"\"\"\n\nb='breadth'\nl='length'\nb=int(input(\"enter the value of b\"))\nl=int(input(\"enter the value of l\"))\nprint(\"area of triangle is\",0.5*b*1)\n\n\"\"\"write the python programe to temperaturein celsius to fahrenheit\"\"\"\n\nc=\"celsius temperature\"\nc=float(input(\"enter thevalue of c\"))\nfahrenheit=(1.8*c+32)\nprint(\"celsius to fahrenheit is\",fahrenheit)\n\n\"\"\"write a python program to calcualate the area of rectangle\"\"\"\n\nl=\"length\"\nb=\"breadth\"\nl=int(input(\"enter the value of l\"))\nb=int(input(\"enter the value of b\"))\nprint(\"area of rectangle is\",l*b)\n\n\"\"\"write a python program to calcualate perimeter osf square\"\"\"\n\na=\"side of square\"\na=int(input(\"enter the value of a\"))\nprint(\"perimeter of square is\",4*a)\n\n\"\"\"write a python program to calculate circumference of a circle\"\"\"\n\nr=\"radius of circle\"\nr=int(input(\"enter the value of r\"))\nprint(\"circumferece of circle is\",2*3.14*r)\n\n\"\"\"write a python program to swap\"\"\"\n\nx=int(input(\"enter the value of x\"))\ny=int(input(\"enter the value of y\"))\ntemp='x'\nx='y'\ny='temp'\nprint(\"the value of x after swapping\",x)\nprint(\"the value of y after swapping\",y)\n\n", "<docstring token>\na = int(input('enter the value of a'))\nb = int(input('enter the value of b'))\nprint('sum of two numbers is', a + b)\nprint('difference of two numbers', a - b)\nprint('product of the two numbers', a * b)\nprint('division of two numbers is', a / b)\nprint('modules of two numbers is', a % b)\nprint('exponent of the number', a ** b)\nprint('floor division', a // b)\n<docstring token>\np = int(input('enter the value of p'))\nt = int(input('enter the time'))\nr = int(input('enter the r'))\nprint('simple interest is', p * t * r / 100)\np = 'principal value'\nt = 'time'\nr = 'interest rate'\n<docstring token>\nr = 'radius'\nr = int(input('enter the value of r'))\nprint('area of the circle', 3.14 * r ** 2)\n<docstring token>\nb = 'breadth'\nl = 'length'\nb = int(input('enter the value of b'))\nl = int(input('enter the value of l'))\nprint('area of triangle is', 0.5 * b * 1)\n<docstring token>\nc = 'celsius temperature'\nc = float(input('enter thevalue of c'))\nfahrenheit = 1.8 * c + 32\nprint('celsius to fahrenheit is', fahrenheit)\n<docstring token>\nl = 'length'\nb = 'breadth'\nl = int(input('enter the value of l'))\nb = int(input('enter the value of b'))\nprint('area of rectangle is', l * b)\n<docstring token>\na = 'side of square'\na = int(input('enter the value of a'))\nprint('perimeter of square is', 4 * a)\n<docstring token>\nr = 'radius of circle'\nr = int(input('enter the value of r'))\nprint('circumferece of circle is', 2 * 3.14 * r)\n<docstring token>\nx = int(input('enter the value of x'))\ny = int(input('enter the value of y'))\ntemp = 'x'\nx = 'y'\ny = 'temp'\nprint('the value of x after swapping', x)\nprint('the value of y after swapping', y)\n", "<docstring token>\n<assignment token>\nprint('sum of two numbers is', a + b)\nprint('difference of two numbers', a - b)\nprint('product of the two numbers', a * b)\nprint('division of two numbers is', a / b)\nprint('modules of two numbers is', a % b)\nprint('exponent of the number', a ** b)\nprint('floor division', a // b)\n<docstring token>\n<assignment token>\nprint('simple interest is', p * t * r / 100)\n<assignment token>\n<docstring token>\n<assignment token>\nprint('area of the circle', 3.14 * r ** 2)\n<docstring token>\n<assignment token>\nprint('area of triangle is', 0.5 * b * 1)\n<docstring token>\n<assignment token>\nprint('celsius to fahrenheit is', fahrenheit)\n<docstring token>\n<assignment token>\nprint('area of rectangle is', l * b)\n<docstring token>\n<assignment token>\nprint('perimeter of square is', 4 * a)\n<docstring token>\n<assignment token>\nprint('circumferece of circle is', 2 * 3.14 * r)\n<docstring token>\n<assignment token>\nprint('the value of x after swapping', x)\nprint('the value of y after swapping', y)\n", "<docstring token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<code token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<code token>\n" ]
false
98,943
58623aebeb5c1e53ab742ec93190befbc99469ca
from twisted.internet.protocol import ClientFactory, Protocol, DatagramProtocol from twisted.internet import reactor, task import logging import struct from gbot.util import split_by from gbot.models import Account import time, json class LocalUDPInfo(DatagramProtocol): node_io_addr = ('0.0.0.0', 8124) bots = [] def __init__(self): print "UDPInfo start" def send_json(self, obj): self.transport.write(json.dumps(obj), self.node_io_addr) def datagramReceived(self, data, addr): msg = json.loads(data) action = msg.get("action") print data if action == "start": for bot in self.bots: for login in [bot.logins.get(id) for id, online in bot.online.items() if online]: self.player_came(bot.name, login) def message_received(self, room, by, body): self.send_json({ "action": "message", "room": room, "by": by, "body": body }) def player_came(self, room, login): self.send_json({ "action": "player_came", "room": room, "login": login }) def player_left(self, room, login): self.send_json({ "action": "player_left", "room": room, "login": login }) #udp_info = LocalUDPInfo() #reactor.listenUDP(8125, udp_info) class GarenaRSUDPProtocol(DatagramProtocol): def __init__(self, factory): self.factory = factory self.msg_seq = int(time.time()) # because of how large unsigned int is, it is ok to do this self.msg_blob = "000000005c0000003f0000f800000040b40000000000000000000000ccff41007200690061006c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" self.msg_blob = self.msg_blob.decode('hex') print "UDP start" self.poll_messages() self.hello_everybody_lc = task.LoopingCall(self.hello_everybody).start(30, False) self.tries = {} self.resenders = {} self.received = [] def poll_messages(self): self.factory.bot.messages.get().addCallback(self.send_message) def hello_everybody(self): # print "UDP hello => all" for id in [id for id, online in self.factory.bot.online.items() if online]: self.say_hello(id) def say_hello(self, id): addr = self.factory.bot.addr(id) if addr: hello_packet = struct.pack("< I I 8x", 2, self.factory.account.id) self.transport.write(hello_packet, addr) def datagramReceived(self, data, host_port): host, port = host_port packet_type = ord(data[0]) if packet_type == 2: self.handle_hello(data) if packet_type == 15: pass # this is {HELLO REPLY} packet, we don't really need it, so -> ignore #print "UDP hello reply <= ", host_port # if packet_type == 51: self.handle_message(data) if packet_type == 57: self.invalidate_resender(data) def handle_message(self, data): data = data[1:] # print len(data) # print data.encode('hex') format = "< I I I 96x I" unpacked = struct.unpack(format, data[:112]) seq, from_id, to_id, length = unpacked msg = data[112:].decode('utf_16_le', 'ignore') # print self.factory.account.login + " => " + msg # player = self.tcp.players.get(from_id) # me = self.tcp.players.get(to_id) addr = self.factory.bot.addr(from_id) # print addr key = "%s#%s" % (from_id, seq) if addr and not key in self.received: self.received.append(key) reactor.callLater(10, lambda: self.received.remove(key)) # print "{MESSAGE #%s from %s of length %s(bytes)}" % (seq, login, length) # print "{MSG BODY => %s}" % msg reply = struct.pack("< B I I 8x", 57, seq, self.factory.account.id) self.transport.write(reply, addr) reactor.callLater(0, self.factory.bot.message_received, from_id, msg, True) # self.send_message(me, player, u"you said => " + msg) def send_message(self, player_and_msg): self.poll_messages() to_player, msg = player_and_msg addr = self.factory.bot.addr(to_player.id) if addr: self.msg_seq += 1 seq = self.msg_seq from_id = self.factory.account.id length = len(msg) * 2 header = struct.pack("< B I I I", 51, seq, from_id, to_player.id) packet = header + self.msg_blob + struct.pack("< I", length) + msg.encode('utf_16_le', 'ignore') #self.transport.write(packet, addr) self.resenders[seq] = task.LoopingCall(self.resend_message, seq, packet, addr) self.tries[seq] = 0 self.resenders[seq].start(0.4) # print "{MESSAGE to %s}" % to_player.login # print "{MSG BODY => %s}" % msg def invalidate_resender(self, data): seq = struct.unpack("<I", data[1:5])[0] # print "remote => i got #%s" % seq lc = self.resenders.get(seq) if lc: lc.stop() del self.resenders[seq] del self.tries[seq] def resend_message(self, seq, packet, addr): lc = self.resenders.get(seq) if lc: self.tries[seq] += 1 self.transport.write(packet, addr) # print "sending #%s, tries: %s" % (seq, self.tries[seq]) if self.tries[seq] > 6: lc.stop() del self.resenders[seq] del self.tries[seq] def handle_hello(self, data): id = struct.unpack("<I", data[4:8])[0] addr = self.factory.bot.addr(id) if addr: reply = struct.pack("< I I 4x I", 15, self.factory.account.id, id) self.transport.write(reply, addr) class GarenaRSProtocol(Protocol): def __init__(self): self.buffer = '' def write(self, data): self.transport.write(data) def write_hex(self, data): self.write(data.decode('hex')) def connectionMade(self): self.log = logging.getLogger("GRSP[%s]" % self.factory.account.login) self.log.info(u"connection made, sending auth packet") self.write_hex(self.factory.packet) self.log.info(u"issuing disconnect in 45 seconds if Garena did not respond with WELCOME") self.timeout_deferred = reactor.callLater(45, self.timeout) def timeout(self): self.log.error(u"Garena did not send WELCOME packet in 45 seconds, dropping connection now") self.transport.loseConnection() def dataReceived(self, data): self.buffer += data self.decodeHeader() def decodeHeader(self): if len(self.buffer) >= 5: header = struct.unpack("< I B", self.buffer[:5]) if len(self.buffer) >= header[0]+4: packet = self.buffer[5:header[0]+4] self.buffer = self.buffer[header[0]+4:] if len(self.buffer) >= 5: reactor.callLater(0, self.decodeHeader) self.decodePacket(header[1], packet) def decodePacket(self, packet_type, data): if self.factory.write_only and packet_type != 48: return getattr(self, 'handle_' + { 34: 'player_came', 35: 'player_left', 37: 'message', 44: 'userlist', 48: 'welcome' }.get(packet_type, 'non_existing'), lambda data: None)(data) def handle_non_existing(self, data): self.log.info(u"??? -> %s", data.encode('hex')) def handle_player_left(self, data): id = struct.unpack("< I", data)[0] self.factory.bot.player_left(id) def handle_player_came(self, data): format = "< I 15s 6x 1B 2x 4B 32x" unpacked = struct.unpack(format, data) id = unpacked[0] login = unicode(unpacked[1].rstrip(chr(0)), 'ascii', 'ignore') ip = "%s.%s.%s.%s" % unpacked[3:] lvl = unpacked[2] port = struct.unpack(">H", data[40:42])[0] if not Account.get_or(pk = id): self.factory.bot.player_came(id, login, ip, port, lvl) else: self.log.info(u"%s is bot's account -> do nothing", login) #if hasattr(self.factory, 'udp_protocol'): # self.factory.udp_protocol.say_hello(id) def handle_userlist(self, data): self.log.info(u"cancelling TIMEOUT") self.factory.connection = self timeout_deferred = getattr(self, 'timeout_deferred', None) if timeout_deferred and timeout_deferred.active: timeout_deferred.cancel() del self.timeout_deferred self.log.info(u"got userlist") for user_data in [ud for ud in split_by(data[8:], 64) if len(ud) == 64]: self.handle_player_came(user_data) def handle_message(self, data): id = struct.unpack("<I", data[4:8])[0] message = unicode(data[12:], 'utf_16_le', 'ignore').strip() reactor.callLater(0, self.factory.bot.message_received, id, message) def handle_welcome(self, data): self.log.info(u"got WELCOME") self.log.info(u"cancelling TIMEOUT") self.factory.connection = self timeout_deferred = getattr(self, 'timeout_deferred', None) if timeout_deferred and timeout_deferred.active: try: timeout_deferred.cancel() except: pass del self.timeout_deferred class GarenaRSFactory(ClientFactory): protocol = GarenaRSProtocol def __init__(self, bot, account, write_only = True, send_kicks = False, send_anns = True, send_pvts = True): self.bot = bot self.account = account self.write_only = write_only self.connection = None self.log = logging.getLogger("GRSF[%s:%s]" % (bot.name, account.login)) self.log.info(u"initialized") self.packet = account.packet.replace("{roomid}", struct.pack("< I", bot.room.id).encode('hex')) # deferreds if send_anns: self.bot.announces.get().addCallback(self.send_announce) if send_pvts: self.bot.privates.get().addCallback(self.send_private) if send_kicks: self.bot.kicks.get().addCallback(self.send_kick) #only now enable udp for ospl.slave #if account.port > 15000: # self.udp_protocol = GarenaRSUDPProtocol(self) # self.udp = reactor.listenUDP(account.port, self.udp_protocol, interface = '212.154.211.111') #else: # self.udp = None self.connect() def connect(self): self.log.info(u"issuing roomserver connection") reactor.connectTCP(self.bot.room.ip, 8687, self) def reconnect(self): self.log.info(u"issuing reconnect in 5 seconds") self.connection = None if not self.write_only: self.log.info(u"lost connection on reading bot, moving ip_list to stale") for id in self.bot.ip_list.keys(): reactor.callLater(0, self.bot.player_left, id) reactor.callLater(5, self.connect) def startedConnecting(self, connector): self.log.info(u"started connecting") def clientConnectionLost(self, connector, reason): self.log.error(u"connection lost, reason: %s", reason) self.reconnect() def clientConnectionFailed(self, connector, reason): self.log.error("uconnection failed, reason: %s", reason) self.reconnect() def send_kick(self, (player_id, reason)): self.kick_deferred = self.bot.kicks.get().addCallback(self.send_kick) if self.connection: self.log.debug(u"doing kick => %s @ %s", player_id, reason) format = "< I b I I I" packet = struct.pack(format, len(reason) + 13, 40, self.account.id, player_id, len(reason)) + reason.encode('ascii', 'ignore') self.connection.write(packet) # remove 15 min ban, that happens after player is kicked player_login = self.bot.logins.get(player_id, u'').encode('ascii', 'ignore') if player_login and False: self.log.debug(u"removing 15min ban => %s", player_login) packet = struct.pack("< I b I", len(player_login) + 10, 120, self.bot.room.id) + \ player_login + ("\0" * 5) self.connection.write(packet) else: self.log.error(u"kick : no connection") def send_private(self, (player_id, message)): reactor.callLater(0.55, lambda: self.bot.privates.get().addCallback(self.send_private)) if self.connection: format = "< I b I I" packet = struct.pack(format, len(message) * 2 + 9, 127, self.account.id, player_id) + message.encode('utf_16_le', 'ignore') self.connection.write(packet) else: self.log.error(u"pvt : no connection") def send_announce(self, message): reactor.callLater(1.1, lambda: self.bot.announces.get().addCallback(self.send_announce)) if self.connection: self.log.debug(u"ANN -> %s", message) format = "< I b I" packet = struct.pack(format, len(message) * 2 + 5, 48, self.bot.room.id) + message.encode('utf_16_le', 'ignore') self.connection.write(packet) else: self.log.error(u"ann : no connection")
[ "from twisted.internet.protocol import ClientFactory, Protocol, DatagramProtocol\r\nfrom twisted.internet import reactor, task\r\nimport logging\r\nimport struct\r\nfrom gbot.util import split_by\r\nfrom gbot.models import Account\r\nimport time, json\r\n\r\n\r\nclass LocalUDPInfo(DatagramProtocol):\r\n\r\n node_io_addr = ('0.0.0.0', 8124)\r\n bots = []\r\n \r\n def __init__(self):\r\n print \"UDPInfo start\"\r\n\r\n def send_json(self, obj): self.transport.write(json.dumps(obj), self.node_io_addr)\r\n\r\n def datagramReceived(self, data, addr):\r\n msg = json.loads(data)\r\n action = msg.get(\"action\")\r\n \r\n print data\r\n \r\n if action == \"start\":\r\n for bot in self.bots:\r\n for login in [bot.logins.get(id) for id, online in bot.online.items() if online]:\r\n self.player_came(bot.name, login)\r\n \r\n \r\n \r\n \r\n def message_received(self, room, by, body):\r\n self.send_json({\r\n \"action\": \"message\",\r\n \"room\": room,\r\n \"by\": by,\r\n \"body\": body\r\n })\r\n \r\n def player_came(self, room, login):\r\n self.send_json({\r\n \"action\": \"player_came\",\r\n \"room\": room,\r\n \"login\": login\r\n })\r\n \r\n def player_left(self, room, login):\r\n self.send_json({\r\n \"action\": \"player_left\",\r\n \"room\": room,\r\n \"login\": login\r\n })\r\n \r\n \r\n\r\n\r\n#udp_info = LocalUDPInfo()\r\n#reactor.listenUDP(8125, udp_info)\r\n\r\n\r\n\r\nclass GarenaRSUDPProtocol(DatagramProtocol):\r\n\r\n def __init__(self, factory):\r\n self.factory = factory\r\n self.msg_seq = int(time.time()) # because of how large unsigned int is, it is ok to do this\r\n self.msg_blob = \"000000005c0000003f0000f800000040b40000000000000000000000ccff41007200690061006c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\"\r\n self.msg_blob = self.msg_blob.decode('hex')\r\n\r\n print \"UDP start\"\r\n\r\n self.poll_messages()\r\n self.hello_everybody_lc = task.LoopingCall(self.hello_everybody).start(30, False)\r\n\r\n self.tries = {}\r\n self.resenders = {}\r\n self.received = []\r\n\r\n\r\n def poll_messages(self):\r\n self.factory.bot.messages.get().addCallback(self.send_message)\r\n\r\n\r\n def hello_everybody(self):\r\n# print \"UDP hello => all\"\r\n\r\n for id in [id for id, online in self.factory.bot.online.items() if online]:\r\n self.say_hello(id)\r\n\r\n def say_hello(self, id):\r\n addr = self.factory.bot.addr(id)\r\n if addr:\r\n hello_packet = struct.pack(\"< I I 8x\", 2, self.factory.account.id)\r\n self.transport.write(hello_packet, addr)\r\n\r\n\r\n def datagramReceived(self, data, host_port):\r\n host, port = host_port\r\n\r\n packet_type = ord(data[0])\r\n\r\n if packet_type == 2:\r\n self.handle_hello(data)\r\n if packet_type == 15:\r\n pass\r\n # this is {HELLO REPLY} packet, we don't really need it, so -> ignore\r\n #print \"UDP hello reply <= \", host_port\r\n#\r\n if packet_type == 51:\r\n\r\n self.handle_message(data)\r\n\r\n if packet_type == 57:\r\n self.invalidate_resender(data)\r\n\r\n\r\n\r\n def handle_message(self, data):\r\n data = data[1:]\r\n\r\n\r\n# print len(data)\r\n# print data.encode('hex')\r\n\r\n format = \"< I I I 96x I\"\r\n unpacked = struct.unpack(format, data[:112])\r\n seq, from_id, to_id, length = unpacked\r\n msg = data[112:].decode('utf_16_le', 'ignore')\r\n\r\n# print self.factory.account.login + \" => \" + msg\r\n\r\n# player = self.tcp.players.get(from_id)\r\n# me = self.tcp.players.get(to_id)\r\n\r\n addr = self.factory.bot.addr(from_id)\r\n# print addr\r\n\r\n key = \"%s#%s\" % (from_id, seq)\r\n\r\n if addr and not key in self.received:\r\n self.received.append(key)\r\n reactor.callLater(10, lambda: self.received.remove(key))\r\n\r\n# print \"{MESSAGE #%s from %s of length %s(bytes)}\" % (seq, login, length)\r\n# print \"{MSG BODY => %s}\" % msg\r\n\r\n reply = struct.pack(\"< B I I 8x\", 57, seq, self.factory.account.id)\r\n self.transport.write(reply, addr)\r\n\r\n reactor.callLater(0, self.factory.bot.message_received, from_id, msg, True)\r\n\r\n# self.send_message(me, player, u\"you said => \" + msg)\r\n\r\n\r\n\r\n def send_message(self, player_and_msg):\r\n self.poll_messages()\r\n to_player, msg = player_and_msg\r\n\r\n addr = self.factory.bot.addr(to_player.id)\r\n if addr:\r\n self.msg_seq += 1\r\n\r\n seq = self.msg_seq\r\n from_id = self.factory.account.id\r\n length = len(msg) * 2\r\n\r\n header = struct.pack(\"< B I I I\", 51, seq, from_id, to_player.id)\r\n packet = header + self.msg_blob + struct.pack(\"< I\", length) + msg.encode('utf_16_le', 'ignore')\r\n\r\n #self.transport.write(packet, addr)\r\n\r\n self.resenders[seq] = task.LoopingCall(self.resend_message, seq, packet, addr)\r\n self.tries[seq] = 0\r\n self.resenders[seq].start(0.4)\r\n# print \"{MESSAGE to %s}\" % to_player.login\r\n# print \"{MSG BODY => %s}\" % msg\r\n\r\n\r\n def invalidate_resender(self, data):\r\n seq = struct.unpack(\"<I\", data[1:5])[0]\r\n# print \"remote => i got #%s\" % seq\r\n\r\n lc = self.resenders.get(seq)\r\n if lc:\r\n lc.stop()\r\n del self.resenders[seq]\r\n del self.tries[seq]\r\n\r\n\r\n\r\n\r\n def resend_message(self, seq, packet, addr):\r\n\r\n\r\n lc = self.resenders.get(seq)\r\n if lc:\r\n self.tries[seq] += 1\r\n self.transport.write(packet, addr)\r\n\r\n# print \"sending #%s, tries: %s\" % (seq, self.tries[seq])\r\n if self.tries[seq] > 6:\r\n lc.stop()\r\n del self.resenders[seq]\r\n del self.tries[seq]\r\n\r\n\r\n\r\n def handle_hello(self, data):\r\n id = struct.unpack(\"<I\", data[4:8])[0]\r\n addr = self.factory.bot.addr(id)\r\n if addr:\r\n reply = struct.pack(\"< I I 4x I\", 15, self.factory.account.id, id)\r\n self.transport.write(reply, addr)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass GarenaRSProtocol(Protocol):\r\n\r\n def __init__(self):\r\n self.buffer = ''\r\n\r\n def write(self, data): self.transport.write(data)\r\n def write_hex(self, data): self.write(data.decode('hex'))\r\n\r\n def connectionMade(self):\r\n self.log = logging.getLogger(\"GRSP[%s]\" % self.factory.account.login)\r\n self.log.info(u\"connection made, sending auth packet\")\r\n\r\n self.write_hex(self.factory.packet)\r\n \r\n self.log.info(u\"issuing disconnect in 45 seconds if Garena did not respond with WELCOME\")\r\n self.timeout_deferred = reactor.callLater(45, self.timeout)\r\n \r\n def timeout(self):\r\n self.log.error(u\"Garena did not send WELCOME packet in 45 seconds, dropping connection now\")\r\n self.transport.loseConnection()\r\n\r\n def dataReceived(self, data):\r\n self.buffer += data\r\n self.decodeHeader()\r\n\r\n def decodeHeader(self):\r\n if len(self.buffer) >= 5:\r\n header = struct.unpack(\"< I B\", self.buffer[:5])\r\n if len(self.buffer) >= header[0]+4:\r\n packet = self.buffer[5:header[0]+4]\r\n self.buffer = self.buffer[header[0]+4:]\r\n if len(self.buffer) >= 5:\r\n reactor.callLater(0, self.decodeHeader)\r\n self.decodePacket(header[1], packet)\r\n\r\n def decodePacket(self, packet_type, data):\r\n if self.factory.write_only and packet_type != 48: return\r\n\r\n getattr(self, 'handle_' + {\r\n 34: 'player_came',\r\n 35: 'player_left',\r\n 37: 'message',\r\n 44: 'userlist',\r\n 48: 'welcome'\r\n }.get(packet_type, 'non_existing'), lambda data: None)(data)\r\n\r\n\r\n def handle_non_existing(self, data):\r\n self.log.info(u\"??? -> %s\", data.encode('hex'))\r\n\r\n\r\n def handle_player_left(self, data):\r\n id = struct.unpack(\"< I\", data)[0]\r\n self.factory.bot.player_left(id)\r\n\r\n\r\n def handle_player_came(self, data):\r\n format = \"< I 15s 6x 1B 2x 4B 32x\"\r\n unpacked = struct.unpack(format, data)\r\n id = unpacked[0]\r\n login = unicode(unpacked[1].rstrip(chr(0)), 'ascii', 'ignore')\r\n ip = \"%s.%s.%s.%s\" % unpacked[3:]\r\n lvl = unpacked[2]\r\n port = struct.unpack(\">H\", data[40:42])[0]\r\n if not Account.get_or(pk = id):\r\n self.factory.bot.player_came(id, login, ip, port, lvl)\r\n else:\r\n self.log.info(u\"%s is bot's account -> do nothing\", login)\r\n\r\n #if hasattr(self.factory, 'udp_protocol'):\r\n # self.factory.udp_protocol.say_hello(id)\r\n\r\n\r\n def handle_userlist(self, data):\r\n self.log.info(u\"cancelling TIMEOUT\")\r\n self.factory.connection = self\r\n timeout_deferred = getattr(self, 'timeout_deferred', None)\r\n if timeout_deferred and timeout_deferred.active:\r\n timeout_deferred.cancel()\r\n del self.timeout_deferred\r\n\r\n\r\n self.log.info(u\"got userlist\")\r\n for user_data in [ud for ud in split_by(data[8:], 64) if len(ud) == 64]:\r\n self.handle_player_came(user_data)\r\n\r\n\r\n\r\n def handle_message(self, data):\r\n id = struct.unpack(\"<I\", data[4:8])[0]\r\n message = unicode(data[12:], 'utf_16_le', 'ignore').strip()\r\n\r\n reactor.callLater(0, self.factory.bot.message_received, id, message)\r\n\r\n\r\n def handle_welcome(self, data):\r\n self.log.info(u\"got WELCOME\")\r\n self.log.info(u\"cancelling TIMEOUT\")\r\n\r\n self.factory.connection = self\r\n timeout_deferred = getattr(self, 'timeout_deferred', None)\r\n if timeout_deferred and timeout_deferred.active:\r\n try:\r\n timeout_deferred.cancel()\r\n except:\r\n pass\r\n del self.timeout_deferred\r\n\r\n\r\n\r\n\r\nclass GarenaRSFactory(ClientFactory):\r\n protocol = GarenaRSProtocol\r\n\r\n def __init__(self, bot, account, write_only = True,\r\n send_kicks = False, send_anns = True, send_pvts = True):\r\n self.bot = bot\r\n self.account = account\r\n self.write_only = write_only\r\n self.connection = None\r\n\r\n self.log = logging.getLogger(\"GRSF[%s:%s]\" % (bot.name, account.login))\r\n self.log.info(u\"initialized\")\r\n\r\n self.packet = account.packet.replace(\"{roomid}\",\r\n struct.pack(\"< I\", bot.room.id).encode('hex'))\r\n\r\n # deferreds\r\n if send_anns: self.bot.announces.get().addCallback(self.send_announce)\r\n if send_pvts: self.bot.privates.get().addCallback(self.send_private)\r\n if send_kicks: self.bot.kicks.get().addCallback(self.send_kick)\r\n\r\n #only now enable udp for ospl.slave\r\n #if account.port > 15000:\r\n # self.udp_protocol = GarenaRSUDPProtocol(self)\r\n # self.udp = reactor.listenUDP(account.port, self.udp_protocol, interface = '212.154.211.111')\r\n #else:\r\n # self.udp = None\r\n\r\n\r\n self.connect()\r\n\r\n\r\n def connect(self):\r\n self.log.info(u\"issuing roomserver connection\")\r\n reactor.connectTCP(self.bot.room.ip, 8687, self)\r\n\r\n def reconnect(self):\r\n self.log.info(u\"issuing reconnect in 5 seconds\")\r\n self.connection = None\r\n if not self.write_only:\r\n self.log.info(u\"lost connection on reading bot, moving ip_list to stale\")\r\n for id in self.bot.ip_list.keys():\r\n reactor.callLater(0, self.bot.player_left, id)\r\n\r\n reactor.callLater(5, self.connect)\r\n \r\n\r\n\r\n def startedConnecting(self, connector):\r\n self.log.info(u\"started connecting\")\r\n\r\n\r\n def clientConnectionLost(self, connector, reason):\r\n self.log.error(u\"connection lost, reason: %s\", reason)\r\n self.reconnect()\r\n\r\n def clientConnectionFailed(self, connector, reason):\r\n self.log.error(\"uconnection failed, reason: %s\", reason)\r\n self.reconnect()\r\n\r\n\r\n\r\n\r\n def send_kick(self, (player_id, reason)):\r\n self.kick_deferred = self.bot.kicks.get().addCallback(self.send_kick)\r\n if self.connection:\r\n self.log.debug(u\"doing kick => %s @ %s\", player_id, reason)\r\n format = \"< I b I I I\"\r\n packet = struct.pack(format, len(reason) + 13, 40, self.account.id,\r\n player_id, len(reason)) + reason.encode('ascii', 'ignore')\r\n self.connection.write(packet)\r\n\r\n # remove 15 min ban, that happens after player is kicked\r\n player_login = self.bot.logins.get(player_id, u'').encode('ascii', 'ignore')\r\n if player_login and False:\r\n self.log.debug(u\"removing 15min ban => %s\", player_login)\r\n\r\n packet = struct.pack(\"< I b I\", len(player_login) + 10, 120, self.bot.room.id) + \\\r\n player_login + (\"\\0\" * 5)\r\n self.connection.write(packet) \r\n\r\n else:\r\n self.log.error(u\"kick : no connection\")\r\n\r\n\r\n\r\n def send_private(self, (player_id, message)):\r\n reactor.callLater(0.55, lambda: self.bot.privates.get().addCallback(self.send_private))\r\n if self.connection:\r\n format = \"< I b I I\"\r\n packet = struct.pack(format, len(message) * 2 + 9, 127,\r\n self.account.id,\r\n player_id) + message.encode('utf_16_le', 'ignore')\r\n self.connection.write(packet)\r\n else:\r\n self.log.error(u\"pvt : no connection\")\r\n\r\n\r\n def send_announce(self, message):\r\n reactor.callLater(1.1, lambda: self.bot.announces.get().addCallback(self.send_announce))\r\n if self.connection:\r\n self.log.debug(u\"ANN -> %s\", message)\r\n format = \"< I b I\"\r\n packet = struct.pack(format, len(message) * 2 + 5, 48,\r\n self.bot.room.id) + message.encode('utf_16_le', 'ignore')\r\n self.connection.write(packet)\r\n else:\r\n self.log.error(u\"ann : no connection\")\r\n\r\n\r\n\r\n\r\n\r\n\r\n" ]
true
98,944
513554cde77c21a26c6e3e2169e053c8dffcc1b4
import io import os import sys import unittest import tempfile import orjson from yote.experiment import Experiment def capture_output(experiment): experiment.ch.stream = io.StringIO() def get_output(experiment): experiment.ch.stream.seek(0) return experiment.ch.stream.readlines() class TestExperiment(unittest.TestCase): def test_experiment_folder_created(self): with tempfile.TemporaryDirectory() as td: experiment = Experiment(data_path=td) self.assertTrue(os.path.isdir(os.path.join(td, experiment._id))) def test_experiment_meta_saves(self): with tempfile.TemporaryDirectory() as td: meta = {"name": "HAHA", "data": 1} experiment = Experiment(meta=meta, data_path=td) with open(os.path.join(td, experiment._id, "meta.json"), "r") as f: saved_meta = orjson.loads(f.read()) self.assertEqual(saved_meta, meta) def test_lines_get_printed(self): with tempfile.TemporaryDirectory() as td: experiment = Experiment(data_path=td) capture_output(experiment) lines = [{"name": "haha", "var": i} for i in range(10)] [experiment.emit(line) for line in lines] read_lines = [orjson.loads(x) for x in get_output(experiment)] self.assertEqual(read_lines, lines) def test_lines_get_written(self): with tempfile.TemporaryDirectory() as td: experiment = Experiment(data_path=td) capture_output(experiment) lines = [{"name": "haha", "var": i} for i in range(10)] [experiment.emit(line) for line in lines] with open(os.path.join(td, experiment._id, "metrics.log"), "r") as f: read_lines = [orjson.loads(line.strip()) for line in f.readlines()] self.assertEqual(read_lines, lines) def test_lines_print_every_n(self): with tempfile.TemporaryDirectory() as td: experiment = Experiment(data_path=td, print_every=5) capture_output(experiment) lines = [{"name": "haha", "var": i} for i in range(10)] [experiment.emit(line) for line in lines] read_lines = [orjson.loads(x) for x in get_output(experiment)] self.assertEqual([lines[0], lines[5]], read_lines) def test_verbose_suppress(self): with tempfile.TemporaryDirectory() as td: experiment = Experiment(data_path=td, verbose=False) capture_output(experiment) lines = [{"name": "haha", "var": i} for i in range(10)] [experiment.emit(line) for line in lines] read_lines = [orjson.loads(x) for x in get_output(experiment)] self.assertEqual(read_lines, []) def test_experiment_from_id(self): with tempfile.TemporaryDirectory() as td: meta = {"asdf": "test"} experiment = Experiment(data_path=td, verbose=False, meta=meta) _id = experiment._id experiment = Experiment.from_id(_id, data_path=td) self.assertEqual(experiment.meta, meta)
[ "import io\nimport os\nimport sys\nimport unittest\nimport tempfile\n\nimport orjson\n\nfrom yote.experiment import Experiment\n\n\ndef capture_output(experiment):\n experiment.ch.stream = io.StringIO()\n\n\ndef get_output(experiment):\n experiment.ch.stream.seek(0)\n return experiment.ch.stream.readlines()\n\n\nclass TestExperiment(unittest.TestCase):\n def test_experiment_folder_created(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n self.assertTrue(os.path.isdir(os.path.join(td, experiment._id)))\n\n def test_experiment_meta_saves(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {\"name\": \"HAHA\", \"data\": 1}\n experiment = Experiment(meta=meta, data_path=td)\n with open(os.path.join(td, experiment._id, \"meta.json\"), \"r\") as f:\n saved_meta = orjson.loads(f.read())\n self.assertEqual(saved_meta, meta)\n\n def test_lines_get_printed(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{\"name\": \"haha\", \"var\": i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, lines)\n\n def test_lines_get_written(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{\"name\": \"haha\", \"var\": i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n with open(os.path.join(td, experiment._id, \"metrics.log\"), \"r\") as f:\n read_lines = [orjson.loads(line.strip()) for line in f.readlines()]\n self.assertEqual(read_lines, lines)\n\n def test_lines_print_every_n(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, print_every=5)\n capture_output(experiment)\n lines = [{\"name\": \"haha\", \"var\": i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual([lines[0], lines[5]], read_lines)\n\n def test_verbose_suppress(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, verbose=False)\n capture_output(experiment)\n lines = [{\"name\": \"haha\", \"var\": i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, [])\n\n def test_experiment_from_id(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {\"asdf\": \"test\"}\n experiment = Experiment(data_path=td, verbose=False, meta=meta)\n _id = experiment._id\n experiment = Experiment.from_id(_id, data_path=td)\n self.assertEqual(experiment.meta, meta)\n", "import io\nimport os\nimport sys\nimport unittest\nimport tempfile\nimport orjson\nfrom yote.experiment import Experiment\n\n\ndef capture_output(experiment):\n experiment.ch.stream = io.StringIO()\n\n\ndef get_output(experiment):\n experiment.ch.stream.seek(0)\n return experiment.ch.stream.readlines()\n\n\nclass TestExperiment(unittest.TestCase):\n\n def test_experiment_folder_created(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n self.assertTrue(os.path.isdir(os.path.join(td, experiment._id)))\n\n def test_experiment_meta_saves(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'name': 'HAHA', 'data': 1}\n experiment = Experiment(meta=meta, data_path=td)\n with open(os.path.join(td, experiment._id, 'meta.json'), 'r') as f:\n saved_meta = orjson.loads(f.read())\n self.assertEqual(saved_meta, meta)\n\n def test_lines_get_printed(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, lines)\n\n def test_lines_get_written(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n with open(os.path.join(td, experiment._id, 'metrics.log'), 'r'\n ) as f:\n read_lines = [orjson.loads(line.strip()) for line in f.\n readlines()]\n self.assertEqual(read_lines, lines)\n\n def test_lines_print_every_n(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, print_every=5)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual([lines[0], lines[5]], read_lines)\n\n def test_verbose_suppress(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, verbose=False)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, [])\n\n def test_experiment_from_id(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'asdf': 'test'}\n experiment = Experiment(data_path=td, verbose=False, meta=meta)\n _id = experiment._id\n experiment = Experiment.from_id(_id, data_path=td)\n self.assertEqual(experiment.meta, meta)\n", "<import token>\n\n\ndef capture_output(experiment):\n experiment.ch.stream = io.StringIO()\n\n\ndef get_output(experiment):\n experiment.ch.stream.seek(0)\n return experiment.ch.stream.readlines()\n\n\nclass TestExperiment(unittest.TestCase):\n\n def test_experiment_folder_created(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n self.assertTrue(os.path.isdir(os.path.join(td, experiment._id)))\n\n def test_experiment_meta_saves(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'name': 'HAHA', 'data': 1}\n experiment = Experiment(meta=meta, data_path=td)\n with open(os.path.join(td, experiment._id, 'meta.json'), 'r') as f:\n saved_meta = orjson.loads(f.read())\n self.assertEqual(saved_meta, meta)\n\n def test_lines_get_printed(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, lines)\n\n def test_lines_get_written(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n with open(os.path.join(td, experiment._id, 'metrics.log'), 'r'\n ) as f:\n read_lines = [orjson.loads(line.strip()) for line in f.\n readlines()]\n self.assertEqual(read_lines, lines)\n\n def test_lines_print_every_n(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, print_every=5)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual([lines[0], lines[5]], read_lines)\n\n def test_verbose_suppress(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, verbose=False)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, [])\n\n def test_experiment_from_id(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'asdf': 'test'}\n experiment = Experiment(data_path=td, verbose=False, meta=meta)\n _id = experiment._id\n experiment = Experiment.from_id(_id, data_path=td)\n self.assertEqual(experiment.meta, meta)\n", "<import token>\n\n\ndef capture_output(experiment):\n experiment.ch.stream = io.StringIO()\n\n\n<function token>\n\n\nclass TestExperiment(unittest.TestCase):\n\n def test_experiment_folder_created(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n self.assertTrue(os.path.isdir(os.path.join(td, experiment._id)))\n\n def test_experiment_meta_saves(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'name': 'HAHA', 'data': 1}\n experiment = Experiment(meta=meta, data_path=td)\n with open(os.path.join(td, experiment._id, 'meta.json'), 'r') as f:\n saved_meta = orjson.loads(f.read())\n self.assertEqual(saved_meta, meta)\n\n def test_lines_get_printed(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, lines)\n\n def test_lines_get_written(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n with open(os.path.join(td, experiment._id, 'metrics.log'), 'r'\n ) as f:\n read_lines = [orjson.loads(line.strip()) for line in f.\n readlines()]\n self.assertEqual(read_lines, lines)\n\n def test_lines_print_every_n(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, print_every=5)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual([lines[0], lines[5]], read_lines)\n\n def test_verbose_suppress(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, verbose=False)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, [])\n\n def test_experiment_from_id(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'asdf': 'test'}\n experiment = Experiment(data_path=td, verbose=False, meta=meta)\n _id = experiment._id\n experiment = Experiment.from_id(_id, data_path=td)\n self.assertEqual(experiment.meta, meta)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestExperiment(unittest.TestCase):\n\n def test_experiment_folder_created(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n self.assertTrue(os.path.isdir(os.path.join(td, experiment._id)))\n\n def test_experiment_meta_saves(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'name': 'HAHA', 'data': 1}\n experiment = Experiment(meta=meta, data_path=td)\n with open(os.path.join(td, experiment._id, 'meta.json'), 'r') as f:\n saved_meta = orjson.loads(f.read())\n self.assertEqual(saved_meta, meta)\n\n def test_lines_get_printed(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, lines)\n\n def test_lines_get_written(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n with open(os.path.join(td, experiment._id, 'metrics.log'), 'r'\n ) as f:\n read_lines = [orjson.loads(line.strip()) for line in f.\n readlines()]\n self.assertEqual(read_lines, lines)\n\n def test_lines_print_every_n(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, print_every=5)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual([lines[0], lines[5]], read_lines)\n\n def test_verbose_suppress(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, verbose=False)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, [])\n\n def test_experiment_from_id(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'asdf': 'test'}\n experiment = Experiment(data_path=td, verbose=False, meta=meta)\n _id = experiment._id\n experiment = Experiment.from_id(_id, data_path=td)\n self.assertEqual(experiment.meta, meta)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestExperiment(unittest.TestCase):\n <function token>\n\n def test_experiment_meta_saves(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'name': 'HAHA', 'data': 1}\n experiment = Experiment(meta=meta, data_path=td)\n with open(os.path.join(td, experiment._id, 'meta.json'), 'r') as f:\n saved_meta = orjson.loads(f.read())\n self.assertEqual(saved_meta, meta)\n\n def test_lines_get_printed(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, lines)\n\n def test_lines_get_written(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n with open(os.path.join(td, experiment._id, 'metrics.log'), 'r'\n ) as f:\n read_lines = [orjson.loads(line.strip()) for line in f.\n readlines()]\n self.assertEqual(read_lines, lines)\n\n def test_lines_print_every_n(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, print_every=5)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual([lines[0], lines[5]], read_lines)\n\n def test_verbose_suppress(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, verbose=False)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, [])\n\n def test_experiment_from_id(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'asdf': 'test'}\n experiment = Experiment(data_path=td, verbose=False, meta=meta)\n _id = experiment._id\n experiment = Experiment.from_id(_id, data_path=td)\n self.assertEqual(experiment.meta, meta)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestExperiment(unittest.TestCase):\n <function token>\n\n def test_experiment_meta_saves(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'name': 'HAHA', 'data': 1}\n experiment = Experiment(meta=meta, data_path=td)\n with open(os.path.join(td, experiment._id, 'meta.json'), 'r') as f:\n saved_meta = orjson.loads(f.read())\n self.assertEqual(saved_meta, meta)\n\n def test_lines_get_printed(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, lines)\n\n def test_lines_get_written(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n with open(os.path.join(td, experiment._id, 'metrics.log'), 'r'\n ) as f:\n read_lines = [orjson.loads(line.strip()) for line in f.\n readlines()]\n self.assertEqual(read_lines, lines)\n <function token>\n\n def test_verbose_suppress(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, verbose=False)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, [])\n\n def test_experiment_from_id(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'asdf': 'test'}\n experiment = Experiment(data_path=td, verbose=False, meta=meta)\n _id = experiment._id\n experiment = Experiment.from_id(_id, data_path=td)\n self.assertEqual(experiment.meta, meta)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestExperiment(unittest.TestCase):\n <function token>\n\n def test_experiment_meta_saves(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'name': 'HAHA', 'data': 1}\n experiment = Experiment(meta=meta, data_path=td)\n with open(os.path.join(td, experiment._id, 'meta.json'), 'r') as f:\n saved_meta = orjson.loads(f.read())\n self.assertEqual(saved_meta, meta)\n\n def test_lines_get_printed(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, lines)\n <function token>\n <function token>\n\n def test_verbose_suppress(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, verbose=False)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, [])\n\n def test_experiment_from_id(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'asdf': 'test'}\n experiment = Experiment(data_path=td, verbose=False, meta=meta)\n _id = experiment._id\n experiment = Experiment.from_id(_id, data_path=td)\n self.assertEqual(experiment.meta, meta)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestExperiment(unittest.TestCase):\n <function token>\n <function token>\n\n def test_lines_get_printed(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, lines)\n <function token>\n <function token>\n\n def test_verbose_suppress(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td, verbose=False)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, [])\n\n def test_experiment_from_id(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'asdf': 'test'}\n experiment = Experiment(data_path=td, verbose=False, meta=meta)\n _id = experiment._id\n experiment = Experiment.from_id(_id, data_path=td)\n self.assertEqual(experiment.meta, meta)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestExperiment(unittest.TestCase):\n <function token>\n <function token>\n\n def test_lines_get_printed(self):\n with tempfile.TemporaryDirectory() as td:\n experiment = Experiment(data_path=td)\n capture_output(experiment)\n lines = [{'name': 'haha', 'var': i} for i in range(10)]\n [experiment.emit(line) for line in lines]\n read_lines = [orjson.loads(x) for x in get_output(experiment)]\n self.assertEqual(read_lines, lines)\n <function token>\n <function token>\n <function token>\n\n def test_experiment_from_id(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'asdf': 'test'}\n experiment = Experiment(data_path=td, verbose=False, meta=meta)\n _id = experiment._id\n experiment = Experiment.from_id(_id, data_path=td)\n self.assertEqual(experiment.meta, meta)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestExperiment(unittest.TestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def test_experiment_from_id(self):\n with tempfile.TemporaryDirectory() as td:\n meta = {'asdf': 'test'}\n experiment = Experiment(data_path=td, verbose=False, meta=meta)\n _id = experiment._id\n experiment = Experiment.from_id(_id, data_path=td)\n self.assertEqual(experiment.meta, meta)\n", "<import token>\n<function token>\n<function token>\n\n\nclass TestExperiment(unittest.TestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<function token>\n<function token>\n<class token>\n" ]
false
98,945
ec830a78cc08ab8940df7ddd0009d805d4704195
import numpy as np import time from sklearn import svm from sklearn.model_selection import train_test_split, GridSearchCV from sklearn.metrics import classification_report, accuracy_score from sklearn.model_selection import cross_val_score, StratifiedKFold from sklearn.metrics import confusion_matrix from Esme.helper.format import precision_format from Esme.dgms.format import dgm2diag from Esme.ml.rf import rfclf class classifier(): def __init__(self, x, y, method='svm', n_cv=5, **kwargs): """ classify. All hyperparameters are taken care of :param x: feature of shape (n_data, dim) :param y: label (n_data,) :param method: svm, rf :param kwargs: pass precomputed kernel as 'kernel' """ self.x = x self.y = y self.rf_stat = None self.summary = {} self.n_cv = n_cv self.method = method if 'kernel' in kwargs.keys(): self.kernel = kwargs['kernel'] self.print_flag = kwargs.get('print_flag', 'off') self.stat = {'train': None, 'test': None} # self.direct = kwargs[''] # TODO more generic here def rf(self): if self.method !='bl0' and self.method!='bl1': s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False) self.rf_stat = {'s1': s1, 's2': s2, 'time': t} else: self.rf_stat = {'s1': -1, 's2': -1, 'time': -1} # for baseline, we only have 5 dim feature, but max_feature is 40 def svm(self, n_splits = 10): # linear/Gaussian kernel self.stat['train'] = train_svm(self.x, self.y, print_flag=self.print_flag) # hyper-parameter seach eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat['train'], self.x, self.y, print_flag='off', n_splits = n_splits, n_cv=self.n_cv) self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv} def svm_kernel_(self, n_splits = 10): # precomputed kernel self.stat['train'] = train_svm(self.x, self.y, kernel_flag=True, kernel=self.kernel, print_flag=self.print_flag) eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat['train'], self.x, self.y, print_flag='off', kernel=self.kernel, n_splits=n_splits, n_cv = self.n_cv) self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv} def clf_summary(self, print_flag = False): if print_flag: if self.svm_train_stat is None: print('have not train svm yet') else: print ('svm train result: %s' %self.svm_train_stat) print ('svm eval result: %s' % self.svm_eval_stat) if self.rf_stat is None: print('have not train random forest yet') else: print ('rf eval result: %s' % self.rf_stat) self.summary['svm_train'] = self.svm_train_stat self.summary['svm_eval'] = self.svm_eval_stat self.summary['rf_test'] = self.rf_stat return self.summary # def save_xy(self, x, y): # np.save(self.direct + self.suffix + 'kernel', kernel) def dgms2swdgm(dgms): swdgms=[] for dgm in dgms: diag = dgm2diag(dgm) swdgms += [np.array(diag)] return swdgms def train_svm(x, y, random_state=2, print_flag='off', nonlinear_flag = True, kernel_flag=False, kernel=np.zeros((1, 1))): """ :param x: feature :param y: label :param random_state: random seed for 10 cv :param print_flag: 'on'/'off' for debug :param nonlinear_flag: linear :param kernel_flag: True if use precomputed kernel :param kernel: precomputed kernel. No need to pass if use gaussian/linear kernel :return: best parameters """ print('print_flag is', print_flag) assert print_flag in ['on', 'off'] tuned_params = [{'kernel': ['linear'], 'C': [0.01, 1, 10, 100, 1000]}] if nonlinear_flag: tuned_params += [{'kernel': ['rbf'], 'gamma': [0.01, 0.1, 1, 10, 100], 'C': [0.1, 1, 10, 100,1000]}] for score in ['accuracy']: x_train, x_test, y_train, y_test, indices_train, indices_test = train_test_split(x, y, range(len(y)), test_size=0.1, random_state=random_state) if not kernel_flag: # not precomputed kernel clf = GridSearchCV(svm.SVC(), tuned_params, cv=10, scoring='%s' % score, n_jobs=-1, verbose=0) clf.fit(x_train, y_train) else: clf = GridSearchCV(svm.SVC(kernel='precomputed'), [{'C': [0.01, 0.1, 1, 10, 100, 1000]}], cv=10, scoring='%s' % score, n_jobs=-1, verbose=0) kernel_train = kernel[np.ix_(indices_train, indices_train)] clf.fit(kernel_train, y_train) assert np.array_equal(kernel[np.ix_(indices_train, indices_train)], kernel_train) == True kernel_test = kernel[np.ix_(indices_test, indices_train)] means = clf.cv_results_['mean_test_score'] stds = clf.cv_results_['std_test_score'] if print_flag == 'on': for mean, std, params in zip(means, stds, clf.cv_results_['params']): print("%0.3f (+/-%0.03f) for %r" % (mean, std * 2, params)) print("Detailed classification report:\n") print("The model is trained on the full development set.") print("The scores are computed on the full evaluation set.\n") if kernel_flag == False: y_true, y_pred = y_test, clf.predict(x_test) else: y_true, y_pred = y_test, clf.predict(kernel_test) print('Able to execute kernel grid search') print(accuracy_score(y_true, y_pred)) print(classification_report(y_true, y_pred)) print(confusion_matrix(y_true, y_pred)) if 'kernel' not in clf.best_params_: clf.best_params_['kernel'] = 'precomputed' return {'param': clf.best_params_, 'score': round(clf.best_score_ * 1000)/10.0} def evaluate_tda_kernel(tda_kernel, Y, best_result_so_far, print_flag='off'): """ TODO: figure this out :param tda_kernel: :param Y: :param best_result_so_far: :param print_flag: :return: """ t1 = time.time() n = np.shape(tda_kernel)[0] grid_search_re = train_svm(np.zeros((n, 23)), Y, print_flag=print_flag, kernel=tda_kernel, kernel_flag=True, nonlinear_flag=False) # X is dummy here if grid_search_re['score'] < best_result_so_far[0]-4: print('Saved one unnecessary evaluation of bad kernel') return (0,0,{},0) cv_score = [] for seed in range(5): clf = svm.SVC(kernel='precomputed', C=grid_search_re['param']['C']) k_fold = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed) scores = cross_val_score(clf, tda_kernel, Y, cv=k_fold, scoring='accuracy', n_jobs=-1) cv_score.append(scores.mean()) cv_score = np.array(cv_score) t2 = time.time() svm_time = precision_format(t2 - t1, 1) return (precision_format(100 * cv_score.mean(), 1), precision_format(100 * cv_score.std(), 1), grid_search_re, svm_time) def evaluate_best_estimator(grid_search_re, x, y, print_flag='off', kernel=None, n_splits = 10, n_cv=5): """ :param grid_search_re: grid search result(dict) :param x: feat :param y: label :param print_flag: on/off :param kernel: :param n_splits: :param n_cv: number of cv(5/10) for evaluation :return: """ if print_flag=='on': print('Start evaluating the best estimator') param = grid_search_re['param'] assert param['kernel'] in ['linear', 'rbf', 'precomputed'] assert isinstance(param, dict) # set up clf if len(param) == 3: clf = svm.SVC(kernel='rbf', C=param['C'], gamma = param['gamma']) elif (len(param) == 2) and (param['kernel'] == 'linear'): clf = svm.SVC(kernel='linear', C = param['C']) else: clf = svm.SVC(kernel='precomputed', C=param['C']) # evaluation t0 = time.time() cv_score, n_cv = [], n_cv for i in range(n_cv): k_fold = StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=i) if param['kernel']!= 'precomputed': scores = cross_val_score(clf, x, y, cv=k_fold, scoring='accuracy', n_jobs=-1) else: scores = cross_val_score(clf, kernel, y, cv=k_fold, scoring='accuracy', n_jobs=-1) if print_flag == 'on': print(scores) cv_score.append(scores.mean()) cv_score = np.array(cv_score) if print_flag=='on': print(cv_score) print('Evaluation takes %0.3f. \n' 'After averageing %0.1f cross validations, the mean accuracy is %0.3f, the std is %0.5f' %(time.time()-t0, n_cv, cv_score.mean(), cv_score.std())) return cv_score.mean(), cv_score.std(), n_cv if __name__=='__main__': import sklearn.datasets as datasets iris = datasets.load_iris() # Take the first two features. We could avoid this by using a two-dim dataset x = iris.data[:, :2] y = iris.target clf = classifier(x, y, method='svm', n_cv=1) clf.svm() # train stat: {'param': {'C': 0.1, 'gamma': 1, 'kernel': 'rbf'}, 'score': 80.0} # test stat: {'mean': 0.8226666666666667, 'std': 0.007999999999999896} print(clf.stat)
[ "import numpy as np\nimport time\nfrom sklearn import svm\nfrom sklearn.model_selection import train_test_split, GridSearchCV\nfrom sklearn.metrics import classification_report, accuracy_score\nfrom sklearn.model_selection import cross_val_score, StratifiedKFold\nfrom sklearn.metrics import confusion_matrix\n\nfrom Esme.helper.format import precision_format\nfrom Esme.dgms.format import dgm2diag\nfrom Esme.ml.rf import rfclf\n\nclass classifier():\n def __init__(self, x, y, method='svm', n_cv=5, **kwargs):\n \"\"\"\n classify.\n All hyperparameters are taken care of\n :param x: feature of shape (n_data, dim)\n :param y: label (n_data,)\n :param method: svm, rf\n :param kwargs: pass precomputed kernel as 'kernel'\n \"\"\"\n self.x = x\n self.y = y\n self.rf_stat = None\n self.summary = {}\n self.n_cv = n_cv\n self.method = method\n if 'kernel' in kwargs.keys(): self.kernel = kwargs['kernel']\n\n self.print_flag = kwargs.get('print_flag', 'off')\n self.stat = {'train': None, 'test': None}\n # self.direct = kwargs[''] # TODO more generic here\n\n def rf(self):\n if self.method !='bl0' and self.method!='bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1} # for baseline, we only have 5 dim feature, but max_feature is 40\n\n def svm(self, n_splits = 10):\n # linear/Gaussian kernel\n\n self.stat['train'] = train_svm(self.x, self.y, print_flag=self.print_flag) # hyper-parameter seach\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat['train'], self.x, self.y, print_flag='off', n_splits = n_splits, n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def svm_kernel_(self, n_splits = 10):\n # precomputed kernel\n\n self.stat['train'] = train_svm(self.x, self.y, kernel_flag=True, kernel=self.kernel, print_flag=self.print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat['train'], self.x, self.y, print_flag='off', kernel=self.kernel, n_splits=n_splits, n_cv = self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def clf_summary(self, print_flag = False):\n if print_flag:\n if self.svm_train_stat is None:\n print('have not train svm yet')\n else:\n print ('svm train result: %s' %self.svm_train_stat)\n print ('svm eval result: %s' % self.svm_eval_stat)\n\n if self.rf_stat is None:\n print('have not train random forest yet')\n else:\n print ('rf eval result: %s' % self.rf_stat)\n\n self.summary['svm_train'] = self.svm_train_stat\n self.summary['svm_eval'] = self.svm_eval_stat\n self.summary['rf_test'] = self.rf_stat\n return self.summary\n\n # def save_xy(self, x, y):\n # np.save(self.direct + self.suffix + 'kernel', kernel)\n\ndef dgms2swdgm(dgms):\n swdgms=[]\n for dgm in dgms:\n diag = dgm2diag(dgm)\n swdgms += [np.array(diag)]\n return swdgms\n\ndef train_svm(x, y, random_state=2, print_flag='off', nonlinear_flag = True, kernel_flag=False, kernel=np.zeros((1, 1))):\n \"\"\"\n :param x: feature\n :param y: label\n :param random_state: random seed for 10 cv\n :param print_flag: 'on'/'off' for debug\n :param nonlinear_flag: linear\n :param kernel_flag: True if use precomputed kernel\n :param kernel: precomputed kernel. No need to pass if use gaussian/linear kernel\n :return: best parameters\n \"\"\"\n print('print_flag is', print_flag)\n assert print_flag in ['on', 'off']\n tuned_params = [{'kernel': ['linear'], 'C': [0.01, 1, 10, 100, 1000]}]\n if nonlinear_flag:\n tuned_params += [{'kernel': ['rbf'], 'gamma': [0.01, 0.1, 1, 10, 100], 'C': [0.1, 1, 10, 100,1000]}]\n\n for score in ['accuracy']:\n x_train, x_test, y_train, y_test, indices_train, indices_test = train_test_split(x, y, range(len(y)), test_size=0.1, random_state=random_state)\n\n if not kernel_flag: # not precomputed kernel\n clf = GridSearchCV(svm.SVC(), tuned_params, cv=10, scoring='%s' % score, n_jobs=-1, verbose=0)\n clf.fit(x_train, y_train)\n else:\n clf = GridSearchCV(svm.SVC(kernel='precomputed'),\n [{'C': [0.01, 0.1, 1, 10, 100, 1000]}],\n cv=10, scoring='%s' % score, n_jobs=-1, verbose=0)\n kernel_train = kernel[np.ix_(indices_train, indices_train)]\n clf.fit(kernel_train, y_train)\n assert np.array_equal(kernel[np.ix_(indices_train, indices_train)], kernel_train) == True\n kernel_test = kernel[np.ix_(indices_test, indices_train)]\n\n means = clf.cv_results_['mean_test_score']\n stds = clf.cv_results_['std_test_score']\n\n if print_flag == 'on':\n for mean, std, params in zip(means, stds, clf.cv_results_['params']):\n print(\"%0.3f (+/-%0.03f) for %r\" % (mean, std * 2, params))\n print(\"Detailed classification report:\\n\")\n print(\"The model is trained on the full development set.\")\n print(\"The scores are computed on the full evaluation set.\\n\")\n\n if kernel_flag == False:\n y_true, y_pred = y_test, clf.predict(x_test)\n else:\n y_true, y_pred = y_test, clf.predict(kernel_test)\n print('Able to execute kernel grid search')\n print(accuracy_score(y_true, y_pred))\n print(classification_report(y_true, y_pred))\n print(confusion_matrix(y_true, y_pred))\n\n if 'kernel' not in clf.best_params_:\n clf.best_params_['kernel'] = 'precomputed'\n return {'param': clf.best_params_, 'score': round(clf.best_score_ * 1000)/10.0}\n\ndef evaluate_tda_kernel(tda_kernel, Y, best_result_so_far, print_flag='off'):\n \"\"\"\n TODO: figure this out\n :param tda_kernel:\n :param Y:\n :param best_result_so_far:\n :param print_flag:\n :return:\n \"\"\"\n\n t1 = time.time()\n n = np.shape(tda_kernel)[0]\n grid_search_re = train_svm(np.zeros((n, 23)), Y, print_flag=print_flag, kernel=tda_kernel,\n kernel_flag=True, nonlinear_flag=False) # X is dummy here\n if grid_search_re['score'] < best_result_so_far[0]-4:\n print('Saved one unnecessary evaluation of bad kernel')\n return (0,0,{},0)\n\n cv_score = []\n for seed in range(5):\n clf = svm.SVC(kernel='precomputed', C=grid_search_re['param']['C'])\n k_fold = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)\n scores = cross_val_score(clf, tda_kernel, Y, cv=k_fold, scoring='accuracy', n_jobs=-1)\n cv_score.append(scores.mean())\n\n cv_score = np.array(cv_score)\n t2 = time.time()\n svm_time = precision_format(t2 - t1, 1)\n return (precision_format(100 * cv_score.mean(), 1),\n precision_format(100 * cv_score.std(), 1),\n grid_search_re, svm_time)\n\ndef evaluate_best_estimator(grid_search_re, x, y, print_flag='off', kernel=None, n_splits = 10, n_cv=5):\n \"\"\"\n :param grid_search_re: grid search result(dict)\n :param x: feat\n :param y: label\n :param print_flag: on/off\n :param kernel:\n :param n_splits:\n :param n_cv: number of cv(5/10) for evaluation\n :return:\n \"\"\"\n if print_flag=='on': print('Start evaluating the best estimator')\n param = grid_search_re['param']\n assert param['kernel'] in ['linear', 'rbf', 'precomputed']\n assert isinstance(param, dict)\n\n # set up clf\n if len(param) == 3:\n clf = svm.SVC(kernel='rbf', C=param['C'], gamma = param['gamma'])\n elif (len(param) == 2) and (param['kernel'] == 'linear'):\n clf = svm.SVC(kernel='linear', C = param['C'])\n else:\n clf = svm.SVC(kernel='precomputed', C=param['C'])\n\n # evaluation\n t0 = time.time()\n cv_score, n_cv = [], n_cv\n for i in range(n_cv):\n k_fold = StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=i)\n if param['kernel']!= 'precomputed':\n scores = cross_val_score(clf, x, y, cv=k_fold, scoring='accuracy', n_jobs=-1)\n else:\n scores = cross_val_score(clf, kernel, y, cv=k_fold, scoring='accuracy', n_jobs=-1)\n\n if print_flag == 'on': print(scores)\n cv_score.append(scores.mean())\n cv_score = np.array(cv_score)\n\n if print_flag=='on':\n print(cv_score)\n\n print('Evaluation takes %0.3f. \\n'\n 'After averageing %0.1f cross validations, the mean accuracy is %0.3f, the std is %0.5f'\n %(time.time()-t0, n_cv, cv_score.mean(), cv_score.std()))\n return cv_score.mean(), cv_score.std(), n_cv\n\nif __name__=='__main__':\n import sklearn.datasets as datasets\n iris = datasets.load_iris()\n # Take the first two features. We could avoid this by using a two-dim dataset\n x = iris.data[:, :2]\n y = iris.target\n clf = classifier(x, y, method='svm', n_cv=1)\n clf.svm()\n\n # train stat: {'param': {'C': 0.1, 'gamma': 1, 'kernel': 'rbf'}, 'score': 80.0}\n # test stat: {'mean': 0.8226666666666667, 'std': 0.007999999999999896}\n print(clf.stat)", "import numpy as np\nimport time\nfrom sklearn import svm\nfrom sklearn.model_selection import train_test_split, GridSearchCV\nfrom sklearn.metrics import classification_report, accuracy_score\nfrom sklearn.model_selection import cross_val_score, StratifiedKFold\nfrom sklearn.metrics import confusion_matrix\nfrom Esme.helper.format import precision_format\nfrom Esme.dgms.format import dgm2diag\nfrom Esme.ml.rf import rfclf\n\n\nclass classifier:\n\n def __init__(self, x, y, method='svm', n_cv=5, **kwargs):\n \"\"\"\n classify.\n All hyperparameters are taken care of\n :param x: feature of shape (n_data, dim)\n :param y: label (n_data,)\n :param method: svm, rf\n :param kwargs: pass precomputed kernel as 'kernel'\n \"\"\"\n self.x = x\n self.y = y\n self.rf_stat = None\n self.summary = {}\n self.n_cv = n_cv\n self.method = method\n if 'kernel' in kwargs.keys():\n self.kernel = kwargs['kernel']\n self.print_flag = kwargs.get('print_flag', 'off')\n self.stat = {'train': None, 'test': None}\n\n def rf(self):\n if self.method != 'bl0' and self.method != 'bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1}\n\n def svm(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, print_flag=self.\n print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', n_splits=n_splits,\n n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def svm_kernel_(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, kernel_flag=True,\n kernel=self.kernel, print_flag=self.print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', kernel=self.kernel,\n n_splits=n_splits, n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def clf_summary(self, print_flag=False):\n if print_flag:\n if self.svm_train_stat is None:\n print('have not train svm yet')\n else:\n print('svm train result: %s' % self.svm_train_stat)\n print('svm eval result: %s' % self.svm_eval_stat)\n if self.rf_stat is None:\n print('have not train random forest yet')\n else:\n print('rf eval result: %s' % self.rf_stat)\n self.summary['svm_train'] = self.svm_train_stat\n self.summary['svm_eval'] = self.svm_eval_stat\n self.summary['rf_test'] = self.rf_stat\n return self.summary\n\n\ndef dgms2swdgm(dgms):\n swdgms = []\n for dgm in dgms:\n diag = dgm2diag(dgm)\n swdgms += [np.array(diag)]\n return swdgms\n\n\ndef train_svm(x, y, random_state=2, print_flag='off', nonlinear_flag=True,\n kernel_flag=False, kernel=np.zeros((1, 1))):\n \"\"\"\n :param x: feature\n :param y: label\n :param random_state: random seed for 10 cv\n :param print_flag: 'on'/'off' for debug\n :param nonlinear_flag: linear\n :param kernel_flag: True if use precomputed kernel\n :param kernel: precomputed kernel. No need to pass if use gaussian/linear kernel\n :return: best parameters\n \"\"\"\n print('print_flag is', print_flag)\n assert print_flag in ['on', 'off']\n tuned_params = [{'kernel': ['linear'], 'C': [0.01, 1, 10, 100, 1000]}]\n if nonlinear_flag:\n tuned_params += [{'kernel': ['rbf'], 'gamma': [0.01, 0.1, 1, 10, \n 100], 'C': [0.1, 1, 10, 100, 1000]}]\n for score in ['accuracy']:\n x_train, x_test, y_train, y_test, indices_train, indices_test = (\n train_test_split(x, y, range(len(y)), test_size=0.1,\n random_state=random_state))\n if not kernel_flag:\n clf = GridSearchCV(svm.SVC(), tuned_params, cv=10, scoring='%s' %\n score, n_jobs=-1, verbose=0)\n clf.fit(x_train, y_train)\n else:\n clf = GridSearchCV(svm.SVC(kernel='precomputed'), [{'C': [0.01,\n 0.1, 1, 10, 100, 1000]}], cv=10, scoring='%s' % score,\n n_jobs=-1, verbose=0)\n kernel_train = kernel[np.ix_(indices_train, indices_train)]\n clf.fit(kernel_train, y_train)\n assert np.array_equal(kernel[np.ix_(indices_train,\n indices_train)], kernel_train) == True\n kernel_test = kernel[np.ix_(indices_test, indices_train)]\n means = clf.cv_results_['mean_test_score']\n stds = clf.cv_results_['std_test_score']\n if print_flag == 'on':\n for mean, std, params in zip(means, stds, clf.cv_results_['params']\n ):\n print('%0.3f (+/-%0.03f) for %r' % (mean, std * 2, params))\n print('Detailed classification report:\\n')\n print('The model is trained on the full development set.')\n print('The scores are computed on the full evaluation set.\\n')\n if kernel_flag == False:\n y_true, y_pred = y_test, clf.predict(x_test)\n else:\n y_true, y_pred = y_test, clf.predict(kernel_test)\n print('Able to execute kernel grid search')\n print(accuracy_score(y_true, y_pred))\n print(classification_report(y_true, y_pred))\n print(confusion_matrix(y_true, y_pred))\n if 'kernel' not in clf.best_params_:\n clf.best_params_['kernel'] = 'precomputed'\n return {'param': clf.best_params_, 'score': round(clf.best_score_ *\n 1000) / 10.0}\n\n\ndef evaluate_tda_kernel(tda_kernel, Y, best_result_so_far, print_flag='off'):\n \"\"\"\n TODO: figure this out\n :param tda_kernel:\n :param Y:\n :param best_result_so_far:\n :param print_flag:\n :return:\n \"\"\"\n t1 = time.time()\n n = np.shape(tda_kernel)[0]\n grid_search_re = train_svm(np.zeros((n, 23)), Y, print_flag=print_flag,\n kernel=tda_kernel, kernel_flag=True, nonlinear_flag=False)\n if grid_search_re['score'] < best_result_so_far[0] - 4:\n print('Saved one unnecessary evaluation of bad kernel')\n return 0, 0, {}, 0\n cv_score = []\n for seed in range(5):\n clf = svm.SVC(kernel='precomputed', C=grid_search_re['param']['C'])\n k_fold = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)\n scores = cross_val_score(clf, tda_kernel, Y, cv=k_fold, scoring=\n 'accuracy', n_jobs=-1)\n cv_score.append(scores.mean())\n cv_score = np.array(cv_score)\n t2 = time.time()\n svm_time = precision_format(t2 - t1, 1)\n return precision_format(100 * cv_score.mean(), 1), precision_format(100 *\n cv_score.std(), 1), grid_search_re, svm_time\n\n\ndef evaluate_best_estimator(grid_search_re, x, y, print_flag='off', kernel=\n None, n_splits=10, n_cv=5):\n \"\"\"\n :param grid_search_re: grid search result(dict)\n :param x: feat\n :param y: label\n :param print_flag: on/off\n :param kernel:\n :param n_splits:\n :param n_cv: number of cv(5/10) for evaluation\n :return:\n \"\"\"\n if print_flag == 'on':\n print('Start evaluating the best estimator')\n param = grid_search_re['param']\n assert param['kernel'] in ['linear', 'rbf', 'precomputed']\n assert isinstance(param, dict)\n if len(param) == 3:\n clf = svm.SVC(kernel='rbf', C=param['C'], gamma=param['gamma'])\n elif len(param) == 2 and param['kernel'] == 'linear':\n clf = svm.SVC(kernel='linear', C=param['C'])\n else:\n clf = svm.SVC(kernel='precomputed', C=param['C'])\n t0 = time.time()\n cv_score, n_cv = [], n_cv\n for i in range(n_cv):\n k_fold = StratifiedKFold(n_splits=n_splits, shuffle=True,\n random_state=i)\n if param['kernel'] != 'precomputed':\n scores = cross_val_score(clf, x, y, cv=k_fold, scoring=\n 'accuracy', n_jobs=-1)\n else:\n scores = cross_val_score(clf, kernel, y, cv=k_fold, scoring=\n 'accuracy', n_jobs=-1)\n if print_flag == 'on':\n print(scores)\n cv_score.append(scores.mean())\n cv_score = np.array(cv_score)\n if print_flag == 'on':\n print(cv_score)\n print(\n \"\"\"Evaluation takes %0.3f. \nAfter averageing %0.1f cross validations, the mean accuracy is %0.3f, the std is %0.5f\"\"\"\n % (time.time() - t0, n_cv, cv_score.mean(), cv_score.std()))\n return cv_score.mean(), cv_score.std(), n_cv\n\n\nif __name__ == '__main__':\n import sklearn.datasets as datasets\n iris = datasets.load_iris()\n x = iris.data[:, :2]\n y = iris.target\n clf = classifier(x, y, method='svm', n_cv=1)\n clf.svm()\n print(clf.stat)\n", "<import token>\n\n\nclass classifier:\n\n def __init__(self, x, y, method='svm', n_cv=5, **kwargs):\n \"\"\"\n classify.\n All hyperparameters are taken care of\n :param x: feature of shape (n_data, dim)\n :param y: label (n_data,)\n :param method: svm, rf\n :param kwargs: pass precomputed kernel as 'kernel'\n \"\"\"\n self.x = x\n self.y = y\n self.rf_stat = None\n self.summary = {}\n self.n_cv = n_cv\n self.method = method\n if 'kernel' in kwargs.keys():\n self.kernel = kwargs['kernel']\n self.print_flag = kwargs.get('print_flag', 'off')\n self.stat = {'train': None, 'test': None}\n\n def rf(self):\n if self.method != 'bl0' and self.method != 'bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1}\n\n def svm(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, print_flag=self.\n print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', n_splits=n_splits,\n n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def svm_kernel_(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, kernel_flag=True,\n kernel=self.kernel, print_flag=self.print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', kernel=self.kernel,\n n_splits=n_splits, n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def clf_summary(self, print_flag=False):\n if print_flag:\n if self.svm_train_stat is None:\n print('have not train svm yet')\n else:\n print('svm train result: %s' % self.svm_train_stat)\n print('svm eval result: %s' % self.svm_eval_stat)\n if self.rf_stat is None:\n print('have not train random forest yet')\n else:\n print('rf eval result: %s' % self.rf_stat)\n self.summary['svm_train'] = self.svm_train_stat\n self.summary['svm_eval'] = self.svm_eval_stat\n self.summary['rf_test'] = self.rf_stat\n return self.summary\n\n\ndef dgms2swdgm(dgms):\n swdgms = []\n for dgm in dgms:\n diag = dgm2diag(dgm)\n swdgms += [np.array(diag)]\n return swdgms\n\n\ndef train_svm(x, y, random_state=2, print_flag='off', nonlinear_flag=True,\n kernel_flag=False, kernel=np.zeros((1, 1))):\n \"\"\"\n :param x: feature\n :param y: label\n :param random_state: random seed for 10 cv\n :param print_flag: 'on'/'off' for debug\n :param nonlinear_flag: linear\n :param kernel_flag: True if use precomputed kernel\n :param kernel: precomputed kernel. No need to pass if use gaussian/linear kernel\n :return: best parameters\n \"\"\"\n print('print_flag is', print_flag)\n assert print_flag in ['on', 'off']\n tuned_params = [{'kernel': ['linear'], 'C': [0.01, 1, 10, 100, 1000]}]\n if nonlinear_flag:\n tuned_params += [{'kernel': ['rbf'], 'gamma': [0.01, 0.1, 1, 10, \n 100], 'C': [0.1, 1, 10, 100, 1000]}]\n for score in ['accuracy']:\n x_train, x_test, y_train, y_test, indices_train, indices_test = (\n train_test_split(x, y, range(len(y)), test_size=0.1,\n random_state=random_state))\n if not kernel_flag:\n clf = GridSearchCV(svm.SVC(), tuned_params, cv=10, scoring='%s' %\n score, n_jobs=-1, verbose=0)\n clf.fit(x_train, y_train)\n else:\n clf = GridSearchCV(svm.SVC(kernel='precomputed'), [{'C': [0.01,\n 0.1, 1, 10, 100, 1000]}], cv=10, scoring='%s' % score,\n n_jobs=-1, verbose=0)\n kernel_train = kernel[np.ix_(indices_train, indices_train)]\n clf.fit(kernel_train, y_train)\n assert np.array_equal(kernel[np.ix_(indices_train,\n indices_train)], kernel_train) == True\n kernel_test = kernel[np.ix_(indices_test, indices_train)]\n means = clf.cv_results_['mean_test_score']\n stds = clf.cv_results_['std_test_score']\n if print_flag == 'on':\n for mean, std, params in zip(means, stds, clf.cv_results_['params']\n ):\n print('%0.3f (+/-%0.03f) for %r' % (mean, std * 2, params))\n print('Detailed classification report:\\n')\n print('The model is trained on the full development set.')\n print('The scores are computed on the full evaluation set.\\n')\n if kernel_flag == False:\n y_true, y_pred = y_test, clf.predict(x_test)\n else:\n y_true, y_pred = y_test, clf.predict(kernel_test)\n print('Able to execute kernel grid search')\n print(accuracy_score(y_true, y_pred))\n print(classification_report(y_true, y_pred))\n print(confusion_matrix(y_true, y_pred))\n if 'kernel' not in clf.best_params_:\n clf.best_params_['kernel'] = 'precomputed'\n return {'param': clf.best_params_, 'score': round(clf.best_score_ *\n 1000) / 10.0}\n\n\ndef evaluate_tda_kernel(tda_kernel, Y, best_result_so_far, print_flag='off'):\n \"\"\"\n TODO: figure this out\n :param tda_kernel:\n :param Y:\n :param best_result_so_far:\n :param print_flag:\n :return:\n \"\"\"\n t1 = time.time()\n n = np.shape(tda_kernel)[0]\n grid_search_re = train_svm(np.zeros((n, 23)), Y, print_flag=print_flag,\n kernel=tda_kernel, kernel_flag=True, nonlinear_flag=False)\n if grid_search_re['score'] < best_result_so_far[0] - 4:\n print('Saved one unnecessary evaluation of bad kernel')\n return 0, 0, {}, 0\n cv_score = []\n for seed in range(5):\n clf = svm.SVC(kernel='precomputed', C=grid_search_re['param']['C'])\n k_fold = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)\n scores = cross_val_score(clf, tda_kernel, Y, cv=k_fold, scoring=\n 'accuracy', n_jobs=-1)\n cv_score.append(scores.mean())\n cv_score = np.array(cv_score)\n t2 = time.time()\n svm_time = precision_format(t2 - t1, 1)\n return precision_format(100 * cv_score.mean(), 1), precision_format(100 *\n cv_score.std(), 1), grid_search_re, svm_time\n\n\ndef evaluate_best_estimator(grid_search_re, x, y, print_flag='off', kernel=\n None, n_splits=10, n_cv=5):\n \"\"\"\n :param grid_search_re: grid search result(dict)\n :param x: feat\n :param y: label\n :param print_flag: on/off\n :param kernel:\n :param n_splits:\n :param n_cv: number of cv(5/10) for evaluation\n :return:\n \"\"\"\n if print_flag == 'on':\n print('Start evaluating the best estimator')\n param = grid_search_re['param']\n assert param['kernel'] in ['linear', 'rbf', 'precomputed']\n assert isinstance(param, dict)\n if len(param) == 3:\n clf = svm.SVC(kernel='rbf', C=param['C'], gamma=param['gamma'])\n elif len(param) == 2 and param['kernel'] == 'linear':\n clf = svm.SVC(kernel='linear', C=param['C'])\n else:\n clf = svm.SVC(kernel='precomputed', C=param['C'])\n t0 = time.time()\n cv_score, n_cv = [], n_cv\n for i in range(n_cv):\n k_fold = StratifiedKFold(n_splits=n_splits, shuffle=True,\n random_state=i)\n if param['kernel'] != 'precomputed':\n scores = cross_val_score(clf, x, y, cv=k_fold, scoring=\n 'accuracy', n_jobs=-1)\n else:\n scores = cross_val_score(clf, kernel, y, cv=k_fold, scoring=\n 'accuracy', n_jobs=-1)\n if print_flag == 'on':\n print(scores)\n cv_score.append(scores.mean())\n cv_score = np.array(cv_score)\n if print_flag == 'on':\n print(cv_score)\n print(\n \"\"\"Evaluation takes %0.3f. \nAfter averageing %0.1f cross validations, the mean accuracy is %0.3f, the std is %0.5f\"\"\"\n % (time.time() - t0, n_cv, cv_score.mean(), cv_score.std()))\n return cv_score.mean(), cv_score.std(), n_cv\n\n\nif __name__ == '__main__':\n import sklearn.datasets as datasets\n iris = datasets.load_iris()\n x = iris.data[:, :2]\n y = iris.target\n clf = classifier(x, y, method='svm', n_cv=1)\n clf.svm()\n print(clf.stat)\n", "<import token>\n\n\nclass classifier:\n\n def __init__(self, x, y, method='svm', n_cv=5, **kwargs):\n \"\"\"\n classify.\n All hyperparameters are taken care of\n :param x: feature of shape (n_data, dim)\n :param y: label (n_data,)\n :param method: svm, rf\n :param kwargs: pass precomputed kernel as 'kernel'\n \"\"\"\n self.x = x\n self.y = y\n self.rf_stat = None\n self.summary = {}\n self.n_cv = n_cv\n self.method = method\n if 'kernel' in kwargs.keys():\n self.kernel = kwargs['kernel']\n self.print_flag = kwargs.get('print_flag', 'off')\n self.stat = {'train': None, 'test': None}\n\n def rf(self):\n if self.method != 'bl0' and self.method != 'bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1}\n\n def svm(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, print_flag=self.\n print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', n_splits=n_splits,\n n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def svm_kernel_(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, kernel_flag=True,\n kernel=self.kernel, print_flag=self.print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', kernel=self.kernel,\n n_splits=n_splits, n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def clf_summary(self, print_flag=False):\n if print_flag:\n if self.svm_train_stat is None:\n print('have not train svm yet')\n else:\n print('svm train result: %s' % self.svm_train_stat)\n print('svm eval result: %s' % self.svm_eval_stat)\n if self.rf_stat is None:\n print('have not train random forest yet')\n else:\n print('rf eval result: %s' % self.rf_stat)\n self.summary['svm_train'] = self.svm_train_stat\n self.summary['svm_eval'] = self.svm_eval_stat\n self.summary['rf_test'] = self.rf_stat\n return self.summary\n\n\ndef dgms2swdgm(dgms):\n swdgms = []\n for dgm in dgms:\n diag = dgm2diag(dgm)\n swdgms += [np.array(diag)]\n return swdgms\n\n\ndef train_svm(x, y, random_state=2, print_flag='off', nonlinear_flag=True,\n kernel_flag=False, kernel=np.zeros((1, 1))):\n \"\"\"\n :param x: feature\n :param y: label\n :param random_state: random seed for 10 cv\n :param print_flag: 'on'/'off' for debug\n :param nonlinear_flag: linear\n :param kernel_flag: True if use precomputed kernel\n :param kernel: precomputed kernel. No need to pass if use gaussian/linear kernel\n :return: best parameters\n \"\"\"\n print('print_flag is', print_flag)\n assert print_flag in ['on', 'off']\n tuned_params = [{'kernel': ['linear'], 'C': [0.01, 1, 10, 100, 1000]}]\n if nonlinear_flag:\n tuned_params += [{'kernel': ['rbf'], 'gamma': [0.01, 0.1, 1, 10, \n 100], 'C': [0.1, 1, 10, 100, 1000]}]\n for score in ['accuracy']:\n x_train, x_test, y_train, y_test, indices_train, indices_test = (\n train_test_split(x, y, range(len(y)), test_size=0.1,\n random_state=random_state))\n if not kernel_flag:\n clf = GridSearchCV(svm.SVC(), tuned_params, cv=10, scoring='%s' %\n score, n_jobs=-1, verbose=0)\n clf.fit(x_train, y_train)\n else:\n clf = GridSearchCV(svm.SVC(kernel='precomputed'), [{'C': [0.01,\n 0.1, 1, 10, 100, 1000]}], cv=10, scoring='%s' % score,\n n_jobs=-1, verbose=0)\n kernel_train = kernel[np.ix_(indices_train, indices_train)]\n clf.fit(kernel_train, y_train)\n assert np.array_equal(kernel[np.ix_(indices_train,\n indices_train)], kernel_train) == True\n kernel_test = kernel[np.ix_(indices_test, indices_train)]\n means = clf.cv_results_['mean_test_score']\n stds = clf.cv_results_['std_test_score']\n if print_flag == 'on':\n for mean, std, params in zip(means, stds, clf.cv_results_['params']\n ):\n print('%0.3f (+/-%0.03f) for %r' % (mean, std * 2, params))\n print('Detailed classification report:\\n')\n print('The model is trained on the full development set.')\n print('The scores are computed on the full evaluation set.\\n')\n if kernel_flag == False:\n y_true, y_pred = y_test, clf.predict(x_test)\n else:\n y_true, y_pred = y_test, clf.predict(kernel_test)\n print('Able to execute kernel grid search')\n print(accuracy_score(y_true, y_pred))\n print(classification_report(y_true, y_pred))\n print(confusion_matrix(y_true, y_pred))\n if 'kernel' not in clf.best_params_:\n clf.best_params_['kernel'] = 'precomputed'\n return {'param': clf.best_params_, 'score': round(clf.best_score_ *\n 1000) / 10.0}\n\n\ndef evaluate_tda_kernel(tda_kernel, Y, best_result_so_far, print_flag='off'):\n \"\"\"\n TODO: figure this out\n :param tda_kernel:\n :param Y:\n :param best_result_so_far:\n :param print_flag:\n :return:\n \"\"\"\n t1 = time.time()\n n = np.shape(tda_kernel)[0]\n grid_search_re = train_svm(np.zeros((n, 23)), Y, print_flag=print_flag,\n kernel=tda_kernel, kernel_flag=True, nonlinear_flag=False)\n if grid_search_re['score'] < best_result_so_far[0] - 4:\n print('Saved one unnecessary evaluation of bad kernel')\n return 0, 0, {}, 0\n cv_score = []\n for seed in range(5):\n clf = svm.SVC(kernel='precomputed', C=grid_search_re['param']['C'])\n k_fold = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)\n scores = cross_val_score(clf, tda_kernel, Y, cv=k_fold, scoring=\n 'accuracy', n_jobs=-1)\n cv_score.append(scores.mean())\n cv_score = np.array(cv_score)\n t2 = time.time()\n svm_time = precision_format(t2 - t1, 1)\n return precision_format(100 * cv_score.mean(), 1), precision_format(100 *\n cv_score.std(), 1), grid_search_re, svm_time\n\n\ndef evaluate_best_estimator(grid_search_re, x, y, print_flag='off', kernel=\n None, n_splits=10, n_cv=5):\n \"\"\"\n :param grid_search_re: grid search result(dict)\n :param x: feat\n :param y: label\n :param print_flag: on/off\n :param kernel:\n :param n_splits:\n :param n_cv: number of cv(5/10) for evaluation\n :return:\n \"\"\"\n if print_flag == 'on':\n print('Start evaluating the best estimator')\n param = grid_search_re['param']\n assert param['kernel'] in ['linear', 'rbf', 'precomputed']\n assert isinstance(param, dict)\n if len(param) == 3:\n clf = svm.SVC(kernel='rbf', C=param['C'], gamma=param['gamma'])\n elif len(param) == 2 and param['kernel'] == 'linear':\n clf = svm.SVC(kernel='linear', C=param['C'])\n else:\n clf = svm.SVC(kernel='precomputed', C=param['C'])\n t0 = time.time()\n cv_score, n_cv = [], n_cv\n for i in range(n_cv):\n k_fold = StratifiedKFold(n_splits=n_splits, shuffle=True,\n random_state=i)\n if param['kernel'] != 'precomputed':\n scores = cross_val_score(clf, x, y, cv=k_fold, scoring=\n 'accuracy', n_jobs=-1)\n else:\n scores = cross_val_score(clf, kernel, y, cv=k_fold, scoring=\n 'accuracy', n_jobs=-1)\n if print_flag == 'on':\n print(scores)\n cv_score.append(scores.mean())\n cv_score = np.array(cv_score)\n if print_flag == 'on':\n print(cv_score)\n print(\n \"\"\"Evaluation takes %0.3f. \nAfter averageing %0.1f cross validations, the mean accuracy is %0.3f, the std is %0.5f\"\"\"\n % (time.time() - t0, n_cv, cv_score.mean(), cv_score.std()))\n return cv_score.mean(), cv_score.std(), n_cv\n\n\n<code token>\n", "<import token>\n\n\nclass classifier:\n\n def __init__(self, x, y, method='svm', n_cv=5, **kwargs):\n \"\"\"\n classify.\n All hyperparameters are taken care of\n :param x: feature of shape (n_data, dim)\n :param y: label (n_data,)\n :param method: svm, rf\n :param kwargs: pass precomputed kernel as 'kernel'\n \"\"\"\n self.x = x\n self.y = y\n self.rf_stat = None\n self.summary = {}\n self.n_cv = n_cv\n self.method = method\n if 'kernel' in kwargs.keys():\n self.kernel = kwargs['kernel']\n self.print_flag = kwargs.get('print_flag', 'off')\n self.stat = {'train': None, 'test': None}\n\n def rf(self):\n if self.method != 'bl0' and self.method != 'bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1}\n\n def svm(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, print_flag=self.\n print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', n_splits=n_splits,\n n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def svm_kernel_(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, kernel_flag=True,\n kernel=self.kernel, print_flag=self.print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', kernel=self.kernel,\n n_splits=n_splits, n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def clf_summary(self, print_flag=False):\n if print_flag:\n if self.svm_train_stat is None:\n print('have not train svm yet')\n else:\n print('svm train result: %s' % self.svm_train_stat)\n print('svm eval result: %s' % self.svm_eval_stat)\n if self.rf_stat is None:\n print('have not train random forest yet')\n else:\n print('rf eval result: %s' % self.rf_stat)\n self.summary['svm_train'] = self.svm_train_stat\n self.summary['svm_eval'] = self.svm_eval_stat\n self.summary['rf_test'] = self.rf_stat\n return self.summary\n\n\ndef dgms2swdgm(dgms):\n swdgms = []\n for dgm in dgms:\n diag = dgm2diag(dgm)\n swdgms += [np.array(diag)]\n return swdgms\n\n\ndef train_svm(x, y, random_state=2, print_flag='off', nonlinear_flag=True,\n kernel_flag=False, kernel=np.zeros((1, 1))):\n \"\"\"\n :param x: feature\n :param y: label\n :param random_state: random seed for 10 cv\n :param print_flag: 'on'/'off' for debug\n :param nonlinear_flag: linear\n :param kernel_flag: True if use precomputed kernel\n :param kernel: precomputed kernel. No need to pass if use gaussian/linear kernel\n :return: best parameters\n \"\"\"\n print('print_flag is', print_flag)\n assert print_flag in ['on', 'off']\n tuned_params = [{'kernel': ['linear'], 'C': [0.01, 1, 10, 100, 1000]}]\n if nonlinear_flag:\n tuned_params += [{'kernel': ['rbf'], 'gamma': [0.01, 0.1, 1, 10, \n 100], 'C': [0.1, 1, 10, 100, 1000]}]\n for score in ['accuracy']:\n x_train, x_test, y_train, y_test, indices_train, indices_test = (\n train_test_split(x, y, range(len(y)), test_size=0.1,\n random_state=random_state))\n if not kernel_flag:\n clf = GridSearchCV(svm.SVC(), tuned_params, cv=10, scoring='%s' %\n score, n_jobs=-1, verbose=0)\n clf.fit(x_train, y_train)\n else:\n clf = GridSearchCV(svm.SVC(kernel='precomputed'), [{'C': [0.01,\n 0.1, 1, 10, 100, 1000]}], cv=10, scoring='%s' % score,\n n_jobs=-1, verbose=0)\n kernel_train = kernel[np.ix_(indices_train, indices_train)]\n clf.fit(kernel_train, y_train)\n assert np.array_equal(kernel[np.ix_(indices_train,\n indices_train)], kernel_train) == True\n kernel_test = kernel[np.ix_(indices_test, indices_train)]\n means = clf.cv_results_['mean_test_score']\n stds = clf.cv_results_['std_test_score']\n if print_flag == 'on':\n for mean, std, params in zip(means, stds, clf.cv_results_['params']\n ):\n print('%0.3f (+/-%0.03f) for %r' % (mean, std * 2, params))\n print('Detailed classification report:\\n')\n print('The model is trained on the full development set.')\n print('The scores are computed on the full evaluation set.\\n')\n if kernel_flag == False:\n y_true, y_pred = y_test, clf.predict(x_test)\n else:\n y_true, y_pred = y_test, clf.predict(kernel_test)\n print('Able to execute kernel grid search')\n print(accuracy_score(y_true, y_pred))\n print(classification_report(y_true, y_pred))\n print(confusion_matrix(y_true, y_pred))\n if 'kernel' not in clf.best_params_:\n clf.best_params_['kernel'] = 'precomputed'\n return {'param': clf.best_params_, 'score': round(clf.best_score_ *\n 1000) / 10.0}\n\n\ndef evaluate_tda_kernel(tda_kernel, Y, best_result_so_far, print_flag='off'):\n \"\"\"\n TODO: figure this out\n :param tda_kernel:\n :param Y:\n :param best_result_so_far:\n :param print_flag:\n :return:\n \"\"\"\n t1 = time.time()\n n = np.shape(tda_kernel)[0]\n grid_search_re = train_svm(np.zeros((n, 23)), Y, print_flag=print_flag,\n kernel=tda_kernel, kernel_flag=True, nonlinear_flag=False)\n if grid_search_re['score'] < best_result_so_far[0] - 4:\n print('Saved one unnecessary evaluation of bad kernel')\n return 0, 0, {}, 0\n cv_score = []\n for seed in range(5):\n clf = svm.SVC(kernel='precomputed', C=grid_search_re['param']['C'])\n k_fold = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)\n scores = cross_val_score(clf, tda_kernel, Y, cv=k_fold, scoring=\n 'accuracy', n_jobs=-1)\n cv_score.append(scores.mean())\n cv_score = np.array(cv_score)\n t2 = time.time()\n svm_time = precision_format(t2 - t1, 1)\n return precision_format(100 * cv_score.mean(), 1), precision_format(100 *\n cv_score.std(), 1), grid_search_re, svm_time\n\n\n<function token>\n<code token>\n", "<import token>\n\n\nclass classifier:\n\n def __init__(self, x, y, method='svm', n_cv=5, **kwargs):\n \"\"\"\n classify.\n All hyperparameters are taken care of\n :param x: feature of shape (n_data, dim)\n :param y: label (n_data,)\n :param method: svm, rf\n :param kwargs: pass precomputed kernel as 'kernel'\n \"\"\"\n self.x = x\n self.y = y\n self.rf_stat = None\n self.summary = {}\n self.n_cv = n_cv\n self.method = method\n if 'kernel' in kwargs.keys():\n self.kernel = kwargs['kernel']\n self.print_flag = kwargs.get('print_flag', 'off')\n self.stat = {'train': None, 'test': None}\n\n def rf(self):\n if self.method != 'bl0' and self.method != 'bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1}\n\n def svm(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, print_flag=self.\n print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', n_splits=n_splits,\n n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def svm_kernel_(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, kernel_flag=True,\n kernel=self.kernel, print_flag=self.print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', kernel=self.kernel,\n n_splits=n_splits, n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def clf_summary(self, print_flag=False):\n if print_flag:\n if self.svm_train_stat is None:\n print('have not train svm yet')\n else:\n print('svm train result: %s' % self.svm_train_stat)\n print('svm eval result: %s' % self.svm_eval_stat)\n if self.rf_stat is None:\n print('have not train random forest yet')\n else:\n print('rf eval result: %s' % self.rf_stat)\n self.summary['svm_train'] = self.svm_train_stat\n self.summary['svm_eval'] = self.svm_eval_stat\n self.summary['rf_test'] = self.rf_stat\n return self.summary\n\n\ndef dgms2swdgm(dgms):\n swdgms = []\n for dgm in dgms:\n diag = dgm2diag(dgm)\n swdgms += [np.array(diag)]\n return swdgms\n\n\n<function token>\n\n\ndef evaluate_tda_kernel(tda_kernel, Y, best_result_so_far, print_flag='off'):\n \"\"\"\n TODO: figure this out\n :param tda_kernel:\n :param Y:\n :param best_result_so_far:\n :param print_flag:\n :return:\n \"\"\"\n t1 = time.time()\n n = np.shape(tda_kernel)[0]\n grid_search_re = train_svm(np.zeros((n, 23)), Y, print_flag=print_flag,\n kernel=tda_kernel, kernel_flag=True, nonlinear_flag=False)\n if grid_search_re['score'] < best_result_so_far[0] - 4:\n print('Saved one unnecessary evaluation of bad kernel')\n return 0, 0, {}, 0\n cv_score = []\n for seed in range(5):\n clf = svm.SVC(kernel='precomputed', C=grid_search_re['param']['C'])\n k_fold = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)\n scores = cross_val_score(clf, tda_kernel, Y, cv=k_fold, scoring=\n 'accuracy', n_jobs=-1)\n cv_score.append(scores.mean())\n cv_score = np.array(cv_score)\n t2 = time.time()\n svm_time = precision_format(t2 - t1, 1)\n return precision_format(100 * cv_score.mean(), 1), precision_format(100 *\n cv_score.std(), 1), grid_search_re, svm_time\n\n\n<function token>\n<code token>\n", "<import token>\n\n\nclass classifier:\n\n def __init__(self, x, y, method='svm', n_cv=5, **kwargs):\n \"\"\"\n classify.\n All hyperparameters are taken care of\n :param x: feature of shape (n_data, dim)\n :param y: label (n_data,)\n :param method: svm, rf\n :param kwargs: pass precomputed kernel as 'kernel'\n \"\"\"\n self.x = x\n self.y = y\n self.rf_stat = None\n self.summary = {}\n self.n_cv = n_cv\n self.method = method\n if 'kernel' in kwargs.keys():\n self.kernel = kwargs['kernel']\n self.print_flag = kwargs.get('print_flag', 'off')\n self.stat = {'train': None, 'test': None}\n\n def rf(self):\n if self.method != 'bl0' and self.method != 'bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1}\n\n def svm(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, print_flag=self.\n print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', n_splits=n_splits,\n n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def svm_kernel_(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, kernel_flag=True,\n kernel=self.kernel, print_flag=self.print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', kernel=self.kernel,\n n_splits=n_splits, n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def clf_summary(self, print_flag=False):\n if print_flag:\n if self.svm_train_stat is None:\n print('have not train svm yet')\n else:\n print('svm train result: %s' % self.svm_train_stat)\n print('svm eval result: %s' % self.svm_eval_stat)\n if self.rf_stat is None:\n print('have not train random forest yet')\n else:\n print('rf eval result: %s' % self.rf_stat)\n self.summary['svm_train'] = self.svm_train_stat\n self.summary['svm_eval'] = self.svm_eval_stat\n self.summary['rf_test'] = self.rf_stat\n return self.summary\n\n\ndef dgms2swdgm(dgms):\n swdgms = []\n for dgm in dgms:\n diag = dgm2diag(dgm)\n swdgms += [np.array(diag)]\n return swdgms\n\n\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n\n\nclass classifier:\n\n def __init__(self, x, y, method='svm', n_cv=5, **kwargs):\n \"\"\"\n classify.\n All hyperparameters are taken care of\n :param x: feature of shape (n_data, dim)\n :param y: label (n_data,)\n :param method: svm, rf\n :param kwargs: pass precomputed kernel as 'kernel'\n \"\"\"\n self.x = x\n self.y = y\n self.rf_stat = None\n self.summary = {}\n self.n_cv = n_cv\n self.method = method\n if 'kernel' in kwargs.keys():\n self.kernel = kwargs['kernel']\n self.print_flag = kwargs.get('print_flag', 'off')\n self.stat = {'train': None, 'test': None}\n\n def rf(self):\n if self.method != 'bl0' and self.method != 'bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1}\n\n def svm(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, print_flag=self.\n print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', n_splits=n_splits,\n n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def svm_kernel_(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, kernel_flag=True,\n kernel=self.kernel, print_flag=self.print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', kernel=self.kernel,\n n_splits=n_splits, n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n\n def clf_summary(self, print_flag=False):\n if print_flag:\n if self.svm_train_stat is None:\n print('have not train svm yet')\n else:\n print('svm train result: %s' % self.svm_train_stat)\n print('svm eval result: %s' % self.svm_eval_stat)\n if self.rf_stat is None:\n print('have not train random forest yet')\n else:\n print('rf eval result: %s' % self.rf_stat)\n self.summary['svm_train'] = self.svm_train_stat\n self.summary['svm_eval'] = self.svm_eval_stat\n self.summary['rf_test'] = self.rf_stat\n return self.summary\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n\n\nclass classifier:\n\n def __init__(self, x, y, method='svm', n_cv=5, **kwargs):\n \"\"\"\n classify.\n All hyperparameters are taken care of\n :param x: feature of shape (n_data, dim)\n :param y: label (n_data,)\n :param method: svm, rf\n :param kwargs: pass precomputed kernel as 'kernel'\n \"\"\"\n self.x = x\n self.y = y\n self.rf_stat = None\n self.summary = {}\n self.n_cv = n_cv\n self.method = method\n if 'kernel' in kwargs.keys():\n self.kernel = kwargs['kernel']\n self.print_flag = kwargs.get('print_flag', 'off')\n self.stat = {'train': None, 'test': None}\n\n def rf(self):\n if self.method != 'bl0' and self.method != 'bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1}\n\n def svm(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, print_flag=self.\n print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', n_splits=n_splits,\n n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n <function token>\n\n def clf_summary(self, print_flag=False):\n if print_flag:\n if self.svm_train_stat is None:\n print('have not train svm yet')\n else:\n print('svm train result: %s' % self.svm_train_stat)\n print('svm eval result: %s' % self.svm_eval_stat)\n if self.rf_stat is None:\n print('have not train random forest yet')\n else:\n print('rf eval result: %s' % self.rf_stat)\n self.summary['svm_train'] = self.svm_train_stat\n self.summary['svm_eval'] = self.svm_eval_stat\n self.summary['rf_test'] = self.rf_stat\n return self.summary\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n\n\nclass classifier:\n <function token>\n\n def rf(self):\n if self.method != 'bl0' and self.method != 'bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1}\n\n def svm(self, n_splits=10):\n self.stat['train'] = train_svm(self.x, self.y, print_flag=self.\n print_flag)\n eval_mean, eval_std, n_cv = evaluate_best_estimator(self.stat[\n 'train'], self.x, self.y, print_flag='off', n_splits=n_splits,\n n_cv=self.n_cv)\n self.stat['test'] = {'mean': eval_mean, 'std': eval_std, 'n_cv': n_cv}\n <function token>\n\n def clf_summary(self, print_flag=False):\n if print_flag:\n if self.svm_train_stat is None:\n print('have not train svm yet')\n else:\n print('svm train result: %s' % self.svm_train_stat)\n print('svm eval result: %s' % self.svm_eval_stat)\n if self.rf_stat is None:\n print('have not train random forest yet')\n else:\n print('rf eval result: %s' % self.rf_stat)\n self.summary['svm_train'] = self.svm_train_stat\n self.summary['svm_eval'] = self.svm_eval_stat\n self.summary['rf_test'] = self.rf_stat\n return self.summary\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n\n\nclass classifier:\n <function token>\n\n def rf(self):\n if self.method != 'bl0' and self.method != 'bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1}\n <function token>\n <function token>\n\n def clf_summary(self, print_flag=False):\n if print_flag:\n if self.svm_train_stat is None:\n print('have not train svm yet')\n else:\n print('svm train result: %s' % self.svm_train_stat)\n print('svm eval result: %s' % self.svm_eval_stat)\n if self.rf_stat is None:\n print('have not train random forest yet')\n else:\n print('rf eval result: %s' % self.rf_stat)\n self.summary['svm_train'] = self.svm_train_stat\n self.summary['svm_eval'] = self.svm_eval_stat\n self.summary['rf_test'] = self.rf_stat\n return self.summary\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n\n\nclass classifier:\n <function token>\n\n def rf(self):\n if self.method != 'bl0' and self.method != 'bl1':\n s1, s2, t = rfclf(self.x, self.y, m_f=40, multi_cv_flag=False)\n self.rf_stat = {'s1': s1, 's2': s2, 'time': t}\n else:\n self.rf_stat = {'s1': -1, 's2': -1, 'time': -1}\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n\n\nclass classifier:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n" ]
false
98,946
7a1246d47e5444a4073a4a37220c12cbeaa6883b
import sys _DECREASE_FACTOR = 4 _DECREASE_VAL = 0.5 _INCREASE_VAL = 2 class Queue: def __init__(self, size=20): self.capacity = size self.arr = [0] * size self.last = 0 self.first = 0 self.size = 0 def resize(self, n_times): old_capacity = self.capacity self.capacity = int(self.capacity * n_times) new_arr = [0] * self.capacity for i in range(self.size): new_arr[i] = self.arr[(self.first + i) % old_capacity] self.arr = new_arr self.first = 0 self.last = self.size def push(self, data): self.arr[self.last] = data self.last = (self.last + 1) % self.capacity self.size += 1 if self.last == self.first: self.resize(n_times=_INCREASE_VAL) def pop(self): pop_val = self.arr[self.first] self.first = (self.first + 1) % self.capacity self.size -= 1 if self.size < self.capacity / _DECREASE_FACTOR: self.resize(n_times=_DECREASE_VAL) return pop_val queue = Queue(2) _INPUT_LINES = sys.stdin.read().splitlines() for line in _INPUT_LINES[1:]: if line[0] == '+': queue.push(int(line[2:])) elif line[0] == '-': print(queue.pop()) else: raise ValueError('unknown operation') # # 4 # + 1 # + 10 # - # -
[ "import sys\n\n_DECREASE_FACTOR = 4\n_DECREASE_VAL = 0.5\n_INCREASE_VAL = 2\n\n\nclass Queue:\n def __init__(self, size=20):\n self.capacity = size\n self.arr = [0] * size\n self.last = 0\n self.first = 0\n self.size = 0\n\n def resize(self, n_times):\n old_capacity = self.capacity\n self.capacity = int(self.capacity * n_times)\n new_arr = [0] * self.capacity\n for i in range(self.size):\n new_arr[i] = self.arr[(self.first + i) % old_capacity]\n self.arr = new_arr\n self.first = 0\n self.last = self.size\n\n def push(self, data):\n self.arr[self.last] = data\n self.last = (self.last + 1) % self.capacity\n self.size += 1\n\n if self.last == self.first:\n self.resize(n_times=_INCREASE_VAL)\n\n def pop(self):\n pop_val = self.arr[self.first]\n self.first = (self.first + 1) % self.capacity\n self.size -= 1\n\n if self.size < self.capacity / _DECREASE_FACTOR:\n self.resize(n_times=_DECREASE_VAL)\n\n return pop_val\n\n\nqueue = Queue(2)\n_INPUT_LINES = sys.stdin.read().splitlines()\nfor line in _INPUT_LINES[1:]:\n if line[0] == '+':\n queue.push(int(line[2:]))\n elif line[0] == '-':\n print(queue.pop())\n else:\n raise ValueError('unknown operation')\n\n#\n# 4\n# + 1\n# + 10\n# -\n# -\n", "import sys\n_DECREASE_FACTOR = 4\n_DECREASE_VAL = 0.5\n_INCREASE_VAL = 2\n\n\nclass Queue:\n\n def __init__(self, size=20):\n self.capacity = size\n self.arr = [0] * size\n self.last = 0\n self.first = 0\n self.size = 0\n\n def resize(self, n_times):\n old_capacity = self.capacity\n self.capacity = int(self.capacity * n_times)\n new_arr = [0] * self.capacity\n for i in range(self.size):\n new_arr[i] = self.arr[(self.first + i) % old_capacity]\n self.arr = new_arr\n self.first = 0\n self.last = self.size\n\n def push(self, data):\n self.arr[self.last] = data\n self.last = (self.last + 1) % self.capacity\n self.size += 1\n if self.last == self.first:\n self.resize(n_times=_INCREASE_VAL)\n\n def pop(self):\n pop_val = self.arr[self.first]\n self.first = (self.first + 1) % self.capacity\n self.size -= 1\n if self.size < self.capacity / _DECREASE_FACTOR:\n self.resize(n_times=_DECREASE_VAL)\n return pop_val\n\n\nqueue = Queue(2)\n_INPUT_LINES = sys.stdin.read().splitlines()\nfor line in _INPUT_LINES[1:]:\n if line[0] == '+':\n queue.push(int(line[2:]))\n elif line[0] == '-':\n print(queue.pop())\n else:\n raise ValueError('unknown operation')\n", "<import token>\n_DECREASE_FACTOR = 4\n_DECREASE_VAL = 0.5\n_INCREASE_VAL = 2\n\n\nclass Queue:\n\n def __init__(self, size=20):\n self.capacity = size\n self.arr = [0] * size\n self.last = 0\n self.first = 0\n self.size = 0\n\n def resize(self, n_times):\n old_capacity = self.capacity\n self.capacity = int(self.capacity * n_times)\n new_arr = [0] * self.capacity\n for i in range(self.size):\n new_arr[i] = self.arr[(self.first + i) % old_capacity]\n self.arr = new_arr\n self.first = 0\n self.last = self.size\n\n def push(self, data):\n self.arr[self.last] = data\n self.last = (self.last + 1) % self.capacity\n self.size += 1\n if self.last == self.first:\n self.resize(n_times=_INCREASE_VAL)\n\n def pop(self):\n pop_val = self.arr[self.first]\n self.first = (self.first + 1) % self.capacity\n self.size -= 1\n if self.size < self.capacity / _DECREASE_FACTOR:\n self.resize(n_times=_DECREASE_VAL)\n return pop_val\n\n\nqueue = Queue(2)\n_INPUT_LINES = sys.stdin.read().splitlines()\nfor line in _INPUT_LINES[1:]:\n if line[0] == '+':\n queue.push(int(line[2:]))\n elif line[0] == '-':\n print(queue.pop())\n else:\n raise ValueError('unknown operation')\n", "<import token>\n<assignment token>\n\n\nclass Queue:\n\n def __init__(self, size=20):\n self.capacity = size\n self.arr = [0] * size\n self.last = 0\n self.first = 0\n self.size = 0\n\n def resize(self, n_times):\n old_capacity = self.capacity\n self.capacity = int(self.capacity * n_times)\n new_arr = [0] * self.capacity\n for i in range(self.size):\n new_arr[i] = self.arr[(self.first + i) % old_capacity]\n self.arr = new_arr\n self.first = 0\n self.last = self.size\n\n def push(self, data):\n self.arr[self.last] = data\n self.last = (self.last + 1) % self.capacity\n self.size += 1\n if self.last == self.first:\n self.resize(n_times=_INCREASE_VAL)\n\n def pop(self):\n pop_val = self.arr[self.first]\n self.first = (self.first + 1) % self.capacity\n self.size -= 1\n if self.size < self.capacity / _DECREASE_FACTOR:\n self.resize(n_times=_DECREASE_VAL)\n return pop_val\n\n\n<assignment token>\nfor line in _INPUT_LINES[1:]:\n if line[0] == '+':\n queue.push(int(line[2:]))\n elif line[0] == '-':\n print(queue.pop())\n else:\n raise ValueError('unknown operation')\n", "<import token>\n<assignment token>\n\n\nclass Queue:\n\n def __init__(self, size=20):\n self.capacity = size\n self.arr = [0] * size\n self.last = 0\n self.first = 0\n self.size = 0\n\n def resize(self, n_times):\n old_capacity = self.capacity\n self.capacity = int(self.capacity * n_times)\n new_arr = [0] * self.capacity\n for i in range(self.size):\n new_arr[i] = self.arr[(self.first + i) % old_capacity]\n self.arr = new_arr\n self.first = 0\n self.last = self.size\n\n def push(self, data):\n self.arr[self.last] = data\n self.last = (self.last + 1) % self.capacity\n self.size += 1\n if self.last == self.first:\n self.resize(n_times=_INCREASE_VAL)\n\n def pop(self):\n pop_val = self.arr[self.first]\n self.first = (self.first + 1) % self.capacity\n self.size -= 1\n if self.size < self.capacity / _DECREASE_FACTOR:\n self.resize(n_times=_DECREASE_VAL)\n return pop_val\n\n\n<assignment token>\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass Queue:\n\n def __init__(self, size=20):\n self.capacity = size\n self.arr = [0] * size\n self.last = 0\n self.first = 0\n self.size = 0\n\n def resize(self, n_times):\n old_capacity = self.capacity\n self.capacity = int(self.capacity * n_times)\n new_arr = [0] * self.capacity\n for i in range(self.size):\n new_arr[i] = self.arr[(self.first + i) % old_capacity]\n self.arr = new_arr\n self.first = 0\n self.last = self.size\n\n def push(self, data):\n self.arr[self.last] = data\n self.last = (self.last + 1) % self.capacity\n self.size += 1\n if self.last == self.first:\n self.resize(n_times=_INCREASE_VAL)\n <function token>\n\n\n<assignment token>\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass Queue:\n\n def __init__(self, size=20):\n self.capacity = size\n self.arr = [0] * size\n self.last = 0\n self.first = 0\n self.size = 0\n\n def resize(self, n_times):\n old_capacity = self.capacity\n self.capacity = int(self.capacity * n_times)\n new_arr = [0] * self.capacity\n for i in range(self.size):\n new_arr[i] = self.arr[(self.first + i) % old_capacity]\n self.arr = new_arr\n self.first = 0\n self.last = self.size\n <function token>\n <function token>\n\n\n<assignment token>\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass Queue:\n <function token>\n\n def resize(self, n_times):\n old_capacity = self.capacity\n self.capacity = int(self.capacity * n_times)\n new_arr = [0] * self.capacity\n for i in range(self.size):\n new_arr[i] = self.arr[(self.first + i) % old_capacity]\n self.arr = new_arr\n self.first = 0\n self.last = self.size\n <function token>\n <function token>\n\n\n<assignment token>\n<code token>\n", "<import token>\n<assignment token>\n\n\nclass Queue:\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<assignment token>\n<code token>\n", "<import token>\n<assignment token>\n<class token>\n<assignment token>\n<code token>\n" ]
false
98,947
7e77b2a4cab14a3b3ba08ab49af5a054f77f4672
# Generated by Django 3.0.7 on 2020-10-17 18:14 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('content_collection', '0009_picturesforcarousel_required_group_to_view'), ] operations = [ migrations.AlterModelOptions( name='picturesforcarousel', options={'ordering': ('-pk', 'caption')}, ), migrations.RemoveField( model_name='picturesforcarousel', name='required_group_to_view', ), ]
[ "# Generated by Django 3.0.7 on 2020-10-17 18:14\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('content_collection', '0009_picturesforcarousel_required_group_to_view'),\n ]\n\n operations = [\n migrations.AlterModelOptions(\n name='picturesforcarousel',\n options={'ordering': ('-pk', 'caption')},\n ),\n migrations.RemoveField(\n model_name='picturesforcarousel',\n name='required_group_to_view',\n ),\n ]\n", "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('content_collection',\n '0009_picturesforcarousel_required_group_to_view')]\n operations = [migrations.AlterModelOptions(name='picturesforcarousel',\n options={'ordering': ('-pk', 'caption')}), migrations.RemoveField(\n model_name='picturesforcarousel', name='required_group_to_view')]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('content_collection',\n '0009_picturesforcarousel_required_group_to_view')]\n operations = [migrations.AlterModelOptions(name='picturesforcarousel',\n options={'ordering': ('-pk', 'caption')}), migrations.RemoveField(\n model_name='picturesforcarousel', name='required_group_to_view')]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
98,948
7004097a54e1e2bb2fd89e1ddfd627e98a917758
import logging import pyspark import argparse import pandas as pd import datetime as dt from google.cloud import storage logging.info("Initializing spark context.") sc = pyspark.SparkContext() parser = argparse.ArgumentParser() parser.add_argument('--project_id', help='Project to use.', type=str) parser.add_argument('--input_file_glob', help='A local or google storage file glob specifying what files should be read.', type=str, default='gs://letter_statistics_calculations/letter_files/inputs/*') parser.add_argument('--output_path', help='Where to store the statistics.', type=str, default='/aggregate_result.csv') known_args, _ = parser.parse_known_args() project_id = known_args.project_id input_file_glob = known_args.input_file_glob output_path = known_args.output_path logging.info("Project: {}".format(project_id)) logging.info("Input file glob: {}".format(input_file_glob)) logging.info("Output path: {}".format(output_path)) def time_function(func): """ Simple wrapper that times the duration of a function. Args: func (Callable): Returns: Callable """ def wrapper(*args, **kwargs): start = dt.datetime.utcnow() result = func(*args, **kwargs) end = dt.datetime.utcnow() logging.info("Function %s took: %s", func.__name__, (end - start)) return result return wrapper @time_function def calculate_letter_statistics(file_glob): """Reads data from cloud storage and calculates the sum, sum of squares, max, min and count. Note that dataproc clusters automatically have a google storage connector. This means file_glob can be a path starting with gs and dataproc will understand it should look at cloud storage. For local development, you either have to install the cloud storage connector, or simply have some data in a local directory. Args: file_glob (str): Returns: list """ lines = sc.textFile(file_glob, minPartitions=8) statistics = (lines.map(lambda record: record.split(',')) .mapValues(lambda x: float(x)) .mapValues(lambda value: (value, value**2, value, value, 1)) .reduceByKey(lambda x, y: (x[0]+y[0], x[1]+y[1], max(x[2],y[2]), min(x[3],y[3]), x[4]+y[4])) ) result = statistics.collect() return result @time_function def result_to_dataframe(data): """Converts data to a pandas dataframe. Args: data (list): Returns: pd.DataFrame """ letters, statistics = zip(*data) dataframe = pd.DataFrame(data=list(statistics), index=letters, columns=['SUM', 'SUM_OF_SQUARES', 'MAX', 'MIN', 'COUNT']).sort_index() dataframe['MEAN'] = dataframe['SUM'] / dataframe['COUNT'] dataframe['VARIANCE'] = dataframe['SUM_OF_SQUARES'] / dataframe['COUNT'] - dataframe['MEAN']**2 dataframe['STANDARD_DEVIATION'] = dataframe['VARIANCE']**0.5 logging.info("Total datapoints read: {}.".format(dataframe['COUNT'].sum())) return dataframe @time_function def store_result(dataframe, filepath): """Stores the dataframe. Either in a local path, or on cloud storage. Args: dataframe (pd.DataFrame): filepath (str): Returns: None """ if not filepath.startswith('gs://'): logging.info("Storing result locally in: {}".format(filepath)) dataframe.to_csv(filepath, index=True) else: logging.info("Storing result in cloud storage in path: {}".format(filepath)) # Content to write content = dataframe.to_csv(index=True) # Client to write with cs = storage.Client(project_id) bucket_name, blob_path = filepath[5:].split('/', 1) bucket = cs.bucket(bucket_name) blob = bucket.blob(blob_path) blob.upload_from_string(content) def main(): result = calculate_letter_statistics(input_file_glob) df = result_to_dataframe(result) store_result(df, output_path) logging.info("Finished job.") if __name__ == '__main__': logging.basicConfig(level=logging.INFO) main()
[ "import logging\nimport pyspark\nimport argparse\nimport pandas as pd\nimport datetime as dt\nfrom google.cloud import storage\n\n\nlogging.info(\"Initializing spark context.\")\nsc = pyspark.SparkContext()\n\n\nparser = argparse.ArgumentParser()\nparser.add_argument('--project_id',\n help='Project to use.',\n type=str)\nparser.add_argument('--input_file_glob',\n help='A local or google storage file glob specifying what files should be read.',\n type=str,\n default='gs://letter_statistics_calculations/letter_files/inputs/*')\nparser.add_argument('--output_path',\n help='Where to store the statistics.',\n type=str,\n default='/aggregate_result.csv')\nknown_args, _ = parser.parse_known_args()\nproject_id = known_args.project_id\ninput_file_glob = known_args.input_file_glob\noutput_path = known_args.output_path\nlogging.info(\"Project: {}\".format(project_id))\nlogging.info(\"Input file glob: {}\".format(input_file_glob))\nlogging.info(\"Output path: {}\".format(output_path))\n\n\ndef time_function(func):\n \"\"\" Simple wrapper that times the duration of a function.\n\n Args:\n func (Callable):\n\n Returns:\n Callable\n \"\"\"\n def wrapper(*args, **kwargs):\n start = dt.datetime.utcnow()\n result = func(*args, **kwargs)\n end = dt.datetime.utcnow()\n logging.info(\"Function %s took: %s\", func.__name__, (end - start))\n return result\n return wrapper\n\n\n@time_function\ndef calculate_letter_statistics(file_glob):\n \"\"\"Reads data from cloud storage and calculates the sum, sum of\n squares, max, min and count.\n Note that dataproc clusters automatically have a google storage\n connector. This means file_glob can be a path starting with gs\n and dataproc will understand it should look at cloud storage. For\n local development, you either have to install the cloud storage\n connector, or simply have some data in a local directory.\n\n Args:\n file_glob (str):\n\n Returns:\n list\n \"\"\"\n lines = sc.textFile(file_glob, minPartitions=8)\n statistics = (lines.map(lambda record: record.split(','))\n .mapValues(lambda x: float(x))\n .mapValues(lambda value: (value, value**2, value, value, 1))\n .reduceByKey(lambda x, y: (x[0]+y[0], x[1]+y[1], max(x[2],y[2]), min(x[3],y[3]), x[4]+y[4]))\n )\n result = statistics.collect()\n return result\n\n\n@time_function\ndef result_to_dataframe(data):\n \"\"\"Converts data to a pandas dataframe.\n\n Args:\n data (list):\n\n Returns:\n pd.DataFrame\n \"\"\"\n letters, statistics = zip(*data)\n dataframe = pd.DataFrame(data=list(statistics), index=letters, columns=['SUM', 'SUM_OF_SQUARES', 'MAX', 'MIN', 'COUNT']).sort_index()\n dataframe['MEAN'] = dataframe['SUM'] / dataframe['COUNT']\n dataframe['VARIANCE'] = dataframe['SUM_OF_SQUARES'] / dataframe['COUNT'] - dataframe['MEAN']**2\n dataframe['STANDARD_DEVIATION'] = dataframe['VARIANCE']**0.5\n logging.info(\"Total datapoints read: {}.\".format(dataframe['COUNT'].sum()))\n return dataframe\n\n\n@time_function\ndef store_result(dataframe, filepath):\n \"\"\"Stores the dataframe. Either in a local path, or on cloud\n storage.\n\n Args:\n dataframe (pd.DataFrame):\n filepath (str):\n\n Returns:\n None\n \"\"\"\n if not filepath.startswith('gs://'):\n logging.info(\"Storing result locally in: {}\".format(filepath))\n dataframe.to_csv(filepath, index=True)\n else:\n logging.info(\"Storing result in cloud storage in path: {}\".format(filepath))\n # Content to write\n content = dataframe.to_csv(index=True)\n # Client to write with\n cs = storage.Client(project_id)\n bucket_name, blob_path = filepath[5:].split('/', 1)\n bucket = cs.bucket(bucket_name)\n blob = bucket.blob(blob_path)\n blob.upload_from_string(content)\n\n\ndef main():\n result = calculate_letter_statistics(input_file_glob)\n df = result_to_dataframe(result)\n store_result(df, output_path)\n logging.info(\"Finished job.\")\n\n\nif __name__ == '__main__':\n logging.basicConfig(level=logging.INFO)\n main()\n", "import logging\nimport pyspark\nimport argparse\nimport pandas as pd\nimport datetime as dt\nfrom google.cloud import storage\nlogging.info('Initializing spark context.')\nsc = pyspark.SparkContext()\nparser = argparse.ArgumentParser()\nparser.add_argument('--project_id', help='Project to use.', type=str)\nparser.add_argument('--input_file_glob', help=\n 'A local or google storage file glob specifying what files should be read.'\n , type=str, default=\n 'gs://letter_statistics_calculations/letter_files/inputs/*')\nparser.add_argument('--output_path', help='Where to store the statistics.',\n type=str, default='/aggregate_result.csv')\nknown_args, _ = parser.parse_known_args()\nproject_id = known_args.project_id\ninput_file_glob = known_args.input_file_glob\noutput_path = known_args.output_path\nlogging.info('Project: {}'.format(project_id))\nlogging.info('Input file glob: {}'.format(input_file_glob))\nlogging.info('Output path: {}'.format(output_path))\n\n\ndef time_function(func):\n \"\"\" Simple wrapper that times the duration of a function.\n\n Args:\n func (Callable):\n\n Returns:\n Callable\n \"\"\"\n\n def wrapper(*args, **kwargs):\n start = dt.datetime.utcnow()\n result = func(*args, **kwargs)\n end = dt.datetime.utcnow()\n logging.info('Function %s took: %s', func.__name__, end - start)\n return result\n return wrapper\n\n\n@time_function\ndef calculate_letter_statistics(file_glob):\n \"\"\"Reads data from cloud storage and calculates the sum, sum of\n squares, max, min and count.\n Note that dataproc clusters automatically have a google storage\n connector. This means file_glob can be a path starting with gs\n and dataproc will understand it should look at cloud storage. For\n local development, you either have to install the cloud storage\n connector, or simply have some data in a local directory.\n\n Args:\n file_glob (str):\n\n Returns:\n list\n \"\"\"\n lines = sc.textFile(file_glob, minPartitions=8)\n statistics = lines.map(lambda record: record.split(',')).mapValues(lambda\n x: float(x)).mapValues(lambda value: (value, value ** 2, value,\n value, 1)).reduceByKey(lambda x, y: (x[0] + y[0], x[1] + y[1], max(\n x[2], y[2]), min(x[3], y[3]), x[4] + y[4]))\n result = statistics.collect()\n return result\n\n\n@time_function\ndef result_to_dataframe(data):\n \"\"\"Converts data to a pandas dataframe.\n\n Args:\n data (list):\n\n Returns:\n pd.DataFrame\n \"\"\"\n letters, statistics = zip(*data)\n dataframe = pd.DataFrame(data=list(statistics), index=letters, columns=\n ['SUM', 'SUM_OF_SQUARES', 'MAX', 'MIN', 'COUNT']).sort_index()\n dataframe['MEAN'] = dataframe['SUM'] / dataframe['COUNT']\n dataframe['VARIANCE'] = dataframe['SUM_OF_SQUARES'] / dataframe['COUNT'\n ] - dataframe['MEAN'] ** 2\n dataframe['STANDARD_DEVIATION'] = dataframe['VARIANCE'] ** 0.5\n logging.info('Total datapoints read: {}.'.format(dataframe['COUNT'].sum()))\n return dataframe\n\n\n@time_function\ndef store_result(dataframe, filepath):\n \"\"\"Stores the dataframe. Either in a local path, or on cloud\n storage.\n\n Args:\n dataframe (pd.DataFrame):\n filepath (str):\n\n Returns:\n None\n \"\"\"\n if not filepath.startswith('gs://'):\n logging.info('Storing result locally in: {}'.format(filepath))\n dataframe.to_csv(filepath, index=True)\n else:\n logging.info('Storing result in cloud storage in path: {}'.format(\n filepath))\n content = dataframe.to_csv(index=True)\n cs = storage.Client(project_id)\n bucket_name, blob_path = filepath[5:].split('/', 1)\n bucket = cs.bucket(bucket_name)\n blob = bucket.blob(blob_path)\n blob.upload_from_string(content)\n\n\ndef main():\n result = calculate_letter_statistics(input_file_glob)\n df = result_to_dataframe(result)\n store_result(df, output_path)\n logging.info('Finished job.')\n\n\nif __name__ == '__main__':\n logging.basicConfig(level=logging.INFO)\n main()\n", "<import token>\nlogging.info('Initializing spark context.')\nsc = pyspark.SparkContext()\nparser = argparse.ArgumentParser()\nparser.add_argument('--project_id', help='Project to use.', type=str)\nparser.add_argument('--input_file_glob', help=\n 'A local or google storage file glob specifying what files should be read.'\n , type=str, default=\n 'gs://letter_statistics_calculations/letter_files/inputs/*')\nparser.add_argument('--output_path', help='Where to store the statistics.',\n type=str, default='/aggregate_result.csv')\nknown_args, _ = parser.parse_known_args()\nproject_id = known_args.project_id\ninput_file_glob = known_args.input_file_glob\noutput_path = known_args.output_path\nlogging.info('Project: {}'.format(project_id))\nlogging.info('Input file glob: {}'.format(input_file_glob))\nlogging.info('Output path: {}'.format(output_path))\n\n\ndef time_function(func):\n \"\"\" Simple wrapper that times the duration of a function.\n\n Args:\n func (Callable):\n\n Returns:\n Callable\n \"\"\"\n\n def wrapper(*args, **kwargs):\n start = dt.datetime.utcnow()\n result = func(*args, **kwargs)\n end = dt.datetime.utcnow()\n logging.info('Function %s took: %s', func.__name__, end - start)\n return result\n return wrapper\n\n\n@time_function\ndef calculate_letter_statistics(file_glob):\n \"\"\"Reads data from cloud storage and calculates the sum, sum of\n squares, max, min and count.\n Note that dataproc clusters automatically have a google storage\n connector. This means file_glob can be a path starting with gs\n and dataproc will understand it should look at cloud storage. For\n local development, you either have to install the cloud storage\n connector, or simply have some data in a local directory.\n\n Args:\n file_glob (str):\n\n Returns:\n list\n \"\"\"\n lines = sc.textFile(file_glob, minPartitions=8)\n statistics = lines.map(lambda record: record.split(',')).mapValues(lambda\n x: float(x)).mapValues(lambda value: (value, value ** 2, value,\n value, 1)).reduceByKey(lambda x, y: (x[0] + y[0], x[1] + y[1], max(\n x[2], y[2]), min(x[3], y[3]), x[4] + y[4]))\n result = statistics.collect()\n return result\n\n\n@time_function\ndef result_to_dataframe(data):\n \"\"\"Converts data to a pandas dataframe.\n\n Args:\n data (list):\n\n Returns:\n pd.DataFrame\n \"\"\"\n letters, statistics = zip(*data)\n dataframe = pd.DataFrame(data=list(statistics), index=letters, columns=\n ['SUM', 'SUM_OF_SQUARES', 'MAX', 'MIN', 'COUNT']).sort_index()\n dataframe['MEAN'] = dataframe['SUM'] / dataframe['COUNT']\n dataframe['VARIANCE'] = dataframe['SUM_OF_SQUARES'] / dataframe['COUNT'\n ] - dataframe['MEAN'] ** 2\n dataframe['STANDARD_DEVIATION'] = dataframe['VARIANCE'] ** 0.5\n logging.info('Total datapoints read: {}.'.format(dataframe['COUNT'].sum()))\n return dataframe\n\n\n@time_function\ndef store_result(dataframe, filepath):\n \"\"\"Stores the dataframe. Either in a local path, or on cloud\n storage.\n\n Args:\n dataframe (pd.DataFrame):\n filepath (str):\n\n Returns:\n None\n \"\"\"\n if not filepath.startswith('gs://'):\n logging.info('Storing result locally in: {}'.format(filepath))\n dataframe.to_csv(filepath, index=True)\n else:\n logging.info('Storing result in cloud storage in path: {}'.format(\n filepath))\n content = dataframe.to_csv(index=True)\n cs = storage.Client(project_id)\n bucket_name, blob_path = filepath[5:].split('/', 1)\n bucket = cs.bucket(bucket_name)\n blob = bucket.blob(blob_path)\n blob.upload_from_string(content)\n\n\ndef main():\n result = calculate_letter_statistics(input_file_glob)\n df = result_to_dataframe(result)\n store_result(df, output_path)\n logging.info('Finished job.')\n\n\nif __name__ == '__main__':\n logging.basicConfig(level=logging.INFO)\n main()\n", "<import token>\nlogging.info('Initializing spark context.')\n<assignment token>\nparser.add_argument('--project_id', help='Project to use.', type=str)\nparser.add_argument('--input_file_glob', help=\n 'A local or google storage file glob specifying what files should be read.'\n , type=str, default=\n 'gs://letter_statistics_calculations/letter_files/inputs/*')\nparser.add_argument('--output_path', help='Where to store the statistics.',\n type=str, default='/aggregate_result.csv')\n<assignment token>\nlogging.info('Project: {}'.format(project_id))\nlogging.info('Input file glob: {}'.format(input_file_glob))\nlogging.info('Output path: {}'.format(output_path))\n\n\ndef time_function(func):\n \"\"\" Simple wrapper that times the duration of a function.\n\n Args:\n func (Callable):\n\n Returns:\n Callable\n \"\"\"\n\n def wrapper(*args, **kwargs):\n start = dt.datetime.utcnow()\n result = func(*args, **kwargs)\n end = dt.datetime.utcnow()\n logging.info('Function %s took: %s', func.__name__, end - start)\n return result\n return wrapper\n\n\n@time_function\ndef calculate_letter_statistics(file_glob):\n \"\"\"Reads data from cloud storage and calculates the sum, sum of\n squares, max, min and count.\n Note that dataproc clusters automatically have a google storage\n connector. This means file_glob can be a path starting with gs\n and dataproc will understand it should look at cloud storage. For\n local development, you either have to install the cloud storage\n connector, or simply have some data in a local directory.\n\n Args:\n file_glob (str):\n\n Returns:\n list\n \"\"\"\n lines = sc.textFile(file_glob, minPartitions=8)\n statistics = lines.map(lambda record: record.split(',')).mapValues(lambda\n x: float(x)).mapValues(lambda value: (value, value ** 2, value,\n value, 1)).reduceByKey(lambda x, y: (x[0] + y[0], x[1] + y[1], max(\n x[2], y[2]), min(x[3], y[3]), x[4] + y[4]))\n result = statistics.collect()\n return result\n\n\n@time_function\ndef result_to_dataframe(data):\n \"\"\"Converts data to a pandas dataframe.\n\n Args:\n data (list):\n\n Returns:\n pd.DataFrame\n \"\"\"\n letters, statistics = zip(*data)\n dataframe = pd.DataFrame(data=list(statistics), index=letters, columns=\n ['SUM', 'SUM_OF_SQUARES', 'MAX', 'MIN', 'COUNT']).sort_index()\n dataframe['MEAN'] = dataframe['SUM'] / dataframe['COUNT']\n dataframe['VARIANCE'] = dataframe['SUM_OF_SQUARES'] / dataframe['COUNT'\n ] - dataframe['MEAN'] ** 2\n dataframe['STANDARD_DEVIATION'] = dataframe['VARIANCE'] ** 0.5\n logging.info('Total datapoints read: {}.'.format(dataframe['COUNT'].sum()))\n return dataframe\n\n\n@time_function\ndef store_result(dataframe, filepath):\n \"\"\"Stores the dataframe. Either in a local path, or on cloud\n storage.\n\n Args:\n dataframe (pd.DataFrame):\n filepath (str):\n\n Returns:\n None\n \"\"\"\n if not filepath.startswith('gs://'):\n logging.info('Storing result locally in: {}'.format(filepath))\n dataframe.to_csv(filepath, index=True)\n else:\n logging.info('Storing result in cloud storage in path: {}'.format(\n filepath))\n content = dataframe.to_csv(index=True)\n cs = storage.Client(project_id)\n bucket_name, blob_path = filepath[5:].split('/', 1)\n bucket = cs.bucket(bucket_name)\n blob = bucket.blob(blob_path)\n blob.upload_from_string(content)\n\n\ndef main():\n result = calculate_letter_statistics(input_file_glob)\n df = result_to_dataframe(result)\n store_result(df, output_path)\n logging.info('Finished job.')\n\n\nif __name__ == '__main__':\n logging.basicConfig(level=logging.INFO)\n main()\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n\n\ndef time_function(func):\n \"\"\" Simple wrapper that times the duration of a function.\n\n Args:\n func (Callable):\n\n Returns:\n Callable\n \"\"\"\n\n def wrapper(*args, **kwargs):\n start = dt.datetime.utcnow()\n result = func(*args, **kwargs)\n end = dt.datetime.utcnow()\n logging.info('Function %s took: %s', func.__name__, end - start)\n return result\n return wrapper\n\n\n@time_function\ndef calculate_letter_statistics(file_glob):\n \"\"\"Reads data from cloud storage and calculates the sum, sum of\n squares, max, min and count.\n Note that dataproc clusters automatically have a google storage\n connector. This means file_glob can be a path starting with gs\n and dataproc will understand it should look at cloud storage. For\n local development, you either have to install the cloud storage\n connector, or simply have some data in a local directory.\n\n Args:\n file_glob (str):\n\n Returns:\n list\n \"\"\"\n lines = sc.textFile(file_glob, minPartitions=8)\n statistics = lines.map(lambda record: record.split(',')).mapValues(lambda\n x: float(x)).mapValues(lambda value: (value, value ** 2, value,\n value, 1)).reduceByKey(lambda x, y: (x[0] + y[0], x[1] + y[1], max(\n x[2], y[2]), min(x[3], y[3]), x[4] + y[4]))\n result = statistics.collect()\n return result\n\n\n@time_function\ndef result_to_dataframe(data):\n \"\"\"Converts data to a pandas dataframe.\n\n Args:\n data (list):\n\n Returns:\n pd.DataFrame\n \"\"\"\n letters, statistics = zip(*data)\n dataframe = pd.DataFrame(data=list(statistics), index=letters, columns=\n ['SUM', 'SUM_OF_SQUARES', 'MAX', 'MIN', 'COUNT']).sort_index()\n dataframe['MEAN'] = dataframe['SUM'] / dataframe['COUNT']\n dataframe['VARIANCE'] = dataframe['SUM_OF_SQUARES'] / dataframe['COUNT'\n ] - dataframe['MEAN'] ** 2\n dataframe['STANDARD_DEVIATION'] = dataframe['VARIANCE'] ** 0.5\n logging.info('Total datapoints read: {}.'.format(dataframe['COUNT'].sum()))\n return dataframe\n\n\n@time_function\ndef store_result(dataframe, filepath):\n \"\"\"Stores the dataframe. Either in a local path, or on cloud\n storage.\n\n Args:\n dataframe (pd.DataFrame):\n filepath (str):\n\n Returns:\n None\n \"\"\"\n if not filepath.startswith('gs://'):\n logging.info('Storing result locally in: {}'.format(filepath))\n dataframe.to_csv(filepath, index=True)\n else:\n logging.info('Storing result in cloud storage in path: {}'.format(\n filepath))\n content = dataframe.to_csv(index=True)\n cs = storage.Client(project_id)\n bucket_name, blob_path = filepath[5:].split('/', 1)\n bucket = cs.bucket(bucket_name)\n blob = bucket.blob(blob_path)\n blob.upload_from_string(content)\n\n\ndef main():\n result = calculate_letter_statistics(input_file_glob)\n df = result_to_dataframe(result)\n store_result(df, output_path)\n logging.info('Finished job.')\n\n\n<code token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n\n\ndef time_function(func):\n \"\"\" Simple wrapper that times the duration of a function.\n\n Args:\n func (Callable):\n\n Returns:\n Callable\n \"\"\"\n\n def wrapper(*args, **kwargs):\n start = dt.datetime.utcnow()\n result = func(*args, **kwargs)\n end = dt.datetime.utcnow()\n logging.info('Function %s took: %s', func.__name__, end - start)\n return result\n return wrapper\n\n\n@time_function\ndef calculate_letter_statistics(file_glob):\n \"\"\"Reads data from cloud storage and calculates the sum, sum of\n squares, max, min and count.\n Note that dataproc clusters automatically have a google storage\n connector. This means file_glob can be a path starting with gs\n and dataproc will understand it should look at cloud storage. For\n local development, you either have to install the cloud storage\n connector, or simply have some data in a local directory.\n\n Args:\n file_glob (str):\n\n Returns:\n list\n \"\"\"\n lines = sc.textFile(file_glob, minPartitions=8)\n statistics = lines.map(lambda record: record.split(',')).mapValues(lambda\n x: float(x)).mapValues(lambda value: (value, value ** 2, value,\n value, 1)).reduceByKey(lambda x, y: (x[0] + y[0], x[1] + y[1], max(\n x[2], y[2]), min(x[3], y[3]), x[4] + y[4]))\n result = statistics.collect()\n return result\n\n\n@time_function\ndef result_to_dataframe(data):\n \"\"\"Converts data to a pandas dataframe.\n\n Args:\n data (list):\n\n Returns:\n pd.DataFrame\n \"\"\"\n letters, statistics = zip(*data)\n dataframe = pd.DataFrame(data=list(statistics), index=letters, columns=\n ['SUM', 'SUM_OF_SQUARES', 'MAX', 'MIN', 'COUNT']).sort_index()\n dataframe['MEAN'] = dataframe['SUM'] / dataframe['COUNT']\n dataframe['VARIANCE'] = dataframe['SUM_OF_SQUARES'] / dataframe['COUNT'\n ] - dataframe['MEAN'] ** 2\n dataframe['STANDARD_DEVIATION'] = dataframe['VARIANCE'] ** 0.5\n logging.info('Total datapoints read: {}.'.format(dataframe['COUNT'].sum()))\n return dataframe\n\n\n@time_function\ndef store_result(dataframe, filepath):\n \"\"\"Stores the dataframe. Either in a local path, or on cloud\n storage.\n\n Args:\n dataframe (pd.DataFrame):\n filepath (str):\n\n Returns:\n None\n \"\"\"\n if not filepath.startswith('gs://'):\n logging.info('Storing result locally in: {}'.format(filepath))\n dataframe.to_csv(filepath, index=True)\n else:\n logging.info('Storing result in cloud storage in path: {}'.format(\n filepath))\n content = dataframe.to_csv(index=True)\n cs = storage.Client(project_id)\n bucket_name, blob_path = filepath[5:].split('/', 1)\n bucket = cs.bucket(bucket_name)\n blob = bucket.blob(blob_path)\n blob.upload_from_string(content)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n\n\ndef time_function(func):\n \"\"\" Simple wrapper that times the duration of a function.\n\n Args:\n func (Callable):\n\n Returns:\n Callable\n \"\"\"\n\n def wrapper(*args, **kwargs):\n start = dt.datetime.utcnow()\n result = func(*args, **kwargs)\n end = dt.datetime.utcnow()\n logging.info('Function %s took: %s', func.__name__, end - start)\n return result\n return wrapper\n\n\n<function token>\n\n\n@time_function\ndef result_to_dataframe(data):\n \"\"\"Converts data to a pandas dataframe.\n\n Args:\n data (list):\n\n Returns:\n pd.DataFrame\n \"\"\"\n letters, statistics = zip(*data)\n dataframe = pd.DataFrame(data=list(statistics), index=letters, columns=\n ['SUM', 'SUM_OF_SQUARES', 'MAX', 'MIN', 'COUNT']).sort_index()\n dataframe['MEAN'] = dataframe['SUM'] / dataframe['COUNT']\n dataframe['VARIANCE'] = dataframe['SUM_OF_SQUARES'] / dataframe['COUNT'\n ] - dataframe['MEAN'] ** 2\n dataframe['STANDARD_DEVIATION'] = dataframe['VARIANCE'] ** 0.5\n logging.info('Total datapoints read: {}.'.format(dataframe['COUNT'].sum()))\n return dataframe\n\n\n@time_function\ndef store_result(dataframe, filepath):\n \"\"\"Stores the dataframe. Either in a local path, or on cloud\n storage.\n\n Args:\n dataframe (pd.DataFrame):\n filepath (str):\n\n Returns:\n None\n \"\"\"\n if not filepath.startswith('gs://'):\n logging.info('Storing result locally in: {}'.format(filepath))\n dataframe.to_csv(filepath, index=True)\n else:\n logging.info('Storing result in cloud storage in path: {}'.format(\n filepath))\n content = dataframe.to_csv(index=True)\n cs = storage.Client(project_id)\n bucket_name, blob_path = filepath[5:].split('/', 1)\n bucket = cs.bucket(bucket_name)\n blob = bucket.blob(blob_path)\n blob.upload_from_string(content)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<function token>\n<function token>\n\n\n@time_function\ndef result_to_dataframe(data):\n \"\"\"Converts data to a pandas dataframe.\n\n Args:\n data (list):\n\n Returns:\n pd.DataFrame\n \"\"\"\n letters, statistics = zip(*data)\n dataframe = pd.DataFrame(data=list(statistics), index=letters, columns=\n ['SUM', 'SUM_OF_SQUARES', 'MAX', 'MIN', 'COUNT']).sort_index()\n dataframe['MEAN'] = dataframe['SUM'] / dataframe['COUNT']\n dataframe['VARIANCE'] = dataframe['SUM_OF_SQUARES'] / dataframe['COUNT'\n ] - dataframe['MEAN'] ** 2\n dataframe['STANDARD_DEVIATION'] = dataframe['VARIANCE'] ** 0.5\n logging.info('Total datapoints read: {}.'.format(dataframe['COUNT'].sum()))\n return dataframe\n\n\n@time_function\ndef store_result(dataframe, filepath):\n \"\"\"Stores the dataframe. Either in a local path, or on cloud\n storage.\n\n Args:\n dataframe (pd.DataFrame):\n filepath (str):\n\n Returns:\n None\n \"\"\"\n if not filepath.startswith('gs://'):\n logging.info('Storing result locally in: {}'.format(filepath))\n dataframe.to_csv(filepath, index=True)\n else:\n logging.info('Storing result in cloud storage in path: {}'.format(\n filepath))\n content = dataframe.to_csv(index=True)\n cs = storage.Client(project_id)\n bucket_name, blob_path = filepath[5:].split('/', 1)\n bucket = cs.bucket(bucket_name)\n blob = bucket.blob(blob_path)\n blob.upload_from_string(content)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<function token>\n<function token>\n<function token>\n\n\n@time_function\ndef store_result(dataframe, filepath):\n \"\"\"Stores the dataframe. Either in a local path, or on cloud\n storage.\n\n Args:\n dataframe (pd.DataFrame):\n filepath (str):\n\n Returns:\n None\n \"\"\"\n if not filepath.startswith('gs://'):\n logging.info('Storing result locally in: {}'.format(filepath))\n dataframe.to_csv(filepath, index=True)\n else:\n logging.info('Storing result in cloud storage in path: {}'.format(\n filepath))\n content = dataframe.to_csv(index=True)\n cs = storage.Client(project_id)\n bucket_name, blob_path = filepath[5:].split('/', 1)\n bucket = cs.bucket(bucket_name)\n blob = bucket.blob(blob_path)\n blob.upload_from_string(content)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n" ]
false
98,949
d04a87a6fd4a427f974e2cb87ebd103d06319277
from linkscrubber.cmd import redirects import mock def test_replace(): responses = iter([ ({ 'href': 'http://example.com/blah', 'description': 'example link', 'extended': 'extended', 'tags': ['tag1', 'tag2'], 'time_parsed': (2013, 3, 31, 9, 9, 9)}, 'http://newlink.com/blah'), None, ]) q = mock.Mock() q.get = lambda *x, **k: next(responses) client = mock.Mock() redirects._update_worker(client, q, False) client.add.assert_called_with( url='http://newlink.com/blah', description='example link', extended='extended', tags=['tag1', 'tag2'], date=(2013, 3, 31), ) client.delete.assert_called_with( 'http://example.com/blah', ) def test_add_only(): responses = iter([ ({ 'href': 'http://example.com/blah', 'description': 'example link', 'extended': 'extended', 'tags': ['tag1', 'tag2'], 'time_parsed': (2013, 3, 31, 9, 9, 9)}, 'http://newlink.com/blah'), None, ]) q = mock.Mock() q.get = lambda *x, **k: next(responses) client = mock.Mock() client.delete.side_effect = AssertionError('should not delete') redirects._update_worker(client, q, True) client.add.assert_called_with( url='http://newlink.com/blah', description='example link', extended='extended', tags=['tag1', 'tag2'], date=(2013, 3, 31), )
[ "from linkscrubber.cmd import redirects\n\nimport mock\n\n\ndef test_replace():\n responses = iter([\n ({\n 'href': 'http://example.com/blah',\n 'description': 'example link',\n 'extended': 'extended',\n 'tags': ['tag1', 'tag2'],\n 'time_parsed': (2013, 3, 31, 9, 9, 9)},\n 'http://newlink.com/blah'),\n None,\n ])\n q = mock.Mock()\n q.get = lambda *x, **k: next(responses)\n\n client = mock.Mock()\n\n redirects._update_worker(client, q, False)\n\n client.add.assert_called_with(\n url='http://newlink.com/blah',\n description='example link',\n extended='extended',\n tags=['tag1', 'tag2'],\n date=(2013, 3, 31),\n )\n client.delete.assert_called_with(\n 'http://example.com/blah',\n )\n\n\ndef test_add_only():\n responses = iter([\n ({\n 'href': 'http://example.com/blah',\n 'description': 'example link',\n 'extended': 'extended',\n 'tags': ['tag1', 'tag2'],\n 'time_parsed': (2013, 3, 31, 9, 9, 9)},\n 'http://newlink.com/blah'),\n None,\n ])\n q = mock.Mock()\n q.get = lambda *x, **k: next(responses)\n\n client = mock.Mock()\n client.delete.side_effect = AssertionError('should not delete')\n\n redirects._update_worker(client, q, True)\n\n client.add.assert_called_with(\n url='http://newlink.com/blah',\n description='example link',\n extended='extended',\n tags=['tag1', 'tag2'],\n date=(2013, 3, 31),\n )\n", "from linkscrubber.cmd import redirects\nimport mock\n\n\ndef test_replace():\n responses = iter([({'href': 'http://example.com/blah', 'description':\n 'example link', 'extended': 'extended', 'tags': ['tag1', 'tag2'],\n 'time_parsed': (2013, 3, 31, 9, 9, 9)}, 'http://newlink.com/blah'),\n None])\n q = mock.Mock()\n q.get = lambda *x, **k: next(responses)\n client = mock.Mock()\n redirects._update_worker(client, q, False)\n client.add.assert_called_with(url='http://newlink.com/blah',\n description='example link', extended='extended', tags=['tag1',\n 'tag2'], date=(2013, 3, 31))\n client.delete.assert_called_with('http://example.com/blah')\n\n\ndef test_add_only():\n responses = iter([({'href': 'http://example.com/blah', 'description':\n 'example link', 'extended': 'extended', 'tags': ['tag1', 'tag2'],\n 'time_parsed': (2013, 3, 31, 9, 9, 9)}, 'http://newlink.com/blah'),\n None])\n q = mock.Mock()\n q.get = lambda *x, **k: next(responses)\n client = mock.Mock()\n client.delete.side_effect = AssertionError('should not delete')\n redirects._update_worker(client, q, True)\n client.add.assert_called_with(url='http://newlink.com/blah',\n description='example link', extended='extended', tags=['tag1',\n 'tag2'], date=(2013, 3, 31))\n", "<import token>\n\n\ndef test_replace():\n responses = iter([({'href': 'http://example.com/blah', 'description':\n 'example link', 'extended': 'extended', 'tags': ['tag1', 'tag2'],\n 'time_parsed': (2013, 3, 31, 9, 9, 9)}, 'http://newlink.com/blah'),\n None])\n q = mock.Mock()\n q.get = lambda *x, **k: next(responses)\n client = mock.Mock()\n redirects._update_worker(client, q, False)\n client.add.assert_called_with(url='http://newlink.com/blah',\n description='example link', extended='extended', tags=['tag1',\n 'tag2'], date=(2013, 3, 31))\n client.delete.assert_called_with('http://example.com/blah')\n\n\ndef test_add_only():\n responses = iter([({'href': 'http://example.com/blah', 'description':\n 'example link', 'extended': 'extended', 'tags': ['tag1', 'tag2'],\n 'time_parsed': (2013, 3, 31, 9, 9, 9)}, 'http://newlink.com/blah'),\n None])\n q = mock.Mock()\n q.get = lambda *x, **k: next(responses)\n client = mock.Mock()\n client.delete.side_effect = AssertionError('should not delete')\n redirects._update_worker(client, q, True)\n client.add.assert_called_with(url='http://newlink.com/blah',\n description='example link', extended='extended', tags=['tag1',\n 'tag2'], date=(2013, 3, 31))\n", "<import token>\n\n\ndef test_replace():\n responses = iter([({'href': 'http://example.com/blah', 'description':\n 'example link', 'extended': 'extended', 'tags': ['tag1', 'tag2'],\n 'time_parsed': (2013, 3, 31, 9, 9, 9)}, 'http://newlink.com/blah'),\n None])\n q = mock.Mock()\n q.get = lambda *x, **k: next(responses)\n client = mock.Mock()\n redirects._update_worker(client, q, False)\n client.add.assert_called_with(url='http://newlink.com/blah',\n description='example link', extended='extended', tags=['tag1',\n 'tag2'], date=(2013, 3, 31))\n client.delete.assert_called_with('http://example.com/blah')\n\n\n<function token>\n", "<import token>\n<function token>\n<function token>\n" ]
false
98,950
e56fd6340cf82d8dea24c76070afbd92353b3597
# -*- coding: utf-8 -*- # Generated by Django 1.11.11 on 2018-08-08 16:12 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('api', '0001_initial'), ] operations = [ migrations.AlterField( model_name='coursechapter', name='course', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Course'), ), migrations.AlterField( model_name='coursesection', name='chapter', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.CourseChapter'), ), ]
[ "# -*- coding: utf-8 -*-\r\n# Generated by Django 1.11.11 on 2018-08-08 16:12\r\nfrom __future__ import unicode_literals\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('api', '0001_initial'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterField(\r\n model_name='coursechapter',\r\n name='course',\r\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Course'),\r\n ),\r\n migrations.AlterField(\r\n model_name='coursesection',\r\n name='chapter',\r\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.CourseChapter'),\r\n ),\r\n ]\r\n", "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('api', '0001_initial')]\n operations = [migrations.AlterField(model_name='coursechapter', name=\n 'course', field=models.ForeignKey(on_delete=django.db.models.\n deletion.CASCADE, to='api.Course')), migrations.AlterField(\n model_name='coursesection', name='chapter', field=models.ForeignKey\n (on_delete=django.db.models.deletion.CASCADE, to='api.CourseChapter'))]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('api', '0001_initial')]\n operations = [migrations.AlterField(model_name='coursechapter', name=\n 'course', field=models.ForeignKey(on_delete=django.db.models.\n deletion.CASCADE, to='api.Course')), migrations.AlterField(\n model_name='coursesection', name='chapter', field=models.ForeignKey\n (on_delete=django.db.models.deletion.CASCADE, to='api.CourseChapter'))]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
98,951
58a0df99f76965d4bf25f1a82db9752646cac57a
#1. print("Hello World") #2. print("Mary's cosmetics") #3 print('신씨가 소리질렀다. "도둑이야".') #4 print("C:\Windows") #5 print("안녕하세요. \n만나서\t\t반갑습니다.") #6. print("오늘은","일요일") #7 print("naver",'kakao','sk','samsung',sep=';') #8 print("naver",'kakao','sk','samsung',sep='/') #9 print("first",end="");print("second") #10 string = "dk2jd923i1jdk2jd93jfd92jd918943jfd8923" print(len(string)) #11 a='3';b='4';print(a+b) #12 s='hello';t='python';print(s+"!",t) #13 print("hi"*3) #14 print('-'*80) #15 t1='python';t2='java';print((t1+" "+t2+' ')*4) #16 print(20000*10) #17 2+2*3 #18 a=128;print(type(a));a='132';print(type(a)) #19 num_str = '720';print(int(num_str)) #20 num=100 ; print(str(num)) #21 lang = 'python';print(lang[0],lang[2]) #22 license_plate = "24가 2210" ; print(license_plate[-4:]) #23 string='홀짝홀짝홀짝' ; print(string[::2]) print(''.join([x for x in string if x == "홀" ])) #24 string = "PYTHON" ; print(string[::-1]) #25 phone_number = '010-1111-2222' ; print(phone_number.replace('-',' ')) #26 print(phone_number.replace('-','')) #27 url = 'http://sharebook.kr' ; print(url[-2:]) #28 lang='python' ; lang[0] = 'P' ; print(lang) #29 string = 'abcdfe2a354a32a' ; print(string.replace('a',"A")) #30 string = 'abcd' ; string.replace('b','B') ; print(string) #41 movie_rank = ['닥터스트레인지','스플릿','럭키'] #42 movie_rank.append('배트맨') ; movie_rank #43 movie_rank.insert(1,"슈퍼맨") ; movie_rank #44 movie_rank.remove('럭키') ; movie_rank #45 del movie_rank[2:] ; movie_rank #46 lang1 = ['c','c++','java'] ; lang2=['python','go','c#'] ; print(lang1+lang2) #47 nums = [1,2,3,4,5,6,7] ; print('max:', max(nums)) ; print('min:',min(nums)) #48 nums = [1,2,3,4,5] ; print(sum(nums)) #49 cook=['피자','김밥','등등해서','김치전'];print(len(cook)) #50 nums=[1,2,3,4,5] ; print(sum(nums)/len(nums)) #51 price = ['20180728',100,130,140,150,160,170];print(price[1:7]) #52 nums=[1,2,3,4,5,6,7,8,9,10] ; print(nums[::2]) #53 nums=[i for i in range(1,11)] ; print(nums[1::2]) #54 nums=[i for i in range(1,6)] ; print(nums[::-1]) #55 interest=['삼성전자','LG전자','Naver'];print(interest[0],interest[2]) #56 interest=['삼성전자','LG전자','Naver','SK하이닉스','미래에셋대우'] print(" ".join(interest)) #57 interest=['삼성전자','LG전자','Naver','SK하이닉스','미래에셋대우'] print("/".join(interest)) #58 interest=['삼성전자','LG전자','Naver','SK하이닉스','미래에셋대우'] print("\n".join(interest)) #59 string='삼성전자/LG전자/Naver' ; interest=string.split('/') ; print(interest) #60 string="삼성전자/LG전자/Naver/SK하이닉스/미래에셋대우" interest=string.split('/') print(interest) #61 interest_0 = ['삼성전자','LG전자','SK Hynix'] interest_1 = interest_0 interest_1[0] = "Naver" print(interest_0) ; print(interest_1) #62 interest_0 = ['삼성전자','LG전자','SK Hynix'] interest_1 = interest_0[:2] interest_1[0] = "Naver" print(interest_0) ; print(interest_1) #63 my_variable=() #64 t = (1, 2, 3) ; t[0] = 'a' #65 tup = (1,) #66 t=1,2,3,4 ; type(t) #67 t=('a','b','c') ; t=(t[0].upper(),) + t[1:3] ; print(t) #68 interest = ('삼성전자','LG전자','SK Hynix') ; list(interest) #69 interest = ['삼성전자','LG전자','SK Hynix'] ; tuple(interest) #70 my_tuple = (1, 2, 3) ; a, b, c = my_tuple ; print(a + b + c) #71 a,b,*c = (0,1,2,3,4,5) ; a ; b ;c #72 scores = [8.8, 8.9, 8.7, 9.2, 9.3, 9.7, 9.9, 9.5, 7.8, 9.4] _,_,*valid_score=scores #이하 90번까지는 했으므로 생략. #92 print(3==5) #93 print(3<5) #94 x=4 ; print(1<x<5) #95 print((3==3) and (4!=3)) #96 print(3 >= 4) #print(3 => 4) #97 if 4<3 : print("Hello World") #98 if 4 < 3: print("Hello World.") else: print("Hi, there.") #99 if True : print ("1") print ("2") else : print("3") print("4") #100 if True : if False: print("1") print("2") else: print("3") else : print("4") print("5") #101 hi = input() print(hi*2) #이렇게 코딩하면 받고 출력까지 한번에수행. #102 x = input("숫자를 입력하세요: ") print(eval(x)+10) #103 x = int(input("숫자를 입력하세요: ")) if x % 2 == 0 : print("짝수") else: print("홀수") #104 x = int(input("입력값: ")) if 0< x <= 235 : print(x+20) elif x > 235: print(255) x = int(input("입력값: ")) print( "출력값: ",min( x+20,255) ) #105 x = int(input("입력값: ")) print( "출력값: ",min( x-20,0) ) #106 x=input("현재시간 : ") if x.split(':')[1] != '00' : print("정각이 아닙니다.") else: print("정각 입니다.") #107 fruit = ['사과','포도','홍시'] x = input("좋아하는 과일은? ") if x in fruit : print("정답입니다.") else: print("오답입니다.") #108 warn_investment_list=['Microsoft','Google','Naver','Kakao','SAMSUNG','LG'] warn_investment_list x=input("투자 종목을 입력해주세요: ") if x.lower() in [y.lower() for y in warn_investment_list] : print("투자 경고 종목입니다.") else: print("투자 경고 종목이 아닙니다.") #109 fruit = {"봄":'딸기','여름':'토마토','가을':'사과'} x=input("제가 좋아하는 계절은: ") if x in fruit.keys(): print("정답입니다.") else: print("오답입니다.") #110 fruit = {"봄":'딸기','여름':'토마토','가을':'사과'} x=input("좋아하는 과일은?: ") if x in fruit.values(): print("정답입니다.") else: print("오답입니다.") #111 x=input("문자를 입력하시오: ") if x.islower() : print(True) else: print(False) #112 score = int(input("score:")) if score > 80 : print("grade is A") elif score > 60 : print("grade is B") elif score > 40 : print("grade is C") elif score > 20 : print("grade is D") else : print("grade is E") #113 x,y = input('입력 :').split(' ') if y == '달러' : print(int(x)*1167,'원') elif y == '엔' : print(int(x)*1.096,'원') elif y == '유로' : print(int(x)*1268,'원') elif y == '위안' : print(int(x)*171,'원') #114 x = int(input('input number1: ')) y = int(input('input number2: ')) z = int(input('input number3: ')) print(max(x,y,z)) #115 x = input('휴대전화 번호 입력: ').split('-') y = {'011':'SKT' , '016':'KT' , '019':'LGU', '010':'알수없음'} print('당신은 {} 사용자입니다.'.format(y[x[0]])) #116 x = input('우편번호: ')[2] y = { '강북구' : ['0','1','2'] , '도봉구':['3','4','5'] ,'노원구' : ['6','7','8','9'] } def reverse(x,y): for a in y: if x in y[a] : return(a) raise ValueError("숫자 입력하세요.") # 리스트 내의 리스트 풀기 list_of_lists = sum( y.values() ,[]) #118은 비슷하므로 생략 #119 <- numpy를 배우면 쉽게 요소별곱을 시행할 수 있음. import numpy as np x = input('주민등록번호: ').split('-') num = list("".join(x)) num2=list(map(int, num)) a1 = np.array(num2)[:-1] a2 = np.array([i for i in range(2,10)] + [j for j in range(2,6)]) first = sum(a1*a2) % 11 ; second = 11-first if second == int(num[-1]) : print("유효한 주민등록번호입니다.") else: print("유효하지 않은 주민등록번호입니다.") #120 생략 #121 for i in ['가','나','다','라'] : print(i) #122 for 변수 in ["사과", "귤", "수박"]: print(변수) #123 for 변수 in ["사과", "귤", "수박"]: print(변수) print("--") #124 for 변수 in ["사과", "귤", "수박"]: print(변수) print("--") #125 menu = ["김밥", "라면", "튀김"] for i in menu: print('오늘의 메뉴:', i) #126 생략 #127 pets = ['dog', 'cat', 'parrot', 'squirrel', 'goldfish'] for i in pets: print(i,len(i)) #128 prices = [100, 200, 300] for i in prices : print(i + 10) #129 prices = ["129,300", "1,000", "2,300"] for i in prices : print(int(i.replace(",",""))) #130 menu = ["면라", "밥김", "김튀"] for i in menu : print(i[::-1]) #131 my_list = ["가", "나", "다", "라"] for i in my_list[1:]: print(i) #132 my_list = [1, 2, 3, 4, 5, 6] for i in my_list[::2]: print(i) #133 my_list = [1, 2, 3, 4, 5, 6] for i in my_list[1::2]: print(i) #134 my_list = ["가", "나", "다", "라"] for i in my_list[::-1]: print(i) #135 my_list = [3, -20, -3, 44] for i in my_list : if i < 0: print(i) #136 my_list = [3, 100, 23, 44] for i in my_list: if i % 3 == 0 : print(i) #137 my_list = ["I", "study", "python", "language", "!"] for i in my_list: if len(i) >= 3 : print(i) #138 my_list = [3, 1, 7, 10, 5, 6] for i in my_list : if 5 <i < 10 : print(i) for i in my_list : if i>5 and i < 10 : print(i) #139 my_list = [13, 21, 12, 14, 30, 18] for i in my_list: if i > 10 and i < 20 and i % 3 == 0 : print(i) #140 my_list = [3, 1, 7, 12, 5, 16] for i in my_list: if i % 3 ==0 or i % 4 == 0 : print(i) #141 my_list = ["A", "b", "c", "D"] for i in my_list: if i.isupper() : print(i) #142 my_list = ["A", "b", "c", "D"] for i in my_list: if i.islower() : print(i) #143 my_list = ["A", "b", "c", "D"] for i in my_list: if i.isupper(): print(i.lower(),end='') else : print(i.upper(),end='') #144 file_list = ['hello.py', 'ex01.py', 'ch02.py', 'intro.hwp'] for i in file_list: print(i.split('.')[0]) #145 filenames = ['intra.h', 'intra.c', 'define.h', 'run.py'] for i in filenames: if i.split('.')[1] == 'h' : print(i) #146 filenames = ['intra.h', 'intra.c', 'define.h', 'run.py'] for i in filenames: if i.split('.')[1] == 'h' or i.split('.')[1] == 'c': print(i) #147 my_list = [3, -20, -3, 44] new_list=[] for i in my_list: if i >0 : new_list.append(i) print(new_list) #148 my_list = ['A', "b", "c", "D"] upper_list=[] for i in my_list : if i.isupper(): upper_list.append(i) print(upper_list) #149 my_list = [3, 4, 4, 5, 6, 6] sole_list=[] for i in my_list: if i not in sole_list: sole_list.append(i) print(sole_list) #150 my_list = [3, 4, 5] a=0 for i in my_list: a += i print(a)
[ "#1.\r\nprint(\"Hello World\")\r\n#2.\r\nprint(\"Mary's cosmetics\")\r\n#3\r\nprint('신씨가 소리질렀다. \"도둑이야\".')\r\n#4\r\nprint(\"C:\\Windows\")\r\n#5\r\nprint(\"안녕하세요. \\n만나서\\t\\t반갑습니다.\")\r\n#6.\r\nprint(\"오늘은\",\"일요일\")\r\n#7\r\nprint(\"naver\",'kakao','sk','samsung',sep=';')\r\n#8\r\nprint(\"naver\",'kakao','sk','samsung',sep='/')\r\n#9\r\nprint(\"first\",end=\"\");print(\"second\")\r\n#10\r\nstring = \"dk2jd923i1jdk2jd93jfd92jd918943jfd8923\"\r\nprint(len(string))\r\n#11\r\na='3';b='4';print(a+b)\r\n#12\r\ns='hello';t='python';print(s+\"!\",t)\r\n#13\r\nprint(\"hi\"*3)\r\n#14\r\nprint('-'*80)\r\n#15\r\nt1='python';t2='java';print((t1+\" \"+t2+' ')*4)\r\n#16\r\nprint(20000*10)\r\n#17\r\n2+2*3\r\n#18\r\na=128;print(type(a));a='132';print(type(a))\r\n#19\r\nnum_str = '720';print(int(num_str))\r\n#20\r\nnum=100 ; print(str(num))\r\n#21\r\nlang = 'python';print(lang[0],lang[2])\r\n#22\r\nlicense_plate = \"24가 2210\" ; print(license_plate[-4:])\r\n#23\r\nstring='홀짝홀짝홀짝' ; print(string[::2])\r\nprint(''.join([x for x in string if x == \"홀\" ]))\r\n#24\r\nstring = \"PYTHON\" ; print(string[::-1])\r\n#25\r\nphone_number = '010-1111-2222' ; print(phone_number.replace('-',' '))\r\n#26\r\nprint(phone_number.replace('-',''))\r\n#27\r\nurl = 'http://sharebook.kr' ; print(url[-2:])\r\n#28\r\nlang='python' ; lang[0] = 'P' ; print(lang)\r\n#29\r\nstring = 'abcdfe2a354a32a' ; print(string.replace('a',\"A\"))\r\n#30\r\nstring = 'abcd' ; string.replace('b','B') ; print(string)\r\n#41\r\nmovie_rank = ['닥터스트레인지','스플릿','럭키']\r\n#42\r\nmovie_rank.append('배트맨') ; movie_rank\r\n#43\r\nmovie_rank.insert(1,\"슈퍼맨\") ; movie_rank\r\n#44\r\nmovie_rank.remove('럭키') ; movie_rank\r\n#45\r\ndel movie_rank[2:] ; movie_rank\r\n#46\r\nlang1 = ['c','c++','java'] ; lang2=['python','go','c#'] ; print(lang1+lang2)\r\n#47\r\nnums = [1,2,3,4,5,6,7] ; print('max:', max(nums)) ; print('min:',min(nums))\r\n#48\r\nnums = [1,2,3,4,5] ; print(sum(nums))\r\n#49\r\ncook=['피자','김밥','등등해서','김치전'];print(len(cook))\r\n#50\r\nnums=[1,2,3,4,5] ; print(sum(nums)/len(nums))\r\n#51\r\nprice = ['20180728',100,130,140,150,160,170];print(price[1:7])\r\n#52\r\nnums=[1,2,3,4,5,6,7,8,9,10] ; print(nums[::2])\r\n#53\r\nnums=[i for i in range(1,11)] ; print(nums[1::2])\r\n#54\r\nnums=[i for i in range(1,6)] ; print(nums[::-1])\r\n#55\r\ninterest=['삼성전자','LG전자','Naver'];print(interest[0],interest[2])\r\n#56\r\ninterest=['삼성전자','LG전자','Naver','SK하이닉스','미래에셋대우']\r\nprint(\" \".join(interest))\r\n#57\r\ninterest=['삼성전자','LG전자','Naver','SK하이닉스','미래에셋대우']\r\nprint(\"/\".join(interest))\r\n#58\r\ninterest=['삼성전자','LG전자','Naver','SK하이닉스','미래에셋대우']\r\nprint(\"\\n\".join(interest))\r\n#59\r\nstring='삼성전자/LG전자/Naver' ; interest=string.split('/') ; print(interest)\r\n#60\r\nstring=\"삼성전자/LG전자/Naver/SK하이닉스/미래에셋대우\"\r\ninterest=string.split('/')\r\nprint(interest)\r\n#61\r\ninterest_0 = ['삼성전자','LG전자','SK Hynix']\r\ninterest_1 = interest_0\r\ninterest_1[0] = \"Naver\"\r\nprint(interest_0) ; print(interest_1)\r\n#62\r\ninterest_0 = ['삼성전자','LG전자','SK Hynix']\r\ninterest_1 = interest_0[:2]\r\ninterest_1[0] = \"Naver\"\r\nprint(interest_0) ; print(interest_1)\r\n#63\r\nmy_variable=()\r\n#64\r\nt = (1, 2, 3) ; t[0] = 'a'\r\n#65\r\ntup = (1,)\r\n#66\r\nt=1,2,3,4 ; type(t)\r\n#67\r\nt=('a','b','c') ; t=(t[0].upper(),) + t[1:3] ; print(t)\r\n#68\r\ninterest = ('삼성전자','LG전자','SK Hynix') ; list(interest)\r\n#69\r\ninterest = ['삼성전자','LG전자','SK Hynix'] ; tuple(interest)\r\n#70\r\nmy_tuple = (1, 2, 3) ; a, b, c = my_tuple ; print(a + b + c)\r\n#71\r\na,b,*c = (0,1,2,3,4,5) ; a ; b ;c\r\n#72\r\nscores = [8.8, 8.9, 8.7, 9.2, 9.3, 9.7, 9.9, 9.5, 7.8, 9.4]\r\n_,_,*valid_score=scores\r\n#이하 90번까지는 했으므로 생략.\r\n#92\r\nprint(3==5)\r\n#93\r\nprint(3<5)\r\n#94\r\nx=4 ; print(1<x<5)\r\n#95\r\nprint((3==3) and (4!=3))\r\n#96\r\nprint(3 >= 4) #print(3 => 4)\r\n#97\r\nif 4<3 :\r\n print(\"Hello World\")\r\n#98\r\nif 4 < 3:\r\n print(\"Hello World.\")\r\nelse:\r\n print(\"Hi, there.\")\r\n#99\r\nif True :\r\n print (\"1\")\r\n print (\"2\")\r\nelse :\r\n print(\"3\")\r\nprint(\"4\")\r\n#100\r\nif True :\r\n if False:\r\n print(\"1\")\r\n print(\"2\")\r\n else:\r\n print(\"3\")\r\nelse :\r\n print(\"4\")\r\nprint(\"5\")\r\n#101\r\nhi = input()\r\nprint(hi*2) #이렇게 코딩하면 받고 출력까지 한번에수행.\r\n#102\r\nx = input(\"숫자를 입력하세요: \")\r\nprint(eval(x)+10)\r\n#103\r\nx = int(input(\"숫자를 입력하세요: \"))\r\nif x % 2 == 0 :\r\n print(\"짝수\")\r\nelse:\r\n print(\"홀수\")\r\n#104\r\nx = int(input(\"입력값: \"))\r\nif 0< x <= 235 :\r\n print(x+20)\r\nelif x > 235:\r\n print(255)\r\nx = int(input(\"입력값: \"))\r\nprint( \"출력값: \",min( x+20,255) )\r\n#105\r\nx = int(input(\"입력값: \"))\r\nprint( \"출력값: \",min( x-20,0) )\r\n#106\r\nx=input(\"현재시간 : \")\r\nif x.split(':')[1] != '00' :\r\n print(\"정각이 아닙니다.\")\r\nelse:\r\n print(\"정각 입니다.\")\r\n#107\r\nfruit = ['사과','포도','홍시']\r\nx = input(\"좋아하는 과일은? \")\r\nif x in fruit :\r\n print(\"정답입니다.\")\r\nelse:\r\n print(\"오답입니다.\")\r\n#108\r\nwarn_investment_list=['Microsoft','Google','Naver','Kakao','SAMSUNG','LG']\r\nwarn_investment_list\r\nx=input(\"투자 종목을 입력해주세요: \")\r\nif x.lower() in [y.lower() for y in warn_investment_list] :\r\n print(\"투자 경고 종목입니다.\")\r\nelse:\r\n print(\"투자 경고 종목이 아닙니다.\")\r\n#109\r\nfruit = {\"봄\":'딸기','여름':'토마토','가을':'사과'}\r\nx=input(\"제가 좋아하는 계절은: \")\r\nif x in fruit.keys():\r\n print(\"정답입니다.\")\r\nelse:\r\n print(\"오답입니다.\")\r\n#110\r\nfruit = {\"봄\":'딸기','여름':'토마토','가을':'사과'}\r\nx=input(\"좋아하는 과일은?: \")\r\nif x in fruit.values():\r\n print(\"정답입니다.\")\r\nelse:\r\n print(\"오답입니다.\")\r\n#111\r\nx=input(\"문자를 입력하시오: \")\r\nif x.islower() :\r\n print(True)\r\nelse:\r\n print(False)\r\n#112\r\nscore = int(input(\"score:\"))\r\nif score > 80 :\r\n print(\"grade is A\")\r\nelif score > 60 :\r\n print(\"grade is B\")\r\nelif score > 40 :\r\n print(\"grade is C\")\r\nelif score > 20 :\r\n print(\"grade is D\")\r\nelse :\r\n print(\"grade is E\")\r\n#113\r\nx,y = input('입력 :').split(' ')\r\nif y == '달러' :\r\n print(int(x)*1167,'원')\r\nelif y == '엔' :\r\n print(int(x)*1.096,'원')\r\nelif y == '유로' :\r\n print(int(x)*1268,'원')\r\nelif y == '위안' :\r\n print(int(x)*171,'원')\r\n#114\r\nx = int(input('input number1: '))\r\ny = int(input('input number2: '))\r\nz = int(input('input number3: '))\r\nprint(max(x,y,z))\r\n#115\r\nx = input('휴대전화 번호 입력: ').split('-')\r\ny = {'011':'SKT' , '016':'KT' , '019':'LGU', '010':'알수없음'}\r\nprint('당신은 {} 사용자입니다.'.format(y[x[0]]))\r\n#116\r\nx = input('우편번호: ')[2]\r\ny = { '강북구' : ['0','1','2'] , '도봉구':['3','4','5'] ,'노원구' : ['6','7','8','9'] }\r\ndef reverse(x,y):\r\n for a in y:\r\n if x in y[a] :\r\n return(a)\r\n raise ValueError(\"숫자 입력하세요.\")\r\n# 리스트 내의 리스트 풀기 list_of_lists = sum( y.values() ,[])\r\n#118은 비슷하므로 생략\r\n#119 <- numpy를 배우면 쉽게 요소별곱을 시행할 수 있음.\r\nimport numpy as np\r\nx = input('주민등록번호: ').split('-')\r\nnum = list(\"\".join(x))\r\nnum2=list(map(int, num))\r\na1 = np.array(num2)[:-1]\r\na2 = np.array([i for i in range(2,10)] + [j for j in range(2,6)])\r\nfirst = sum(a1*a2) % 11 ; second = 11-first\r\nif second == int(num[-1]) :\r\n print(\"유효한 주민등록번호입니다.\")\r\nelse:\r\n print(\"유효하지 않은 주민등록번호입니다.\")\r\n#120 생략\r\n#121\r\nfor i in ['가','나','다','라'] :\r\n print(i)\r\n#122\r\nfor 변수 in [\"사과\", \"귤\", \"수박\"]:\r\n print(변수)\r\n#123\r\nfor 변수 in [\"사과\", \"귤\", \"수박\"]:\r\n print(변수)\r\n print(\"--\")\r\n#124\r\nfor 변수 in [\"사과\", \"귤\", \"수박\"]:\r\n print(변수)\r\nprint(\"--\")\r\n#125\r\nmenu = [\"김밥\", \"라면\", \"튀김\"]\r\nfor i in menu:\r\n print('오늘의 메뉴:', i)\r\n#126 생략\r\n#127\r\npets = ['dog', 'cat', 'parrot', 'squirrel', 'goldfish']\r\nfor i in pets:\r\n print(i,len(i))\r\n#128\r\nprices = [100, 200, 300]\r\nfor i in prices :\r\n print(i + 10)\r\n#129\r\nprices = [\"129,300\", \"1,000\", \"2,300\"]\r\nfor i in prices :\r\n print(int(i.replace(\",\",\"\")))\r\n#130\r\nmenu = [\"면라\", \"밥김\", \"김튀\"]\r\nfor i in menu :\r\n print(i[::-1])\r\n#131\r\nmy_list = [\"가\", \"나\", \"다\", \"라\"]\r\nfor i in my_list[1:]:\r\n print(i)\r\n#132\r\nmy_list = [1, 2, 3, 4, 5, 6]\r\nfor i in my_list[::2]:\r\n print(i)\r\n#133\r\nmy_list = [1, 2, 3, 4, 5, 6]\r\nfor i in my_list[1::2]:\r\n print(i)\r\n#134\r\nmy_list = [\"가\", \"나\", \"다\", \"라\"]\r\nfor i in my_list[::-1]:\r\n print(i)\r\n#135\r\nmy_list = [3, -20, -3, 44]\r\nfor i in my_list :\r\n if i < 0:\r\n print(i)\r\n#136\r\nmy_list = [3, 100, 23, 44]\r\nfor i in my_list:\r\n if i % 3 == 0 :\r\n print(i)\r\n#137\r\nmy_list = [\"I\", \"study\", \"python\", \"language\", \"!\"]\r\nfor i in my_list:\r\n if len(i) >= 3 :\r\n print(i)\r\n#138\r\nmy_list = [3, 1, 7, 10, 5, 6]\r\nfor i in my_list :\r\n if 5 <i < 10 :\r\n print(i)\r\nfor i in my_list :\r\n if i>5 and i < 10 :\r\n print(i)\r\n#139\r\nmy_list = [13, 21, 12, 14, 30, 18]\r\nfor i in my_list:\r\n if i > 10 and i < 20 and i % 3 == 0 :\r\n print(i)\r\n#140\r\nmy_list = [3, 1, 7, 12, 5, 16]\r\nfor i in my_list:\r\n if i % 3 ==0 or i % 4 == 0 :\r\n print(i)\r\n#141\r\nmy_list = [\"A\", \"b\", \"c\", \"D\"]\r\nfor i in my_list:\r\n if i.isupper() :\r\n print(i)\r\n#142\r\nmy_list = [\"A\", \"b\", \"c\", \"D\"]\r\nfor i in my_list:\r\n if i.islower() :\r\n print(i)\r\n#143\r\nmy_list = [\"A\", \"b\", \"c\", \"D\"]\r\nfor i in my_list:\r\n if i.isupper():\r\n print(i.lower(),end='')\r\n else :\r\n print(i.upper(),end='')\r\n#144\r\nfile_list = ['hello.py', 'ex01.py', 'ch02.py', 'intro.hwp']\r\nfor i in file_list:\r\n print(i.split('.')[0])\r\n#145\r\nfilenames = ['intra.h', 'intra.c', 'define.h', 'run.py']\r\nfor i in filenames:\r\n if i.split('.')[1] == 'h' :\r\n print(i)\r\n#146\r\nfilenames = ['intra.h', 'intra.c', 'define.h', 'run.py']\r\nfor i in filenames:\r\n if i.split('.')[1] == 'h' or i.split('.')[1] == 'c':\r\n print(i)\r\n#147\r\nmy_list = [3, -20, -3, 44]\r\nnew_list=[]\r\nfor i in my_list:\r\n if i >0 :\r\n new_list.append(i)\r\nprint(new_list)\r\n#148\r\nmy_list = ['A', \"b\", \"c\", \"D\"]\r\nupper_list=[]\r\nfor i in my_list :\r\n if i.isupper():\r\n upper_list.append(i)\r\nprint(upper_list)\r\n#149\r\nmy_list = [3, 4, 4, 5, 6, 6]\r\nsole_list=[]\r\nfor i in my_list:\r\n if i not in sole_list:\r\n sole_list.append(i)\r\nprint(sole_list)\r\n#150\r\nmy_list = [3, 4, 5]\r\na=0\r\nfor i in my_list:\r\n a += i\r\nprint(a)", "print('Hello World')\nprint(\"Mary's cosmetics\")\nprint('신씨가 소리질렀다. \"도둑이야\".')\nprint('C:\\\\Windows')\nprint(\"\"\"안녕하세요. \n만나서\t\t반갑습니다.\"\"\")\nprint('오늘은', '일요일')\nprint('naver', 'kakao', 'sk', 'samsung', sep=';')\nprint('naver', 'kakao', 'sk', 'samsung', sep='/')\nprint('first', end='')\nprint('second')\nstring = 'dk2jd923i1jdk2jd93jfd92jd918943jfd8923'\nprint(len(string))\na = '3'\nb = '4'\nprint(a + b)\ns = 'hello'\nt = 'python'\nprint(s + '!', t)\nprint('hi' * 3)\nprint('-' * 80)\nt1 = 'python'\nt2 = 'java'\nprint((t1 + ' ' + t2 + ' ') * 4)\nprint(20000 * 10)\n2 + 2 * 3\na = 128\nprint(type(a))\na = '132'\nprint(type(a))\nnum_str = '720'\nprint(int(num_str))\nnum = 100\nprint(str(num))\nlang = 'python'\nprint(lang[0], lang[2])\nlicense_plate = '24가 2210'\nprint(license_plate[-4:])\nstring = '홀짝홀짝홀짝'\nprint(string[::2])\nprint(''.join([x for x in string if x == '홀']))\nstring = 'PYTHON'\nprint(string[::-1])\nphone_number = '010-1111-2222'\nprint(phone_number.replace('-', ' '))\nprint(phone_number.replace('-', ''))\nurl = 'http://sharebook.kr'\nprint(url[-2:])\nlang = 'python'\nlang[0] = 'P'\nprint(lang)\nstring = 'abcdfe2a354a32a'\nprint(string.replace('a', 'A'))\nstring = 'abcd'\nstring.replace('b', 'B')\nprint(string)\nmovie_rank = ['닥터스트레인지', '스플릿', '럭키']\nmovie_rank.append('배트맨')\nmovie_rank\nmovie_rank.insert(1, '슈퍼맨')\nmovie_rank\nmovie_rank.remove('럭키')\nmovie_rank\ndel movie_rank[2:]\nmovie_rank\nlang1 = ['c', 'c++', 'java']\nlang2 = ['python', 'go', 'c#']\nprint(lang1 + lang2)\nnums = [1, 2, 3, 4, 5, 6, 7]\nprint('max:', max(nums))\nprint('min:', min(nums))\nnums = [1, 2, 3, 4, 5]\nprint(sum(nums))\ncook = ['피자', '김밥', '등등해서', '김치전']\nprint(len(cook))\nnums = [1, 2, 3, 4, 5]\nprint(sum(nums) / len(nums))\nprice = ['20180728', 100, 130, 140, 150, 160, 170]\nprint(price[1:7])\nnums = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\nprint(nums[::2])\nnums = [i for i in range(1, 11)]\nprint(nums[1::2])\nnums = [i for i in range(1, 6)]\nprint(nums[::-1])\ninterest = ['삼성전자', 'LG전자', 'Naver']\nprint(interest[0], interest[2])\ninterest = ['삼성전자', 'LG전자', 'Naver', 'SK하이닉스', '미래에셋대우']\nprint(' '.join(interest))\ninterest = ['삼성전자', 'LG전자', 'Naver', 'SK하이닉스', '미래에셋대우']\nprint('/'.join(interest))\ninterest = ['삼성전자', 'LG전자', 'Naver', 'SK하이닉스', '미래에셋대우']\nprint('\\n'.join(interest))\nstring = '삼성전자/LG전자/Naver'\ninterest = string.split('/')\nprint(interest)\nstring = '삼성전자/LG전자/Naver/SK하이닉스/미래에셋대우'\ninterest = string.split('/')\nprint(interest)\ninterest_0 = ['삼성전자', 'LG전자', 'SK Hynix']\ninterest_1 = interest_0\ninterest_1[0] = 'Naver'\nprint(interest_0)\nprint(interest_1)\ninterest_0 = ['삼성전자', 'LG전자', 'SK Hynix']\ninterest_1 = interest_0[:2]\ninterest_1[0] = 'Naver'\nprint(interest_0)\nprint(interest_1)\nmy_variable = ()\nt = 1, 2, 3\nt[0] = 'a'\ntup = 1,\nt = 1, 2, 3, 4\ntype(t)\nt = 'a', 'b', 'c'\nt = (t[0].upper(),) + t[1:3]\nprint(t)\ninterest = '삼성전자', 'LG전자', 'SK Hynix'\nlist(interest)\ninterest = ['삼성전자', 'LG전자', 'SK Hynix']\ntuple(interest)\nmy_tuple = 1, 2, 3\na, b, c = my_tuple\nprint(a + b + c)\na, b, *c = 0, 1, 2, 3, 4, 5\na\nb\nc\nscores = [8.8, 8.9, 8.7, 9.2, 9.3, 9.7, 9.9, 9.5, 7.8, 9.4]\n_, _, *valid_score = scores\nprint(3 == 5)\nprint(3 < 5)\nx = 4\nprint(1 < x < 5)\nprint(3 == 3 and 4 != 3)\nprint(3 >= 4)\nif 4 < 3:\n print('Hello World')\nif 4 < 3:\n print('Hello World.')\nelse:\n print('Hi, there.')\nif True:\n print('1')\n print('2')\nelse:\n print('3')\nprint('4')\nif True:\n if False:\n print('1')\n print('2')\n else:\n print('3')\nelse:\n print('4')\nprint('5')\nhi = input()\nprint(hi * 2)\nx = input('숫자를 입력하세요: ')\nprint(eval(x) + 10)\nx = int(input('숫자를 입력하세요: '))\nif x % 2 == 0:\n print('짝수')\nelse:\n print('홀수')\nx = int(input('입력값: '))\nif 0 < x <= 235:\n print(x + 20)\nelif x > 235:\n print(255)\nx = int(input('입력값: '))\nprint('출력값: ', min(x + 20, 255))\nx = int(input('입력값: '))\nprint('출력값: ', min(x - 20, 0))\nx = input('현재시간 : ')\nif x.split(':')[1] != '00':\n print('정각이 아닙니다.')\nelse:\n print('정각 입니다.')\nfruit = ['사과', '포도', '홍시']\nx = input('좋아하는 과일은? ')\nif x in fruit:\n print('정답입니다.')\nelse:\n print('오답입니다.')\nwarn_investment_list = ['Microsoft', 'Google', 'Naver', 'Kakao', 'SAMSUNG',\n 'LG']\nwarn_investment_list\nx = input('투자 종목을 입력해주세요: ')\nif x.lower() in [y.lower() for y in warn_investment_list]:\n print('투자 경고 종목입니다.')\nelse:\n print('투자 경고 종목이 아닙니다.')\nfruit = {'봄': '딸기', '여름': '토마토', '가을': '사과'}\nx = input('제가 좋아하는 계절은: ')\nif x in fruit.keys():\n print('정답입니다.')\nelse:\n print('오답입니다.')\nfruit = {'봄': '딸기', '여름': '토마토', '가을': '사과'}\nx = input('좋아하는 과일은?: ')\nif x in fruit.values():\n print('정답입니다.')\nelse:\n print('오답입니다.')\nx = input('문자를 입력하시오: ')\nif x.islower():\n print(True)\nelse:\n print(False)\nscore = int(input('score:'))\nif score > 80:\n print('grade is A')\nelif score > 60:\n print('grade is B')\nelif score > 40:\n print('grade is C')\nelif score > 20:\n print('grade is D')\nelse:\n print('grade is E')\nx, y = input('입력 :').split(' ')\nif y == '달러':\n print(int(x) * 1167, '원')\nelif y == '엔':\n print(int(x) * 1.096, '원')\nelif y == '유로':\n print(int(x) * 1268, '원')\nelif y == '위안':\n print(int(x) * 171, '원')\nx = int(input('input number1: '))\ny = int(input('input number2: '))\nz = int(input('input number3: '))\nprint(max(x, y, z))\nx = input('휴대전화 번호 입력: ').split('-')\ny = {'011': 'SKT', '016': 'KT', '019': 'LGU', '010': '알수없음'}\nprint('당신은 {} 사용자입니다.'.format(y[x[0]]))\nx = input('우편번호: ')[2]\ny = {'강북구': ['0', '1', '2'], '도봉구': ['3', '4', '5'], '노원구': ['6', '7', '8',\n '9']}\n\n\ndef reverse(x, y):\n for a in y:\n if x in y[a]:\n return a\n raise ValueError('숫자 입력하세요.')\n\n\nimport numpy as np\nx = input('주민등록번호: ').split('-')\nnum = list(''.join(x))\nnum2 = list(map(int, num))\na1 = np.array(num2)[:-1]\na2 = np.array([i for i in range(2, 10)] + [j for j in range(2, 6)])\nfirst = sum(a1 * a2) % 11\nsecond = 11 - first\nif second == int(num[-1]):\n print('유효한 주민등록번호입니다.')\nelse:\n print('유효하지 않은 주민등록번호입니다.')\nfor i in ['가', '나', '다', '라']:\n print(i)\nfor 변수 in ['사과', '귤', '수박']:\n print(변수)\nfor 변수 in ['사과', '귤', '수박']:\n print(변수)\n print('--')\nfor 변수 in ['사과', '귤', '수박']:\n print(변수)\nprint('--')\nmenu = ['김밥', '라면', '튀김']\nfor i in menu:\n print('오늘의 메뉴:', i)\npets = ['dog', 'cat', 'parrot', 'squirrel', 'goldfish']\nfor i in pets:\n print(i, len(i))\nprices = [100, 200, 300]\nfor i in prices:\n print(i + 10)\nprices = ['129,300', '1,000', '2,300']\nfor i in prices:\n print(int(i.replace(',', '')))\nmenu = ['면라', '밥김', '김튀']\nfor i in menu:\n print(i[::-1])\nmy_list = ['가', '나', '다', '라']\nfor i in my_list[1:]:\n print(i)\nmy_list = [1, 2, 3, 4, 5, 6]\nfor i in my_list[::2]:\n print(i)\nmy_list = [1, 2, 3, 4, 5, 6]\nfor i in my_list[1::2]:\n print(i)\nmy_list = ['가', '나', '다', '라']\nfor i in my_list[::-1]:\n print(i)\nmy_list = [3, -20, -3, 44]\nfor i in my_list:\n if i < 0:\n print(i)\nmy_list = [3, 100, 23, 44]\nfor i in my_list:\n if i % 3 == 0:\n print(i)\nmy_list = ['I', 'study', 'python', 'language', '!']\nfor i in my_list:\n if len(i) >= 3:\n print(i)\nmy_list = [3, 1, 7, 10, 5, 6]\nfor i in my_list:\n if 5 < i < 10:\n print(i)\nfor i in my_list:\n if i > 5 and i < 10:\n print(i)\nmy_list = [13, 21, 12, 14, 30, 18]\nfor i in my_list:\n if i > 10 and i < 20 and i % 3 == 0:\n print(i)\nmy_list = [3, 1, 7, 12, 5, 16]\nfor i in my_list:\n if i % 3 == 0 or i % 4 == 0:\n print(i)\nmy_list = ['A', 'b', 'c', 'D']\nfor i in my_list:\n if i.isupper():\n print(i)\nmy_list = ['A', 'b', 'c', 'D']\nfor i in my_list:\n if i.islower():\n print(i)\nmy_list = ['A', 'b', 'c', 'D']\nfor i in my_list:\n if i.isupper():\n print(i.lower(), end='')\n else:\n print(i.upper(), end='')\nfile_list = ['hello.py', 'ex01.py', 'ch02.py', 'intro.hwp']\nfor i in file_list:\n print(i.split('.')[0])\nfilenames = ['intra.h', 'intra.c', 'define.h', 'run.py']\nfor i in filenames:\n if i.split('.')[1] == 'h':\n print(i)\nfilenames = ['intra.h', 'intra.c', 'define.h', 'run.py']\nfor i in filenames:\n if i.split('.')[1] == 'h' or i.split('.')[1] == 'c':\n print(i)\nmy_list = [3, -20, -3, 44]\nnew_list = []\nfor i in my_list:\n if i > 0:\n new_list.append(i)\nprint(new_list)\nmy_list = ['A', 'b', 'c', 'D']\nupper_list = []\nfor i in my_list:\n if i.isupper():\n upper_list.append(i)\nprint(upper_list)\nmy_list = [3, 4, 4, 5, 6, 6]\nsole_list = []\nfor i in my_list:\n if i not in sole_list:\n sole_list.append(i)\nprint(sole_list)\nmy_list = [3, 4, 5]\na = 0\nfor i in my_list:\n a += i\nprint(a)\n", "print('Hello World')\nprint(\"Mary's cosmetics\")\nprint('신씨가 소리질렀다. \"도둑이야\".')\nprint('C:\\\\Windows')\nprint(\"\"\"안녕하세요. \n만나서\t\t반갑습니다.\"\"\")\nprint('오늘은', '일요일')\nprint('naver', 'kakao', 'sk', 'samsung', sep=';')\nprint('naver', 'kakao', 'sk', 'samsung', sep='/')\nprint('first', end='')\nprint('second')\nstring = 'dk2jd923i1jdk2jd93jfd92jd918943jfd8923'\nprint(len(string))\na = '3'\nb = '4'\nprint(a + b)\ns = 'hello'\nt = 'python'\nprint(s + '!', t)\nprint('hi' * 3)\nprint('-' * 80)\nt1 = 'python'\nt2 = 'java'\nprint((t1 + ' ' + t2 + ' ') * 4)\nprint(20000 * 10)\n2 + 2 * 3\na = 128\nprint(type(a))\na = '132'\nprint(type(a))\nnum_str = '720'\nprint(int(num_str))\nnum = 100\nprint(str(num))\nlang = 'python'\nprint(lang[0], lang[2])\nlicense_plate = '24가 2210'\nprint(license_plate[-4:])\nstring = '홀짝홀짝홀짝'\nprint(string[::2])\nprint(''.join([x for x in string if x == '홀']))\nstring = 'PYTHON'\nprint(string[::-1])\nphone_number = '010-1111-2222'\nprint(phone_number.replace('-', ' '))\nprint(phone_number.replace('-', ''))\nurl = 'http://sharebook.kr'\nprint(url[-2:])\nlang = 'python'\nlang[0] = 'P'\nprint(lang)\nstring = 'abcdfe2a354a32a'\nprint(string.replace('a', 'A'))\nstring = 'abcd'\nstring.replace('b', 'B')\nprint(string)\nmovie_rank = ['닥터스트레인지', '스플릿', '럭키']\nmovie_rank.append('배트맨')\nmovie_rank\nmovie_rank.insert(1, '슈퍼맨')\nmovie_rank\nmovie_rank.remove('럭키')\nmovie_rank\ndel movie_rank[2:]\nmovie_rank\nlang1 = ['c', 'c++', 'java']\nlang2 = ['python', 'go', 'c#']\nprint(lang1 + lang2)\nnums = [1, 2, 3, 4, 5, 6, 7]\nprint('max:', max(nums))\nprint('min:', min(nums))\nnums = [1, 2, 3, 4, 5]\nprint(sum(nums))\ncook = ['피자', '김밥', '등등해서', '김치전']\nprint(len(cook))\nnums = [1, 2, 3, 4, 5]\nprint(sum(nums) / len(nums))\nprice = ['20180728', 100, 130, 140, 150, 160, 170]\nprint(price[1:7])\nnums = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\nprint(nums[::2])\nnums = [i for i in range(1, 11)]\nprint(nums[1::2])\nnums = [i for i in range(1, 6)]\nprint(nums[::-1])\ninterest = ['삼성전자', 'LG전자', 'Naver']\nprint(interest[0], interest[2])\ninterest = ['삼성전자', 'LG전자', 'Naver', 'SK하이닉스', '미래에셋대우']\nprint(' '.join(interest))\ninterest = ['삼성전자', 'LG전자', 'Naver', 'SK하이닉스', '미래에셋대우']\nprint('/'.join(interest))\ninterest = ['삼성전자', 'LG전자', 'Naver', 'SK하이닉스', '미래에셋대우']\nprint('\\n'.join(interest))\nstring = '삼성전자/LG전자/Naver'\ninterest = string.split('/')\nprint(interest)\nstring = '삼성전자/LG전자/Naver/SK하이닉스/미래에셋대우'\ninterest = string.split('/')\nprint(interest)\ninterest_0 = ['삼성전자', 'LG전자', 'SK Hynix']\ninterest_1 = interest_0\ninterest_1[0] = 'Naver'\nprint(interest_0)\nprint(interest_1)\ninterest_0 = ['삼성전자', 'LG전자', 'SK Hynix']\ninterest_1 = interest_0[:2]\ninterest_1[0] = 'Naver'\nprint(interest_0)\nprint(interest_1)\nmy_variable = ()\nt = 1, 2, 3\nt[0] = 'a'\ntup = 1,\nt = 1, 2, 3, 4\ntype(t)\nt = 'a', 'b', 'c'\nt = (t[0].upper(),) + t[1:3]\nprint(t)\ninterest = '삼성전자', 'LG전자', 'SK Hynix'\nlist(interest)\ninterest = ['삼성전자', 'LG전자', 'SK Hynix']\ntuple(interest)\nmy_tuple = 1, 2, 3\na, b, c = my_tuple\nprint(a + b + c)\na, b, *c = 0, 1, 2, 3, 4, 5\na\nb\nc\nscores = [8.8, 8.9, 8.7, 9.2, 9.3, 9.7, 9.9, 9.5, 7.8, 9.4]\n_, _, *valid_score = scores\nprint(3 == 5)\nprint(3 < 5)\nx = 4\nprint(1 < x < 5)\nprint(3 == 3 and 4 != 3)\nprint(3 >= 4)\nif 4 < 3:\n print('Hello World')\nif 4 < 3:\n print('Hello World.')\nelse:\n print('Hi, there.')\nif True:\n print('1')\n print('2')\nelse:\n print('3')\nprint('4')\nif True:\n if False:\n print('1')\n print('2')\n else:\n print('3')\nelse:\n print('4')\nprint('5')\nhi = input()\nprint(hi * 2)\nx = input('숫자를 입력하세요: ')\nprint(eval(x) + 10)\nx = int(input('숫자를 입력하세요: '))\nif x % 2 == 0:\n print('짝수')\nelse:\n print('홀수')\nx = int(input('입력값: '))\nif 0 < x <= 235:\n print(x + 20)\nelif x > 235:\n print(255)\nx = int(input('입력값: '))\nprint('출력값: ', min(x + 20, 255))\nx = int(input('입력값: '))\nprint('출력값: ', min(x - 20, 0))\nx = input('현재시간 : ')\nif x.split(':')[1] != '00':\n print('정각이 아닙니다.')\nelse:\n print('정각 입니다.')\nfruit = ['사과', '포도', '홍시']\nx = input('좋아하는 과일은? ')\nif x in fruit:\n print('정답입니다.')\nelse:\n print('오답입니다.')\nwarn_investment_list = ['Microsoft', 'Google', 'Naver', 'Kakao', 'SAMSUNG',\n 'LG']\nwarn_investment_list\nx = input('투자 종목을 입력해주세요: ')\nif x.lower() in [y.lower() for y in warn_investment_list]:\n print('투자 경고 종목입니다.')\nelse:\n print('투자 경고 종목이 아닙니다.')\nfruit = {'봄': '딸기', '여름': '토마토', '가을': '사과'}\nx = input('제가 좋아하는 계절은: ')\nif x in fruit.keys():\n print('정답입니다.')\nelse:\n print('오답입니다.')\nfruit = {'봄': '딸기', '여름': '토마토', '가을': '사과'}\nx = input('좋아하는 과일은?: ')\nif x in fruit.values():\n print('정답입니다.')\nelse:\n print('오답입니다.')\nx = input('문자를 입력하시오: ')\nif x.islower():\n print(True)\nelse:\n print(False)\nscore = int(input('score:'))\nif score > 80:\n print('grade is A')\nelif score > 60:\n print('grade is B')\nelif score > 40:\n print('grade is C')\nelif score > 20:\n print('grade is D')\nelse:\n print('grade is E')\nx, y = input('입력 :').split(' ')\nif y == '달러':\n print(int(x) * 1167, '원')\nelif y == '엔':\n print(int(x) * 1.096, '원')\nelif y == '유로':\n print(int(x) * 1268, '원')\nelif y == '위안':\n print(int(x) * 171, '원')\nx = int(input('input number1: '))\ny = int(input('input number2: '))\nz = int(input('input number3: '))\nprint(max(x, y, z))\nx = input('휴대전화 번호 입력: ').split('-')\ny = {'011': 'SKT', '016': 'KT', '019': 'LGU', '010': '알수없음'}\nprint('당신은 {} 사용자입니다.'.format(y[x[0]]))\nx = input('우편번호: ')[2]\ny = {'강북구': ['0', '1', '2'], '도봉구': ['3', '4', '5'], '노원구': ['6', '7', '8',\n '9']}\n\n\ndef reverse(x, y):\n for a in y:\n if x in y[a]:\n return a\n raise ValueError('숫자 입력하세요.')\n\n\n<import token>\nx = input('주민등록번호: ').split('-')\nnum = list(''.join(x))\nnum2 = list(map(int, num))\na1 = np.array(num2)[:-1]\na2 = np.array([i for i in range(2, 10)] + [j for j in range(2, 6)])\nfirst = sum(a1 * a2) % 11\nsecond = 11 - first\nif second == int(num[-1]):\n print('유효한 주민등록번호입니다.')\nelse:\n print('유효하지 않은 주민등록번호입니다.')\nfor i in ['가', '나', '다', '라']:\n print(i)\nfor 변수 in ['사과', '귤', '수박']:\n print(변수)\nfor 변수 in ['사과', '귤', '수박']:\n print(변수)\n print('--')\nfor 변수 in ['사과', '귤', '수박']:\n print(변수)\nprint('--')\nmenu = ['김밥', '라면', '튀김']\nfor i in menu:\n print('오늘의 메뉴:', i)\npets = ['dog', 'cat', 'parrot', 'squirrel', 'goldfish']\nfor i in pets:\n print(i, len(i))\nprices = [100, 200, 300]\nfor i in prices:\n print(i + 10)\nprices = ['129,300', '1,000', '2,300']\nfor i in prices:\n print(int(i.replace(',', '')))\nmenu = ['면라', '밥김', '김튀']\nfor i in menu:\n print(i[::-1])\nmy_list = ['가', '나', '다', '라']\nfor i in my_list[1:]:\n print(i)\nmy_list = [1, 2, 3, 4, 5, 6]\nfor i in my_list[::2]:\n print(i)\nmy_list = [1, 2, 3, 4, 5, 6]\nfor i in my_list[1::2]:\n print(i)\nmy_list = ['가', '나', '다', '라']\nfor i in my_list[::-1]:\n print(i)\nmy_list = [3, -20, -3, 44]\nfor i in my_list:\n if i < 0:\n print(i)\nmy_list = [3, 100, 23, 44]\nfor i in my_list:\n if i % 3 == 0:\n print(i)\nmy_list = ['I', 'study', 'python', 'language', '!']\nfor i in my_list:\n if len(i) >= 3:\n print(i)\nmy_list = [3, 1, 7, 10, 5, 6]\nfor i in my_list:\n if 5 < i < 10:\n print(i)\nfor i in my_list:\n if i > 5 and i < 10:\n print(i)\nmy_list = [13, 21, 12, 14, 30, 18]\nfor i in my_list:\n if i > 10 and i < 20 and i % 3 == 0:\n print(i)\nmy_list = [3, 1, 7, 12, 5, 16]\nfor i in my_list:\n if i % 3 == 0 or i % 4 == 0:\n print(i)\nmy_list = ['A', 'b', 'c', 'D']\nfor i in my_list:\n if i.isupper():\n print(i)\nmy_list = ['A', 'b', 'c', 'D']\nfor i in my_list:\n if i.islower():\n print(i)\nmy_list = ['A', 'b', 'c', 'D']\nfor i in my_list:\n if i.isupper():\n print(i.lower(), end='')\n else:\n print(i.upper(), end='')\nfile_list = ['hello.py', 'ex01.py', 'ch02.py', 'intro.hwp']\nfor i in file_list:\n print(i.split('.')[0])\nfilenames = ['intra.h', 'intra.c', 'define.h', 'run.py']\nfor i in filenames:\n if i.split('.')[1] == 'h':\n print(i)\nfilenames = ['intra.h', 'intra.c', 'define.h', 'run.py']\nfor i in filenames:\n if i.split('.')[1] == 'h' or i.split('.')[1] == 'c':\n print(i)\nmy_list = [3, -20, -3, 44]\nnew_list = []\nfor i in my_list:\n if i > 0:\n new_list.append(i)\nprint(new_list)\nmy_list = ['A', 'b', 'c', 'D']\nupper_list = []\nfor i in my_list:\n if i.isupper():\n upper_list.append(i)\nprint(upper_list)\nmy_list = [3, 4, 4, 5, 6, 6]\nsole_list = []\nfor i in my_list:\n if i not in sole_list:\n sole_list.append(i)\nprint(sole_list)\nmy_list = [3, 4, 5]\na = 0\nfor i in my_list:\n a += i\nprint(a)\n", "print('Hello World')\nprint(\"Mary's cosmetics\")\nprint('신씨가 소리질렀다. \"도둑이야\".')\nprint('C:\\\\Windows')\nprint(\"\"\"안녕하세요. \n만나서\t\t반갑습니다.\"\"\")\nprint('오늘은', '일요일')\nprint('naver', 'kakao', 'sk', 'samsung', sep=';')\nprint('naver', 'kakao', 'sk', 'samsung', sep='/')\nprint('first', end='')\nprint('second')\n<assignment token>\nprint(len(string))\n<assignment token>\nprint(a + b)\n<assignment token>\nprint(s + '!', t)\nprint('hi' * 3)\nprint('-' * 80)\n<assignment token>\nprint((t1 + ' ' + t2 + ' ') * 4)\nprint(20000 * 10)\n2 + 2 * 3\n<assignment token>\nprint(type(a))\n<assignment token>\nprint(type(a))\n<assignment token>\nprint(int(num_str))\n<assignment token>\nprint(str(num))\n<assignment token>\nprint(lang[0], lang[2])\n<assignment token>\nprint(license_plate[-4:])\n<assignment token>\nprint(string[::2])\nprint(''.join([x for x in string if x == '홀']))\n<assignment token>\nprint(string[::-1])\n<assignment token>\nprint(phone_number.replace('-', ' '))\nprint(phone_number.replace('-', ''))\n<assignment token>\nprint(url[-2:])\n<assignment token>\nprint(lang)\n<assignment token>\nprint(string.replace('a', 'A'))\n<assignment token>\nstring.replace('b', 'B')\nprint(string)\n<assignment token>\nmovie_rank.append('배트맨')\nmovie_rank\nmovie_rank.insert(1, '슈퍼맨')\nmovie_rank\nmovie_rank.remove('럭키')\nmovie_rank\ndel movie_rank[2:]\nmovie_rank\n<assignment token>\nprint(lang1 + lang2)\n<assignment token>\nprint('max:', max(nums))\nprint('min:', min(nums))\n<assignment token>\nprint(sum(nums))\n<assignment token>\nprint(len(cook))\n<assignment token>\nprint(sum(nums) / len(nums))\n<assignment token>\nprint(price[1:7])\n<assignment token>\nprint(nums[::2])\n<assignment token>\nprint(nums[1::2])\n<assignment token>\nprint(nums[::-1])\n<assignment token>\nprint(interest[0], interest[2])\n<assignment token>\nprint(' '.join(interest))\n<assignment token>\nprint('/'.join(interest))\n<assignment token>\nprint('\\n'.join(interest))\n<assignment token>\nprint(interest)\n<assignment token>\nprint(interest)\n<assignment token>\nprint(interest_0)\nprint(interest_1)\n<assignment token>\nprint(interest_0)\nprint(interest_1)\n<assignment token>\ntype(t)\n<assignment token>\nprint(t)\n<assignment token>\nlist(interest)\n<assignment token>\ntuple(interest)\n<assignment token>\nprint(a + b + c)\n<assignment token>\na\nb\nc\n<assignment token>\nprint(3 == 5)\nprint(3 < 5)\n<assignment token>\nprint(1 < x < 5)\nprint(3 == 3 and 4 != 3)\nprint(3 >= 4)\nif 4 < 3:\n print('Hello World')\nif 4 < 3:\n print('Hello World.')\nelse:\n print('Hi, there.')\nif True:\n print('1')\n print('2')\nelse:\n print('3')\nprint('4')\nif True:\n if False:\n print('1')\n print('2')\n else:\n print('3')\nelse:\n print('4')\nprint('5')\n<assignment token>\nprint(hi * 2)\n<assignment token>\nprint(eval(x) + 10)\n<assignment token>\nif x % 2 == 0:\n print('짝수')\nelse:\n print('홀수')\n<assignment token>\nif 0 < x <= 235:\n print(x + 20)\nelif x > 235:\n print(255)\n<assignment token>\nprint('출력값: ', min(x + 20, 255))\n<assignment token>\nprint('출력값: ', min(x - 20, 0))\n<assignment token>\nif x.split(':')[1] != '00':\n print('정각이 아닙니다.')\nelse:\n print('정각 입니다.')\n<assignment token>\nif x in fruit:\n print('정답입니다.')\nelse:\n print('오답입니다.')\n<assignment token>\nwarn_investment_list\n<assignment token>\nif x.lower() in [y.lower() for y in warn_investment_list]:\n print('투자 경고 종목입니다.')\nelse:\n print('투자 경고 종목이 아닙니다.')\n<assignment token>\nif x in fruit.keys():\n print('정답입니다.')\nelse:\n print('오답입니다.')\n<assignment token>\nif x in fruit.values():\n print('정답입니다.')\nelse:\n print('오답입니다.')\n<assignment token>\nif x.islower():\n print(True)\nelse:\n print(False)\n<assignment token>\nif score > 80:\n print('grade is A')\nelif score > 60:\n print('grade is B')\nelif score > 40:\n print('grade is C')\nelif score > 20:\n print('grade is D')\nelse:\n print('grade is E')\n<assignment token>\nif y == '달러':\n print(int(x) * 1167, '원')\nelif y == '엔':\n print(int(x) * 1.096, '원')\nelif y == '유로':\n print(int(x) * 1268, '원')\nelif y == '위안':\n print(int(x) * 171, '원')\n<assignment token>\nprint(max(x, y, z))\n<assignment token>\nprint('당신은 {} 사용자입니다.'.format(y[x[0]]))\n<assignment token>\n\n\ndef reverse(x, y):\n for a in y:\n if x in y[a]:\n return a\n raise ValueError('숫자 입력하세요.')\n\n\n<import token>\n<assignment token>\nif second == int(num[-1]):\n print('유효한 주민등록번호입니다.')\nelse:\n print('유효하지 않은 주민등록번호입니다.')\nfor i in ['가', '나', '다', '라']:\n print(i)\nfor 변수 in ['사과', '귤', '수박']:\n print(변수)\nfor 변수 in ['사과', '귤', '수박']:\n print(변수)\n print('--')\nfor 변수 in ['사과', '귤', '수박']:\n print(변수)\nprint('--')\n<assignment token>\nfor i in menu:\n print('오늘의 메뉴:', i)\n<assignment token>\nfor i in pets:\n print(i, len(i))\n<assignment token>\nfor i in prices:\n print(i + 10)\n<assignment token>\nfor i in prices:\n print(int(i.replace(',', '')))\n<assignment token>\nfor i in menu:\n print(i[::-1])\n<assignment token>\nfor i in my_list[1:]:\n print(i)\n<assignment token>\nfor i in my_list[::2]:\n print(i)\n<assignment token>\nfor i in my_list[1::2]:\n print(i)\n<assignment token>\nfor i in my_list[::-1]:\n print(i)\n<assignment token>\nfor i in my_list:\n if i < 0:\n print(i)\n<assignment token>\nfor i in my_list:\n if i % 3 == 0:\n print(i)\n<assignment token>\nfor i in my_list:\n if len(i) >= 3:\n print(i)\n<assignment token>\nfor i in my_list:\n if 5 < i < 10:\n print(i)\nfor i in my_list:\n if i > 5 and i < 10:\n print(i)\n<assignment token>\nfor i in my_list:\n if i > 10 and i < 20 and i % 3 == 0:\n print(i)\n<assignment token>\nfor i in my_list:\n if i % 3 == 0 or i % 4 == 0:\n print(i)\n<assignment token>\nfor i in my_list:\n if i.isupper():\n print(i)\n<assignment token>\nfor i in my_list:\n if i.islower():\n print(i)\n<assignment token>\nfor i in my_list:\n if i.isupper():\n print(i.lower(), end='')\n else:\n print(i.upper(), end='')\n<assignment token>\nfor i in file_list:\n print(i.split('.')[0])\n<assignment token>\nfor i in filenames:\n if i.split('.')[1] == 'h':\n print(i)\n<assignment token>\nfor i in filenames:\n if i.split('.')[1] == 'h' or i.split('.')[1] == 'c':\n print(i)\n<assignment token>\nfor i in my_list:\n if i > 0:\n new_list.append(i)\nprint(new_list)\n<assignment token>\nfor i in my_list:\n if i.isupper():\n upper_list.append(i)\nprint(upper_list)\n<assignment token>\nfor i in my_list:\n if i not in sole_list:\n sole_list.append(i)\nprint(sole_list)\n<assignment token>\nfor i in my_list:\n a += i\nprint(a)\n", "<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\ndef reverse(x, y):\n for a in y:\n if x in y[a]:\n return a\n raise ValueError('숫자 입력하세요.')\n\n\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
98,952
c21c341cb7eabbda0fd7f2899a5c8b6e0bf5d93b
r"""@author Sebastien E. Bourban """ from __future__ import print_function # _____ ___________________________________________________ # ____/ Imports /__________________________________________________/ # # ~~> dependencies towards other modules from compilation.parser_fortran import clean_quotes from data_manip.extraction.parser_lqd import LQD from data_manip.extraction.parser_kenue import InS from data_manip.conversion import convert_utm as utm from data_manip.formats.selafins import Selafins from data_manip.formats.selafin import Selafin from utils.files import move_file from utils.exceptions import TelemacException from pretel.meshes import tessellate_poly from pretel.scan_selafin import ScanSelafin from pretel.alter_selafin import AlterSelafin from pretel.chop_selafin import ChopSelafin from pretel.crunch_selafin import CrunchSelafin from pretel.scan_spectral import ScanSpectral from pretel.sub_selafin import SubSelafin from pretel.calcs_selafin import CalcsSelafin from pretel.transf_selafin import TransfSelafin # ~~> dependencies towards standard python from os import path import numpy as np def scan(options): """ Scan of a file """ slf_files = options.input_files for slf_file in slf_files: slf_file = path.realpath(slf_file) if not path.exists(slf_file): raise TelemacException(\ '\nCould not find the file named: {}'.format(slf_file)) print('\n\nScanning ' + path.basename(slf_file) + ' within ' + \ path.dirname(slf_file) + '\n'+'~'*72+'\n') vrs = options.xvars if options.xvars != None: vrs = clean_quotes(options.xvars.replace('_', ' ')) times = (int(options.tfrom), int(options.tstep), int(options.tstop)) slf = ScanSelafin(slf_file, times=times, vrs=vrs) slf.print_header() if options.core: slf.print_core() else: slf.print_time_summary() def spec(options): """ Spectral file """ slf_files = options.input_files for slf_file in slf_files: slf_file = path.realpath(slf_file) if not path.exists(slf_file): raise TelemacException(\ '\nCould not find the file named: {}'.format(slf_file)) print('\n\nScanning ' + path.basename(slf_file) + ' within ' + \ path.dirname(slf_file) + '\n'+'~'*72+'\n') vrs = options.xvars if options.xvars != None: vrs = clean_quotes(options.xvars.replace('_', ' ')) times = (int(options.tfrom), int(options.tstep), int(options.tstop)) slf = ScanSpectral(slf_file, times=times, vrs=vrs) slf.print_header() if options.core: slf.print_core(int(options.accuracy)) else: slf.print_time_summary() def chop(options): """ Chopping of a file """ root_file = None if not options.freplace: if not options.parallel: if len(options.args) != 2: raise TelemacException(\ '\nThe code "chop" (without --replace) ' 'here requires 2 file names\n') slf_files = [options.args[0]] out_file = options.args[1] else: if len(options.args) != 3: raise TelemacException(\ '\nThe code "chop" (without --replace) ' 'here requires 2 file names and ' '1 file root name for the partition\n') slf_files = [options.args[0]] root_file = options.args[1] out_file = options.args[2] else: slf_files = options.args out_file = "chop-tmp.slf" for slf_file in slf_files: slf_file = path.realpath(slf_file) if not path.exists(slf_file): raise TelemacException(\ '\nCould not find the file named: {}'.format(slf_file)) print('\n\nChoping ' + path.basename(slf_file) + ' within ' + \ path.dirname(slf_file) + '\n'+'~'*72+'\n') vrs = options.xvars if options.xvars != None: vrs = clean_quotes(options.xvars.replace('_', ' ')) times = (int(options.tfrom), int(options.tstep), int(options.tstop)) slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file) if options.eswitch: slf.alter_endian() if options.fswitch: slf.alter_float() slf.put_content(out_file) if options.freplace: move_file(out_file, slf_file) def alter(options): """ Modifications in the file """ root_file = None if not options.freplace: if not options.parallel: if len(options.args) != 2: raise TelemacException(\ '\nThe code "alter" (without --replace) ' 'requires 2 file names\n') slf_files = [options.args[0]] out_file = options.args[1] else: if len(options.args) != 3: raise TelemacException(\ '\nThe code "alter" (without --replace) ' 'here requires 2 file names and ' '1 file root name for the partition\n') slf_files = [options.args[0]] root_file = options.args[1] out_file = options.args[2] else: slf_files = options.args out_file = "chop-tmp.slf" for slf_file in slf_files: slf_file = path.realpath(slf_file) if not path.exists(slf_file): raise TelemacException(\ '\nCould not find the file named: {}'.format(slf_file)) print('\n\nAltering ' + path.basename(slf_file) + ' within ' + \ path.dirname(slf_file) + '\n'+'~'*72+'\n') vrs = options.xvars if options.xvars != None: vrs = clean_quotes(options.xvars.replace('_', ' ')) times = (int(options.tfrom), int(options.tstep), int(options.tstop)) slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file) if options.atitle != None: slf.alter_title(options.atitle) if options.areset: slf.alter_times(p_t=-slf.slf.tags['times'][0]) if options.adate != None: slf.alter_datetime(date=options.adate.split('-')) if options.atime != None: slf.alter_datetime(time=options.atime.split(':')) if options.aswitch: slf.switch_vars() if options.eswitch: slf.alter_endian() if options.fswitch: slf.alter_float() if options.aname != None: slf.alter_vars(options.aname) slf.alter_times(m_t=float(options.atm), p_t=float(options.atp)) slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=float(options.aym), p_y=float(options.ayp)) if options.azname != None: slf.alter_values(options.azname, m_z=float(options.azm), p_z=float(options.azp)) if options.sph2ll != None: radius = 6371000. long0, lat0 = options.sph2ll.split(":") long0 = np.deg2rad(float(long0)) lat0 = np.deg2rad(float(lat0)) const = np.tan(lat0/2. + np.pi/4.) slf.slf.meshx = np.rad2deg(slf.slf.meshx/radius + long0) expo = np.exp(slf.slf.meshy/radius) slf.slf.meshy = np.rad2deg(2.*np.arctan(const*expo) - np.pi/2.) if options.ll2sph != None: radius = 6371000. long0, lat0 = options.ll2sph.split(":") long0 = np.deg2rad(float(long0)) lat0 = np.deg2rad(float(lat0)) slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0) slf.slf.meshy = radius * \ (np.log(np.tan(np.deg2rad(slf.slf.meshy)/2. + np.pi/4.)) \ - np.log(np.tan(lat0/2. + np.pi/4.))) if options.ll2utm != None: zone = int(options.ll2utm) slf.slf.meshx, slf.slf.meshy, zone = \ utm.from_lat_long(slf.slf.meshx, slf.slf.meshy, zone) if options.utm2ll != None: zone = int(options.utm2ll) slf.slf.meshx, slf.slf.meshy = \ utm.to_lat_long(slf.slf.meshx, slf.slf.meshy, zone) slf.put_content(out_file) if options.freplace: move_file(out_file, slf_file) def merge(options): """ Merging two selafin files """ root_file = None if not options.parallel: if len(options.args) < 3: raise TelemacException(\ '\nThe code "merge" requires ' 'at leat 2 file names, aside ' 'from the options\n') slf_files = options.args[0:len(options.args)-1] out_file = options.args[len(options.args)-1] slfs = Selafins() print('\n\nMerging into ' + path.basename(out_file) + ' within ' + \ path.dirname(out_file) + '\n'+'~'*72+'\n') for slf_file in slf_files: slf_file = path.realpath(slf_file) if not path.exists(slf_file): raise TelemacException(\ '\nCould not find ' 'the file named: {}'.format(slf_file)) slfs.add(slf_file) slfs.put_content(out_file) else: if len(options.args) != 3: raise TelemacException(\ '\nThe code "merge" here requires ' '2 file names and ' '1 file root name for the partition\n') slf_file = options.args[0] root_file = options.args[1] out_file = options.args[2] print('\n\nMerging into ' + path.basename(out_file) + ' within ' \ + path.dirname(out_file) + '\n'+'~'*72+'\n') slf_file = path.realpath(slf_file) if not path.exists(slf_file): raise TelemacException(\ '\nCould not find ' 'the file named: {}'.format(slf_file)) vrs = options.xvars if options.xvars != None: vrs = clean_quotes(options.xvars.replace('_', ' ')) times = (int(options.tfrom), int(options.tstep), int(options.tstop)) slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file) if options.atitle != None: slf.alter_title(options.atitle) if options.areset: slf.alter_times(p_t=-slf.slf.tags['times'][0]) if options.adate != None: slf.alter_datetime(date=options.adate.split('-')) if options.atime != None: slf.alter_datetime(time=options.atime.split(':')) if options.aswitch: slf.switch_vars() if options.eswitch: slf.alter_endian() if options.fswitch: slf.alter_float() if options.aname != None: slf.alter_vars(options.aname) slf.alter_times(m_t=float(options.atm), p_t=float(options.atp)) slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=float(options.aym), p_y=float(options.ayp)) if options.azname != None: slf.alter_values(options.azname, m_z=float(options.azm), p_z=float(options.azp)) slf.put_content(out_file) def diff(options): """ diff between two serafin files """ if len(options.args) < 2: raise TelemacException(\ '\nThe code "diff" uses a minimum of ' '3 argumensts, aside from the options\n') slf_files = options.args[0:len(options.args)-1] out_file = options.args[len(options.args)-1] slfs = Selafins() print('\n\nDifferences into {}\n{}\n'.format(path.basename(out_file), '~'*72)) for slf_file in slf_files: slf_file = path.realpath(slf_file) if not path.exists(slf_file): raise TelemacException(\ '\nCould not find ' 'the file named: {}'.format(slf_file)) slfs.add(slf_file) slfs.put_content(out_file) def sample(options): """ Set liquid boundary file from a selafin file """ root_file = None if not options.parallel: if len(options.args) < 3: raise TelemacException(\ '\nThe code "sample" requires ' 'at least 2 file names and ' 'one series of node numbers\n') slf_file = options.args[0] out_file = options.args[1] nod_list = [] for nod in options.args[2].split(" "): nod_list.append(int(nod)) else: if len(options.args) != 4: raise TelemacException(\ '\nThe code "sample" here ' 'requires 2 file names, ' '1 file root name for the partition and ' '1 series of node numbers\n') slf_file = options.args[0] root_file = options.args[1] out_file = options.args[2] nod_list = [] for nod in options.args[3].split(" "): nod_list.append(int(nod)) slf_file = path.realpath(slf_file) if not path.exists(slf_file): raise TelemacException(\ '\nCould not find the file named: ' '{}'.format(slf_file)) print('\n\nSample ' + path.basename(slf_file) + ' within ' + \ path.dirname(slf_file) + '\n'+'~'*72+'\n') vrs = options.xvars if options.xvars != None: vrs = clean_quotes(options.xvars.replace('_', ' ')) times = (int(options.tfrom), int(options.tstep), int(options.tstop)) slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file) lqd = LQD(vrs=[zip(slf.slf.varnames, slf.slf.varunits), nod_list], date=slf.slf.datetime, times=slf.slf.tags['times'], series=slf.get_series(nod_list)) lqd.put_content(out_file) def subdivide(options): """ Subdivide a mesh """ if not options.freplace: if len(options.args) != 2: raise TelemacException(\ '\nThe code "subdivide" ' '(without --replace) here ' 'requires 2 file names\n') slf_file = options.args[0] out_file = options.args[1] else: if len(options.args) != 1: raise TelemacException(\ '\nThe code "subdivide" (with --replace) ' 'here requires 1 file name at a time\n') slf_file = options.args[0] out_file = "subdivide-tmp.slf" slf_file = path.realpath(slf_file) if not path.exists(slf_file): raise TelemacException(\ '\nCould not find' ' the file named: {}'.format(slf_file)) print('\n\nSubdividing ' + path.basename(slf_file) + ' within ' + \ path.dirname(slf_file) + '\n'+'~'*72+'\n') slf = SubSelafin(slf_file) slf.put_content(out_file) if options.freplace: move_file(out_file, slf_file) def tesselate(options): """ Generate a mesh from a polygon """ if not options.freplace: if len(options.args) != 2: raise TelemacException(\ '\nThe code "tessellate" here ' 'requires one i2s/i3s file and ' 'one output slf file\n') i3s_file = options.args[0] out_file = options.args[1] else: if len(options.args) != 1: raise TelemacException(\ '\nThe code "tessellate" here ' 'requires one i2s/i3s file\n') i3s_file = options.args[0] head, _ = path.splitext(i3s_file) out_file = head+'.slf' i3s_file = path.realpath(i3s_file) if not path.exists(i3s_file): raise TelemacException(\ '\nCould not find ' 'the file named: {}'.format(i3s_file)) print('\n\nTessellating ' + path.basename(i3s_file) + ' within ' + \ path.dirname(i3s_file) + '\n'+'~'*72+'\n') i2s = InS(i3s_file) ikle2, ipob2, meshx, meshy = tessellate_poly(i2s, debug=True) print('\n\nWriting down the Selafin file ' + \ path.basename(out_file) + '\n'+'~'*72+'\n') slf = Selafin('') slf.title = '' slf.nplan = 1 slf.ndp2 = 3 slf.ndp3 = 3 slf.nbv1 = 1 slf.nvar = 1 slf.varindex = 1 slf.varnames = ['BOTTOM '] slf.varunits = ['M '] slf.ikle2 = ikle2 slf.ikle3 = slf.ikle2 slf.meshx = meshx slf.meshy = meshy slf.npoin2 = i2s.npoin slf.npoin3 = slf.npoin2 slf.nelem2 = len(slf.ikle2)/slf.ndp3 slf.nelem3 = slf.nelem2 slf.iparam = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0] slf.ipob2 = ipob2 slf.ipob3 = slf.ipob2 slf.fole = {'hook':open(out_file, 'wb'), 'endian':">", 'float':('f', 4), 'name':out_file} slf.tags['times'] = [1] if options.sph2ll != None: radius = 6371000. long0, lat0 = options.sph2ll.split(":") long0 = np.deg2rad(float(long0)) lat0 = np.deg2rad(float(lat0)) const = np.tan(lat0/2. + np.pi/4.) slf.meshx = np.rad2deg(slf.meshx/radius + long0) slf.meshy = np.rad2deg(2.*np.arctan(const*np.exp(slf.meshy/radius)) \ - np.pi/2.) if options.ll2sph != None: radius = 6371000. long0, lat0 = options.ll2sph.split(":") long0 = np.deg2rad(float(long0)) lat0 = np.deg2rad(float(lat0)) slf.meshx = radius * (np.deg2rad(slf.meshx) - long0) slf.meshy = radius * \ (np.log(np.tan(np.deg2rad(slf.meshy)/2. + np.pi/4.)) \ - np.log(np.tan(lat0/2. + np.pi/4.))) if options.ll2utm != None: zone = int(options.ll2utm) slf.meshx, slf.meshy, zone = utm.from_lat_long(slf.meshx, slf.meshy, zone) if options.utm2ll != None: zone = int(options.utm2ll) slf.meshx, slf.meshy = utm.to_lat_long(slf.meshx, slf.meshy, zone) slf.append_header_slf() slf.append_core_time_slf(0) slf.append_core_vars_slf([np.zeros(slf.npoin2)]) slf.fole['hook'].close() def calcs(options, code_name): """ Doing calcs, crunh, transf """ root_file = None if not options.parallel: if len(options.args) < 2: raise TelemacException(\ '\nThe code "calcs" requires 2 file names\n') slf_file = options.args[0] out_file = options.args[1] else: if len(options.args) != 3: raise TelemacException(\ '\nThe code "calcs" requires ' '2 file names and 1 root file name ' 'for parallel inputs\n') slf_file = options.args[0] root_file = options.args[1] out_file = options.args[2] slf_file = path.realpath(slf_file) if not path.exists(slf_file): raise TelemacException(\ '\nCould not find the file named: {}'.format(slf_file)) print('\n\nCalculations for ' + path.basename(slf_file) + ' within ' + \ path.dirname(slf_file) + '\n'+'~'*72+'\n') vrs = options.xvars calc_list = [] if options.xvars != None: vrs = clean_quotes(options.xvars.replace('_', ' ')) calc_list = vrs.split(':') if code_name == 'calcs': times = (int(options.tfrom), int(options.tstep), int(options.tstop)) slf = CalcsSelafin(slf_file, times=times, root=root_file) print(' ~> Assembling the following variables together '\ 'into the file:') for calc in calc_list: if calc.upper() in "WATER DEPTH": print(' +> WATER DEPTH') slf.calc_water_depth() if calc.upper() in "KINETIC ENERGY": print(' +> KINETIC ENERGY') slf.calc_kinetic_energy() elif code_name == 'transf': times = (float(options.tfrom), float(options.tstep), float(options.tstop)) slf = TransfSelafin(slf_file, times=times, root=root_file) print(' ~> Computing an animation for the following variable(s):') for calc in calc_list: if calc.upper() in "WAVE SURFACE": print(' +> WAVE SURFACE') slf.calc_free_surface_from_artemis() elif code_name == 'crunch': times = (int(options.tfrom), int(options.tstep), int(options.tstop)) slf = CrunchSelafin(slf_file, times=times, root=root_file) print(' ~> Assembling the following variables into the file:') for calc in calc_list: if calc.upper() in "SURFACE RANGE": print(' +> SURFACE RANGE') slf.calc_surface_range() if calc.upper() in "MAXIMUM SPEED": print(' +> MAXIMUM SPEED') slf.calc_maximum_speed() if calc.upper() in "TIME OF PEAK": print(' +> TIME OF PEAK') slf.calc_peak_time_modulo_m2() if calc.upper() in "RESIDUAL U": print(' +> RESIDUAL U') slf.calc_residual_velocity() slf.alter_times(m_t=float(options.atm), p_t=float(options.atp)) slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=float(options.aym), p_y=float(options.ayp)) if options.azname != None: slf.alter_values(options.azname, m_z=float(options.azm), p_z=float(options.azp)) if options.eswitch: slf.alter_endian() if options.fswitch: slf.alter_float() slf.put_content(out_file)
[ "r\"\"\"@author Sebastien E. Bourban\n\n\"\"\"\nfrom __future__ import print_function\n# _____ ___________________________________________________\n# ____/ Imports /__________________________________________________/\n#\n# ~~> dependencies towards other modules\n\nfrom compilation.parser_fortran import clean_quotes\nfrom data_manip.extraction.parser_lqd import LQD\nfrom data_manip.extraction.parser_kenue import InS\nfrom data_manip.conversion import convert_utm as utm\nfrom data_manip.formats.selafins import Selafins\nfrom data_manip.formats.selafin import Selafin\nfrom utils.files import move_file\nfrom utils.exceptions import TelemacException\n\nfrom pretel.meshes import tessellate_poly\nfrom pretel.scan_selafin import ScanSelafin\nfrom pretel.alter_selafin import AlterSelafin\nfrom pretel.chop_selafin import ChopSelafin\nfrom pretel.crunch_selafin import CrunchSelafin\nfrom pretel.scan_spectral import ScanSpectral\nfrom pretel.sub_selafin import SubSelafin\nfrom pretel.calcs_selafin import CalcsSelafin\nfrom pretel.transf_selafin import TransfSelafin\n# ~~> dependencies towards standard python\nfrom os import path\nimport numpy as np\n\ndef scan(options):\n \"\"\"\n Scan of a file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException(\\\n '\\nCould not find the file named: {}'.format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + \\\n path.dirname(slf_file) + '\\n'+'~'*72+'\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = (int(options.tfrom), int(options.tstep), int(options.tstop))\n slf = ScanSelafin(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core()\n else:\n slf.print_time_summary()\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException(\\\n '\\nCould not find the file named: {}'.format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + \\\n path.dirname(slf_file) + '\\n'+'~'*72+'\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = (int(options.tfrom), int(options.tstep), int(options.tstop))\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\ndef chop(options):\n \"\"\"\n Chopping of a file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\\\n '\\nThe code \"chop\" (without --replace) '\n 'here requires 2 file names\\n')\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\\\n '\\nThe code \"chop\" (without --replace) '\n 'here requires 2 file names and '\n '1 file root name for the partition\\n')\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = \"chop-tmp.slf\"\n\n for slf_file in slf_files:\n\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException(\\\n '\\nCould not find the file named: {}'.format(slf_file))\n print('\\n\\nChoping ' + path.basename(slf_file) + ' within ' + \\\n path.dirname(slf_file) + '\\n'+'~'*72+'\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = (int(options.tfrom), int(options.tstep), int(options.tstop))\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n\n slf.put_content(out_file)\n\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef alter(options):\n \"\"\"\n Modifications in the file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\\\n '\\nThe code \"alter\" (without --replace) '\n 'requires 2 file names\\n')\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\\\n '\\nThe code \"alter\" (without --replace) '\n 'here requires 2 file names and '\n '1 file root name for the partition\\n')\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = \"chop-tmp.slf\"\n\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException(\\\n '\\nCould not find the file named: {}'.format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + \\\n path.dirname(slf_file) + '\\n'+'~'*72+'\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = (int(options.tfrom), int(options.tstep), int(options.tstop))\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp),\n m_y=float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname,\n m_z=float(options.azm), p_z=float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.\n long0, lat0 = options.sph2ll.split(\":\")\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0/2. + np.pi/4.)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx/radius + long0)\n expo = np.exp(slf.slf.meshy/radius)\n slf.slf.meshy = np.rad2deg(2.*np.arctan(const*expo) - np.pi/2.)\n if options.ll2sph != None:\n radius = 6371000.\n long0, lat0 = options.ll2sph.split(\":\")\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * \\\n (np.log(np.tan(np.deg2rad(slf.slf.meshy)/2. + np.pi/4.)) \\\n - np.log(np.tan(lat0/2. + np.pi/4.)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = \\\n utm.from_lat_long(slf.slf.meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = \\\n utm.to_lat_long(slf.slf.meshx, slf.slf.meshy, zone)\n\n slf.put_content(out_file)\n\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef merge(options):\n \"\"\"\n Merging two selafin files\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\\\n '\\nThe code \"merge\" requires '\n 'at leat 2 file names, aside '\n 'from the options\\n')\n slf_files = options.args[0:len(options.args)-1]\n out_file = options.args[len(options.args)-1]\n\n slfs = Selafins()\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' + \\\n path.dirname(out_file) + '\\n'+'~'*72+'\\n')\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException(\\\n '\\nCould not find '\n 'the file named: {}'.format(slf_file))\n slfs.add(slf_file)\n\n slfs.put_content(out_file)\n\n else:\n if len(options.args) != 3:\n raise TelemacException(\\\n '\\nThe code \"merge\" here requires '\n '2 file names and '\n '1 file root name for the partition\\n')\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' \\\n + path.dirname(out_file) + '\\n'+'~'*72+'\\n')\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException(\\\n '\\nCould not find '\n 'the file named: {}'.format(slf_file))\n\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = (int(options.tfrom), int(options.tstep), int(options.tstop))\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp),\n m_y=float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname,\n m_z=float(options.azm), p_z=float(options.azp))\n\n slf.put_content(out_file)\n\ndef diff(options):\n \"\"\"\n diff between two serafin files\n \"\"\"\n if len(options.args) < 2:\n raise TelemacException(\\\n '\\nThe code \"diff\" uses a minimum of '\n '3 argumensts, aside from the options\\n')\n slf_files = options.args[0:len(options.args)-1]\n out_file = options.args[len(options.args)-1]\n\n slfs = Selafins()\n print('\\n\\nDifferences into {}\\n{}\\n'.format(path.basename(out_file),\n '~'*72))\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException(\\\n '\\nCould not find '\n 'the file named: {}'.format(slf_file))\n slfs.add(slf_file)\n slfs.put_content(out_file)\n\n\ndef sample(options):\n \"\"\"\n Set liquid boundary file from a selafin file\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\\\n '\\nThe code \"sample\" requires '\n 'at least 2 file names and '\n 'one series of node numbers\\n')\n slf_file = options.args[0]\n out_file = options.args[1]\n nod_list = []\n for nod in options.args[2].split(\" \"):\n nod_list.append(int(nod))\n else:\n if len(options.args) != 4:\n raise TelemacException(\\\n '\\nThe code \"sample\" here '\n 'requires 2 file names, '\n '1 file root name for the partition and '\n '1 series of node numbers\\n')\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n nod_list = []\n for nod in options.args[3].split(\" \"):\n nod_list.append(int(nod))\n\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException(\\\n '\\nCould not find the file named: '\n '{}'.format(slf_file))\n print('\\n\\nSample ' + path.basename(slf_file) + ' within ' + \\\n path.dirname(slf_file) + '\\n'+'~'*72+'\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = (int(options.tfrom), int(options.tstep), int(options.tstop))\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n\n lqd = LQD(vrs=[zip(slf.slf.varnames, slf.slf.varunits), nod_list],\n date=slf.slf.datetime, times=slf.slf.tags['times'],\n series=slf.get_series(nod_list))\n lqd.put_content(out_file)\n\ndef subdivide(options):\n \"\"\"\n Subdivide a mesh\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\\\n '\\nThe code \"subdivide\" '\n '(without --replace) here '\n 'requires 2 file names\\n')\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\\\n '\\nThe code \"subdivide\" (with --replace) '\n 'here requires 1 file name at a time\\n')\n slf_file = options.args[0]\n out_file = \"subdivide-tmp.slf\"\n\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException(\\\n '\\nCould not find'\n ' the file named: {}'.format(slf_file))\n print('\\n\\nSubdividing ' + path.basename(slf_file) + ' within ' + \\\n path.dirname(slf_file) + '\\n'+'~'*72+'\\n')\n slf = SubSelafin(slf_file)\n slf.put_content(out_file)\n\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef tesselate(options):\n \"\"\"\n Generate a mesh from a polygon\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\\\n '\\nThe code \"tessellate\" here '\n 'requires one i2s/i3s file and '\n 'one output slf file\\n')\n i3s_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\\\n '\\nThe code \"tessellate\" here '\n 'requires one i2s/i3s file\\n')\n i3s_file = options.args[0]\n head, _ = path.splitext(i3s_file)\n out_file = head+'.slf'\n\n i3s_file = path.realpath(i3s_file)\n if not path.exists(i3s_file):\n raise TelemacException(\\\n '\\nCould not find '\n 'the file named: {}'.format(i3s_file))\n\n print('\\n\\nTessellating ' + path.basename(i3s_file) + ' within ' + \\\n path.dirname(i3s_file) + '\\n'+'~'*72+'\\n')\n i2s = InS(i3s_file)\n ikle2, ipob2, meshx, meshy = tessellate_poly(i2s, debug=True)\n\n print('\\n\\nWriting down the Selafin file ' + \\\n path.basename(out_file) + '\\n'+'~'*72+'\\n')\n slf = Selafin('')\n slf.title = ''\n slf.nplan = 1\n slf.ndp2 = 3\n slf.ndp3 = 3\n slf.nbv1 = 1\n slf.nvar = 1\n slf.varindex = 1\n slf.varnames = ['BOTTOM ']\n slf.varunits = ['M ']\n slf.ikle2 = ikle2\n slf.ikle3 = slf.ikle2\n slf.meshx = meshx\n slf.meshy = meshy\n slf.npoin2 = i2s.npoin\n slf.npoin3 = slf.npoin2\n slf.nelem2 = len(slf.ikle2)/slf.ndp3\n slf.nelem3 = slf.nelem2\n slf.iparam = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]\n slf.ipob2 = ipob2\n slf.ipob3 = slf.ipob2\n slf.fole = {'hook':open(out_file, 'wb'), 'endian':\">\",\n 'float':('f', 4), 'name':out_file}\n slf.tags['times'] = [1]\n if options.sph2ll != None:\n radius = 6371000.\n long0, lat0 = options.sph2ll.split(\":\")\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0/2. + np.pi/4.)\n slf.meshx = np.rad2deg(slf.meshx/radius + long0)\n slf.meshy = np.rad2deg(2.*np.arctan(const*np.exp(slf.meshy/radius)) \\\n - np.pi/2.)\n if options.ll2sph != None:\n radius = 6371000.\n long0, lat0 = options.ll2sph.split(\":\")\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.meshx = radius * (np.deg2rad(slf.meshx) - long0)\n slf.meshy = radius * \\\n (np.log(np.tan(np.deg2rad(slf.meshy)/2. + np.pi/4.)) \\\n - np.log(np.tan(lat0/2. + np.pi/4.)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.meshx, slf.meshy, zone = utm.from_lat_long(slf.meshx, slf.meshy,\n zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.meshx, slf.meshy = utm.to_lat_long(slf.meshx, slf.meshy, zone)\n slf.append_header_slf()\n slf.append_core_time_slf(0)\n slf.append_core_vars_slf([np.zeros(slf.npoin2)])\n slf.fole['hook'].close()\n\ndef calcs(options, code_name):\n \"\"\"\n Doing calcs, crunh, transf\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 2:\n raise TelemacException(\\\n '\\nThe code \"calcs\" requires 2 file names\\n')\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\\\n '\\nThe code \"calcs\" requires '\n '2 file names and 1 root file name '\n 'for parallel inputs\\n')\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException(\\\n '\\nCould not find the file named: {}'.format(slf_file))\n print('\\n\\nCalculations for ' + path.basename(slf_file) + ' within ' + \\\n path.dirname(slf_file) + '\\n'+'~'*72+'\\n')\n vrs = options.xvars\n calc_list = []\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n calc_list = vrs.split(':')\n if code_name == 'calcs':\n times = (int(options.tfrom), int(options.tstep), int(options.tstop))\n slf = CalcsSelafin(slf_file, times=times, root=root_file)\n print(' ~> Assembling the following variables together '\\\n 'into the file:')\n for calc in calc_list:\n if calc.upper() in \"WATER DEPTH\":\n print(' +> WATER DEPTH')\n slf.calc_water_depth()\n if calc.upper() in \"KINETIC ENERGY\":\n print(' +> KINETIC ENERGY')\n slf.calc_kinetic_energy()\n elif code_name == 'transf':\n times = (float(options.tfrom), float(options.tstep),\n float(options.tstop))\n slf = TransfSelafin(slf_file, times=times, root=root_file)\n print(' ~> Computing an animation for the following variable(s):')\n for calc in calc_list:\n if calc.upper() in \"WAVE SURFACE\":\n print(' +> WAVE SURFACE')\n slf.calc_free_surface_from_artemis()\n elif code_name == 'crunch':\n times = (int(options.tfrom), int(options.tstep), int(options.tstop))\n slf = CrunchSelafin(slf_file, times=times, root=root_file)\n print(' ~> Assembling the following variables into the file:')\n for calc in calc_list:\n if calc.upper() in \"SURFACE RANGE\":\n print(' +> SURFACE RANGE')\n slf.calc_surface_range()\n if calc.upper() in \"MAXIMUM SPEED\":\n print(' +> MAXIMUM SPEED')\n slf.calc_maximum_speed()\n if calc.upper() in \"TIME OF PEAK\":\n print(' +> TIME OF PEAK')\n slf.calc_peak_time_modulo_m2()\n if calc.upper() in \"RESIDUAL U\":\n print(' +> RESIDUAL U')\n slf.calc_residual_velocity()\n\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp),\n m_y=float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname,\n m_z=float(options.azm), p_z=float(options.azp))\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n\n slf.put_content(out_file)\n", "<docstring token>\nfrom __future__ import print_function\nfrom compilation.parser_fortran import clean_quotes\nfrom data_manip.extraction.parser_lqd import LQD\nfrom data_manip.extraction.parser_kenue import InS\nfrom data_manip.conversion import convert_utm as utm\nfrom data_manip.formats.selafins import Selafins\nfrom data_manip.formats.selafin import Selafin\nfrom utils.files import move_file\nfrom utils.exceptions import TelemacException\nfrom pretel.meshes import tessellate_poly\nfrom pretel.scan_selafin import ScanSelafin\nfrom pretel.alter_selafin import AlterSelafin\nfrom pretel.chop_selafin import ChopSelafin\nfrom pretel.crunch_selafin import CrunchSelafin\nfrom pretel.scan_spectral import ScanSpectral\nfrom pretel.sub_selafin import SubSelafin\nfrom pretel.calcs_selafin import CalcsSelafin\nfrom pretel.transf_selafin import TransfSelafin\nfrom os import path\nimport numpy as np\n\n\ndef scan(options):\n \"\"\"\n Scan of a file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSelafin(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core()\n else:\n slf.print_time_summary()\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\ndef chop(options):\n \"\"\"\n Chopping of a file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"chop\" (without --replace) here requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"chop\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nChoping ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef alter(options):\n \"\"\"\n Modifications in the file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx / radius + long0)\n expo = np.exp(slf.slf.meshy / radius)\n slf.slf.meshy = np.rad2deg(2.0 * np.arctan(const * expo) - np.\n pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.slf.\n meshy) / 2.0 + np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + \n np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = utm.from_lat_long(slf.slf.\n meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = utm.to_lat_long(slf.slf.meshx,\n slf.slf.meshy, zone)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef merge(options):\n \"\"\"\n Merging two selafin files\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" requires at leat 2 file names, aside from the options\n\"\"\"\n )\n slf_files = options.args[0:len(options.args) - 1]\n out_file = options.args[len(options.args) - 1]\n slfs = Selafins()\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'\n .format(slf_file))\n slfs.add(slf_file)\n slfs.put_content(out_file)\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n slf.put_content(out_file)\n\n\ndef diff(options):\n \"\"\"\n diff between two serafin files\n \"\"\"\n if len(options.args) < 2:\n raise TelemacException(\n \"\"\"\nThe code \"diff\" uses a minimum of 3 argumensts, aside from the options\n\"\"\"\n )\n slf_files = options.args[0:len(options.args) - 1]\n out_file = options.args[len(options.args) - 1]\n slfs = Selafins()\n print('\\n\\nDifferences into {}\\n{}\\n'.format(path.basename(out_file), \n '~' * 72))\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n slfs.add(slf_file)\n slfs.put_content(out_file)\n\n\ndef sample(options):\n \"\"\"\n Set liquid boundary file from a selafin file\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" requires at least 2 file names and one series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n nod_list = []\n for nod in options.args[2].split(' '):\n nod_list.append(int(nod))\n else:\n if len(options.args) != 4:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" here requires 2 file names, 1 file root name for the partition and 1 series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n nod_list = []\n for nod in options.args[3].split(' '):\n nod_list.append(int(nod))\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSample ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n lqd = LQD(vrs=[zip(slf.slf.varnames, slf.slf.varunits), nod_list], date\n =slf.slf.datetime, times=slf.slf.tags['times'], series=slf.\n get_series(nod_list))\n lqd.put_content(out_file)\n\n\ndef subdivide(options):\n \"\"\"\n Subdivide a mesh\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"subdivide\" (without --replace) here requires 2 file names\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n \"\"\"\nThe code \"subdivide\" (with --replace) here requires 1 file name at a time\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = 'subdivide-tmp.slf'\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSubdividing ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n slf = SubSelafin(slf_file)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef tesselate(options):\n \"\"\"\n Generate a mesh from a polygon\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"tessellate\" here requires one i2s/i3s file and one output slf file\n\"\"\"\n )\n i3s_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n '\\nThe code \"tessellate\" here requires one i2s/i3s file\\n')\n i3s_file = options.args[0]\n head, _ = path.splitext(i3s_file)\n out_file = head + '.slf'\n i3s_file = path.realpath(i3s_file)\n if not path.exists(i3s_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (i3s_file))\n print('\\n\\nTessellating ' + path.basename(i3s_file) + ' within ' + path\n .dirname(i3s_file) + '\\n' + '~' * 72 + '\\n')\n i2s = InS(i3s_file)\n ikle2, ipob2, meshx, meshy = tessellate_poly(i2s, debug=True)\n print('\\n\\nWriting down the Selafin file ' + path.basename(out_file) +\n '\\n' + '~' * 72 + '\\n')\n slf = Selafin('')\n slf.title = ''\n slf.nplan = 1\n slf.ndp2 = 3\n slf.ndp3 = 3\n slf.nbv1 = 1\n slf.nvar = 1\n slf.varindex = 1\n slf.varnames = ['BOTTOM ']\n slf.varunits = ['M ']\n slf.ikle2 = ikle2\n slf.ikle3 = slf.ikle2\n slf.meshx = meshx\n slf.meshy = meshy\n slf.npoin2 = i2s.npoin\n slf.npoin3 = slf.npoin2\n slf.nelem2 = len(slf.ikle2) / slf.ndp3\n slf.nelem3 = slf.nelem2\n slf.iparam = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]\n slf.ipob2 = ipob2\n slf.ipob3 = slf.ipob2\n slf.fole = {'hook': open(out_file, 'wb'), 'endian': '>', 'float': ('f',\n 4), 'name': out_file}\n slf.tags['times'] = [1]\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.meshx = np.rad2deg(slf.meshx / radius + long0)\n slf.meshy = np.rad2deg(2.0 * np.arctan(const * np.exp(slf.meshy /\n radius)) - np.pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.meshx = radius * (np.deg2rad(slf.meshx) - long0)\n slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.meshy) / 2.0 + \n np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.meshx, slf.meshy, zone = utm.from_lat_long(slf.meshx, slf.meshy,\n zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.meshx, slf.meshy = utm.to_lat_long(slf.meshx, slf.meshy, zone)\n slf.append_header_slf()\n slf.append_core_time_slf(0)\n slf.append_core_vars_slf([np.zeros(slf.npoin2)])\n slf.fole['hook'].close()\n\n\ndef calcs(options, code_name):\n \"\"\"\n Doing calcs, crunh, transf\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 2:\n raise TelemacException('\\nThe code \"calcs\" requires 2 file names\\n'\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"calcs\" requires 2 file names and 1 root file name for parallel inputs\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nCalculations for ' + path.basename(slf_file) + ' within ' +\n path.dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n calc_list = []\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n calc_list = vrs.split(':')\n if code_name == 'calcs':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CalcsSelafin(slf_file, times=times, root=root_file)\n print(\n ' ~> Assembling the following variables together into the file:')\n for calc in calc_list:\n if calc.upper() in 'WATER DEPTH':\n print(' +> WATER DEPTH')\n slf.calc_water_depth()\n if calc.upper() in 'KINETIC ENERGY':\n print(' +> KINETIC ENERGY')\n slf.calc_kinetic_energy()\n elif code_name == 'transf':\n times = float(options.tfrom), float(options.tstep), float(options.tstop\n )\n slf = TransfSelafin(slf_file, times=times, root=root_file)\n print(' ~> Computing an animation for the following variable(s):')\n for calc in calc_list:\n if calc.upper() in 'WAVE SURFACE':\n print(' +> WAVE SURFACE')\n slf.calc_free_surface_from_artemis()\n elif code_name == 'crunch':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CrunchSelafin(slf_file, times=times, root=root_file)\n print(' ~> Assembling the following variables into the file:')\n for calc in calc_list:\n if calc.upper() in 'SURFACE RANGE':\n print(' +> SURFACE RANGE')\n slf.calc_surface_range()\n if calc.upper() in 'MAXIMUM SPEED':\n print(' +> MAXIMUM SPEED')\n slf.calc_maximum_speed()\n if calc.upper() in 'TIME OF PEAK':\n print(' +> TIME OF PEAK')\n slf.calc_peak_time_modulo_m2()\n if calc.upper() in 'RESIDUAL U':\n print(' +> RESIDUAL U')\n slf.calc_residual_velocity()\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=float(\n options.azp))\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n", "<docstring token>\n<import token>\n\n\ndef scan(options):\n \"\"\"\n Scan of a file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSelafin(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core()\n else:\n slf.print_time_summary()\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\ndef chop(options):\n \"\"\"\n Chopping of a file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"chop\" (without --replace) here requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"chop\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nChoping ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef alter(options):\n \"\"\"\n Modifications in the file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx / radius + long0)\n expo = np.exp(slf.slf.meshy / radius)\n slf.slf.meshy = np.rad2deg(2.0 * np.arctan(const * expo) - np.\n pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.slf.\n meshy) / 2.0 + np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + \n np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = utm.from_lat_long(slf.slf.\n meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = utm.to_lat_long(slf.slf.meshx,\n slf.slf.meshy, zone)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef merge(options):\n \"\"\"\n Merging two selafin files\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" requires at leat 2 file names, aside from the options\n\"\"\"\n )\n slf_files = options.args[0:len(options.args) - 1]\n out_file = options.args[len(options.args) - 1]\n slfs = Selafins()\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'\n .format(slf_file))\n slfs.add(slf_file)\n slfs.put_content(out_file)\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n slf.put_content(out_file)\n\n\ndef diff(options):\n \"\"\"\n diff between two serafin files\n \"\"\"\n if len(options.args) < 2:\n raise TelemacException(\n \"\"\"\nThe code \"diff\" uses a minimum of 3 argumensts, aside from the options\n\"\"\"\n )\n slf_files = options.args[0:len(options.args) - 1]\n out_file = options.args[len(options.args) - 1]\n slfs = Selafins()\n print('\\n\\nDifferences into {}\\n{}\\n'.format(path.basename(out_file), \n '~' * 72))\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n slfs.add(slf_file)\n slfs.put_content(out_file)\n\n\ndef sample(options):\n \"\"\"\n Set liquid boundary file from a selafin file\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" requires at least 2 file names and one series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n nod_list = []\n for nod in options.args[2].split(' '):\n nod_list.append(int(nod))\n else:\n if len(options.args) != 4:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" here requires 2 file names, 1 file root name for the partition and 1 series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n nod_list = []\n for nod in options.args[3].split(' '):\n nod_list.append(int(nod))\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSample ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n lqd = LQD(vrs=[zip(slf.slf.varnames, slf.slf.varunits), nod_list], date\n =slf.slf.datetime, times=slf.slf.tags['times'], series=slf.\n get_series(nod_list))\n lqd.put_content(out_file)\n\n\ndef subdivide(options):\n \"\"\"\n Subdivide a mesh\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"subdivide\" (without --replace) here requires 2 file names\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n \"\"\"\nThe code \"subdivide\" (with --replace) here requires 1 file name at a time\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = 'subdivide-tmp.slf'\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSubdividing ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n slf = SubSelafin(slf_file)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef tesselate(options):\n \"\"\"\n Generate a mesh from a polygon\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"tessellate\" here requires one i2s/i3s file and one output slf file\n\"\"\"\n )\n i3s_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n '\\nThe code \"tessellate\" here requires one i2s/i3s file\\n')\n i3s_file = options.args[0]\n head, _ = path.splitext(i3s_file)\n out_file = head + '.slf'\n i3s_file = path.realpath(i3s_file)\n if not path.exists(i3s_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (i3s_file))\n print('\\n\\nTessellating ' + path.basename(i3s_file) + ' within ' + path\n .dirname(i3s_file) + '\\n' + '~' * 72 + '\\n')\n i2s = InS(i3s_file)\n ikle2, ipob2, meshx, meshy = tessellate_poly(i2s, debug=True)\n print('\\n\\nWriting down the Selafin file ' + path.basename(out_file) +\n '\\n' + '~' * 72 + '\\n')\n slf = Selafin('')\n slf.title = ''\n slf.nplan = 1\n slf.ndp2 = 3\n slf.ndp3 = 3\n slf.nbv1 = 1\n slf.nvar = 1\n slf.varindex = 1\n slf.varnames = ['BOTTOM ']\n slf.varunits = ['M ']\n slf.ikle2 = ikle2\n slf.ikle3 = slf.ikle2\n slf.meshx = meshx\n slf.meshy = meshy\n slf.npoin2 = i2s.npoin\n slf.npoin3 = slf.npoin2\n slf.nelem2 = len(slf.ikle2) / slf.ndp3\n slf.nelem3 = slf.nelem2\n slf.iparam = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]\n slf.ipob2 = ipob2\n slf.ipob3 = slf.ipob2\n slf.fole = {'hook': open(out_file, 'wb'), 'endian': '>', 'float': ('f',\n 4), 'name': out_file}\n slf.tags['times'] = [1]\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.meshx = np.rad2deg(slf.meshx / radius + long0)\n slf.meshy = np.rad2deg(2.0 * np.arctan(const * np.exp(slf.meshy /\n radius)) - np.pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.meshx = radius * (np.deg2rad(slf.meshx) - long0)\n slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.meshy) / 2.0 + \n np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.meshx, slf.meshy, zone = utm.from_lat_long(slf.meshx, slf.meshy,\n zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.meshx, slf.meshy = utm.to_lat_long(slf.meshx, slf.meshy, zone)\n slf.append_header_slf()\n slf.append_core_time_slf(0)\n slf.append_core_vars_slf([np.zeros(slf.npoin2)])\n slf.fole['hook'].close()\n\n\ndef calcs(options, code_name):\n \"\"\"\n Doing calcs, crunh, transf\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 2:\n raise TelemacException('\\nThe code \"calcs\" requires 2 file names\\n'\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"calcs\" requires 2 file names and 1 root file name for parallel inputs\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nCalculations for ' + path.basename(slf_file) + ' within ' +\n path.dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n calc_list = []\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n calc_list = vrs.split(':')\n if code_name == 'calcs':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CalcsSelafin(slf_file, times=times, root=root_file)\n print(\n ' ~> Assembling the following variables together into the file:')\n for calc in calc_list:\n if calc.upper() in 'WATER DEPTH':\n print(' +> WATER DEPTH')\n slf.calc_water_depth()\n if calc.upper() in 'KINETIC ENERGY':\n print(' +> KINETIC ENERGY')\n slf.calc_kinetic_energy()\n elif code_name == 'transf':\n times = float(options.tfrom), float(options.tstep), float(options.tstop\n )\n slf = TransfSelafin(slf_file, times=times, root=root_file)\n print(' ~> Computing an animation for the following variable(s):')\n for calc in calc_list:\n if calc.upper() in 'WAVE SURFACE':\n print(' +> WAVE SURFACE')\n slf.calc_free_surface_from_artemis()\n elif code_name == 'crunch':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CrunchSelafin(slf_file, times=times, root=root_file)\n print(' ~> Assembling the following variables into the file:')\n for calc in calc_list:\n if calc.upper() in 'SURFACE RANGE':\n print(' +> SURFACE RANGE')\n slf.calc_surface_range()\n if calc.upper() in 'MAXIMUM SPEED':\n print(' +> MAXIMUM SPEED')\n slf.calc_maximum_speed()\n if calc.upper() in 'TIME OF PEAK':\n print(' +> TIME OF PEAK')\n slf.calc_peak_time_modulo_m2()\n if calc.upper() in 'RESIDUAL U':\n print(' +> RESIDUAL U')\n slf.calc_residual_velocity()\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=float(\n options.azp))\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n", "<docstring token>\n<import token>\n\n\ndef scan(options):\n \"\"\"\n Scan of a file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSelafin(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core()\n else:\n slf.print_time_summary()\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\ndef chop(options):\n \"\"\"\n Chopping of a file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"chop\" (without --replace) here requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"chop\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nChoping ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef alter(options):\n \"\"\"\n Modifications in the file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx / radius + long0)\n expo = np.exp(slf.slf.meshy / radius)\n slf.slf.meshy = np.rad2deg(2.0 * np.arctan(const * expo) - np.\n pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.slf.\n meshy) / 2.0 + np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + \n np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = utm.from_lat_long(slf.slf.\n meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = utm.to_lat_long(slf.slf.meshx,\n slf.slf.meshy, zone)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef merge(options):\n \"\"\"\n Merging two selafin files\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" requires at leat 2 file names, aside from the options\n\"\"\"\n )\n slf_files = options.args[0:len(options.args) - 1]\n out_file = options.args[len(options.args) - 1]\n slfs = Selafins()\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'\n .format(slf_file))\n slfs.add(slf_file)\n slfs.put_content(out_file)\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n slf.put_content(out_file)\n\n\n<function token>\n\n\ndef sample(options):\n \"\"\"\n Set liquid boundary file from a selafin file\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" requires at least 2 file names and one series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n nod_list = []\n for nod in options.args[2].split(' '):\n nod_list.append(int(nod))\n else:\n if len(options.args) != 4:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" here requires 2 file names, 1 file root name for the partition and 1 series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n nod_list = []\n for nod in options.args[3].split(' '):\n nod_list.append(int(nod))\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSample ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n lqd = LQD(vrs=[zip(slf.slf.varnames, slf.slf.varunits), nod_list], date\n =slf.slf.datetime, times=slf.slf.tags['times'], series=slf.\n get_series(nod_list))\n lqd.put_content(out_file)\n\n\ndef subdivide(options):\n \"\"\"\n Subdivide a mesh\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"subdivide\" (without --replace) here requires 2 file names\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n \"\"\"\nThe code \"subdivide\" (with --replace) here requires 1 file name at a time\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = 'subdivide-tmp.slf'\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSubdividing ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n slf = SubSelafin(slf_file)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef tesselate(options):\n \"\"\"\n Generate a mesh from a polygon\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"tessellate\" here requires one i2s/i3s file and one output slf file\n\"\"\"\n )\n i3s_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n '\\nThe code \"tessellate\" here requires one i2s/i3s file\\n')\n i3s_file = options.args[0]\n head, _ = path.splitext(i3s_file)\n out_file = head + '.slf'\n i3s_file = path.realpath(i3s_file)\n if not path.exists(i3s_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (i3s_file))\n print('\\n\\nTessellating ' + path.basename(i3s_file) + ' within ' + path\n .dirname(i3s_file) + '\\n' + '~' * 72 + '\\n')\n i2s = InS(i3s_file)\n ikle2, ipob2, meshx, meshy = tessellate_poly(i2s, debug=True)\n print('\\n\\nWriting down the Selafin file ' + path.basename(out_file) +\n '\\n' + '~' * 72 + '\\n')\n slf = Selafin('')\n slf.title = ''\n slf.nplan = 1\n slf.ndp2 = 3\n slf.ndp3 = 3\n slf.nbv1 = 1\n slf.nvar = 1\n slf.varindex = 1\n slf.varnames = ['BOTTOM ']\n slf.varunits = ['M ']\n slf.ikle2 = ikle2\n slf.ikle3 = slf.ikle2\n slf.meshx = meshx\n slf.meshy = meshy\n slf.npoin2 = i2s.npoin\n slf.npoin3 = slf.npoin2\n slf.nelem2 = len(slf.ikle2) / slf.ndp3\n slf.nelem3 = slf.nelem2\n slf.iparam = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]\n slf.ipob2 = ipob2\n slf.ipob3 = slf.ipob2\n slf.fole = {'hook': open(out_file, 'wb'), 'endian': '>', 'float': ('f',\n 4), 'name': out_file}\n slf.tags['times'] = [1]\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.meshx = np.rad2deg(slf.meshx / radius + long0)\n slf.meshy = np.rad2deg(2.0 * np.arctan(const * np.exp(slf.meshy /\n radius)) - np.pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.meshx = radius * (np.deg2rad(slf.meshx) - long0)\n slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.meshy) / 2.0 + \n np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.meshx, slf.meshy, zone = utm.from_lat_long(slf.meshx, slf.meshy,\n zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.meshx, slf.meshy = utm.to_lat_long(slf.meshx, slf.meshy, zone)\n slf.append_header_slf()\n slf.append_core_time_slf(0)\n slf.append_core_vars_slf([np.zeros(slf.npoin2)])\n slf.fole['hook'].close()\n\n\ndef calcs(options, code_name):\n \"\"\"\n Doing calcs, crunh, transf\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 2:\n raise TelemacException('\\nThe code \"calcs\" requires 2 file names\\n'\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"calcs\" requires 2 file names and 1 root file name for parallel inputs\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nCalculations for ' + path.basename(slf_file) + ' within ' +\n path.dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n calc_list = []\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n calc_list = vrs.split(':')\n if code_name == 'calcs':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CalcsSelafin(slf_file, times=times, root=root_file)\n print(\n ' ~> Assembling the following variables together into the file:')\n for calc in calc_list:\n if calc.upper() in 'WATER DEPTH':\n print(' +> WATER DEPTH')\n slf.calc_water_depth()\n if calc.upper() in 'KINETIC ENERGY':\n print(' +> KINETIC ENERGY')\n slf.calc_kinetic_energy()\n elif code_name == 'transf':\n times = float(options.tfrom), float(options.tstep), float(options.tstop\n )\n slf = TransfSelafin(slf_file, times=times, root=root_file)\n print(' ~> Computing an animation for the following variable(s):')\n for calc in calc_list:\n if calc.upper() in 'WAVE SURFACE':\n print(' +> WAVE SURFACE')\n slf.calc_free_surface_from_artemis()\n elif code_name == 'crunch':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CrunchSelafin(slf_file, times=times, root=root_file)\n print(' ~> Assembling the following variables into the file:')\n for calc in calc_list:\n if calc.upper() in 'SURFACE RANGE':\n print(' +> SURFACE RANGE')\n slf.calc_surface_range()\n if calc.upper() in 'MAXIMUM SPEED':\n print(' +> MAXIMUM SPEED')\n slf.calc_maximum_speed()\n if calc.upper() in 'TIME OF PEAK':\n print(' +> TIME OF PEAK')\n slf.calc_peak_time_modulo_m2()\n if calc.upper() in 'RESIDUAL U':\n print(' +> RESIDUAL U')\n slf.calc_residual_velocity()\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=float(\n options.azp))\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\ndef chop(options):\n \"\"\"\n Chopping of a file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"chop\" (without --replace) here requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"chop\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nChoping ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef alter(options):\n \"\"\"\n Modifications in the file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx / radius + long0)\n expo = np.exp(slf.slf.meshy / radius)\n slf.slf.meshy = np.rad2deg(2.0 * np.arctan(const * expo) - np.\n pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.slf.\n meshy) / 2.0 + np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + \n np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = utm.from_lat_long(slf.slf.\n meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = utm.to_lat_long(slf.slf.meshx,\n slf.slf.meshy, zone)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef merge(options):\n \"\"\"\n Merging two selafin files\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" requires at leat 2 file names, aside from the options\n\"\"\"\n )\n slf_files = options.args[0:len(options.args) - 1]\n out_file = options.args[len(options.args) - 1]\n slfs = Selafins()\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'\n .format(slf_file))\n slfs.add(slf_file)\n slfs.put_content(out_file)\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n slf.put_content(out_file)\n\n\n<function token>\n\n\ndef sample(options):\n \"\"\"\n Set liquid boundary file from a selafin file\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" requires at least 2 file names and one series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n nod_list = []\n for nod in options.args[2].split(' '):\n nod_list.append(int(nod))\n else:\n if len(options.args) != 4:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" here requires 2 file names, 1 file root name for the partition and 1 series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n nod_list = []\n for nod in options.args[3].split(' '):\n nod_list.append(int(nod))\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSample ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n lqd = LQD(vrs=[zip(slf.slf.varnames, slf.slf.varunits), nod_list], date\n =slf.slf.datetime, times=slf.slf.tags['times'], series=slf.\n get_series(nod_list))\n lqd.put_content(out_file)\n\n\ndef subdivide(options):\n \"\"\"\n Subdivide a mesh\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"subdivide\" (without --replace) here requires 2 file names\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n \"\"\"\nThe code \"subdivide\" (with --replace) here requires 1 file name at a time\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = 'subdivide-tmp.slf'\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSubdividing ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n slf = SubSelafin(slf_file)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef tesselate(options):\n \"\"\"\n Generate a mesh from a polygon\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"tessellate\" here requires one i2s/i3s file and one output slf file\n\"\"\"\n )\n i3s_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n '\\nThe code \"tessellate\" here requires one i2s/i3s file\\n')\n i3s_file = options.args[0]\n head, _ = path.splitext(i3s_file)\n out_file = head + '.slf'\n i3s_file = path.realpath(i3s_file)\n if not path.exists(i3s_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (i3s_file))\n print('\\n\\nTessellating ' + path.basename(i3s_file) + ' within ' + path\n .dirname(i3s_file) + '\\n' + '~' * 72 + '\\n')\n i2s = InS(i3s_file)\n ikle2, ipob2, meshx, meshy = tessellate_poly(i2s, debug=True)\n print('\\n\\nWriting down the Selafin file ' + path.basename(out_file) +\n '\\n' + '~' * 72 + '\\n')\n slf = Selafin('')\n slf.title = ''\n slf.nplan = 1\n slf.ndp2 = 3\n slf.ndp3 = 3\n slf.nbv1 = 1\n slf.nvar = 1\n slf.varindex = 1\n slf.varnames = ['BOTTOM ']\n slf.varunits = ['M ']\n slf.ikle2 = ikle2\n slf.ikle3 = slf.ikle2\n slf.meshx = meshx\n slf.meshy = meshy\n slf.npoin2 = i2s.npoin\n slf.npoin3 = slf.npoin2\n slf.nelem2 = len(slf.ikle2) / slf.ndp3\n slf.nelem3 = slf.nelem2\n slf.iparam = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]\n slf.ipob2 = ipob2\n slf.ipob3 = slf.ipob2\n slf.fole = {'hook': open(out_file, 'wb'), 'endian': '>', 'float': ('f',\n 4), 'name': out_file}\n slf.tags['times'] = [1]\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.meshx = np.rad2deg(slf.meshx / radius + long0)\n slf.meshy = np.rad2deg(2.0 * np.arctan(const * np.exp(slf.meshy /\n radius)) - np.pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.meshx = radius * (np.deg2rad(slf.meshx) - long0)\n slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.meshy) / 2.0 + \n np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.meshx, slf.meshy, zone = utm.from_lat_long(slf.meshx, slf.meshy,\n zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.meshx, slf.meshy = utm.to_lat_long(slf.meshx, slf.meshy, zone)\n slf.append_header_slf()\n slf.append_core_time_slf(0)\n slf.append_core_vars_slf([np.zeros(slf.npoin2)])\n slf.fole['hook'].close()\n\n\ndef calcs(options, code_name):\n \"\"\"\n Doing calcs, crunh, transf\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 2:\n raise TelemacException('\\nThe code \"calcs\" requires 2 file names\\n'\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"calcs\" requires 2 file names and 1 root file name for parallel inputs\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nCalculations for ' + path.basename(slf_file) + ' within ' +\n path.dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n calc_list = []\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n calc_list = vrs.split(':')\n if code_name == 'calcs':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CalcsSelafin(slf_file, times=times, root=root_file)\n print(\n ' ~> Assembling the following variables together into the file:')\n for calc in calc_list:\n if calc.upper() in 'WATER DEPTH':\n print(' +> WATER DEPTH')\n slf.calc_water_depth()\n if calc.upper() in 'KINETIC ENERGY':\n print(' +> KINETIC ENERGY')\n slf.calc_kinetic_energy()\n elif code_name == 'transf':\n times = float(options.tfrom), float(options.tstep), float(options.tstop\n )\n slf = TransfSelafin(slf_file, times=times, root=root_file)\n print(' ~> Computing an animation for the following variable(s):')\n for calc in calc_list:\n if calc.upper() in 'WAVE SURFACE':\n print(' +> WAVE SURFACE')\n slf.calc_free_surface_from_artemis()\n elif code_name == 'crunch':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CrunchSelafin(slf_file, times=times, root=root_file)\n print(' ~> Assembling the following variables into the file:')\n for calc in calc_list:\n if calc.upper() in 'SURFACE RANGE':\n print(' +> SURFACE RANGE')\n slf.calc_surface_range()\n if calc.upper() in 'MAXIMUM SPEED':\n print(' +> MAXIMUM SPEED')\n slf.calc_maximum_speed()\n if calc.upper() in 'TIME OF PEAK':\n print(' +> TIME OF PEAK')\n slf.calc_peak_time_modulo_m2()\n if calc.upper() in 'RESIDUAL U':\n print(' +> RESIDUAL U')\n slf.calc_residual_velocity()\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=float(\n options.azp))\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\n<function token>\n\n\ndef alter(options):\n \"\"\"\n Modifications in the file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx / radius + long0)\n expo = np.exp(slf.slf.meshy / radius)\n slf.slf.meshy = np.rad2deg(2.0 * np.arctan(const * expo) - np.\n pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.slf.\n meshy) / 2.0 + np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + \n np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = utm.from_lat_long(slf.slf.\n meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = utm.to_lat_long(slf.slf.meshx,\n slf.slf.meshy, zone)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef merge(options):\n \"\"\"\n Merging two selafin files\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" requires at leat 2 file names, aside from the options\n\"\"\"\n )\n slf_files = options.args[0:len(options.args) - 1]\n out_file = options.args[len(options.args) - 1]\n slfs = Selafins()\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'\n .format(slf_file))\n slfs.add(slf_file)\n slfs.put_content(out_file)\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n slf.put_content(out_file)\n\n\n<function token>\n\n\ndef sample(options):\n \"\"\"\n Set liquid boundary file from a selafin file\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" requires at least 2 file names and one series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n nod_list = []\n for nod in options.args[2].split(' '):\n nod_list.append(int(nod))\n else:\n if len(options.args) != 4:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" here requires 2 file names, 1 file root name for the partition and 1 series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n nod_list = []\n for nod in options.args[3].split(' '):\n nod_list.append(int(nod))\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSample ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n lqd = LQD(vrs=[zip(slf.slf.varnames, slf.slf.varunits), nod_list], date\n =slf.slf.datetime, times=slf.slf.tags['times'], series=slf.\n get_series(nod_list))\n lqd.put_content(out_file)\n\n\ndef subdivide(options):\n \"\"\"\n Subdivide a mesh\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"subdivide\" (without --replace) here requires 2 file names\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n \"\"\"\nThe code \"subdivide\" (with --replace) here requires 1 file name at a time\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = 'subdivide-tmp.slf'\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSubdividing ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n slf = SubSelafin(slf_file)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef tesselate(options):\n \"\"\"\n Generate a mesh from a polygon\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"tessellate\" here requires one i2s/i3s file and one output slf file\n\"\"\"\n )\n i3s_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n '\\nThe code \"tessellate\" here requires one i2s/i3s file\\n')\n i3s_file = options.args[0]\n head, _ = path.splitext(i3s_file)\n out_file = head + '.slf'\n i3s_file = path.realpath(i3s_file)\n if not path.exists(i3s_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (i3s_file))\n print('\\n\\nTessellating ' + path.basename(i3s_file) + ' within ' + path\n .dirname(i3s_file) + '\\n' + '~' * 72 + '\\n')\n i2s = InS(i3s_file)\n ikle2, ipob2, meshx, meshy = tessellate_poly(i2s, debug=True)\n print('\\n\\nWriting down the Selafin file ' + path.basename(out_file) +\n '\\n' + '~' * 72 + '\\n')\n slf = Selafin('')\n slf.title = ''\n slf.nplan = 1\n slf.ndp2 = 3\n slf.ndp3 = 3\n slf.nbv1 = 1\n slf.nvar = 1\n slf.varindex = 1\n slf.varnames = ['BOTTOM ']\n slf.varunits = ['M ']\n slf.ikle2 = ikle2\n slf.ikle3 = slf.ikle2\n slf.meshx = meshx\n slf.meshy = meshy\n slf.npoin2 = i2s.npoin\n slf.npoin3 = slf.npoin2\n slf.nelem2 = len(slf.ikle2) / slf.ndp3\n slf.nelem3 = slf.nelem2\n slf.iparam = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]\n slf.ipob2 = ipob2\n slf.ipob3 = slf.ipob2\n slf.fole = {'hook': open(out_file, 'wb'), 'endian': '>', 'float': ('f',\n 4), 'name': out_file}\n slf.tags['times'] = [1]\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.meshx = np.rad2deg(slf.meshx / radius + long0)\n slf.meshy = np.rad2deg(2.0 * np.arctan(const * np.exp(slf.meshy /\n radius)) - np.pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.meshx = radius * (np.deg2rad(slf.meshx) - long0)\n slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.meshy) / 2.0 + \n np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.meshx, slf.meshy, zone = utm.from_lat_long(slf.meshx, slf.meshy,\n zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.meshx, slf.meshy = utm.to_lat_long(slf.meshx, slf.meshy, zone)\n slf.append_header_slf()\n slf.append_core_time_slf(0)\n slf.append_core_vars_slf([np.zeros(slf.npoin2)])\n slf.fole['hook'].close()\n\n\ndef calcs(options, code_name):\n \"\"\"\n Doing calcs, crunh, transf\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 2:\n raise TelemacException('\\nThe code \"calcs\" requires 2 file names\\n'\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"calcs\" requires 2 file names and 1 root file name for parallel inputs\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nCalculations for ' + path.basename(slf_file) + ' within ' +\n path.dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n calc_list = []\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n calc_list = vrs.split(':')\n if code_name == 'calcs':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CalcsSelafin(slf_file, times=times, root=root_file)\n print(\n ' ~> Assembling the following variables together into the file:')\n for calc in calc_list:\n if calc.upper() in 'WATER DEPTH':\n print(' +> WATER DEPTH')\n slf.calc_water_depth()\n if calc.upper() in 'KINETIC ENERGY':\n print(' +> KINETIC ENERGY')\n slf.calc_kinetic_energy()\n elif code_name == 'transf':\n times = float(options.tfrom), float(options.tstep), float(options.tstop\n )\n slf = TransfSelafin(slf_file, times=times, root=root_file)\n print(' ~> Computing an animation for the following variable(s):')\n for calc in calc_list:\n if calc.upper() in 'WAVE SURFACE':\n print(' +> WAVE SURFACE')\n slf.calc_free_surface_from_artemis()\n elif code_name == 'crunch':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CrunchSelafin(slf_file, times=times, root=root_file)\n print(' ~> Assembling the following variables into the file:')\n for calc in calc_list:\n if calc.upper() in 'SURFACE RANGE':\n print(' +> SURFACE RANGE')\n slf.calc_surface_range()\n if calc.upper() in 'MAXIMUM SPEED':\n print(' +> MAXIMUM SPEED')\n slf.calc_maximum_speed()\n if calc.upper() in 'TIME OF PEAK':\n print(' +> TIME OF PEAK')\n slf.calc_peak_time_modulo_m2()\n if calc.upper() in 'RESIDUAL U':\n print(' +> RESIDUAL U')\n slf.calc_residual_velocity()\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=float(\n options.azp))\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\n<function token>\n\n\ndef alter(options):\n \"\"\"\n Modifications in the file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx / radius + long0)\n expo = np.exp(slf.slf.meshy / radius)\n slf.slf.meshy = np.rad2deg(2.0 * np.arctan(const * expo) - np.\n pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.slf.\n meshy) / 2.0 + np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + \n np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = utm.from_lat_long(slf.slf.\n meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = utm.to_lat_long(slf.slf.meshx,\n slf.slf.meshy, zone)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\ndef merge(options):\n \"\"\"\n Merging two selafin files\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" requires at leat 2 file names, aside from the options\n\"\"\"\n )\n slf_files = options.args[0:len(options.args) - 1]\n out_file = options.args[len(options.args) - 1]\n slfs = Selafins()\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'\n .format(slf_file))\n slfs.add(slf_file)\n slfs.put_content(out_file)\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"merge\" here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n print('\\n\\nMerging into ' + path.basename(out_file) + ' within ' +\n path.dirname(out_file) + '\\n' + '~' * 72 + '\\n')\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n slf.put_content(out_file)\n\n\n<function token>\n\n\ndef sample(options):\n \"\"\"\n Set liquid boundary file from a selafin file\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" requires at least 2 file names and one series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n nod_list = []\n for nod in options.args[2].split(' '):\n nod_list.append(int(nod))\n else:\n if len(options.args) != 4:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" here requires 2 file names, 1 file root name for the partition and 1 series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n nod_list = []\n for nod in options.args[3].split(' '):\n nod_list.append(int(nod))\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSample ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n lqd = LQD(vrs=[zip(slf.slf.varnames, slf.slf.varunits), nod_list], date\n =slf.slf.datetime, times=slf.slf.tags['times'], series=slf.\n get_series(nod_list))\n lqd.put_content(out_file)\n\n\n<function token>\n\n\ndef tesselate(options):\n \"\"\"\n Generate a mesh from a polygon\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"tessellate\" here requires one i2s/i3s file and one output slf file\n\"\"\"\n )\n i3s_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n '\\nThe code \"tessellate\" here requires one i2s/i3s file\\n')\n i3s_file = options.args[0]\n head, _ = path.splitext(i3s_file)\n out_file = head + '.slf'\n i3s_file = path.realpath(i3s_file)\n if not path.exists(i3s_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (i3s_file))\n print('\\n\\nTessellating ' + path.basename(i3s_file) + ' within ' + path\n .dirname(i3s_file) + '\\n' + '~' * 72 + '\\n')\n i2s = InS(i3s_file)\n ikle2, ipob2, meshx, meshy = tessellate_poly(i2s, debug=True)\n print('\\n\\nWriting down the Selafin file ' + path.basename(out_file) +\n '\\n' + '~' * 72 + '\\n')\n slf = Selafin('')\n slf.title = ''\n slf.nplan = 1\n slf.ndp2 = 3\n slf.ndp3 = 3\n slf.nbv1 = 1\n slf.nvar = 1\n slf.varindex = 1\n slf.varnames = ['BOTTOM ']\n slf.varunits = ['M ']\n slf.ikle2 = ikle2\n slf.ikle3 = slf.ikle2\n slf.meshx = meshx\n slf.meshy = meshy\n slf.npoin2 = i2s.npoin\n slf.npoin3 = slf.npoin2\n slf.nelem2 = len(slf.ikle2) / slf.ndp3\n slf.nelem3 = slf.nelem2\n slf.iparam = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]\n slf.ipob2 = ipob2\n slf.ipob3 = slf.ipob2\n slf.fole = {'hook': open(out_file, 'wb'), 'endian': '>', 'float': ('f',\n 4), 'name': out_file}\n slf.tags['times'] = [1]\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.meshx = np.rad2deg(slf.meshx / radius + long0)\n slf.meshy = np.rad2deg(2.0 * np.arctan(const * np.exp(slf.meshy /\n radius)) - np.pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.meshx = radius * (np.deg2rad(slf.meshx) - long0)\n slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.meshy) / 2.0 + \n np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.meshx, slf.meshy, zone = utm.from_lat_long(slf.meshx, slf.meshy,\n zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.meshx, slf.meshy = utm.to_lat_long(slf.meshx, slf.meshy, zone)\n slf.append_header_slf()\n slf.append_core_time_slf(0)\n slf.append_core_vars_slf([np.zeros(slf.npoin2)])\n slf.fole['hook'].close()\n\n\ndef calcs(options, code_name):\n \"\"\"\n Doing calcs, crunh, transf\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 2:\n raise TelemacException('\\nThe code \"calcs\" requires 2 file names\\n'\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"calcs\" requires 2 file names and 1 root file name for parallel inputs\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nCalculations for ' + path.basename(slf_file) + ' within ' +\n path.dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n calc_list = []\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n calc_list = vrs.split(':')\n if code_name == 'calcs':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CalcsSelafin(slf_file, times=times, root=root_file)\n print(\n ' ~> Assembling the following variables together into the file:')\n for calc in calc_list:\n if calc.upper() in 'WATER DEPTH':\n print(' +> WATER DEPTH')\n slf.calc_water_depth()\n if calc.upper() in 'KINETIC ENERGY':\n print(' +> KINETIC ENERGY')\n slf.calc_kinetic_energy()\n elif code_name == 'transf':\n times = float(options.tfrom), float(options.tstep), float(options.tstop\n )\n slf = TransfSelafin(slf_file, times=times, root=root_file)\n print(' ~> Computing an animation for the following variable(s):')\n for calc in calc_list:\n if calc.upper() in 'WAVE SURFACE':\n print(' +> WAVE SURFACE')\n slf.calc_free_surface_from_artemis()\n elif code_name == 'crunch':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CrunchSelafin(slf_file, times=times, root=root_file)\n print(' ~> Assembling the following variables into the file:')\n for calc in calc_list:\n if calc.upper() in 'SURFACE RANGE':\n print(' +> SURFACE RANGE')\n slf.calc_surface_range()\n if calc.upper() in 'MAXIMUM SPEED':\n print(' +> MAXIMUM SPEED')\n slf.calc_maximum_speed()\n if calc.upper() in 'TIME OF PEAK':\n print(' +> TIME OF PEAK')\n slf.calc_peak_time_modulo_m2()\n if calc.upper() in 'RESIDUAL U':\n print(' +> RESIDUAL U')\n slf.calc_residual_velocity()\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=float(\n options.azp))\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\n<function token>\n\n\ndef alter(options):\n \"\"\"\n Modifications in the file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx / radius + long0)\n expo = np.exp(slf.slf.meshy / radius)\n slf.slf.meshy = np.rad2deg(2.0 * np.arctan(const * expo) - np.\n pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.slf.\n meshy) / 2.0 + np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + \n np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = utm.from_lat_long(slf.slf.\n meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = utm.to_lat_long(slf.slf.meshx,\n slf.slf.meshy, zone)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\n<function token>\n<function token>\n\n\ndef sample(options):\n \"\"\"\n Set liquid boundary file from a selafin file\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" requires at least 2 file names and one series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n nod_list = []\n for nod in options.args[2].split(' '):\n nod_list.append(int(nod))\n else:\n if len(options.args) != 4:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" here requires 2 file names, 1 file root name for the partition and 1 series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n nod_list = []\n for nod in options.args[3].split(' '):\n nod_list.append(int(nod))\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSample ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n lqd = LQD(vrs=[zip(slf.slf.varnames, slf.slf.varunits), nod_list], date\n =slf.slf.datetime, times=slf.slf.tags['times'], series=slf.\n get_series(nod_list))\n lqd.put_content(out_file)\n\n\n<function token>\n\n\ndef tesselate(options):\n \"\"\"\n Generate a mesh from a polygon\n \"\"\"\n if not options.freplace:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"tessellate\" here requires one i2s/i3s file and one output slf file\n\"\"\"\n )\n i3s_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 1:\n raise TelemacException(\n '\\nThe code \"tessellate\" here requires one i2s/i3s file\\n')\n i3s_file = options.args[0]\n head, _ = path.splitext(i3s_file)\n out_file = head + '.slf'\n i3s_file = path.realpath(i3s_file)\n if not path.exists(i3s_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (i3s_file))\n print('\\n\\nTessellating ' + path.basename(i3s_file) + ' within ' + path\n .dirname(i3s_file) + '\\n' + '~' * 72 + '\\n')\n i2s = InS(i3s_file)\n ikle2, ipob2, meshx, meshy = tessellate_poly(i2s, debug=True)\n print('\\n\\nWriting down the Selafin file ' + path.basename(out_file) +\n '\\n' + '~' * 72 + '\\n')\n slf = Selafin('')\n slf.title = ''\n slf.nplan = 1\n slf.ndp2 = 3\n slf.ndp3 = 3\n slf.nbv1 = 1\n slf.nvar = 1\n slf.varindex = 1\n slf.varnames = ['BOTTOM ']\n slf.varunits = ['M ']\n slf.ikle2 = ikle2\n slf.ikle3 = slf.ikle2\n slf.meshx = meshx\n slf.meshy = meshy\n slf.npoin2 = i2s.npoin\n slf.npoin3 = slf.npoin2\n slf.nelem2 = len(slf.ikle2) / slf.ndp3\n slf.nelem3 = slf.nelem2\n slf.iparam = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0]\n slf.ipob2 = ipob2\n slf.ipob3 = slf.ipob2\n slf.fole = {'hook': open(out_file, 'wb'), 'endian': '>', 'float': ('f',\n 4), 'name': out_file}\n slf.tags['times'] = [1]\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.meshx = np.rad2deg(slf.meshx / radius + long0)\n slf.meshy = np.rad2deg(2.0 * np.arctan(const * np.exp(slf.meshy /\n radius)) - np.pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.meshx = radius * (np.deg2rad(slf.meshx) - long0)\n slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.meshy) / 2.0 + \n np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.meshx, slf.meshy, zone = utm.from_lat_long(slf.meshx, slf.meshy,\n zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.meshx, slf.meshy = utm.to_lat_long(slf.meshx, slf.meshy, zone)\n slf.append_header_slf()\n slf.append_core_time_slf(0)\n slf.append_core_vars_slf([np.zeros(slf.npoin2)])\n slf.fole['hook'].close()\n\n\ndef calcs(options, code_name):\n \"\"\"\n Doing calcs, crunh, transf\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 2:\n raise TelemacException('\\nThe code \"calcs\" requires 2 file names\\n'\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"calcs\" requires 2 file names and 1 root file name for parallel inputs\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nCalculations for ' + path.basename(slf_file) + ' within ' +\n path.dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n calc_list = []\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n calc_list = vrs.split(':')\n if code_name == 'calcs':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CalcsSelafin(slf_file, times=times, root=root_file)\n print(\n ' ~> Assembling the following variables together into the file:')\n for calc in calc_list:\n if calc.upper() in 'WATER DEPTH':\n print(' +> WATER DEPTH')\n slf.calc_water_depth()\n if calc.upper() in 'KINETIC ENERGY':\n print(' +> KINETIC ENERGY')\n slf.calc_kinetic_energy()\n elif code_name == 'transf':\n times = float(options.tfrom), float(options.tstep), float(options.tstop\n )\n slf = TransfSelafin(slf_file, times=times, root=root_file)\n print(' ~> Computing an animation for the following variable(s):')\n for calc in calc_list:\n if calc.upper() in 'WAVE SURFACE':\n print(' +> WAVE SURFACE')\n slf.calc_free_surface_from_artemis()\n elif code_name == 'crunch':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CrunchSelafin(slf_file, times=times, root=root_file)\n print(' ~> Assembling the following variables into the file:')\n for calc in calc_list:\n if calc.upper() in 'SURFACE RANGE':\n print(' +> SURFACE RANGE')\n slf.calc_surface_range()\n if calc.upper() in 'MAXIMUM SPEED':\n print(' +> MAXIMUM SPEED')\n slf.calc_maximum_speed()\n if calc.upper() in 'TIME OF PEAK':\n print(' +> TIME OF PEAK')\n slf.calc_peak_time_modulo_m2()\n if calc.upper() in 'RESIDUAL U':\n print(' +> RESIDUAL U')\n slf.calc_residual_velocity()\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=float(\n options.azp))\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\n<function token>\n\n\ndef alter(options):\n \"\"\"\n Modifications in the file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx / radius + long0)\n expo = np.exp(slf.slf.meshy / radius)\n slf.slf.meshy = np.rad2deg(2.0 * np.arctan(const * expo) - np.\n pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.slf.\n meshy) / 2.0 + np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + \n np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = utm.from_lat_long(slf.slf.\n meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = utm.to_lat_long(slf.slf.meshx,\n slf.slf.meshy, zone)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\n<function token>\n<function token>\n\n\ndef sample(options):\n \"\"\"\n Set liquid boundary file from a selafin file\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" requires at least 2 file names and one series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n nod_list = []\n for nod in options.args[2].split(' '):\n nod_list.append(int(nod))\n else:\n if len(options.args) != 4:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" here requires 2 file names, 1 file root name for the partition and 1 series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n nod_list = []\n for nod in options.args[3].split(' '):\n nod_list.append(int(nod))\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSample ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n lqd = LQD(vrs=[zip(slf.slf.varnames, slf.slf.varunits), nod_list], date\n =slf.slf.datetime, times=slf.slf.tags['times'], series=slf.\n get_series(nod_list))\n lqd.put_content(out_file)\n\n\n<function token>\n<function token>\n\n\ndef calcs(options, code_name):\n \"\"\"\n Doing calcs, crunh, transf\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 2:\n raise TelemacException('\\nThe code \"calcs\" requires 2 file names\\n'\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"calcs\" requires 2 file names and 1 root file name for parallel inputs\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nCalculations for ' + path.basename(slf_file) + ' within ' +\n path.dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n calc_list = []\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n calc_list = vrs.split(':')\n if code_name == 'calcs':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CalcsSelafin(slf_file, times=times, root=root_file)\n print(\n ' ~> Assembling the following variables together into the file:')\n for calc in calc_list:\n if calc.upper() in 'WATER DEPTH':\n print(' +> WATER DEPTH')\n slf.calc_water_depth()\n if calc.upper() in 'KINETIC ENERGY':\n print(' +> KINETIC ENERGY')\n slf.calc_kinetic_energy()\n elif code_name == 'transf':\n times = float(options.tfrom), float(options.tstep), float(options.tstop\n )\n slf = TransfSelafin(slf_file, times=times, root=root_file)\n print(' ~> Computing an animation for the following variable(s):')\n for calc in calc_list:\n if calc.upper() in 'WAVE SURFACE':\n print(' +> WAVE SURFACE')\n slf.calc_free_surface_from_artemis()\n elif code_name == 'crunch':\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = CrunchSelafin(slf_file, times=times, root=root_file)\n print(' ~> Assembling the following variables into the file:')\n for calc in calc_list:\n if calc.upper() in 'SURFACE RANGE':\n print(' +> SURFACE RANGE')\n slf.calc_surface_range()\n if calc.upper() in 'MAXIMUM SPEED':\n print(' +> MAXIMUM SPEED')\n slf.calc_maximum_speed()\n if calc.upper() in 'TIME OF PEAK':\n print(' +> TIME OF PEAK')\n slf.calc_peak_time_modulo_m2()\n if calc.upper() in 'RESIDUAL U':\n print(' +> RESIDUAL U')\n slf.calc_residual_velocity()\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=float(\n options.azp))\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n slf.put_content(out_file)\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\n<function token>\n\n\ndef alter(options):\n \"\"\"\n Modifications in the file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx / radius + long0)\n expo = np.exp(slf.slf.meshy / radius)\n slf.slf.meshy = np.rad2deg(2.0 * np.arctan(const * expo) - np.\n pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.slf.\n meshy) / 2.0 + np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + \n np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = utm.from_lat_long(slf.slf.\n meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = utm.to_lat_long(slf.slf.meshx,\n slf.slf.meshy, zone)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\n<function token>\n<function token>\n\n\ndef sample(options):\n \"\"\"\n Set liquid boundary file from a selafin file\n \"\"\"\n root_file = None\n if not options.parallel:\n if len(options.args) < 3:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" requires at least 2 file names and one series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n out_file = options.args[1]\n nod_list = []\n for nod in options.args[2].split(' '):\n nod_list.append(int(nod))\n else:\n if len(options.args) != 4:\n raise TelemacException(\n \"\"\"\nThe code \"sample\" here requires 2 file names, 1 file root name for the partition and 1 series of node numbers\n\"\"\"\n )\n slf_file = options.args[0]\n root_file = options.args[1]\n out_file = options.args[2]\n nod_list = []\n for nod in options.args[3].split(' '):\n nod_list.append(int(nod))\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.format\n (slf_file))\n print('\\n\\nSample ' + path.basename(slf_file) + ' within ' + path.\n dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ChopSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n lqd = LQD(vrs=[zip(slf.slf.varnames, slf.slf.varunits), nod_list], date\n =slf.slf.datetime, times=slf.slf.tags['times'], series=slf.\n get_series(nod_list))\n lqd.put_content(out_file)\n\n\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\n<function token>\n\n\ndef alter(options):\n \"\"\"\n Modifications in the file\n \"\"\"\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) requires 2 file names\n\"\"\"\n )\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\n \"\"\"\nThe code \"alter\" (without --replace) here requires 2 file names and 1 file root name for the partition\n\"\"\"\n )\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = 'chop-tmp.slf'\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp), m_y=\n float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname, m_z=float(options.azm), p_z=\n float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.0\n long0, lat0 = options.sph2ll.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0 / 2.0 + np.pi / 4.0)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx / radius + long0)\n expo = np.exp(slf.slf.meshy / radius)\n slf.slf.meshy = np.rad2deg(2.0 * np.arctan(const * expo) - np.\n pi / 2.0)\n if options.ll2sph != None:\n radius = 6371000.0\n long0, lat0 = options.ll2sph.split(':')\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * (np.log(np.tan(np.deg2rad(slf.slf.\n meshy) / 2.0 + np.pi / 4.0)) - np.log(np.tan(lat0 / 2.0 + \n np.pi / 4.0)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = utm.from_lat_long(slf.slf.\n meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = utm.to_lat_long(slf.slf.meshx,\n slf.slf.meshy, zone)\n slf.put_content(out_file)\n if options.freplace:\n move_file(out_file, slf_file)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<function token>\n\n\ndef spec(options):\n \"\"\"\n Spectral file\n \"\"\"\n slf_files = options.input_files\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException('\\nCould not find the file named: {}'.\n format(slf_file))\n print('\\n\\nScanning ' + path.basename(slf_file) + ' within ' + path\n .dirname(slf_file) + '\\n' + '~' * 72 + '\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = int(options.tfrom), int(options.tstep), int(options.tstop)\n slf = ScanSpectral(slf_file, times=times, vrs=vrs)\n slf.print_header()\n if options.core:\n slf.print_core(int(options.accuracy))\n else:\n slf.print_time_summary()\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
98,953
f74af5c6229620d6030b422598adff35aea91284
#!/usr/bin/python import sys import getopt import string import shutil import subprocess import os.path class Usage(Exception): def __init__(self, msg): self.msg = msg def doprint(str): subprocess.call(["echo", str]) def spcall(array): return subprocess.call(array) def finds_match(str, suffixes): for suffix in suffixes: if str.endswith(suffix): return True return False def scrape_flat(path, matches): result = [] try: files = os.listdir(path) except: return result for f in files: if finds_match(f, matches): result.append(f) return result def get_students(dir): for root, list, files in os.walk(dir): return list # # python this_file.py students_dir code_dir pa? # def main(argv=None): if argv is None: argv = sys.argv students_dir = argv[1] if students_dir == "--help" or students_dir == "-h": doprint("Creates a set of directories (here), one for each student that has the corresponding") doprint("pa# directory. Each directory will have a copy of code/pa# and all of the .cpp/.h") doprint("files found in the student's pa# top-level") doprint("> grab dir_for_student_users dir_for_code pa#") return assignment = argv[3] code_dir = os.path.join(argv[2], assignment); students = get_students(students_dir) for stud in students: stud_src_dir = os.path.join(students_dir, stud, assignment) if os.path.exists(stud_src_dir): # copy the code files first, creating the stud directory at the same time. # code stores all of its .cpp and .h files in sub dirs (e.g. src or include) shutil.copytree(code_dir, stud) # now copy just the top-level cpp/h files from the student. files = scrape_flat(stud_src_dir, [".cpp", ".h"]) for f in files: src_f = os.path.join(stud_src_dir, f) try: shutil.copyfile(src_f, os.path.join(stud, f)) except: doprint(stud + ": couldn't copy file " + src_f) else: doprint(stud + ": does not have " + assignment) if __name__ == "__main__": sys.exit(main())
[ "#!/usr/bin/python\n\nimport sys\nimport getopt\nimport string\nimport shutil\nimport subprocess\nimport os.path\n\nclass Usage(Exception):\n def __init__(self, msg):\n self.msg = msg\n\ndef doprint(str):\n subprocess.call([\"echo\", str])\n\ndef spcall(array):\n return subprocess.call(array)\n\ndef finds_match(str, suffixes):\n for suffix in suffixes:\n if str.endswith(suffix):\n return True\n return False\n\ndef scrape_flat(path, matches):\n result = []\n try:\n files = os.listdir(path)\n except:\n return result\n\n for f in files:\n if finds_match(f, matches):\n result.append(f)\n return result\n\ndef get_students(dir):\n for root, list, files in os.walk(dir):\n return list\n\n#\n# python this_file.py students_dir code_dir pa?\n# \ndef main(argv=None):\n if argv is None:\n argv = sys.argv\n\n students_dir = argv[1]\n if students_dir == \"--help\" or students_dir == \"-h\":\n doprint(\"Creates a set of directories (here), one for each student that has the corresponding\")\n doprint(\"pa# directory. Each directory will have a copy of code/pa# and all of the .cpp/.h\")\n doprint(\"files found in the student's pa# top-level\")\n doprint(\"> grab dir_for_student_users dir_for_code pa#\")\n return\n\n assignment = argv[3]\n code_dir = os.path.join(argv[2], assignment);\n\n students = get_students(students_dir)\n for stud in students:\n stud_src_dir = os.path.join(students_dir, stud, assignment)\n if os.path.exists(stud_src_dir):\n # copy the code files first, creating the stud directory at the same time.\n # code stores all of its .cpp and .h files in sub dirs (e.g. src or include)\n shutil.copytree(code_dir, stud)\n # now copy just the top-level cpp/h files from the student.\n files = scrape_flat(stud_src_dir, [\".cpp\", \".h\"])\n for f in files:\n src_f = os.path.join(stud_src_dir, f)\n try:\n shutil.copyfile(src_f, os.path.join(stud, f))\n except:\n doprint(stud + \": couldn't copy file \" + src_f)\n else:\n doprint(stud + \": does not have \" + assignment)\n\n\nif __name__ == \"__main__\":\n sys.exit(main())\n\n", "import sys\nimport getopt\nimport string\nimport shutil\nimport subprocess\nimport os.path\n\n\nclass Usage(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n\ndef doprint(str):\n subprocess.call(['echo', str])\n\n\ndef spcall(array):\n return subprocess.call(array)\n\n\ndef finds_match(str, suffixes):\n for suffix in suffixes:\n if str.endswith(suffix):\n return True\n return False\n\n\ndef scrape_flat(path, matches):\n result = []\n try:\n files = os.listdir(path)\n except:\n return result\n for f in files:\n if finds_match(f, matches):\n result.append(f)\n return result\n\n\ndef get_students(dir):\n for root, list, files in os.walk(dir):\n return list\n\n\ndef main(argv=None):\n if argv is None:\n argv = sys.argv\n students_dir = argv[1]\n if students_dir == '--help' or students_dir == '-h':\n doprint(\n 'Creates a set of directories (here), one for each student that has the corresponding'\n )\n doprint(\n 'pa# directory. Each directory will have a copy of code/pa# and all of the .cpp/.h'\n )\n doprint(\"files found in the student's pa# top-level\")\n doprint('> grab dir_for_student_users dir_for_code pa#')\n return\n assignment = argv[3]\n code_dir = os.path.join(argv[2], assignment)\n students = get_students(students_dir)\n for stud in students:\n stud_src_dir = os.path.join(students_dir, stud, assignment)\n if os.path.exists(stud_src_dir):\n shutil.copytree(code_dir, stud)\n files = scrape_flat(stud_src_dir, ['.cpp', '.h'])\n for f in files:\n src_f = os.path.join(stud_src_dir, f)\n try:\n shutil.copyfile(src_f, os.path.join(stud, f))\n except:\n doprint(stud + \": couldn't copy file \" + src_f)\n else:\n doprint(stud + ': does not have ' + assignment)\n\n\nif __name__ == '__main__':\n sys.exit(main())\n", "<import token>\n\n\nclass Usage(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n\ndef doprint(str):\n subprocess.call(['echo', str])\n\n\ndef spcall(array):\n return subprocess.call(array)\n\n\ndef finds_match(str, suffixes):\n for suffix in suffixes:\n if str.endswith(suffix):\n return True\n return False\n\n\ndef scrape_flat(path, matches):\n result = []\n try:\n files = os.listdir(path)\n except:\n return result\n for f in files:\n if finds_match(f, matches):\n result.append(f)\n return result\n\n\ndef get_students(dir):\n for root, list, files in os.walk(dir):\n return list\n\n\ndef main(argv=None):\n if argv is None:\n argv = sys.argv\n students_dir = argv[1]\n if students_dir == '--help' or students_dir == '-h':\n doprint(\n 'Creates a set of directories (here), one for each student that has the corresponding'\n )\n doprint(\n 'pa# directory. Each directory will have a copy of code/pa# and all of the .cpp/.h'\n )\n doprint(\"files found in the student's pa# top-level\")\n doprint('> grab dir_for_student_users dir_for_code pa#')\n return\n assignment = argv[3]\n code_dir = os.path.join(argv[2], assignment)\n students = get_students(students_dir)\n for stud in students:\n stud_src_dir = os.path.join(students_dir, stud, assignment)\n if os.path.exists(stud_src_dir):\n shutil.copytree(code_dir, stud)\n files = scrape_flat(stud_src_dir, ['.cpp', '.h'])\n for f in files:\n src_f = os.path.join(stud_src_dir, f)\n try:\n shutil.copyfile(src_f, os.path.join(stud, f))\n except:\n doprint(stud + \": couldn't copy file \" + src_f)\n else:\n doprint(stud + ': does not have ' + assignment)\n\n\nif __name__ == '__main__':\n sys.exit(main())\n", "<import token>\n\n\nclass Usage(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n\ndef doprint(str):\n subprocess.call(['echo', str])\n\n\ndef spcall(array):\n return subprocess.call(array)\n\n\ndef finds_match(str, suffixes):\n for suffix in suffixes:\n if str.endswith(suffix):\n return True\n return False\n\n\ndef scrape_flat(path, matches):\n result = []\n try:\n files = os.listdir(path)\n except:\n return result\n for f in files:\n if finds_match(f, matches):\n result.append(f)\n return result\n\n\ndef get_students(dir):\n for root, list, files in os.walk(dir):\n return list\n\n\ndef main(argv=None):\n if argv is None:\n argv = sys.argv\n students_dir = argv[1]\n if students_dir == '--help' or students_dir == '-h':\n doprint(\n 'Creates a set of directories (here), one for each student that has the corresponding'\n )\n doprint(\n 'pa# directory. Each directory will have a copy of code/pa# and all of the .cpp/.h'\n )\n doprint(\"files found in the student's pa# top-level\")\n doprint('> grab dir_for_student_users dir_for_code pa#')\n return\n assignment = argv[3]\n code_dir = os.path.join(argv[2], assignment)\n students = get_students(students_dir)\n for stud in students:\n stud_src_dir = os.path.join(students_dir, stud, assignment)\n if os.path.exists(stud_src_dir):\n shutil.copytree(code_dir, stud)\n files = scrape_flat(stud_src_dir, ['.cpp', '.h'])\n for f in files:\n src_f = os.path.join(stud_src_dir, f)\n try:\n shutil.copyfile(src_f, os.path.join(stud, f))\n except:\n doprint(stud + \": couldn't copy file \" + src_f)\n else:\n doprint(stud + ': does not have ' + assignment)\n\n\n<code token>\n", "<import token>\n\n\nclass Usage(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n\ndef doprint(str):\n subprocess.call(['echo', str])\n\n\ndef spcall(array):\n return subprocess.call(array)\n\n\n<function token>\n\n\ndef scrape_flat(path, matches):\n result = []\n try:\n files = os.listdir(path)\n except:\n return result\n for f in files:\n if finds_match(f, matches):\n result.append(f)\n return result\n\n\ndef get_students(dir):\n for root, list, files in os.walk(dir):\n return list\n\n\ndef main(argv=None):\n if argv is None:\n argv = sys.argv\n students_dir = argv[1]\n if students_dir == '--help' or students_dir == '-h':\n doprint(\n 'Creates a set of directories (here), one for each student that has the corresponding'\n )\n doprint(\n 'pa# directory. Each directory will have a copy of code/pa# and all of the .cpp/.h'\n )\n doprint(\"files found in the student's pa# top-level\")\n doprint('> grab dir_for_student_users dir_for_code pa#')\n return\n assignment = argv[3]\n code_dir = os.path.join(argv[2], assignment)\n students = get_students(students_dir)\n for stud in students:\n stud_src_dir = os.path.join(students_dir, stud, assignment)\n if os.path.exists(stud_src_dir):\n shutil.copytree(code_dir, stud)\n files = scrape_flat(stud_src_dir, ['.cpp', '.h'])\n for f in files:\n src_f = os.path.join(stud_src_dir, f)\n try:\n shutil.copyfile(src_f, os.path.join(stud, f))\n except:\n doprint(stud + \": couldn't copy file \" + src_f)\n else:\n doprint(stud + ': does not have ' + assignment)\n\n\n<code token>\n", "<import token>\n\n\nclass Usage(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n\n<function token>\n\n\ndef spcall(array):\n return subprocess.call(array)\n\n\n<function token>\n\n\ndef scrape_flat(path, matches):\n result = []\n try:\n files = os.listdir(path)\n except:\n return result\n for f in files:\n if finds_match(f, matches):\n result.append(f)\n return result\n\n\ndef get_students(dir):\n for root, list, files in os.walk(dir):\n return list\n\n\ndef main(argv=None):\n if argv is None:\n argv = sys.argv\n students_dir = argv[1]\n if students_dir == '--help' or students_dir == '-h':\n doprint(\n 'Creates a set of directories (here), one for each student that has the corresponding'\n )\n doprint(\n 'pa# directory. Each directory will have a copy of code/pa# and all of the .cpp/.h'\n )\n doprint(\"files found in the student's pa# top-level\")\n doprint('> grab dir_for_student_users dir_for_code pa#')\n return\n assignment = argv[3]\n code_dir = os.path.join(argv[2], assignment)\n students = get_students(students_dir)\n for stud in students:\n stud_src_dir = os.path.join(students_dir, stud, assignment)\n if os.path.exists(stud_src_dir):\n shutil.copytree(code_dir, stud)\n files = scrape_flat(stud_src_dir, ['.cpp', '.h'])\n for f in files:\n src_f = os.path.join(stud_src_dir, f)\n try:\n shutil.copyfile(src_f, os.path.join(stud, f))\n except:\n doprint(stud + \": couldn't copy file \" + src_f)\n else:\n doprint(stud + ': does not have ' + assignment)\n\n\n<code token>\n", "<import token>\n\n\nclass Usage(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n\n<function token>\n\n\ndef spcall(array):\n return subprocess.call(array)\n\n\n<function token>\n\n\ndef scrape_flat(path, matches):\n result = []\n try:\n files = os.listdir(path)\n except:\n return result\n for f in files:\n if finds_match(f, matches):\n result.append(f)\n return result\n\n\ndef get_students(dir):\n for root, list, files in os.walk(dir):\n return list\n\n\n<function token>\n<code token>\n", "<import token>\n\n\nclass Usage(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef scrape_flat(path, matches):\n result = []\n try:\n files = os.listdir(path)\n except:\n return result\n for f in files:\n if finds_match(f, matches):\n result.append(f)\n return result\n\n\ndef get_students(dir):\n for root, list, files in os.walk(dir):\n return list\n\n\n<function token>\n<code token>\n", "<import token>\n\n\nclass Usage(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef get_students(dir):\n for root, list, files in os.walk(dir):\n return list\n\n\n<function token>\n<code token>\n", "<import token>\n\n\nclass Usage(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n\n\nclass Usage(Exception):\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n", "<import token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n" ]
false
98,954
de584b18018011c64a7ae26bbb11c10ecf2c8d95
# from PIL import Image # from os import listdir # from os.path import isfile, join import numpy as np import cv2 as cv class Preprocessor(object): # OFFSET_CROP = 50 #to have a little margin while cropping, saving contours BLOB_MAX_AREA = 16.0 def binarize(self, image_path): print("binarizing image: ", image_path) image = cv.imread(image_path, 0) # read image image = self.papyrusProblem(image, image_path) image = cv.GaussianBlur(image, (3, 3), 0) # apply blur ret3, th3 = cv.threshold(image, 0, 255, cv.THRESH_BINARY + cv.THRESH_OTSU) # Otsu thresholding return th3 # function to detect papyrus and make background lighter for otsu thresholding to work better def papyrusProblem(self, image, image_path): ret, th = cv.threshold(image, 240, 255, cv.THRESH_BINARY) # check how many pixels have a value over 240 (if more than 500000, its papyrus) num_pix = np.sum(th) / 255 if num_pix > 500000: ret2, th2 = cv.threshold(image, 25, 255, cv.THRESH_BINARY) thInv = cv.bitwise_not(th2) darkPixels = np.where(thInv == 255) # make pixels that are very black lighter to reduce contrast and improve binarization image[darkPixels[0], darkPixels[1]] = image[darkPixels[0], darkPixels[1]] + 150 return image # function to remove everything around the image (the letters and auxiliary items) # works by taking the largest connected component and removing everything around it. def removeExtras(self, image): kernelclose = cv.getStructuringElement(cv.MORPH_ELLIPSE, (50, 50)) kernelerode = cv.getStructuringElement(cv.MORPH_ELLIPSE, (25, 25)) # close image, to remove letters and other small items closed = cv.morphologyEx(image, cv.MORPH_CLOSE, kernelclose) nb_components, output, stats, centroids = cv.connectedComponentsWithStats(closed, connectivity=8) # remove background from stats sizes = stats[1:, -1] nb_components = nb_components - 1 if nb_components > 0: # get maximal connected component max_size = max(sizes) # initialize mask mask = np.zeros((output.shape), dtype="uint8") # loop through connected components, until largest is found for i in range(0, nb_components): if sizes[i] >= max_size: mask[output == i + 1] = 255 xMin = stats[i + 1, cv.CC_STAT_LEFT] xMax = stats[i + 1, cv.CC_STAT_WIDTH] + xMin yMin = stats[i + 1, cv.CC_STAT_TOP] yMax = yMin + stats[i + 1, cv.CC_STAT_HEIGHT] # erode mask so that there won't be a contour around it (since we've closed it before it has become slightly larger) erodedmask = cv.erode(mask, kernelerode, iterations=1) erodedmaskI = cv.bitwise_not(erodedmask) # apply mask masked = cv.bitwise_and(image, erodedmask) masked = cv.bitwise_or(masked, erodedmaskI) # remove large stains (such as tape) noStains = self.removeStains(masked[yMin:yMax, xMin:xMax]) noStainsI = cv.bitwise_not(noStains) # apply new mask final = cv.bitwise_and(noStains, masked[yMin:yMax, xMin:xMax]) final = cv.bitwise_or(noStainsI, final) return final return image # function to remove large stains (such as the tape which is used to attech the papyrus/perkament) def removeStains(self, image): # use a maximum allowed size and a minimum allowed size (heuristically decided) MAX_SIZE = 3000 MIN_SIZE = 20 # compute connected components and size of background nb_components, output, stats, centroids = cv.connectedComponentsWithStats(cv.bitwise_not(image), connectivity=8) background = max(stats[:, cv.CC_STAT_AREA]) # initialize mask mask = np.zeros((output.shape), dtype="uint8") # loop through every connected component, if not background for i in range(0, nb_components): if stats[i, cv.CC_STAT_AREA] != background: # if it is larger than the allowed size and the bounding box is for more # than 60% filled by the connected component, remove the component if stats[i, cv.CC_STAT_AREA] > MAX_SIZE and stats[i, cv.CC_STAT_AREA] > stats[i, cv.CC_STAT_WIDTH] * stats[i, cv.CC_STAT_WIDTH] * 0.6: mask[output == i] = 255 # if it is smaller than the allowed size, discard the connected component elif stats[i, cv.CC_STAT_AREA] < MIN_SIZE: mask[output == i] = 255 # mask result and return result = cv.bitwise_and(cv.bitwise_not(image), mask) return cv.bitwise_not(result) # make letters slightly larger, to make it easier to retrieve them def enlargeLetters(self, image): kernelopen = cv.getStructuringElement(cv.MORPH_ELLIPSE, (4, 4)) opened = cv.morphologyEx(image, cv.MORPH_OPEN, kernelopen) return opened def despeckle(self, array): kerneldilate = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3)) dilated = cv.dilate(array, kerneldilate, iterations=2) contours, hierachy = cv.findContours(dilated, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) c = min(contours, key=cv.contourArea) newcontours = [] for c in contours: area = cv.contourArea(c) if (area < self.BLOB_MAX_AREA): newcontours.append(c) stencil = np.zeros(array.shape).astype(array.dtype) cv.drawContours(stencil, newcontours, -1, (255, 255, 0), 3) cv.fillPoly(stencil, [c], (255, 255, 255)) result = cv.bitwise_or(array, stencil) return result # def removeOuterborder(self, image): # # invert black and white # image = cv.bitwise_not(image) # # imCopy = image.copy() # contours, hierachy = cv.findContours(imCopy, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) # c = max(contours, key=cv.contourArea) # # # fill outside contour parts # stencil = np.zeros(image.shape).astype(image.dtype) # cv.fillPoly(stencil, [c], (255, 255, 255)) # result = cv.bitwise_xor(image, stencil) # # # invert white back to black and vice versa # result = cv.bitwise_not(result) # return result # def arrayToImage(self, array, name): # numpy_array = np.array(array) # image = Image.fromarray(numpy_array.astype('uint8')) # image.save(name + ".jpg")
[ "# from PIL import Image\n# from os import listdir\n# from os.path import isfile, join\nimport numpy as np\nimport cv2 as cv\n\n\nclass Preprocessor(object):\n # OFFSET_CROP = 50 #to have a little margin while cropping, saving contours\n BLOB_MAX_AREA = 16.0\n\n def binarize(self, image_path):\n print(\"binarizing image: \", image_path)\n image = cv.imread(image_path, 0) # read image\n image = self.papyrusProblem(image, image_path)\n image = cv.GaussianBlur(image, (3, 3), 0) # apply blur\n ret3, th3 = cv.threshold(image, 0, 255, cv.THRESH_BINARY + cv.THRESH_OTSU) # Otsu thresholding\n return th3\n\n # function to detect papyrus and make background lighter for otsu thresholding to work better\n def papyrusProblem(self, image, image_path):\n ret, th = cv.threshold(image, 240, 255, cv.THRESH_BINARY)\n # check how many pixels have a value over 240 (if more than 500000, its papyrus)\n num_pix = np.sum(th) / 255\n if num_pix > 500000:\n ret2, th2 = cv.threshold(image, 25, 255, cv.THRESH_BINARY)\n thInv = cv.bitwise_not(th2)\n darkPixels = np.where(thInv == 255)\n # make pixels that are very black lighter to reduce contrast and improve binarization\n image[darkPixels[0], darkPixels[1]] = image[darkPixels[0], darkPixels[1]] + 150\n return image\n\n # function to remove everything around the image (the letters and auxiliary items)\n # works by taking the largest connected component and removing everything around it.\n def removeExtras(self, image):\n\n kernelclose = cv.getStructuringElement(cv.MORPH_ELLIPSE, (50, 50))\n kernelerode = cv.getStructuringElement(cv.MORPH_ELLIPSE, (25, 25))\n # close image, to remove letters and other small items\n closed = cv.morphologyEx(image, cv.MORPH_CLOSE, kernelclose)\n nb_components, output, stats, centroids = cv.connectedComponentsWithStats(closed, connectivity=8)\n # remove background from stats\n sizes = stats[1:, -1]\n nb_components = nb_components - 1\n\n if nb_components > 0:\n # get maximal connected component\n max_size = max(sizes)\n # initialize mask\n mask = np.zeros((output.shape), dtype=\"uint8\")\n # loop through connected components, until largest is found\n for i in range(0, nb_components):\n if sizes[i] >= max_size:\n mask[output == i + 1] = 255\n xMin = stats[i + 1, cv.CC_STAT_LEFT]\n xMax = stats[i + 1, cv.CC_STAT_WIDTH] + xMin\n yMin = stats[i + 1, cv.CC_STAT_TOP]\n yMax = yMin + stats[i + 1, cv.CC_STAT_HEIGHT]\n\n # erode mask so that there won't be a contour around it (since we've closed it before it has become slightly larger)\n erodedmask = cv.erode(mask, kernelerode, iterations=1)\n erodedmaskI = cv.bitwise_not(erodedmask)\n # apply mask\n masked = cv.bitwise_and(image, erodedmask)\n masked = cv.bitwise_or(masked, erodedmaskI)\n # remove large stains (such as tape)\n noStains = self.removeStains(masked[yMin:yMax, xMin:xMax])\n noStainsI = cv.bitwise_not(noStains)\n # apply new mask\n final = cv.bitwise_and(noStains, masked[yMin:yMax, xMin:xMax])\n final = cv.bitwise_or(noStainsI, final)\n return final\n\n return image\n\n # function to remove large stains (such as the tape which is used to attech the papyrus/perkament)\n def removeStains(self, image):\n # use a maximum allowed size and a minimum allowed size (heuristically decided)\n MAX_SIZE = 3000\n MIN_SIZE = 20\n # compute connected components and size of background\n nb_components, output, stats, centroids = cv.connectedComponentsWithStats(cv.bitwise_not(image), connectivity=8)\n background = max(stats[:, cv.CC_STAT_AREA])\n # initialize mask\n mask = np.zeros((output.shape), dtype=\"uint8\")\n # loop through every connected component, if not background\n for i in range(0, nb_components):\n if stats[i, cv.CC_STAT_AREA] != background:\n # if it is larger than the allowed size and the bounding box is for more\n # than 60% filled by the connected component, remove the component\n if stats[i, cv.CC_STAT_AREA] > MAX_SIZE and stats[i, cv.CC_STAT_AREA] > stats[i, cv.CC_STAT_WIDTH] * stats[i, cv.CC_STAT_WIDTH] * 0.6:\n mask[output == i] = 255\n # if it is smaller than the allowed size, discard the connected component\n elif stats[i, cv.CC_STAT_AREA] < MIN_SIZE:\n mask[output == i] = 255\n # mask result and return\n result = cv.bitwise_and(cv.bitwise_not(image), mask)\n return cv.bitwise_not(result)\n\n # make letters slightly larger, to make it easier to retrieve them\n def enlargeLetters(self, image):\n kernelopen = cv.getStructuringElement(cv.MORPH_ELLIPSE, (4, 4))\n opened = cv.morphologyEx(image, cv.MORPH_OPEN, kernelopen)\n return opened\n\n def despeckle(self, array):\n kerneldilate = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))\n dilated = cv.dilate(array, kerneldilate, iterations=2)\n\n contours, hierachy = cv.findContours(dilated, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)\n c = min(contours, key=cv.contourArea)\n newcontours = []\n for c in contours:\n area = cv.contourArea(c)\n if (area < self.BLOB_MAX_AREA):\n newcontours.append(c)\n\n stencil = np.zeros(array.shape).astype(array.dtype)\n cv.drawContours(stencil, newcontours, -1, (255, 255, 0), 3)\n cv.fillPoly(stencil, [c], (255, 255, 255))\n result = cv.bitwise_or(array, stencil)\n return result\n\n\n# def removeOuterborder(self, image):\n# # invert black and white\n# image = cv.bitwise_not(image)\n#\n# imCopy = image.copy()\n# contours, hierachy = cv.findContours(imCopy, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)\n# c = max(contours, key=cv.contourArea)\n#\n# # fill outside contour parts\n# stencil = np.zeros(image.shape).astype(image.dtype)\n# cv.fillPoly(stencil, [c], (255, 255, 255))\n# result = cv.bitwise_xor(image, stencil)\n#\n# # invert white back to black and vice versa\n# result = cv.bitwise_not(result)\n# return result\n\n# def arrayToImage(self, array, name):\n# numpy_array = np.array(array)\n# image = Image.fromarray(numpy_array.astype('uint8'))\n# image.save(name + \".jpg\")\n", "import numpy as np\nimport cv2 as cv\n\n\nclass Preprocessor(object):\n BLOB_MAX_AREA = 16.0\n\n def binarize(self, image_path):\n print('binarizing image: ', image_path)\n image = cv.imread(image_path, 0)\n image = self.papyrusProblem(image, image_path)\n image = cv.GaussianBlur(image, (3, 3), 0)\n ret3, th3 = cv.threshold(image, 0, 255, cv.THRESH_BINARY + cv.\n THRESH_OTSU)\n return th3\n\n def papyrusProblem(self, image, image_path):\n ret, th = cv.threshold(image, 240, 255, cv.THRESH_BINARY)\n num_pix = np.sum(th) / 255\n if num_pix > 500000:\n ret2, th2 = cv.threshold(image, 25, 255, cv.THRESH_BINARY)\n thInv = cv.bitwise_not(th2)\n darkPixels = np.where(thInv == 255)\n image[darkPixels[0], darkPixels[1]] = image[darkPixels[0],\n darkPixels[1]] + 150\n return image\n\n def removeExtras(self, image):\n kernelclose = cv.getStructuringElement(cv.MORPH_ELLIPSE, (50, 50))\n kernelerode = cv.getStructuringElement(cv.MORPH_ELLIPSE, (25, 25))\n closed = cv.morphologyEx(image, cv.MORPH_CLOSE, kernelclose)\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(closed, connectivity=8))\n sizes = stats[1:, -1]\n nb_components = nb_components - 1\n if nb_components > 0:\n max_size = max(sizes)\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if sizes[i] >= max_size:\n mask[output == i + 1] = 255\n xMin = stats[i + 1, cv.CC_STAT_LEFT]\n xMax = stats[i + 1, cv.CC_STAT_WIDTH] + xMin\n yMin = stats[i + 1, cv.CC_STAT_TOP]\n yMax = yMin + stats[i + 1, cv.CC_STAT_HEIGHT]\n erodedmask = cv.erode(mask, kernelerode, iterations=1)\n erodedmaskI = cv.bitwise_not(erodedmask)\n masked = cv.bitwise_and(image, erodedmask)\n masked = cv.bitwise_or(masked, erodedmaskI)\n noStains = self.removeStains(masked[yMin:yMax, xMin:xMax])\n noStainsI = cv.bitwise_not(noStains)\n final = cv.bitwise_and(noStains, masked[yMin:yMax, xMin:xMax])\n final = cv.bitwise_or(noStainsI, final)\n return final\n return image\n\n def removeStains(self, image):\n MAX_SIZE = 3000\n MIN_SIZE = 20\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(cv.bitwise_not(image), connectivity=8)\n )\n background = max(stats[:, cv.CC_STAT_AREA])\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if stats[i, cv.CC_STAT_AREA] != background:\n if stats[i, cv.CC_STAT_AREA] > MAX_SIZE and stats[i, cv.\n CC_STAT_AREA] > stats[i, cv.CC_STAT_WIDTH] * stats[i,\n cv.CC_STAT_WIDTH] * 0.6:\n mask[output == i] = 255\n elif stats[i, cv.CC_STAT_AREA] < MIN_SIZE:\n mask[output == i] = 255\n result = cv.bitwise_and(cv.bitwise_not(image), mask)\n return cv.bitwise_not(result)\n\n def enlargeLetters(self, image):\n kernelopen = cv.getStructuringElement(cv.MORPH_ELLIPSE, (4, 4))\n opened = cv.morphologyEx(image, cv.MORPH_OPEN, kernelopen)\n return opened\n\n def despeckle(self, array):\n kerneldilate = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))\n dilated = cv.dilate(array, kerneldilate, iterations=2)\n contours, hierachy = cv.findContours(dilated, cv.RETR_TREE, cv.\n CHAIN_APPROX_SIMPLE)\n c = min(contours, key=cv.contourArea)\n newcontours = []\n for c in contours:\n area = cv.contourArea(c)\n if area < self.BLOB_MAX_AREA:\n newcontours.append(c)\n stencil = np.zeros(array.shape).astype(array.dtype)\n cv.drawContours(stencil, newcontours, -1, (255, 255, 0), 3)\n cv.fillPoly(stencil, [c], (255, 255, 255))\n result = cv.bitwise_or(array, stencil)\n return result\n", "<import token>\n\n\nclass Preprocessor(object):\n BLOB_MAX_AREA = 16.0\n\n def binarize(self, image_path):\n print('binarizing image: ', image_path)\n image = cv.imread(image_path, 0)\n image = self.papyrusProblem(image, image_path)\n image = cv.GaussianBlur(image, (3, 3), 0)\n ret3, th3 = cv.threshold(image, 0, 255, cv.THRESH_BINARY + cv.\n THRESH_OTSU)\n return th3\n\n def papyrusProblem(self, image, image_path):\n ret, th = cv.threshold(image, 240, 255, cv.THRESH_BINARY)\n num_pix = np.sum(th) / 255\n if num_pix > 500000:\n ret2, th2 = cv.threshold(image, 25, 255, cv.THRESH_BINARY)\n thInv = cv.bitwise_not(th2)\n darkPixels = np.where(thInv == 255)\n image[darkPixels[0], darkPixels[1]] = image[darkPixels[0],\n darkPixels[1]] + 150\n return image\n\n def removeExtras(self, image):\n kernelclose = cv.getStructuringElement(cv.MORPH_ELLIPSE, (50, 50))\n kernelerode = cv.getStructuringElement(cv.MORPH_ELLIPSE, (25, 25))\n closed = cv.morphologyEx(image, cv.MORPH_CLOSE, kernelclose)\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(closed, connectivity=8))\n sizes = stats[1:, -1]\n nb_components = nb_components - 1\n if nb_components > 0:\n max_size = max(sizes)\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if sizes[i] >= max_size:\n mask[output == i + 1] = 255\n xMin = stats[i + 1, cv.CC_STAT_LEFT]\n xMax = stats[i + 1, cv.CC_STAT_WIDTH] + xMin\n yMin = stats[i + 1, cv.CC_STAT_TOP]\n yMax = yMin + stats[i + 1, cv.CC_STAT_HEIGHT]\n erodedmask = cv.erode(mask, kernelerode, iterations=1)\n erodedmaskI = cv.bitwise_not(erodedmask)\n masked = cv.bitwise_and(image, erodedmask)\n masked = cv.bitwise_or(masked, erodedmaskI)\n noStains = self.removeStains(masked[yMin:yMax, xMin:xMax])\n noStainsI = cv.bitwise_not(noStains)\n final = cv.bitwise_and(noStains, masked[yMin:yMax, xMin:xMax])\n final = cv.bitwise_or(noStainsI, final)\n return final\n return image\n\n def removeStains(self, image):\n MAX_SIZE = 3000\n MIN_SIZE = 20\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(cv.bitwise_not(image), connectivity=8)\n )\n background = max(stats[:, cv.CC_STAT_AREA])\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if stats[i, cv.CC_STAT_AREA] != background:\n if stats[i, cv.CC_STAT_AREA] > MAX_SIZE and stats[i, cv.\n CC_STAT_AREA] > stats[i, cv.CC_STAT_WIDTH] * stats[i,\n cv.CC_STAT_WIDTH] * 0.6:\n mask[output == i] = 255\n elif stats[i, cv.CC_STAT_AREA] < MIN_SIZE:\n mask[output == i] = 255\n result = cv.bitwise_and(cv.bitwise_not(image), mask)\n return cv.bitwise_not(result)\n\n def enlargeLetters(self, image):\n kernelopen = cv.getStructuringElement(cv.MORPH_ELLIPSE, (4, 4))\n opened = cv.morphologyEx(image, cv.MORPH_OPEN, kernelopen)\n return opened\n\n def despeckle(self, array):\n kerneldilate = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))\n dilated = cv.dilate(array, kerneldilate, iterations=2)\n contours, hierachy = cv.findContours(dilated, cv.RETR_TREE, cv.\n CHAIN_APPROX_SIMPLE)\n c = min(contours, key=cv.contourArea)\n newcontours = []\n for c in contours:\n area = cv.contourArea(c)\n if area < self.BLOB_MAX_AREA:\n newcontours.append(c)\n stencil = np.zeros(array.shape).astype(array.dtype)\n cv.drawContours(stencil, newcontours, -1, (255, 255, 0), 3)\n cv.fillPoly(stencil, [c], (255, 255, 255))\n result = cv.bitwise_or(array, stencil)\n return result\n", "<import token>\n\n\nclass Preprocessor(object):\n <assignment token>\n\n def binarize(self, image_path):\n print('binarizing image: ', image_path)\n image = cv.imread(image_path, 0)\n image = self.papyrusProblem(image, image_path)\n image = cv.GaussianBlur(image, (3, 3), 0)\n ret3, th3 = cv.threshold(image, 0, 255, cv.THRESH_BINARY + cv.\n THRESH_OTSU)\n return th3\n\n def papyrusProblem(self, image, image_path):\n ret, th = cv.threshold(image, 240, 255, cv.THRESH_BINARY)\n num_pix = np.sum(th) / 255\n if num_pix > 500000:\n ret2, th2 = cv.threshold(image, 25, 255, cv.THRESH_BINARY)\n thInv = cv.bitwise_not(th2)\n darkPixels = np.where(thInv == 255)\n image[darkPixels[0], darkPixels[1]] = image[darkPixels[0],\n darkPixels[1]] + 150\n return image\n\n def removeExtras(self, image):\n kernelclose = cv.getStructuringElement(cv.MORPH_ELLIPSE, (50, 50))\n kernelerode = cv.getStructuringElement(cv.MORPH_ELLIPSE, (25, 25))\n closed = cv.morphologyEx(image, cv.MORPH_CLOSE, kernelclose)\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(closed, connectivity=8))\n sizes = stats[1:, -1]\n nb_components = nb_components - 1\n if nb_components > 0:\n max_size = max(sizes)\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if sizes[i] >= max_size:\n mask[output == i + 1] = 255\n xMin = stats[i + 1, cv.CC_STAT_LEFT]\n xMax = stats[i + 1, cv.CC_STAT_WIDTH] + xMin\n yMin = stats[i + 1, cv.CC_STAT_TOP]\n yMax = yMin + stats[i + 1, cv.CC_STAT_HEIGHT]\n erodedmask = cv.erode(mask, kernelerode, iterations=1)\n erodedmaskI = cv.bitwise_not(erodedmask)\n masked = cv.bitwise_and(image, erodedmask)\n masked = cv.bitwise_or(masked, erodedmaskI)\n noStains = self.removeStains(masked[yMin:yMax, xMin:xMax])\n noStainsI = cv.bitwise_not(noStains)\n final = cv.bitwise_and(noStains, masked[yMin:yMax, xMin:xMax])\n final = cv.bitwise_or(noStainsI, final)\n return final\n return image\n\n def removeStains(self, image):\n MAX_SIZE = 3000\n MIN_SIZE = 20\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(cv.bitwise_not(image), connectivity=8)\n )\n background = max(stats[:, cv.CC_STAT_AREA])\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if stats[i, cv.CC_STAT_AREA] != background:\n if stats[i, cv.CC_STAT_AREA] > MAX_SIZE and stats[i, cv.\n CC_STAT_AREA] > stats[i, cv.CC_STAT_WIDTH] * stats[i,\n cv.CC_STAT_WIDTH] * 0.6:\n mask[output == i] = 255\n elif stats[i, cv.CC_STAT_AREA] < MIN_SIZE:\n mask[output == i] = 255\n result = cv.bitwise_and(cv.bitwise_not(image), mask)\n return cv.bitwise_not(result)\n\n def enlargeLetters(self, image):\n kernelopen = cv.getStructuringElement(cv.MORPH_ELLIPSE, (4, 4))\n opened = cv.morphologyEx(image, cv.MORPH_OPEN, kernelopen)\n return opened\n\n def despeckle(self, array):\n kerneldilate = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))\n dilated = cv.dilate(array, kerneldilate, iterations=2)\n contours, hierachy = cv.findContours(dilated, cv.RETR_TREE, cv.\n CHAIN_APPROX_SIMPLE)\n c = min(contours, key=cv.contourArea)\n newcontours = []\n for c in contours:\n area = cv.contourArea(c)\n if area < self.BLOB_MAX_AREA:\n newcontours.append(c)\n stencil = np.zeros(array.shape).astype(array.dtype)\n cv.drawContours(stencil, newcontours, -1, (255, 255, 0), 3)\n cv.fillPoly(stencil, [c], (255, 255, 255))\n result = cv.bitwise_or(array, stencil)\n return result\n", "<import token>\n\n\nclass Preprocessor(object):\n <assignment token>\n <function token>\n\n def papyrusProblem(self, image, image_path):\n ret, th = cv.threshold(image, 240, 255, cv.THRESH_BINARY)\n num_pix = np.sum(th) / 255\n if num_pix > 500000:\n ret2, th2 = cv.threshold(image, 25, 255, cv.THRESH_BINARY)\n thInv = cv.bitwise_not(th2)\n darkPixels = np.where(thInv == 255)\n image[darkPixels[0], darkPixels[1]] = image[darkPixels[0],\n darkPixels[1]] + 150\n return image\n\n def removeExtras(self, image):\n kernelclose = cv.getStructuringElement(cv.MORPH_ELLIPSE, (50, 50))\n kernelerode = cv.getStructuringElement(cv.MORPH_ELLIPSE, (25, 25))\n closed = cv.morphologyEx(image, cv.MORPH_CLOSE, kernelclose)\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(closed, connectivity=8))\n sizes = stats[1:, -1]\n nb_components = nb_components - 1\n if nb_components > 0:\n max_size = max(sizes)\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if sizes[i] >= max_size:\n mask[output == i + 1] = 255\n xMin = stats[i + 1, cv.CC_STAT_LEFT]\n xMax = stats[i + 1, cv.CC_STAT_WIDTH] + xMin\n yMin = stats[i + 1, cv.CC_STAT_TOP]\n yMax = yMin + stats[i + 1, cv.CC_STAT_HEIGHT]\n erodedmask = cv.erode(mask, kernelerode, iterations=1)\n erodedmaskI = cv.bitwise_not(erodedmask)\n masked = cv.bitwise_and(image, erodedmask)\n masked = cv.bitwise_or(masked, erodedmaskI)\n noStains = self.removeStains(masked[yMin:yMax, xMin:xMax])\n noStainsI = cv.bitwise_not(noStains)\n final = cv.bitwise_and(noStains, masked[yMin:yMax, xMin:xMax])\n final = cv.bitwise_or(noStainsI, final)\n return final\n return image\n\n def removeStains(self, image):\n MAX_SIZE = 3000\n MIN_SIZE = 20\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(cv.bitwise_not(image), connectivity=8)\n )\n background = max(stats[:, cv.CC_STAT_AREA])\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if stats[i, cv.CC_STAT_AREA] != background:\n if stats[i, cv.CC_STAT_AREA] > MAX_SIZE and stats[i, cv.\n CC_STAT_AREA] > stats[i, cv.CC_STAT_WIDTH] * stats[i,\n cv.CC_STAT_WIDTH] * 0.6:\n mask[output == i] = 255\n elif stats[i, cv.CC_STAT_AREA] < MIN_SIZE:\n mask[output == i] = 255\n result = cv.bitwise_and(cv.bitwise_not(image), mask)\n return cv.bitwise_not(result)\n\n def enlargeLetters(self, image):\n kernelopen = cv.getStructuringElement(cv.MORPH_ELLIPSE, (4, 4))\n opened = cv.morphologyEx(image, cv.MORPH_OPEN, kernelopen)\n return opened\n\n def despeckle(self, array):\n kerneldilate = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))\n dilated = cv.dilate(array, kerneldilate, iterations=2)\n contours, hierachy = cv.findContours(dilated, cv.RETR_TREE, cv.\n CHAIN_APPROX_SIMPLE)\n c = min(contours, key=cv.contourArea)\n newcontours = []\n for c in contours:\n area = cv.contourArea(c)\n if area < self.BLOB_MAX_AREA:\n newcontours.append(c)\n stencil = np.zeros(array.shape).astype(array.dtype)\n cv.drawContours(stencil, newcontours, -1, (255, 255, 0), 3)\n cv.fillPoly(stencil, [c], (255, 255, 255))\n result = cv.bitwise_or(array, stencil)\n return result\n", "<import token>\n\n\nclass Preprocessor(object):\n <assignment token>\n <function token>\n\n def papyrusProblem(self, image, image_path):\n ret, th = cv.threshold(image, 240, 255, cv.THRESH_BINARY)\n num_pix = np.sum(th) / 255\n if num_pix > 500000:\n ret2, th2 = cv.threshold(image, 25, 255, cv.THRESH_BINARY)\n thInv = cv.bitwise_not(th2)\n darkPixels = np.where(thInv == 255)\n image[darkPixels[0], darkPixels[1]] = image[darkPixels[0],\n darkPixels[1]] + 150\n return image\n <function token>\n\n def removeStains(self, image):\n MAX_SIZE = 3000\n MIN_SIZE = 20\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(cv.bitwise_not(image), connectivity=8)\n )\n background = max(stats[:, cv.CC_STAT_AREA])\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if stats[i, cv.CC_STAT_AREA] != background:\n if stats[i, cv.CC_STAT_AREA] > MAX_SIZE and stats[i, cv.\n CC_STAT_AREA] > stats[i, cv.CC_STAT_WIDTH] * stats[i,\n cv.CC_STAT_WIDTH] * 0.6:\n mask[output == i] = 255\n elif stats[i, cv.CC_STAT_AREA] < MIN_SIZE:\n mask[output == i] = 255\n result = cv.bitwise_and(cv.bitwise_not(image), mask)\n return cv.bitwise_not(result)\n\n def enlargeLetters(self, image):\n kernelopen = cv.getStructuringElement(cv.MORPH_ELLIPSE, (4, 4))\n opened = cv.morphologyEx(image, cv.MORPH_OPEN, kernelopen)\n return opened\n\n def despeckle(self, array):\n kerneldilate = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))\n dilated = cv.dilate(array, kerneldilate, iterations=2)\n contours, hierachy = cv.findContours(dilated, cv.RETR_TREE, cv.\n CHAIN_APPROX_SIMPLE)\n c = min(contours, key=cv.contourArea)\n newcontours = []\n for c in contours:\n area = cv.contourArea(c)\n if area < self.BLOB_MAX_AREA:\n newcontours.append(c)\n stencil = np.zeros(array.shape).astype(array.dtype)\n cv.drawContours(stencil, newcontours, -1, (255, 255, 0), 3)\n cv.fillPoly(stencil, [c], (255, 255, 255))\n result = cv.bitwise_or(array, stencil)\n return result\n", "<import token>\n\n\nclass Preprocessor(object):\n <assignment token>\n <function token>\n\n def papyrusProblem(self, image, image_path):\n ret, th = cv.threshold(image, 240, 255, cv.THRESH_BINARY)\n num_pix = np.sum(th) / 255\n if num_pix > 500000:\n ret2, th2 = cv.threshold(image, 25, 255, cv.THRESH_BINARY)\n thInv = cv.bitwise_not(th2)\n darkPixels = np.where(thInv == 255)\n image[darkPixels[0], darkPixels[1]] = image[darkPixels[0],\n darkPixels[1]] + 150\n return image\n <function token>\n\n def removeStains(self, image):\n MAX_SIZE = 3000\n MIN_SIZE = 20\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(cv.bitwise_not(image), connectivity=8)\n )\n background = max(stats[:, cv.CC_STAT_AREA])\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if stats[i, cv.CC_STAT_AREA] != background:\n if stats[i, cv.CC_STAT_AREA] > MAX_SIZE and stats[i, cv.\n CC_STAT_AREA] > stats[i, cv.CC_STAT_WIDTH] * stats[i,\n cv.CC_STAT_WIDTH] * 0.6:\n mask[output == i] = 255\n elif stats[i, cv.CC_STAT_AREA] < MIN_SIZE:\n mask[output == i] = 255\n result = cv.bitwise_and(cv.bitwise_not(image), mask)\n return cv.bitwise_not(result)\n <function token>\n\n def despeckle(self, array):\n kerneldilate = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))\n dilated = cv.dilate(array, kerneldilate, iterations=2)\n contours, hierachy = cv.findContours(dilated, cv.RETR_TREE, cv.\n CHAIN_APPROX_SIMPLE)\n c = min(contours, key=cv.contourArea)\n newcontours = []\n for c in contours:\n area = cv.contourArea(c)\n if area < self.BLOB_MAX_AREA:\n newcontours.append(c)\n stencil = np.zeros(array.shape).astype(array.dtype)\n cv.drawContours(stencil, newcontours, -1, (255, 255, 0), 3)\n cv.fillPoly(stencil, [c], (255, 255, 255))\n result = cv.bitwise_or(array, stencil)\n return result\n", "<import token>\n\n\nclass Preprocessor(object):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def removeStains(self, image):\n MAX_SIZE = 3000\n MIN_SIZE = 20\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(cv.bitwise_not(image), connectivity=8)\n )\n background = max(stats[:, cv.CC_STAT_AREA])\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if stats[i, cv.CC_STAT_AREA] != background:\n if stats[i, cv.CC_STAT_AREA] > MAX_SIZE and stats[i, cv.\n CC_STAT_AREA] > stats[i, cv.CC_STAT_WIDTH] * stats[i,\n cv.CC_STAT_WIDTH] * 0.6:\n mask[output == i] = 255\n elif stats[i, cv.CC_STAT_AREA] < MIN_SIZE:\n mask[output == i] = 255\n result = cv.bitwise_and(cv.bitwise_not(image), mask)\n return cv.bitwise_not(result)\n <function token>\n\n def despeckle(self, array):\n kerneldilate = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))\n dilated = cv.dilate(array, kerneldilate, iterations=2)\n contours, hierachy = cv.findContours(dilated, cv.RETR_TREE, cv.\n CHAIN_APPROX_SIMPLE)\n c = min(contours, key=cv.contourArea)\n newcontours = []\n for c in contours:\n area = cv.contourArea(c)\n if area < self.BLOB_MAX_AREA:\n newcontours.append(c)\n stencil = np.zeros(array.shape).astype(array.dtype)\n cv.drawContours(stencil, newcontours, -1, (255, 255, 0), 3)\n cv.fillPoly(stencil, [c], (255, 255, 255))\n result = cv.bitwise_or(array, stencil)\n return result\n", "<import token>\n\n\nclass Preprocessor(object):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def removeStains(self, image):\n MAX_SIZE = 3000\n MIN_SIZE = 20\n nb_components, output, stats, centroids = (cv.\n connectedComponentsWithStats(cv.bitwise_not(image), connectivity=8)\n )\n background = max(stats[:, cv.CC_STAT_AREA])\n mask = np.zeros(output.shape, dtype='uint8')\n for i in range(0, nb_components):\n if stats[i, cv.CC_STAT_AREA] != background:\n if stats[i, cv.CC_STAT_AREA] > MAX_SIZE and stats[i, cv.\n CC_STAT_AREA] > stats[i, cv.CC_STAT_WIDTH] * stats[i,\n cv.CC_STAT_WIDTH] * 0.6:\n mask[output == i] = 255\n elif stats[i, cv.CC_STAT_AREA] < MIN_SIZE:\n mask[output == i] = 255\n result = cv.bitwise_and(cv.bitwise_not(image), mask)\n return cv.bitwise_not(result)\n <function token>\n <function token>\n", "<import token>\n\n\nclass Preprocessor(object):\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
98,955
8c68710cf4144835fe44e5ea552f7fa0c6a57979
# flake8: noqa from .base import make_agg_primitive, make_trans_primitive from .standard import * from .utils import ( get_aggregation_primitives, get_default_aggregation_primitives, get_default_transform_primitives, get_transform_primitives, list_primitives )
[ "# flake8: noqa\nfrom .base import make_agg_primitive, make_trans_primitive\nfrom .standard import *\nfrom .utils import (\n get_aggregation_primitives,\n get_default_aggregation_primitives,\n get_default_transform_primitives,\n get_transform_primitives,\n list_primitives\n)\n", "from .base import make_agg_primitive, make_trans_primitive\nfrom .standard import *\nfrom .utils import get_aggregation_primitives, get_default_aggregation_primitives, get_default_transform_primitives, get_transform_primitives, list_primitives\n", "<import token>\n" ]
false
98,956
40b6f4fe5fcc92e7d57b4ec1b9046424ef49739c
from django.core.exceptions import ValidationError from django.db import models class Poll(models.Model): """Основная таблица опроса""" name = models.CharField('Название опроса', max_length=200) desc = models.TextField('Описание') start_date = models.DateTimeField('Дата начала', null=True, blank=True, help_text='Учтите, после заполнения даты, изменить опрос вы не сможете.') end_date = models.DateTimeField('Дата окончания', null=True, blank=True) class Meta: verbose_name = 'Опрос' verbose_name_plural = 'Опросы' def __str__(self): return self.name def clean(self,*args,**kwargs): if Poll.objects.filter(pk=self.pk).exists(): if self.start_date and Poll.objects.filter(pk=self.pk,start_date__isnull=False): raise ValidationError({'start_date': 'Дата уже заполнена, опрос изменить невозможно'}) if self.start_date and not self.end_date: raise ValidationError({'end_date':'Заполните дату окончания опроса'}) class Question(models.Model): """Таблица вопроса""" CHOICES_TYPE = ( ('Один выбранный вариант', 'Один выбранный вариант'), ('Несколько выбранных вариантов', 'Несколько выбранных вариантов'), ('Свой вариант ответа', 'Свой вариант ответа'), ) poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name='question', verbose_name='Опрос') desc = models.TextField('Текст вопроса') type = models.CharField('Тип вопроса', choices=CHOICES_TYPE, max_length=55, help_text='Если вы выбрали один или несколько вариантов ответа, заполните поля "описание выбора",' 'если вы выбрали "свой вариант ответа", не заполняйте это поле') class Meta: verbose_name = 'Вопрос' verbose_name_plural = 'Вопросы' def __str__(self): return self.desc class QuesChoices(models.Model): """Таблица варинтов ответа""" question = models.ForeignKey(Question, related_name='ques_choices', on_delete=models.CASCADE) desc = models.CharField('Описание выбора', null=True, blank=True, max_length=200) class Meta: verbose_name = 'Выбор ответа' verbose_name_plural = 'Выбор ответа' def __str__(self): return self.desc class UserId(models.Model): """Таблица пользователей""" user_id = models.IntegerField('Уникальный идентификатор пользователя', unique=True) class Meta: verbose_name = 'Опрос пользователей' verbose_name_plural = 'Опросы пользователей' def __str__(self): return f'{self.user_id} - id пользователя' class UserPoll(models.Model): """Таблица проходящяго опроса пользователя""" user_id = models.ForeignKey(UserId, on_delete=models.CASCADE) poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name='Опрос') class Meta: verbose_name = 'Пройденный опрос' verbose_name_plural = 'Пройденный опрос' def __str__(self): return self.poll.name class UserAnswerQues(models.Model): """Таблица вопроса и ответа/ответов на него пользователем""" user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE, verbose_name='Опрос', related_name='user_poll') question = models.ForeignKey(Question, on_delete=models.CASCADE, verbose_name='Вопрос') text = models.TextField(null=True, blank=True, verbose_name='Свой вариант ответа') ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=True, verbose_name='Выбранный ответ/ответы') class Meta: verbose_name = 'Ответ пользователя' verbose_name_plural = 'Ответ пользователя' def __str__(self): return self.question.desc
[ "from django.core.exceptions import ValidationError\nfrom django.db import models\n\n\nclass Poll(models.Model):\n \"\"\"Основная таблица опроса\"\"\"\n name = models.CharField('Название опроса', max_length=200)\n desc = models.TextField('Описание')\n start_date = models.DateTimeField('Дата начала', null=True, blank=True,\n help_text='Учтите, после заполнения даты, изменить опрос вы не сможете.')\n end_date = models.DateTimeField('Дата окончания', null=True, blank=True)\n\n class Meta:\n verbose_name = 'Опрос'\n verbose_name_plural = 'Опросы'\n\n def __str__(self):\n return self.name\n\n def clean(self,*args,**kwargs):\n if Poll.objects.filter(pk=self.pk).exists():\n if self.start_date and Poll.objects.filter(pk=self.pk,start_date__isnull=False):\n raise ValidationError({'start_date': 'Дата уже заполнена, опрос изменить невозможно'})\n if self.start_date and not self.end_date:\n raise ValidationError({'end_date':'Заполните дату окончания опроса'})\n\n\n\nclass Question(models.Model):\n \"\"\"Таблица вопроса\"\"\"\n CHOICES_TYPE = (\n ('Один выбранный вариант', 'Один выбранный вариант'),\n ('Несколько выбранных вариантов', 'Несколько выбранных вариантов'),\n ('Свой вариант ответа', 'Свой вариант ответа'),\n )\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name='question', verbose_name='Опрос')\n desc = models.TextField('Текст вопроса')\n type = models.CharField('Тип вопроса', choices=CHOICES_TYPE, max_length=55,\n help_text='Если вы выбрали один или несколько вариантов ответа, заполните поля \"описание выбора\",'\n 'если вы выбрали \"свой вариант ответа\", не заполняйте это поле')\n\n class Meta:\n verbose_name = 'Вопрос'\n verbose_name_plural = 'Вопросы'\n\n def __str__(self):\n return self.desc\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices', on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True, max_length=200)\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя', unique=True)\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name='Опрос')\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE, verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE, verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name='Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=True, verbose_name='Выбранный ответ/ответы')\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "from django.core.exceptions import ValidationError\nfrom django.db import models\n\n\nclass Poll(models.Model):\n \"\"\"Основная таблица опроса\"\"\"\n name = models.CharField('Название опроса', max_length=200)\n desc = models.TextField('Описание')\n start_date = models.DateTimeField('Дата начала', null=True, blank=True,\n help_text=\n 'Учтите, после заполнения даты, изменить опрос вы не сможете.')\n end_date = models.DateTimeField('Дата окончания', null=True, blank=True)\n\n\n class Meta:\n verbose_name = 'Опрос'\n verbose_name_plural = 'Опросы'\n\n def __str__(self):\n return self.name\n\n def clean(self, *args, **kwargs):\n if Poll.objects.filter(pk=self.pk).exists():\n if self.start_date and Poll.objects.filter(pk=self.pk,\n start_date__isnull=False):\n raise ValidationError({'start_date':\n 'Дата уже заполнена, опрос изменить невозможно'})\n if self.start_date and not self.end_date:\n raise ValidationError({'end_date':\n 'Заполните дату окончания опроса'})\n\n\nclass Question(models.Model):\n \"\"\"Таблица вопроса\"\"\"\n CHOICES_TYPE = ('Один выбранный вариант', 'Один выбранный вариант'), (\n 'Несколько выбранных вариантов', 'Несколько выбранных вариантов'), (\n 'Свой вариант ответа', 'Свой вариант ответа')\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name=\n 'question', verbose_name='Опрос')\n desc = models.TextField('Текст вопроса')\n type = models.CharField('Тип вопроса', choices=CHOICES_TYPE, max_length\n =55, help_text=\n 'Если вы выбрали один или несколько вариантов ответа, заполните поля \"описание выбора\",если вы выбрали \"свой вариант ответа\", не заполняйте это поле'\n )\n\n\n class Meta:\n verbose_name = 'Вопрос'\n verbose_name_plural = 'Вопросы'\n\n def __str__(self):\n return self.desc\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n\n\nclass Poll(models.Model):\n \"\"\"Основная таблица опроса\"\"\"\n name = models.CharField('Название опроса', max_length=200)\n desc = models.TextField('Описание')\n start_date = models.DateTimeField('Дата начала', null=True, blank=True,\n help_text=\n 'Учтите, после заполнения даты, изменить опрос вы не сможете.')\n end_date = models.DateTimeField('Дата окончания', null=True, blank=True)\n\n\n class Meta:\n verbose_name = 'Опрос'\n verbose_name_plural = 'Опросы'\n\n def __str__(self):\n return self.name\n\n def clean(self, *args, **kwargs):\n if Poll.objects.filter(pk=self.pk).exists():\n if self.start_date and Poll.objects.filter(pk=self.pk,\n start_date__isnull=False):\n raise ValidationError({'start_date':\n 'Дата уже заполнена, опрос изменить невозможно'})\n if self.start_date and not self.end_date:\n raise ValidationError({'end_date':\n 'Заполните дату окончания опроса'})\n\n\nclass Question(models.Model):\n \"\"\"Таблица вопроса\"\"\"\n CHOICES_TYPE = ('Один выбранный вариант', 'Один выбранный вариант'), (\n 'Несколько выбранных вариантов', 'Несколько выбранных вариантов'), (\n 'Свой вариант ответа', 'Свой вариант ответа')\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name=\n 'question', verbose_name='Опрос')\n desc = models.TextField('Текст вопроса')\n type = models.CharField('Тип вопроса', choices=CHOICES_TYPE, max_length\n =55, help_text=\n 'Если вы выбрали один или несколько вариантов ответа, заполните поля \"описание выбора\",если вы выбрали \"свой вариант ответа\", не заполняйте это поле'\n )\n\n\n class Meta:\n verbose_name = 'Вопрос'\n verbose_name_plural = 'Вопросы'\n\n def __str__(self):\n return self.desc\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n\n\nclass Poll(models.Model):\n <docstring token>\n name = models.CharField('Название опроса', max_length=200)\n desc = models.TextField('Описание')\n start_date = models.DateTimeField('Дата начала', null=True, blank=True,\n help_text=\n 'Учтите, после заполнения даты, изменить опрос вы не сможете.')\n end_date = models.DateTimeField('Дата окончания', null=True, blank=True)\n\n\n class Meta:\n verbose_name = 'Опрос'\n verbose_name_plural = 'Опросы'\n\n def __str__(self):\n return self.name\n\n def clean(self, *args, **kwargs):\n if Poll.objects.filter(pk=self.pk).exists():\n if self.start_date and Poll.objects.filter(pk=self.pk,\n start_date__isnull=False):\n raise ValidationError({'start_date':\n 'Дата уже заполнена, опрос изменить невозможно'})\n if self.start_date and not self.end_date:\n raise ValidationError({'end_date':\n 'Заполните дату окончания опроса'})\n\n\nclass Question(models.Model):\n \"\"\"Таблица вопроса\"\"\"\n CHOICES_TYPE = ('Один выбранный вариант', 'Один выбранный вариант'), (\n 'Несколько выбранных вариантов', 'Несколько выбранных вариантов'), (\n 'Свой вариант ответа', 'Свой вариант ответа')\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name=\n 'question', verbose_name='Опрос')\n desc = models.TextField('Текст вопроса')\n type = models.CharField('Тип вопроса', choices=CHOICES_TYPE, max_length\n =55, help_text=\n 'Если вы выбрали один или несколько вариантов ответа, заполните поля \"описание выбора\",если вы выбрали \"свой вариант ответа\", не заполняйте это поле'\n )\n\n\n class Meta:\n verbose_name = 'Вопрос'\n verbose_name_plural = 'Вопросы'\n\n def __str__(self):\n return self.desc\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n\n\nclass Poll(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Опрос'\n verbose_name_plural = 'Опросы'\n\n def __str__(self):\n return self.name\n\n def clean(self, *args, **kwargs):\n if Poll.objects.filter(pk=self.pk).exists():\n if self.start_date and Poll.objects.filter(pk=self.pk,\n start_date__isnull=False):\n raise ValidationError({'start_date':\n 'Дата уже заполнена, опрос изменить невозможно'})\n if self.start_date and not self.end_date:\n raise ValidationError({'end_date':\n 'Заполните дату окончания опроса'})\n\n\nclass Question(models.Model):\n \"\"\"Таблица вопроса\"\"\"\n CHOICES_TYPE = ('Один выбранный вариант', 'Один выбранный вариант'), (\n 'Несколько выбранных вариантов', 'Несколько выбранных вариантов'), (\n 'Свой вариант ответа', 'Свой вариант ответа')\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name=\n 'question', verbose_name='Опрос')\n desc = models.TextField('Текст вопроса')\n type = models.CharField('Тип вопроса', choices=CHOICES_TYPE, max_length\n =55, help_text=\n 'Если вы выбрали один или несколько вариантов ответа, заполните поля \"описание выбора\",если вы выбрали \"свой вариант ответа\", не заполняйте это поле'\n )\n\n\n class Meta:\n verbose_name = 'Вопрос'\n verbose_name_plural = 'Вопросы'\n\n def __str__(self):\n return self.desc\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n\n\nclass Poll(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Опрос'\n verbose_name_plural = 'Опросы'\n <function token>\n\n def clean(self, *args, **kwargs):\n if Poll.objects.filter(pk=self.pk).exists():\n if self.start_date and Poll.objects.filter(pk=self.pk,\n start_date__isnull=False):\n raise ValidationError({'start_date':\n 'Дата уже заполнена, опрос изменить невозможно'})\n if self.start_date and not self.end_date:\n raise ValidationError({'end_date':\n 'Заполните дату окончания опроса'})\n\n\nclass Question(models.Model):\n \"\"\"Таблица вопроса\"\"\"\n CHOICES_TYPE = ('Один выбранный вариант', 'Один выбранный вариант'), (\n 'Несколько выбранных вариантов', 'Несколько выбранных вариантов'), (\n 'Свой вариант ответа', 'Свой вариант ответа')\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name=\n 'question', verbose_name='Опрос')\n desc = models.TextField('Текст вопроса')\n type = models.CharField('Тип вопроса', choices=CHOICES_TYPE, max_length\n =55, help_text=\n 'Если вы выбрали один или несколько вариантов ответа, заполните поля \"описание выбора\",если вы выбрали \"свой вариант ответа\", не заполняйте это поле'\n )\n\n\n class Meta:\n verbose_name = 'Вопрос'\n verbose_name_plural = 'Вопросы'\n\n def __str__(self):\n return self.desc\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n\n\nclass Poll(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Опрос'\n verbose_name_plural = 'Опросы'\n <function token>\n <function token>\n\n\nclass Question(models.Model):\n \"\"\"Таблица вопроса\"\"\"\n CHOICES_TYPE = ('Один выбранный вариант', 'Один выбранный вариант'), (\n 'Несколько выбранных вариантов', 'Несколько выбранных вариантов'), (\n 'Свой вариант ответа', 'Свой вариант ответа')\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name=\n 'question', verbose_name='Опрос')\n desc = models.TextField('Текст вопроса')\n type = models.CharField('Тип вопроса', choices=CHOICES_TYPE, max_length\n =55, help_text=\n 'Если вы выбрали один или несколько вариантов ответа, заполните поля \"описание выбора\",если вы выбрали \"свой вариант ответа\", не заполняйте это поле'\n )\n\n\n class Meta:\n verbose_name = 'Вопрос'\n verbose_name_plural = 'Вопросы'\n\n def __str__(self):\n return self.desc\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n\n\nclass Question(models.Model):\n \"\"\"Таблица вопроса\"\"\"\n CHOICES_TYPE = ('Один выбранный вариант', 'Один выбранный вариант'), (\n 'Несколько выбранных вариантов', 'Несколько выбранных вариантов'), (\n 'Свой вариант ответа', 'Свой вариант ответа')\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name=\n 'question', verbose_name='Опрос')\n desc = models.TextField('Текст вопроса')\n type = models.CharField('Тип вопроса', choices=CHOICES_TYPE, max_length\n =55, help_text=\n 'Если вы выбрали один или несколько вариантов ответа, заполните поля \"описание выбора\",если вы выбрали \"свой вариант ответа\", не заполняйте это поле'\n )\n\n\n class Meta:\n verbose_name = 'Вопрос'\n verbose_name_plural = 'Вопросы'\n\n def __str__(self):\n return self.desc\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n\n\nclass Question(models.Model):\n <docstring token>\n CHOICES_TYPE = ('Один выбранный вариант', 'Один выбранный вариант'), (\n 'Несколько выбранных вариантов', 'Несколько выбранных вариантов'), (\n 'Свой вариант ответа', 'Свой вариант ответа')\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name=\n 'question', verbose_name='Опрос')\n desc = models.TextField('Текст вопроса')\n type = models.CharField('Тип вопроса', choices=CHOICES_TYPE, max_length\n =55, help_text=\n 'Если вы выбрали один или несколько вариантов ответа, заполните поля \"описание выбора\",если вы выбрали \"свой вариант ответа\", не заполняйте это поле'\n )\n\n\n class Meta:\n verbose_name = 'Вопрос'\n verbose_name_plural = 'Вопросы'\n\n def __str__(self):\n return self.desc\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n\n\nclass Question(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Вопрос'\n verbose_name_plural = 'Вопросы'\n\n def __str__(self):\n return self.desc\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n\n\nclass Question(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Вопрос'\n verbose_name_plural = 'Вопросы'\n <function token>\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n\n\nclass QuesChoices(models.Model):\n \"\"\"Таблица варинтов ответа\"\"\"\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n\n\nclass QuesChoices(models.Model):\n <docstring token>\n question = models.ForeignKey(Question, related_name='ques_choices',\n on_delete=models.CASCADE)\n desc = models.CharField('Описание выбора', null=True, blank=True,\n max_length=200)\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n\n\nclass QuesChoices(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n\n def __str__(self):\n return self.desc\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n\n\nclass QuesChoices(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Выбор ответа'\n verbose_name_plural = 'Выбор ответа'\n <function token>\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserId(models.Model):\n \"\"\"Таблица пользователей\"\"\"\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserId(models.Model):\n <docstring token>\n user_id = models.IntegerField('Уникальный идентификатор пользователя',\n unique=True)\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserId(models.Model):\n <docstring token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n\n def __str__(self):\n return f'{self.user_id} - id пользователя'\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserId(models.Model):\n <docstring token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Опрос пользователей'\n verbose_name_plural = 'Опросы пользователей'\n <function token>\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserPoll(models.Model):\n \"\"\"Таблица проходящяго опроса пользователя\"\"\"\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserPoll(models.Model):\n <docstring token>\n user_id = models.ForeignKey(UserId, on_delete=models.CASCADE)\n poll = models.ForeignKey(Poll, on_delete=models.CASCADE, verbose_name=\n 'Опрос')\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserPoll(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n\n def __str__(self):\n return self.poll.name\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserPoll(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Пройденный опрос'\n verbose_name_plural = 'Пройденный опрос'\n <function token>\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserAnswerQues(models.Model):\n \"\"\"Таблица вопроса и ответа/ответов на него пользователем\"\"\"\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserAnswerQues(models.Model):\n <docstring token>\n user_poll = models.ForeignKey(UserPoll, on_delete=models.CASCADE,\n verbose_name='Опрос', related_name='user_poll')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n verbose_name='Вопрос')\n text = models.TextField(null=True, blank=True, verbose_name=\n 'Свой вариант ответа')\n ques_choices = models.ManyToManyField(QuesChoices, null=True, blank=\n True, verbose_name='Выбранный ответ/ответы')\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserAnswerQues(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n\n def __str__(self):\n return self.question.desc\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UserAnswerQues(models.Model):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n class Meta:\n verbose_name = 'Ответ пользователя'\n verbose_name_plural = 'Ответ пользователя'\n <function token>\n", "<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n" ]
false
98,957
632dacafbfd02d3ea4a755fd0acb3be781d8f160
import numpy as np from generators.RoomGenerator import Room from utils.graphUtils import findAdjacentRegions from utils.geometryUtils import * import random import pdb class Corridor(Room): def __init__(self): self.width = self.getRandomCorridorWidth() self.points = [] self.intersecting_regions = [] def getRandomCorridorWidth(self): return 0.025; def create(self, room, regions, graph): corridor_w = self.getRandomCorridorWidth(); candidate_walls = [] #find wall with enough size for the corridor for wall in room.walls: vwall = graph.vertices[wall,:] length = np.linalg.norm(graph.vertices[wall[0],:] - graph.vertices[wall[1],:]) if length > corridor_w: candidate_walls.append(wall) #choose wall randomly wall = candidate_walls[random.randint(0, len(candidate_walls)-1)] #cast corridor from the middle of the wall, outwards p1 = graph.vertices[wall[0],:]; p2 = graph.vertices[wall[1],:]; edge_vec = p1 - p2 middle_point = p2 + edge_vec/2 edge_vec = edge_vec/np.linalg.norm(edge_vec) corridor_p1 = middle_point - edge_vec*corridor_w/2; corridor_p2 = middle_point + edge_vec*corridor_w/2; corridor_dir = np.array([edge_vec[1], -edge_vec[0]]); self.points.append(middle_point) self.points.append(middle_point + corridor_dir*2) corridor_segment = np.array([middle_point, middle_point + corridor_dir*2]) #mark regions intersecting the corridor as "used" for p_idx in range(len(graph.polygons)): pvertices = graph.vertices[graph.polygons[p_idx],:] if is_intersecting_segment_polygon(corridor_segment, pvertices): self.intersecting_regions.append(p_idx) pdb.set_trace() #choose the first available region of the corridor def computeCorridorSegment(self, middle_point, corridor_dir, room_neighbors, roomv, graph, regions): #CAST ONLY ONE RAY WHILE COMPUTING THE CORRIDOR l1a = middle_point l1b = (middle_point + corridor_dir) R1l = [] for reg in room_neighbors: vv = graph.vertices[regions[reg].polygon,:]; for idx in range(vv.shape[0]-1): #TODO: if corridor intersects with the same room cancel it # skip walls that belong to the room itself if not np.any(np.where((roomv == vv[idx+1,:]).all(axis=1))[0]): l2a = vv[idx,:] l2b = vv[idx+1,:] R1 = find_intersection(l1a,l1b,l2a,l2b) if R1.any(): R1l.append(R1) #find closest intersection bestd = 10000; bestr1 = []; for r1 in R1l: d = np.linalg.norm(r1 - middle_point) if d < bestd: bestd = d; bestr1 = r1; return bestr1
[ "import numpy as np\nfrom generators.RoomGenerator import Room\nfrom utils.graphUtils import findAdjacentRegions\nfrom utils.geometryUtils import *\nimport random\nimport pdb\n\nclass Corridor(Room):\n\n def __init__(self):\n self.width = self.getRandomCorridorWidth()\n self.points = []\n self.intersecting_regions = []\n\n def getRandomCorridorWidth(self):\n return 0.025;\n\n def create(self, room, regions, graph):\n corridor_w = self.getRandomCorridorWidth();\n\n candidate_walls = []\n\n #find wall with enough size for the corridor\n for wall in room.walls:\n vwall = graph.vertices[wall,:]\n length = np.linalg.norm(graph.vertices[wall[0],:] - graph.vertices[wall[1],:])\n if length > corridor_w:\n candidate_walls.append(wall)\n\n #choose wall randomly\n wall = candidate_walls[random.randint(0, len(candidate_walls)-1)]\n\n #cast corridor from the middle of the wall, outwards\n p1 = graph.vertices[wall[0],:];\n p2 = graph.vertices[wall[1],:];\n edge_vec = p1 - p2\n middle_point = p2 + edge_vec/2\n edge_vec = edge_vec/np.linalg.norm(edge_vec)\n corridor_p1 = middle_point - edge_vec*corridor_w/2;\n corridor_p2 = middle_point + edge_vec*corridor_w/2;\n corridor_dir = np.array([edge_vec[1], -edge_vec[0]]);\n\n self.points.append(middle_point)\n self.points.append(middle_point + corridor_dir*2)\n\n corridor_segment = np.array([middle_point, middle_point + corridor_dir*2])\n\n #mark regions intersecting the corridor as \"used\"\n\n for p_idx in range(len(graph.polygons)):\n pvertices = graph.vertices[graph.polygons[p_idx],:]\n if is_intersecting_segment_polygon(corridor_segment, pvertices):\n self.intersecting_regions.append(p_idx)\n\n pdb.set_trace()\n\n\n\n #choose the first available region of the corridor\n\n\n\n\n def computeCorridorSegment(self, middle_point, corridor_dir,\n room_neighbors, roomv, graph, regions):\n\n #CAST ONLY ONE RAY WHILE COMPUTING THE CORRIDOR\n\n l1a = middle_point\n l1b = (middle_point + corridor_dir)\n R1l = []\n for reg in room_neighbors:\n vv = graph.vertices[regions[reg].polygon,:];\n for idx in range(vv.shape[0]-1):\n\n #TODO: if corridor intersects with the same room cancel it\n # skip walls that belong to the room itself\n if not np.any(np.where((roomv == vv[idx+1,:]).all(axis=1))[0]):\n l2a = vv[idx,:]\n l2b = vv[idx+1,:]\n R1 = find_intersection(l1a,l1b,l2a,l2b)\n if R1.any(): R1l.append(R1)\n\n #find closest intersection\n bestd = 10000;\n bestr1 = [];\n for r1 in R1l:\n d = np.linalg.norm(r1 - middle_point)\n if d < bestd:\n bestd = d;\n bestr1 = r1;\n return bestr1\n", "import numpy as np\nfrom generators.RoomGenerator import Room\nfrom utils.graphUtils import findAdjacentRegions\nfrom utils.geometryUtils import *\nimport random\nimport pdb\n\n\nclass Corridor(Room):\n\n def __init__(self):\n self.width = self.getRandomCorridorWidth()\n self.points = []\n self.intersecting_regions = []\n\n def getRandomCorridorWidth(self):\n return 0.025\n\n def create(self, room, regions, graph):\n corridor_w = self.getRandomCorridorWidth()\n candidate_walls = []\n for wall in room.walls:\n vwall = graph.vertices[wall, :]\n length = np.linalg.norm(graph.vertices[wall[0], :] - graph.\n vertices[wall[1], :])\n if length > corridor_w:\n candidate_walls.append(wall)\n wall = candidate_walls[random.randint(0, len(candidate_walls) - 1)]\n p1 = graph.vertices[wall[0], :]\n p2 = graph.vertices[wall[1], :]\n edge_vec = p1 - p2\n middle_point = p2 + edge_vec / 2\n edge_vec = edge_vec / np.linalg.norm(edge_vec)\n corridor_p1 = middle_point - edge_vec * corridor_w / 2\n corridor_p2 = middle_point + edge_vec * corridor_w / 2\n corridor_dir = np.array([edge_vec[1], -edge_vec[0]])\n self.points.append(middle_point)\n self.points.append(middle_point + corridor_dir * 2)\n corridor_segment = np.array([middle_point, middle_point + \n corridor_dir * 2])\n for p_idx in range(len(graph.polygons)):\n pvertices = graph.vertices[graph.polygons[p_idx], :]\n if is_intersecting_segment_polygon(corridor_segment, pvertices):\n self.intersecting_regions.append(p_idx)\n pdb.set_trace()\n\n def computeCorridorSegment(self, middle_point, corridor_dir,\n room_neighbors, roomv, graph, regions):\n l1a = middle_point\n l1b = middle_point + corridor_dir\n R1l = []\n for reg in room_neighbors:\n vv = graph.vertices[regions[reg].polygon, :]\n for idx in range(vv.shape[0] - 1):\n if not np.any(np.where((roomv == vv[idx + 1, :]).all(axis=1\n ))[0]):\n l2a = vv[idx, :]\n l2b = vv[idx + 1, :]\n R1 = find_intersection(l1a, l1b, l2a, l2b)\n if R1.any():\n R1l.append(R1)\n bestd = 10000\n bestr1 = []\n for r1 in R1l:\n d = np.linalg.norm(r1 - middle_point)\n if d < bestd:\n bestd = d\n bestr1 = r1\n return bestr1\n", "<import token>\n\n\nclass Corridor(Room):\n\n def __init__(self):\n self.width = self.getRandomCorridorWidth()\n self.points = []\n self.intersecting_regions = []\n\n def getRandomCorridorWidth(self):\n return 0.025\n\n def create(self, room, regions, graph):\n corridor_w = self.getRandomCorridorWidth()\n candidate_walls = []\n for wall in room.walls:\n vwall = graph.vertices[wall, :]\n length = np.linalg.norm(graph.vertices[wall[0], :] - graph.\n vertices[wall[1], :])\n if length > corridor_w:\n candidate_walls.append(wall)\n wall = candidate_walls[random.randint(0, len(candidate_walls) - 1)]\n p1 = graph.vertices[wall[0], :]\n p2 = graph.vertices[wall[1], :]\n edge_vec = p1 - p2\n middle_point = p2 + edge_vec / 2\n edge_vec = edge_vec / np.linalg.norm(edge_vec)\n corridor_p1 = middle_point - edge_vec * corridor_w / 2\n corridor_p2 = middle_point + edge_vec * corridor_w / 2\n corridor_dir = np.array([edge_vec[1], -edge_vec[0]])\n self.points.append(middle_point)\n self.points.append(middle_point + corridor_dir * 2)\n corridor_segment = np.array([middle_point, middle_point + \n corridor_dir * 2])\n for p_idx in range(len(graph.polygons)):\n pvertices = graph.vertices[graph.polygons[p_idx], :]\n if is_intersecting_segment_polygon(corridor_segment, pvertices):\n self.intersecting_regions.append(p_idx)\n pdb.set_trace()\n\n def computeCorridorSegment(self, middle_point, corridor_dir,\n room_neighbors, roomv, graph, regions):\n l1a = middle_point\n l1b = middle_point + corridor_dir\n R1l = []\n for reg in room_neighbors:\n vv = graph.vertices[regions[reg].polygon, :]\n for idx in range(vv.shape[0] - 1):\n if not np.any(np.where((roomv == vv[idx + 1, :]).all(axis=1\n ))[0]):\n l2a = vv[idx, :]\n l2b = vv[idx + 1, :]\n R1 = find_intersection(l1a, l1b, l2a, l2b)\n if R1.any():\n R1l.append(R1)\n bestd = 10000\n bestr1 = []\n for r1 in R1l:\n d = np.linalg.norm(r1 - middle_point)\n if d < bestd:\n bestd = d\n bestr1 = r1\n return bestr1\n", "<import token>\n\n\nclass Corridor(Room):\n\n def __init__(self):\n self.width = self.getRandomCorridorWidth()\n self.points = []\n self.intersecting_regions = []\n <function token>\n\n def create(self, room, regions, graph):\n corridor_w = self.getRandomCorridorWidth()\n candidate_walls = []\n for wall in room.walls:\n vwall = graph.vertices[wall, :]\n length = np.linalg.norm(graph.vertices[wall[0], :] - graph.\n vertices[wall[1], :])\n if length > corridor_w:\n candidate_walls.append(wall)\n wall = candidate_walls[random.randint(0, len(candidate_walls) - 1)]\n p1 = graph.vertices[wall[0], :]\n p2 = graph.vertices[wall[1], :]\n edge_vec = p1 - p2\n middle_point = p2 + edge_vec / 2\n edge_vec = edge_vec / np.linalg.norm(edge_vec)\n corridor_p1 = middle_point - edge_vec * corridor_w / 2\n corridor_p2 = middle_point + edge_vec * corridor_w / 2\n corridor_dir = np.array([edge_vec[1], -edge_vec[0]])\n self.points.append(middle_point)\n self.points.append(middle_point + corridor_dir * 2)\n corridor_segment = np.array([middle_point, middle_point + \n corridor_dir * 2])\n for p_idx in range(len(graph.polygons)):\n pvertices = graph.vertices[graph.polygons[p_idx], :]\n if is_intersecting_segment_polygon(corridor_segment, pvertices):\n self.intersecting_regions.append(p_idx)\n pdb.set_trace()\n\n def computeCorridorSegment(self, middle_point, corridor_dir,\n room_neighbors, roomv, graph, regions):\n l1a = middle_point\n l1b = middle_point + corridor_dir\n R1l = []\n for reg in room_neighbors:\n vv = graph.vertices[regions[reg].polygon, :]\n for idx in range(vv.shape[0] - 1):\n if not np.any(np.where((roomv == vv[idx + 1, :]).all(axis=1\n ))[0]):\n l2a = vv[idx, :]\n l2b = vv[idx + 1, :]\n R1 = find_intersection(l1a, l1b, l2a, l2b)\n if R1.any():\n R1l.append(R1)\n bestd = 10000\n bestr1 = []\n for r1 in R1l:\n d = np.linalg.norm(r1 - middle_point)\n if d < bestd:\n bestd = d\n bestr1 = r1\n return bestr1\n", "<import token>\n\n\nclass Corridor(Room):\n <function token>\n <function token>\n\n def create(self, room, regions, graph):\n corridor_w = self.getRandomCorridorWidth()\n candidate_walls = []\n for wall in room.walls:\n vwall = graph.vertices[wall, :]\n length = np.linalg.norm(graph.vertices[wall[0], :] - graph.\n vertices[wall[1], :])\n if length > corridor_w:\n candidate_walls.append(wall)\n wall = candidate_walls[random.randint(0, len(candidate_walls) - 1)]\n p1 = graph.vertices[wall[0], :]\n p2 = graph.vertices[wall[1], :]\n edge_vec = p1 - p2\n middle_point = p2 + edge_vec / 2\n edge_vec = edge_vec / np.linalg.norm(edge_vec)\n corridor_p1 = middle_point - edge_vec * corridor_w / 2\n corridor_p2 = middle_point + edge_vec * corridor_w / 2\n corridor_dir = np.array([edge_vec[1], -edge_vec[0]])\n self.points.append(middle_point)\n self.points.append(middle_point + corridor_dir * 2)\n corridor_segment = np.array([middle_point, middle_point + \n corridor_dir * 2])\n for p_idx in range(len(graph.polygons)):\n pvertices = graph.vertices[graph.polygons[p_idx], :]\n if is_intersecting_segment_polygon(corridor_segment, pvertices):\n self.intersecting_regions.append(p_idx)\n pdb.set_trace()\n\n def computeCorridorSegment(self, middle_point, corridor_dir,\n room_neighbors, roomv, graph, regions):\n l1a = middle_point\n l1b = middle_point + corridor_dir\n R1l = []\n for reg in room_neighbors:\n vv = graph.vertices[regions[reg].polygon, :]\n for idx in range(vv.shape[0] - 1):\n if not np.any(np.where((roomv == vv[idx + 1, :]).all(axis=1\n ))[0]):\n l2a = vv[idx, :]\n l2b = vv[idx + 1, :]\n R1 = find_intersection(l1a, l1b, l2a, l2b)\n if R1.any():\n R1l.append(R1)\n bestd = 10000\n bestr1 = []\n for r1 in R1l:\n d = np.linalg.norm(r1 - middle_point)\n if d < bestd:\n bestd = d\n bestr1 = r1\n return bestr1\n", "<import token>\n\n\nclass Corridor(Room):\n <function token>\n <function token>\n <function token>\n\n def computeCorridorSegment(self, middle_point, corridor_dir,\n room_neighbors, roomv, graph, regions):\n l1a = middle_point\n l1b = middle_point + corridor_dir\n R1l = []\n for reg in room_neighbors:\n vv = graph.vertices[regions[reg].polygon, :]\n for idx in range(vv.shape[0] - 1):\n if not np.any(np.where((roomv == vv[idx + 1, :]).all(axis=1\n ))[0]):\n l2a = vv[idx, :]\n l2b = vv[idx + 1, :]\n R1 = find_intersection(l1a, l1b, l2a, l2b)\n if R1.any():\n R1l.append(R1)\n bestd = 10000\n bestr1 = []\n for r1 in R1l:\n d = np.linalg.norm(r1 - middle_point)\n if d < bestd:\n bestd = d\n bestr1 = r1\n return bestr1\n", "<import token>\n\n\nclass Corridor(Room):\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
98,958
188f5cc45449e943387994870cc97b02581ec9c0
for w in range(10): row = "" for h in range(10): if w == 0 or h == 0 or w == 9 or h == 9: row += "#" else: row += "." if w < 10: row += " " print(row)
[ "for w in range(10):\n row = \"\"\n for h in range(10):\n if w == 0 or h == 0 or w == 9 or h == 9:\n row += \"#\"\n else:\n row += \".\"\n if w < 10:\n row += \" \"\n print(row)\n\n", "for w in range(10):\n row = ''\n for h in range(10):\n if w == 0 or h == 0 or w == 9 or h == 9:\n row += '#'\n else:\n row += '.'\n if w < 10:\n row += ' '\n print(row)\n", "<code token>\n" ]
false
98,959
e4ee0eeb7063a530cacfbd43fcc1ff4ff7434bdc
from random import randint def reverseDir(d): return (d << 2 | d >> 2) & 15 def generateMaze(w, h, sp): print( "Generating " + str(w) + " by " + str(h) + " maze at starting point [" + str(sp[0]) + ", " + str(sp[1]) + "].") maze = [[0 for y in range(h)] for x in range(w)] spacesLeft = w * h stack = [] stack.append([sp[0], sp[1]]) i = 0 while len(stack) > 0: cursor = stack[len(stack) - 1] x = cursor[0] y = cursor[1] # input("Stack size: "+str(len(stack))+" ["+str(x)+","+str(y)+"]") d = [1, 2, 4, 8] blocked = 1 while blocked: move = d[randint(0, len(d) - 1)] lx = x ly = y # Swap Move==N to Move&N if (move & 1): lx = lx + 1 elif (move & 2): ly = ly + 1 elif (move & 4): lx = lx - 1 elif (move & 8): ly = ly - 1 if (not (lx >= w or lx < 0 or ly >= h or ly < 0 or maze[lx][ly])): maze[x][y] += move x = lx y = ly maze[x][y] += reverseDir(move) stack.append([x, y]) spacesLeft -= 1 if (((((w * h) - spacesLeft) / (w * h)) * 100) % 5 == 0): print(str((((w * h) - spacesLeft) / (w * h)) * 100) + "% complete.") blocked = 0 else: if (len(d) == 1): stack.remove(cursor) blocked = 0 else: d.remove(move) print("Finished generating maze.") return maze m = generateMaze(10, 10, [1, 1]) print(m)
[ "from random import randint\n\ndef reverseDir(d):\n return (d << 2 | d >> 2) & 15\n\n\ndef generateMaze(w, h, sp):\n print(\n \"Generating \" + str(w) + \" by \" + str(h) + \" maze at starting point [\" + str(sp[0]) + \", \" + str(sp[1]) + \"].\")\n maze = [[0 for y in range(h)] for x in range(w)]\n spacesLeft = w * h\n stack = []\n stack.append([sp[0], sp[1]])\n i = 0\n while len(stack) > 0:\n cursor = stack[len(stack) - 1]\n x = cursor[0]\n y = cursor[1]\n # input(\"Stack size: \"+str(len(stack))+\" [\"+str(x)+\",\"+str(y)+\"]\")\n d = [1, 2, 4, 8]\n blocked = 1\n while blocked:\n move = d[randint(0, len(d) - 1)]\n lx = x\n ly = y\n # Swap Move==N to Move&N\n if (move & 1):\n lx = lx + 1\n elif (move & 2):\n ly = ly + 1\n elif (move & 4):\n lx = lx - 1\n elif (move & 8):\n ly = ly - 1\n if (not (lx >= w or lx < 0 or ly >= h or ly < 0 or maze[lx][ly])):\n maze[x][y] += move\n x = lx\n y = ly\n maze[x][y] += reverseDir(move)\n stack.append([x, y])\n spacesLeft -= 1\n if (((((w * h) - spacesLeft) / (w * h)) * 100) % 5 == 0):\n print(str((((w * h) - spacesLeft) / (w * h)) * 100) + \"% complete.\")\n blocked = 0\n else:\n if (len(d) == 1):\n stack.remove(cursor)\n blocked = 0\n else:\n d.remove(move)\n print(\"Finished generating maze.\")\n return maze\n\nm = generateMaze(10, 10, [1, 1])\n\nprint(m)\n", "from random import randint\n\n\ndef reverseDir(d):\n return (d << 2 | d >> 2) & 15\n\n\ndef generateMaze(w, h, sp):\n print('Generating ' + str(w) + ' by ' + str(h) +\n ' maze at starting point [' + str(sp[0]) + ', ' + str(sp[1]) + '].')\n maze = [[(0) for y in range(h)] for x in range(w)]\n spacesLeft = w * h\n stack = []\n stack.append([sp[0], sp[1]])\n i = 0\n while len(stack) > 0:\n cursor = stack[len(stack) - 1]\n x = cursor[0]\n y = cursor[1]\n d = [1, 2, 4, 8]\n blocked = 1\n while blocked:\n move = d[randint(0, len(d) - 1)]\n lx = x\n ly = y\n if move & 1:\n lx = lx + 1\n elif move & 2:\n ly = ly + 1\n elif move & 4:\n lx = lx - 1\n elif move & 8:\n ly = ly - 1\n if not (lx >= w or lx < 0 or ly >= h or ly < 0 or maze[lx][ly]):\n maze[x][y] += move\n x = lx\n y = ly\n maze[x][y] += reverseDir(move)\n stack.append([x, y])\n spacesLeft -= 1\n if (w * h - spacesLeft) / (w * h) * 100 % 5 == 0:\n print(str((w * h - spacesLeft) / (w * h) * 100) +\n '% complete.')\n blocked = 0\n elif len(d) == 1:\n stack.remove(cursor)\n blocked = 0\n else:\n d.remove(move)\n print('Finished generating maze.')\n return maze\n\n\nm = generateMaze(10, 10, [1, 1])\nprint(m)\n", "<import token>\n\n\ndef reverseDir(d):\n return (d << 2 | d >> 2) & 15\n\n\ndef generateMaze(w, h, sp):\n print('Generating ' + str(w) + ' by ' + str(h) +\n ' maze at starting point [' + str(sp[0]) + ', ' + str(sp[1]) + '].')\n maze = [[(0) for y in range(h)] for x in range(w)]\n spacesLeft = w * h\n stack = []\n stack.append([sp[0], sp[1]])\n i = 0\n while len(stack) > 0:\n cursor = stack[len(stack) - 1]\n x = cursor[0]\n y = cursor[1]\n d = [1, 2, 4, 8]\n blocked = 1\n while blocked:\n move = d[randint(0, len(d) - 1)]\n lx = x\n ly = y\n if move & 1:\n lx = lx + 1\n elif move & 2:\n ly = ly + 1\n elif move & 4:\n lx = lx - 1\n elif move & 8:\n ly = ly - 1\n if not (lx >= w or lx < 0 or ly >= h or ly < 0 or maze[lx][ly]):\n maze[x][y] += move\n x = lx\n y = ly\n maze[x][y] += reverseDir(move)\n stack.append([x, y])\n spacesLeft -= 1\n if (w * h - spacesLeft) / (w * h) * 100 % 5 == 0:\n print(str((w * h - spacesLeft) / (w * h) * 100) +\n '% complete.')\n blocked = 0\n elif len(d) == 1:\n stack.remove(cursor)\n blocked = 0\n else:\n d.remove(move)\n print('Finished generating maze.')\n return maze\n\n\nm = generateMaze(10, 10, [1, 1])\nprint(m)\n", "<import token>\n\n\ndef reverseDir(d):\n return (d << 2 | d >> 2) & 15\n\n\ndef generateMaze(w, h, sp):\n print('Generating ' + str(w) + ' by ' + str(h) +\n ' maze at starting point [' + str(sp[0]) + ', ' + str(sp[1]) + '].')\n maze = [[(0) for y in range(h)] for x in range(w)]\n spacesLeft = w * h\n stack = []\n stack.append([sp[0], sp[1]])\n i = 0\n while len(stack) > 0:\n cursor = stack[len(stack) - 1]\n x = cursor[0]\n y = cursor[1]\n d = [1, 2, 4, 8]\n blocked = 1\n while blocked:\n move = d[randint(0, len(d) - 1)]\n lx = x\n ly = y\n if move & 1:\n lx = lx + 1\n elif move & 2:\n ly = ly + 1\n elif move & 4:\n lx = lx - 1\n elif move & 8:\n ly = ly - 1\n if not (lx >= w or lx < 0 or ly >= h or ly < 0 or maze[lx][ly]):\n maze[x][y] += move\n x = lx\n y = ly\n maze[x][y] += reverseDir(move)\n stack.append([x, y])\n spacesLeft -= 1\n if (w * h - spacesLeft) / (w * h) * 100 % 5 == 0:\n print(str((w * h - spacesLeft) / (w * h) * 100) +\n '% complete.')\n blocked = 0\n elif len(d) == 1:\n stack.remove(cursor)\n blocked = 0\n else:\n d.remove(move)\n print('Finished generating maze.')\n return maze\n\n\n<assignment token>\nprint(m)\n", "<import token>\n\n\ndef reverseDir(d):\n return (d << 2 | d >> 2) & 15\n\n\ndef generateMaze(w, h, sp):\n print('Generating ' + str(w) + ' by ' + str(h) +\n ' maze at starting point [' + str(sp[0]) + ', ' + str(sp[1]) + '].')\n maze = [[(0) for y in range(h)] for x in range(w)]\n spacesLeft = w * h\n stack = []\n stack.append([sp[0], sp[1]])\n i = 0\n while len(stack) > 0:\n cursor = stack[len(stack) - 1]\n x = cursor[0]\n y = cursor[1]\n d = [1, 2, 4, 8]\n blocked = 1\n while blocked:\n move = d[randint(0, len(d) - 1)]\n lx = x\n ly = y\n if move & 1:\n lx = lx + 1\n elif move & 2:\n ly = ly + 1\n elif move & 4:\n lx = lx - 1\n elif move & 8:\n ly = ly - 1\n if not (lx >= w or lx < 0 or ly >= h or ly < 0 or maze[lx][ly]):\n maze[x][y] += move\n x = lx\n y = ly\n maze[x][y] += reverseDir(move)\n stack.append([x, y])\n spacesLeft -= 1\n if (w * h - spacesLeft) / (w * h) * 100 % 5 == 0:\n print(str((w * h - spacesLeft) / (w * h) * 100) +\n '% complete.')\n blocked = 0\n elif len(d) == 1:\n stack.remove(cursor)\n blocked = 0\n else:\n d.remove(move)\n print('Finished generating maze.')\n return maze\n\n\n<assignment token>\n<code token>\n", "<import token>\n\n\ndef reverseDir(d):\n return (d << 2 | d >> 2) & 15\n\n\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n" ]
false
98,960
3a1ff99fde49a94386c8f060e00a177aabc7cf8c
""" Author: Abderrahim AMANAR """ import sys from PyQt5.QtGui import QIcon, QPixmap from PyQt5.QtWidgets import * # the function is intended to retrieve caption of the clicked button, # the button object should be passed as an argument. # This is achieved by the use of lambda function. class Form(QDialog): def __init__(self, parent=None): super(Form, self).__init__(parent) layout = QVBoxLayout() self.b1 = QPushButton("Button1") self.b1.setCheckable(True) self.b1.toggle() self.b1.clicked.connect(lambda: self.whichbtn(self.b1)) self.b1.clicked.connect(self.btnstate) layout.addWidget(self.b1) self.b2 = QPushButton() self.b2.setIcon(QIcon(QPixmap("python.png"))) self.b2.clicked.connect(lambda: self.whichbtn(self.b2)) layout.addWidget(self.b2) self.setLayout(layout) self.b3 = QPushButton("Disabled") self.b3.setEnabled(False) layout.addWidget(self.b3) self.b4 = QPushButton("&Default") self.b4.setDefault(True) self.b4.clicked.connect(lambda: self.whichbtn(self.b4)) layout.addWidget(self.b4) self.setWindowTitle("Button demo") def btnstate(self): if self.b1.isChecked(): print("button pressed") else: print("button released") def whichbtn(self, b): print("clicked button is " + b.text()) def main(): app = QApplication(sys.argv) ex = Form() ex.show() sys.exit(app.exec_()) if __name__ == '__main__': main()
[ "\"\"\"\nAuthor: Abderrahim AMANAR\n\"\"\"\n\nimport sys\nfrom PyQt5.QtGui import QIcon, QPixmap\nfrom PyQt5.QtWidgets import *\n\n\n# the function is intended to retrieve caption of the clicked button,\n# the button object should be passed as an argument.\n# This is achieved by the use of lambda function.\n\n\nclass Form(QDialog):\n def __init__(self, parent=None):\n super(Form, self).__init__(parent)\n\n layout = QVBoxLayout()\n self.b1 = QPushButton(\"Button1\")\n self.b1.setCheckable(True)\n self.b1.toggle()\n self.b1.clicked.connect(lambda: self.whichbtn(self.b1))\n self.b1.clicked.connect(self.btnstate)\n\n layout.addWidget(self.b1)\n\n self.b2 = QPushButton()\n self.b2.setIcon(QIcon(QPixmap(\"python.png\")))\n self.b2.clicked.connect(lambda: self.whichbtn(self.b2))\n layout.addWidget(self.b2)\n\n self.setLayout(layout)\n\n self.b3 = QPushButton(\"Disabled\")\n self.b3.setEnabled(False)\n layout.addWidget(self.b3)\n\n self.b4 = QPushButton(\"&Default\")\n self.b4.setDefault(True)\n self.b4.clicked.connect(lambda: self.whichbtn(self.b4))\n layout.addWidget(self.b4)\n\n self.setWindowTitle(\"Button demo\")\n\n def btnstate(self):\n if self.b1.isChecked():\n print(\"button pressed\")\n\n else:\n print(\"button released\")\n\n def whichbtn(self, b):\n print(\"clicked button is \" + b.text())\n\n\ndef main():\n app = QApplication(sys.argv)\n ex = Form()\n ex.show()\n sys.exit(app.exec_())\n\n\nif __name__ == '__main__':\n main()\n", "<docstring token>\nimport sys\nfrom PyQt5.QtGui import QIcon, QPixmap\nfrom PyQt5.QtWidgets import *\n\n\nclass Form(QDialog):\n\n def __init__(self, parent=None):\n super(Form, self).__init__(parent)\n layout = QVBoxLayout()\n self.b1 = QPushButton('Button1')\n self.b1.setCheckable(True)\n self.b1.toggle()\n self.b1.clicked.connect(lambda : self.whichbtn(self.b1))\n self.b1.clicked.connect(self.btnstate)\n layout.addWidget(self.b1)\n self.b2 = QPushButton()\n self.b2.setIcon(QIcon(QPixmap('python.png')))\n self.b2.clicked.connect(lambda : self.whichbtn(self.b2))\n layout.addWidget(self.b2)\n self.setLayout(layout)\n self.b3 = QPushButton('Disabled')\n self.b3.setEnabled(False)\n layout.addWidget(self.b3)\n self.b4 = QPushButton('&Default')\n self.b4.setDefault(True)\n self.b4.clicked.connect(lambda : self.whichbtn(self.b4))\n layout.addWidget(self.b4)\n self.setWindowTitle('Button demo')\n\n def btnstate(self):\n if self.b1.isChecked():\n print('button pressed')\n else:\n print('button released')\n\n def whichbtn(self, b):\n print('clicked button is ' + b.text())\n\n\ndef main():\n app = QApplication(sys.argv)\n ex = Form()\n ex.show()\n sys.exit(app.exec_())\n\n\nif __name__ == '__main__':\n main()\n", "<docstring token>\n<import token>\n\n\nclass Form(QDialog):\n\n def __init__(self, parent=None):\n super(Form, self).__init__(parent)\n layout = QVBoxLayout()\n self.b1 = QPushButton('Button1')\n self.b1.setCheckable(True)\n self.b1.toggle()\n self.b1.clicked.connect(lambda : self.whichbtn(self.b1))\n self.b1.clicked.connect(self.btnstate)\n layout.addWidget(self.b1)\n self.b2 = QPushButton()\n self.b2.setIcon(QIcon(QPixmap('python.png')))\n self.b2.clicked.connect(lambda : self.whichbtn(self.b2))\n layout.addWidget(self.b2)\n self.setLayout(layout)\n self.b3 = QPushButton('Disabled')\n self.b3.setEnabled(False)\n layout.addWidget(self.b3)\n self.b4 = QPushButton('&Default')\n self.b4.setDefault(True)\n self.b4.clicked.connect(lambda : self.whichbtn(self.b4))\n layout.addWidget(self.b4)\n self.setWindowTitle('Button demo')\n\n def btnstate(self):\n if self.b1.isChecked():\n print('button pressed')\n else:\n print('button released')\n\n def whichbtn(self, b):\n print('clicked button is ' + b.text())\n\n\ndef main():\n app = QApplication(sys.argv)\n ex = Form()\n ex.show()\n sys.exit(app.exec_())\n\n\nif __name__ == '__main__':\n main()\n", "<docstring token>\n<import token>\n\n\nclass Form(QDialog):\n\n def __init__(self, parent=None):\n super(Form, self).__init__(parent)\n layout = QVBoxLayout()\n self.b1 = QPushButton('Button1')\n self.b1.setCheckable(True)\n self.b1.toggle()\n self.b1.clicked.connect(lambda : self.whichbtn(self.b1))\n self.b1.clicked.connect(self.btnstate)\n layout.addWidget(self.b1)\n self.b2 = QPushButton()\n self.b2.setIcon(QIcon(QPixmap('python.png')))\n self.b2.clicked.connect(lambda : self.whichbtn(self.b2))\n layout.addWidget(self.b2)\n self.setLayout(layout)\n self.b3 = QPushButton('Disabled')\n self.b3.setEnabled(False)\n layout.addWidget(self.b3)\n self.b4 = QPushButton('&Default')\n self.b4.setDefault(True)\n self.b4.clicked.connect(lambda : self.whichbtn(self.b4))\n layout.addWidget(self.b4)\n self.setWindowTitle('Button demo')\n\n def btnstate(self):\n if self.b1.isChecked():\n print('button pressed')\n else:\n print('button released')\n\n def whichbtn(self, b):\n print('clicked button is ' + b.text())\n\n\ndef main():\n app = QApplication(sys.argv)\n ex = Form()\n ex.show()\n sys.exit(app.exec_())\n\n\n<code token>\n", "<docstring token>\n<import token>\n\n\nclass Form(QDialog):\n\n def __init__(self, parent=None):\n super(Form, self).__init__(parent)\n layout = QVBoxLayout()\n self.b1 = QPushButton('Button1')\n self.b1.setCheckable(True)\n self.b1.toggle()\n self.b1.clicked.connect(lambda : self.whichbtn(self.b1))\n self.b1.clicked.connect(self.btnstate)\n layout.addWidget(self.b1)\n self.b2 = QPushButton()\n self.b2.setIcon(QIcon(QPixmap('python.png')))\n self.b2.clicked.connect(lambda : self.whichbtn(self.b2))\n layout.addWidget(self.b2)\n self.setLayout(layout)\n self.b3 = QPushButton('Disabled')\n self.b3.setEnabled(False)\n layout.addWidget(self.b3)\n self.b4 = QPushButton('&Default')\n self.b4.setDefault(True)\n self.b4.clicked.connect(lambda : self.whichbtn(self.b4))\n layout.addWidget(self.b4)\n self.setWindowTitle('Button demo')\n\n def btnstate(self):\n if self.b1.isChecked():\n print('button pressed')\n else:\n print('button released')\n\n def whichbtn(self, b):\n print('clicked button is ' + b.text())\n\n\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n\n\nclass Form(QDialog):\n\n def __init__(self, parent=None):\n super(Form, self).__init__(parent)\n layout = QVBoxLayout()\n self.b1 = QPushButton('Button1')\n self.b1.setCheckable(True)\n self.b1.toggle()\n self.b1.clicked.connect(lambda : self.whichbtn(self.b1))\n self.b1.clicked.connect(self.btnstate)\n layout.addWidget(self.b1)\n self.b2 = QPushButton()\n self.b2.setIcon(QIcon(QPixmap('python.png')))\n self.b2.clicked.connect(lambda : self.whichbtn(self.b2))\n layout.addWidget(self.b2)\n self.setLayout(layout)\n self.b3 = QPushButton('Disabled')\n self.b3.setEnabled(False)\n layout.addWidget(self.b3)\n self.b4 = QPushButton('&Default')\n self.b4.setDefault(True)\n self.b4.clicked.connect(lambda : self.whichbtn(self.b4))\n layout.addWidget(self.b4)\n self.setWindowTitle('Button demo')\n\n def btnstate(self):\n if self.b1.isChecked():\n print('button pressed')\n else:\n print('button released')\n <function token>\n\n\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n\n\nclass Form(QDialog):\n\n def __init__(self, parent=None):\n super(Form, self).__init__(parent)\n layout = QVBoxLayout()\n self.b1 = QPushButton('Button1')\n self.b1.setCheckable(True)\n self.b1.toggle()\n self.b1.clicked.connect(lambda : self.whichbtn(self.b1))\n self.b1.clicked.connect(self.btnstate)\n layout.addWidget(self.b1)\n self.b2 = QPushButton()\n self.b2.setIcon(QIcon(QPixmap('python.png')))\n self.b2.clicked.connect(lambda : self.whichbtn(self.b2))\n layout.addWidget(self.b2)\n self.setLayout(layout)\n self.b3 = QPushButton('Disabled')\n self.b3.setEnabled(False)\n layout.addWidget(self.b3)\n self.b4 = QPushButton('&Default')\n self.b4.setDefault(True)\n self.b4.clicked.connect(lambda : self.whichbtn(self.b4))\n layout.addWidget(self.b4)\n self.setWindowTitle('Button demo')\n <function token>\n <function token>\n\n\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n\n\nclass Form(QDialog):\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n<class token>\n<function token>\n<code token>\n" ]
false
98,961
ffab21b1005c398e34b40af04cb8c4789e0cf513
# -*- coding: utf-8 -*- """ Created on Wed Jan 3 15:24:50 2018 @author: wuzw """ from nltk.corpus import inaugural from nltk.corpus import reuters import nltk inaugural.fileids() reuters.categories('test/16574') reuters.fileids('earn')[1000:1100] reuters.words('test/16574') [fileid[:4] for fileid in inaugural.fileids()] inaugural.fileids() file = [fileid[:4] for fileid in inaugural.fileids()] print(file) print(type(inaugural.fileids())) for fileid in inaugural.fileids(): print(fileid) for w in inaugural.words(fileid): print(w) cfd = nltk.ConditionalFreqDist( (target,fileid[:4]) for fileid in inaugural.fileids() for w in inaugural.words(fileid) for target in ['america','citizen'] if w.lower().startswith(target) ) cfd.plot() print(cfd.items()) from nltk.corpus import udhr udhr.fileids() ch = udhr.words('Chinese_Mandarin-GB2312') nltk.FreqDist(ch).plot() ch = nltk.ConditionalFreqDist( (lang,len(word)) for lang in (['Chinese_Mandarin-GB2312']) for word in udhr.words('Chinese_Mandarin-GB2312') ) ch.plot(cumulative = True) fdistch = nltk.FreqDist(ch) import operator d = sorted(fdistch.items(),key = operator.itemgetter(1),reverse=True) print(d)
[ "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jan 3 15:24:50 2018\n\n@author: wuzw\n\"\"\"\n\nfrom nltk.corpus import inaugural\nfrom nltk.corpus import reuters\nimport nltk\n\ninaugural.fileids()\n\n\nreuters.categories('test/16574')\nreuters.fileids('earn')[1000:1100]\nreuters.words('test/16574')\n[fileid[:4] for fileid in inaugural.fileids()]\n\ninaugural.fileids()\n\nfile = [fileid[:4] for fileid in inaugural.fileids()]\n\nprint(file)\n\nprint(type(inaugural.fileids()))\n\nfor fileid in inaugural.fileids():\n print(fileid)\n for w in inaugural.words(fileid):\n print(w)\n\ncfd = nltk.ConditionalFreqDist(\n (target,fileid[:4])\n for fileid in inaugural.fileids()\n for w in inaugural.words(fileid)\n for target in ['america','citizen']\n if w.lower().startswith(target)\n )\ncfd.plot()\nprint(cfd.items())\n\nfrom nltk.corpus import udhr\nudhr.fileids()\nch = udhr.words('Chinese_Mandarin-GB2312')\n\nnltk.FreqDist(ch).plot()\n\nch = nltk.ConditionalFreqDist(\n (lang,len(word))\n for lang in (['Chinese_Mandarin-GB2312'])\n for word in udhr.words('Chinese_Mandarin-GB2312')\n )\n\nch.plot(cumulative = True)\n\nfdistch = nltk.FreqDist(ch)\n\nimport operator\nd = sorted(fdistch.items(),key = operator.itemgetter(1),reverse=True)\nprint(d)\n", "<docstring token>\nfrom nltk.corpus import inaugural\nfrom nltk.corpus import reuters\nimport nltk\ninaugural.fileids()\nreuters.categories('test/16574')\nreuters.fileids('earn')[1000:1100]\nreuters.words('test/16574')\n[fileid[:4] for fileid in inaugural.fileids()]\ninaugural.fileids()\nfile = [fileid[:4] for fileid in inaugural.fileids()]\nprint(file)\nprint(type(inaugural.fileids()))\nfor fileid in inaugural.fileids():\n print(fileid)\n for w in inaugural.words(fileid):\n print(w)\ncfd = nltk.ConditionalFreqDist((target, fileid[:4]) for fileid in inaugural\n .fileids() for w in inaugural.words(fileid) for target in ['america',\n 'citizen'] if w.lower().startswith(target))\ncfd.plot()\nprint(cfd.items())\nfrom nltk.corpus import udhr\nudhr.fileids()\nch = udhr.words('Chinese_Mandarin-GB2312')\nnltk.FreqDist(ch).plot()\nch = nltk.ConditionalFreqDist((lang, len(word)) for lang in [\n 'Chinese_Mandarin-GB2312'] for word in udhr.words(\n 'Chinese_Mandarin-GB2312'))\nch.plot(cumulative=True)\nfdistch = nltk.FreqDist(ch)\nimport operator\nd = sorted(fdistch.items(), key=operator.itemgetter(1), reverse=True)\nprint(d)\n", "<docstring token>\n<import token>\ninaugural.fileids()\nreuters.categories('test/16574')\nreuters.fileids('earn')[1000:1100]\nreuters.words('test/16574')\n[fileid[:4] for fileid in inaugural.fileids()]\ninaugural.fileids()\nfile = [fileid[:4] for fileid in inaugural.fileids()]\nprint(file)\nprint(type(inaugural.fileids()))\nfor fileid in inaugural.fileids():\n print(fileid)\n for w in inaugural.words(fileid):\n print(w)\ncfd = nltk.ConditionalFreqDist((target, fileid[:4]) for fileid in inaugural\n .fileids() for w in inaugural.words(fileid) for target in ['america',\n 'citizen'] if w.lower().startswith(target))\ncfd.plot()\nprint(cfd.items())\n<import token>\nudhr.fileids()\nch = udhr.words('Chinese_Mandarin-GB2312')\nnltk.FreqDist(ch).plot()\nch = nltk.ConditionalFreqDist((lang, len(word)) for lang in [\n 'Chinese_Mandarin-GB2312'] for word in udhr.words(\n 'Chinese_Mandarin-GB2312'))\nch.plot(cumulative=True)\nfdistch = nltk.FreqDist(ch)\n<import token>\nd = sorted(fdistch.items(), key=operator.itemgetter(1), reverse=True)\nprint(d)\n", "<docstring token>\n<import token>\ninaugural.fileids()\nreuters.categories('test/16574')\nreuters.fileids('earn')[1000:1100]\nreuters.words('test/16574')\n[fileid[:4] for fileid in inaugural.fileids()]\ninaugural.fileids()\n<assignment token>\nprint(file)\nprint(type(inaugural.fileids()))\nfor fileid in inaugural.fileids():\n print(fileid)\n for w in inaugural.words(fileid):\n print(w)\n<assignment token>\ncfd.plot()\nprint(cfd.items())\n<import token>\nudhr.fileids()\n<assignment token>\nnltk.FreqDist(ch).plot()\n<assignment token>\nch.plot(cumulative=True)\n<assignment token>\n<import token>\n<assignment token>\nprint(d)\n", "<docstring token>\n<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<import token>\n<assignment token>\n<code token>\n" ]
false
98,962
530d61a4376b21d105428b90d60e91656baed8f4
# Autor: [loopTree] VGasparini 🎈<[email protected]> # Nome: Fibonacci Fácil # Nível: 1 # Categoria: INICIANTE # URL: https://www.urionlinejudge.com.br/judge/pt/problems/view/1151 n = int(input()) a = [] for i in range(n): if(i==0 or i==1): a.append(i) else: a.append(a[i-2]+a[i-1]) for i in range(len(a)): if(i==(len(a)-1)): print(a[i]) else: print(a[i],"",end="")
[ "# Autor: [loopTree] VGasparini 🎈<[email protected]>\n# Nome: Fibonacci Fácil\n# Nível: 1\n# Categoria: INICIANTE\n# URL: https://www.urionlinejudge.com.br/judge/pt/problems/view/1151\n\nn = int(input())\r\na = []\r\nfor i in range(n):\r\n if(i==0 or i==1): a.append(i)\r\n else:\r\n a.append(a[i-2]+a[i-1])\r\nfor i in range(len(a)):\r\n\r\n if(i==(len(a)-1)): print(a[i])\r\n else: print(a[i],\"\",end=\"\")\r\n\n", "n = int(input())\na = []\nfor i in range(n):\n if i == 0 or i == 1:\n a.append(i)\n else:\n a.append(a[i - 2] + a[i - 1])\nfor i in range(len(a)):\n if i == len(a) - 1:\n print(a[i])\n else:\n print(a[i], '', end='')\n", "<assignment token>\nfor i in range(n):\n if i == 0 or i == 1:\n a.append(i)\n else:\n a.append(a[i - 2] + a[i - 1])\nfor i in range(len(a)):\n if i == len(a) - 1:\n print(a[i])\n else:\n print(a[i], '', end='')\n", "<assignment token>\n<code token>\n" ]
false
98,963
5a05471f19b071e6c5d9145b17508c9918aeca5c
import ecoblock_test.simulation as sim import matplotlib.pyplot as plt import pandas as pd import numpy as np NUMBER_OF_SIMULATIONS = 20 NUMBER_OF_SIMULATIONS_ID = 28 cost_record = [] flywheel_final_soc = [] def plot_hist(data): plt.figure() num_bins = 30 data.hist(bins=num_bins) plt.xlabel('Cost in $/day') plt.ylabel('Simulation results') plt.grid(True) plt.savefig('hist_cost.png') sim_id_list = [] sim_number_list = [] for sim_id in range(1, NUMBER_OF_SIMULATIONS_ID + 1): for sim_number in range(1, NUMBER_OF_SIMULATIONS + 1): print('sim_id:', sim_id, 'and sim_number:', sim_number) sim_id_list.append(sim_id) sim_number_list.append(sim_number) system = sim.System(sim_number, sim_id) system.load_data() system.run_simulation() cost_record.append(system.get_cost()) flywheel_final_soc.append(np.sum(system.flywheel.soc_record)) print('Is at cost:', system.get_cost()) system.plot_results() file_name = 'normal' + str(sim_number) + '-' + str(sim_id) + '.png' plt.savefig(file_name) data_result = pd.DataFrame(sim_id_list, columns=['sim_id']) data_result['sim_num'] = sim_number_list data_result['cost'] = cost_record data_result['flywheel_final_soc'] = flywheel_final_soc data_result.to_csv('data_result.csv') cost_record_df = pd.DataFrame(cost_record, columns=['cost']) plot_hist(cost_record_df['cost'])
[ "import ecoblock_test.simulation as sim\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport numpy as np\n\nNUMBER_OF_SIMULATIONS = 20\nNUMBER_OF_SIMULATIONS_ID = 28\n\ncost_record = []\nflywheel_final_soc = []\n\ndef plot_hist(data):\n plt.figure()\n num_bins = 30\n data.hist(bins=num_bins)\n plt.xlabel('Cost in $/day')\n plt.ylabel('Simulation results')\n plt.grid(True)\n plt.savefig('hist_cost.png')\n\nsim_id_list = []\nsim_number_list = []\n\nfor sim_id in range(1, NUMBER_OF_SIMULATIONS_ID + 1):\n for sim_number in range(1, NUMBER_OF_SIMULATIONS + 1):\n print('sim_id:', sim_id, 'and sim_number:', sim_number)\n sim_id_list.append(sim_id)\n sim_number_list.append(sim_number)\n system = sim.System(sim_number, sim_id)\n system.load_data()\n system.run_simulation()\n cost_record.append(system.get_cost())\n flywheel_final_soc.append(np.sum(system.flywheel.soc_record))\n\n print('Is at cost:', system.get_cost())\n system.plot_results()\n file_name = 'normal' + str(sim_number) + '-' + str(sim_id) + '.png'\n plt.savefig(file_name)\n\ndata_result = pd.DataFrame(sim_id_list, columns=['sim_id'])\ndata_result['sim_num'] = sim_number_list\ndata_result['cost'] = cost_record\ndata_result['flywheel_final_soc'] = flywheel_final_soc\ndata_result.to_csv('data_result.csv')\ncost_record_df = pd.DataFrame(cost_record, columns=['cost'])\nplot_hist(cost_record_df['cost'])\n", "import ecoblock_test.simulation as sim\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport numpy as np\nNUMBER_OF_SIMULATIONS = 20\nNUMBER_OF_SIMULATIONS_ID = 28\ncost_record = []\nflywheel_final_soc = []\n\n\ndef plot_hist(data):\n plt.figure()\n num_bins = 30\n data.hist(bins=num_bins)\n plt.xlabel('Cost in $/day')\n plt.ylabel('Simulation results')\n plt.grid(True)\n plt.savefig('hist_cost.png')\n\n\nsim_id_list = []\nsim_number_list = []\nfor sim_id in range(1, NUMBER_OF_SIMULATIONS_ID + 1):\n for sim_number in range(1, NUMBER_OF_SIMULATIONS + 1):\n print('sim_id:', sim_id, 'and sim_number:', sim_number)\n sim_id_list.append(sim_id)\n sim_number_list.append(sim_number)\n system = sim.System(sim_number, sim_id)\n system.load_data()\n system.run_simulation()\n cost_record.append(system.get_cost())\n flywheel_final_soc.append(np.sum(system.flywheel.soc_record))\n print('Is at cost:', system.get_cost())\n system.plot_results()\n file_name = 'normal' + str(sim_number) + '-' + str(sim_id) + '.png'\n plt.savefig(file_name)\ndata_result = pd.DataFrame(sim_id_list, columns=['sim_id'])\ndata_result['sim_num'] = sim_number_list\ndata_result['cost'] = cost_record\ndata_result['flywheel_final_soc'] = flywheel_final_soc\ndata_result.to_csv('data_result.csv')\ncost_record_df = pd.DataFrame(cost_record, columns=['cost'])\nplot_hist(cost_record_df['cost'])\n", "<import token>\nNUMBER_OF_SIMULATIONS = 20\nNUMBER_OF_SIMULATIONS_ID = 28\ncost_record = []\nflywheel_final_soc = []\n\n\ndef plot_hist(data):\n plt.figure()\n num_bins = 30\n data.hist(bins=num_bins)\n plt.xlabel('Cost in $/day')\n plt.ylabel('Simulation results')\n plt.grid(True)\n plt.savefig('hist_cost.png')\n\n\nsim_id_list = []\nsim_number_list = []\nfor sim_id in range(1, NUMBER_OF_SIMULATIONS_ID + 1):\n for sim_number in range(1, NUMBER_OF_SIMULATIONS + 1):\n print('sim_id:', sim_id, 'and sim_number:', sim_number)\n sim_id_list.append(sim_id)\n sim_number_list.append(sim_number)\n system = sim.System(sim_number, sim_id)\n system.load_data()\n system.run_simulation()\n cost_record.append(system.get_cost())\n flywheel_final_soc.append(np.sum(system.flywheel.soc_record))\n print('Is at cost:', system.get_cost())\n system.plot_results()\n file_name = 'normal' + str(sim_number) + '-' + str(sim_id) + '.png'\n plt.savefig(file_name)\ndata_result = pd.DataFrame(sim_id_list, columns=['sim_id'])\ndata_result['sim_num'] = sim_number_list\ndata_result['cost'] = cost_record\ndata_result['flywheel_final_soc'] = flywheel_final_soc\ndata_result.to_csv('data_result.csv')\ncost_record_df = pd.DataFrame(cost_record, columns=['cost'])\nplot_hist(cost_record_df['cost'])\n", "<import token>\n<assignment token>\n\n\ndef plot_hist(data):\n plt.figure()\n num_bins = 30\n data.hist(bins=num_bins)\n plt.xlabel('Cost in $/day')\n plt.ylabel('Simulation results')\n plt.grid(True)\n plt.savefig('hist_cost.png')\n\n\n<assignment token>\nfor sim_id in range(1, NUMBER_OF_SIMULATIONS_ID + 1):\n for sim_number in range(1, NUMBER_OF_SIMULATIONS + 1):\n print('sim_id:', sim_id, 'and sim_number:', sim_number)\n sim_id_list.append(sim_id)\n sim_number_list.append(sim_number)\n system = sim.System(sim_number, sim_id)\n system.load_data()\n system.run_simulation()\n cost_record.append(system.get_cost())\n flywheel_final_soc.append(np.sum(system.flywheel.soc_record))\n print('Is at cost:', system.get_cost())\n system.plot_results()\n file_name = 'normal' + str(sim_number) + '-' + str(sim_id) + '.png'\n plt.savefig(file_name)\n<assignment token>\ndata_result.to_csv('data_result.csv')\n<assignment token>\nplot_hist(cost_record_df['cost'])\n", "<import token>\n<assignment token>\n\n\ndef plot_hist(data):\n plt.figure()\n num_bins = 30\n data.hist(bins=num_bins)\n plt.xlabel('Cost in $/day')\n plt.ylabel('Simulation results')\n plt.grid(True)\n plt.savefig('hist_cost.png')\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n<assignment token>\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
98,964
f60d099009c49bf28ee17a520f069171dd713f09
import numpy as np import matplotlib.pyplot as plt from assignment1.cs231n.data_utils import load_CIFAR10 from assignment1.cs231n.gradient_check import eval_numerical_gradient from assignment1.cs231n.vis_utils import visualize_grid from assignment1.cs231n.classifiers.neural_net import TwoLayerNet plt.rcParams["figure.figsize"] = (10.0, 8.0) plt.rcParams["image.interpolation"] = "nearest" plt.rcParams["image.cmap"] = "gray" def rel_error(x, y): """ returns relative error """ return np.max(np.abs(x - y) / (np.maximum(1e-8, np.abs(x) + np.abs(y)))) def show_net_weights(net): W1 = net.params["W1"] W1 = W1.reshape(32, 32, 3, -1).transpose(3, 0, 1, 2) plt.imshow(visualize_grid(W1, padding=3).astype("uint8")) plt.gca().axis("off") plt.show() def visualize_loss(stats): plt.subplot(2, 1, 1) plt.plot(stats["loss_history"]) plt.title("Loss history") plt.xlabel("Iteration") plt.ylabel("Loss") plt.subplot(2, 1, 2) plt.plot(stats["train_acc_history"], label="train") plt.plot(stats["val_acc_history"], label="val") plt.title("Classification accuracy history") plt.xlabel("Epoch") plt.ylabel("Clasification accuracy") plt.legend(loc="upper right") plt.show() def experiment(): print() print("*****************start experiment*****************") def init_toy_model(): np.random.seed(0) return TwoLayerNet(input_size, hidden_size, num_classes, std=1e-1) def init_toy_data(): np.random.seed(1) X = 10 * np.random.randn(num_inputs, input_size) y = np.array([0, 1, 2, 2, 1]) return X, y input_size = 4 hidden_size = 10 num_classes = 3 num_inputs = 5 net = init_toy_model() X, y = init_toy_data() loss, grads = net.loss(X, y, reg=0.05) # should be very small, we get < 1e-12 correct_loss = 1.30378789133 print("Difference between your loss and correct loss: {}".format(np.sum(np.abs(loss - correct_loss)))) print() # these should all be less than 1e-8 or so for param_name in grads: f = lambda W: net.loss(X, y, reg=0.05)[0] param_grad_num = eval_numerical_gradient(f, net.params[param_name], verbose=False) print("%s max relative error: %e" % (param_name, rel_error(param_grad_num, grads[param_name]))) print() net = init_toy_model() stats = net.train(X, y, X, y, learning_rate=1e-1, reg=5e-6, num_iters=100, verbose=False) print("Final training loss: ", stats["loss_history"][-1]) # plot the loss history plt.plot(stats["loss_history"]) plt.xlabel("iteration") plt.ylabel("training loss") plt.title("Training Loss history") plt.show() def read_cifar_data(cifar10_dir): num_training = 49000 num_validation = 1000 num_test = 1000 X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir) mask = range(num_training, num_training + num_validation) X_val = X_train[mask] y_val = y_train[mask] mask = range(num_training) X_train = X_train[mask] y_train = y_train[mask] mask = range(num_test) X_test = X_test[mask] y_test = y_test[mask] return X_train, y_train, X_val, y_val, X_test, y_test def preprocess(X_train, X_val, X_test): X_train = np.reshape(X_train, (X_train.shape[0], -1)) X_val = np.reshape(X_val, (X_val.shape[0], -1)) X_test = np.reshape(X_test, (X_test.shape[0], -1)) mean_image = np.mean(X_train, axis=0) X_train -= mean_image X_val -= mean_image X_test -= mean_image return X_train, X_val, X_test def tuning_hyperparameters(X_train, y_train, X_val, y_val): print() print("*****************start tuning hyperparameters*****************") input_size = 32 * 32 * 3 num_classes = 10 hidden_sizes = [200, 300] regularization_strengths = [0.025, 0.1] best_val = -1 best_net = None best_stats = None for reg in regularization_strengths: for hidden_size in hidden_sizes: net = TwoLayerNet(input_size, hidden_size, num_classes) stats = net.train(X_train, y_train, X_val, y_val, num_iters=3000, batch_size=100, learning_rate=1e-3, learning_rate_decay=0.95, reg=reg, verbose=False) val_acc = (net.predict(X_val) == y_val).mean() print("When reg is {}, h_size is {}, Validation accuracy: {}" .format(reg, hidden_size, val_acc)) if val_acc > best_val: best_val = val_acc best_net = net best_stats = stats return best_net, best_stats if __name__ == "__main__": X_train, y_train, X_val, y_val, X_test, y_test = read_cifar_data("cs231n/datasets/cifar-10-batches-py") X_train, X_val, X_test = preprocess(X_train, X_val, X_test) best_net, best_stats = tuning_hyperparameters(X_train, y_train, X_val, y_val) visualize_loss(best_stats) show_net_weights(best_net) test_acc = (best_net.predict(X_test) == y_test).mean() print("Test accuract: {}".format(test_acc))
[ "import numpy as np\nimport matplotlib.pyplot as plt\n\nfrom assignment1.cs231n.data_utils import load_CIFAR10\nfrom assignment1.cs231n.gradient_check import eval_numerical_gradient\nfrom assignment1.cs231n.vis_utils import visualize_grid\nfrom assignment1.cs231n.classifiers.neural_net import TwoLayerNet\n\nplt.rcParams[\"figure.figsize\"] = (10.0, 8.0)\nplt.rcParams[\"image.interpolation\"] = \"nearest\"\nplt.rcParams[\"image.cmap\"] = \"gray\"\n\n\ndef rel_error(x, y):\n \"\"\" returns relative error \"\"\"\n return np.max(np.abs(x - y) / (np.maximum(1e-8, np.abs(x) + np.abs(y))))\n\n\ndef show_net_weights(net):\n W1 = net.params[\"W1\"]\n W1 = W1.reshape(32, 32, 3, -1).transpose(3, 0, 1, 2)\n plt.imshow(visualize_grid(W1, padding=3).astype(\"uint8\"))\n plt.gca().axis(\"off\")\n plt.show()\n\n\ndef visualize_loss(stats):\n plt.subplot(2, 1, 1)\n plt.plot(stats[\"loss_history\"])\n plt.title(\"Loss history\")\n plt.xlabel(\"Iteration\")\n plt.ylabel(\"Loss\")\n\n plt.subplot(2, 1, 2)\n plt.plot(stats[\"train_acc_history\"], label=\"train\")\n plt.plot(stats[\"val_acc_history\"], label=\"val\")\n plt.title(\"Classification accuracy history\")\n plt.xlabel(\"Epoch\")\n plt.ylabel(\"Clasification accuracy\")\n plt.legend(loc=\"upper right\")\n plt.show()\n\n\ndef experiment():\n print()\n print(\"*****************start experiment*****************\")\n\n def init_toy_model():\n np.random.seed(0)\n return TwoLayerNet(input_size, hidden_size, num_classes, std=1e-1)\n\n def init_toy_data():\n np.random.seed(1)\n X = 10 * np.random.randn(num_inputs, input_size)\n y = np.array([0, 1, 2, 2, 1])\n return X, y\n\n input_size = 4\n hidden_size = 10\n num_classes = 3\n num_inputs = 5\n\n net = init_toy_model()\n X, y = init_toy_data()\n\n loss, grads = net.loss(X, y, reg=0.05)\n\n # should be very small, we get < 1e-12\n correct_loss = 1.30378789133\n print(\"Difference between your loss and correct loss: {}\".format(np.sum(np.abs(loss - correct_loss))))\n print()\n\n # these should all be less than 1e-8 or so\n for param_name in grads:\n f = lambda W: net.loss(X, y, reg=0.05)[0]\n param_grad_num = eval_numerical_gradient(f, net.params[param_name], verbose=False)\n print(\"%s max relative error: %e\" % (param_name, rel_error(param_grad_num, grads[param_name])))\n print()\n\n net = init_toy_model()\n stats = net.train(X, y, X, y,\n learning_rate=1e-1, reg=5e-6,\n num_iters=100, verbose=False)\n\n print(\"Final training loss: \", stats[\"loss_history\"][-1])\n\n # plot the loss history\n plt.plot(stats[\"loss_history\"])\n plt.xlabel(\"iteration\")\n plt.ylabel(\"training loss\")\n plt.title(\"Training Loss history\")\n plt.show()\n\n\ndef read_cifar_data(cifar10_dir):\n num_training = 49000\n num_validation = 1000\n num_test = 1000\n\n X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n\n mask = range(num_training, num_training + num_validation)\n X_val = X_train[mask]\n y_val = y_train[mask]\n\n mask = range(num_training)\n X_train = X_train[mask]\n y_train = y_train[mask]\n\n mask = range(num_test)\n X_test = X_test[mask]\n y_test = y_test[mask]\n\n return X_train, y_train, X_val, y_val, X_test, y_test\n\n\ndef preprocess(X_train, X_val, X_test):\n X_train = np.reshape(X_train, (X_train.shape[0], -1))\n X_val = np.reshape(X_val, (X_val.shape[0], -1))\n X_test = np.reshape(X_test, (X_test.shape[0], -1))\n\n mean_image = np.mean(X_train, axis=0)\n\n X_train -= mean_image\n X_val -= mean_image\n X_test -= mean_image\n\n return X_train, X_val, X_test\n\n\ndef tuning_hyperparameters(X_train, y_train, X_val, y_val):\n print()\n print(\"*****************start tuning hyperparameters*****************\")\n\n input_size = 32 * 32 * 3\n num_classes = 10\n\n hidden_sizes = [200, 300]\n regularization_strengths = [0.025, 0.1]\n\n best_val = -1\n best_net = None\n best_stats = None\n\n for reg in regularization_strengths:\n for hidden_size in hidden_sizes:\n net = TwoLayerNet(input_size, hidden_size, num_classes)\n stats = net.train(X_train, y_train, X_val, y_val,\n num_iters=3000, batch_size=100,\n learning_rate=1e-3, learning_rate_decay=0.95,\n reg=reg, verbose=False)\n val_acc = (net.predict(X_val) == y_val).mean()\n print(\"When reg is {}, h_size is {}, Validation accuracy: {}\"\n .format(reg, hidden_size, val_acc))\n if val_acc > best_val:\n best_val = val_acc\n best_net = net\n best_stats = stats\n\n return best_net, best_stats\n\n\n\nif __name__ == \"__main__\":\n X_train, y_train, X_val, y_val, X_test, y_test = read_cifar_data(\"cs231n/datasets/cifar-10-batches-py\")\n X_train, X_val, X_test = preprocess(X_train, X_val, X_test)\n\n best_net, best_stats = tuning_hyperparameters(X_train, y_train, X_val, y_val)\n visualize_loss(best_stats)\n show_net_weights(best_net)\n\n test_acc = (best_net.predict(X_test) == y_test).mean()\n print(\"Test accuract: {}\".format(test_acc))\n", "import numpy as np\nimport matplotlib.pyplot as plt\nfrom assignment1.cs231n.data_utils import load_CIFAR10\nfrom assignment1.cs231n.gradient_check import eval_numerical_gradient\nfrom assignment1.cs231n.vis_utils import visualize_grid\nfrom assignment1.cs231n.classifiers.neural_net import TwoLayerNet\nplt.rcParams['figure.figsize'] = 10.0, 8.0\nplt.rcParams['image.interpolation'] = 'nearest'\nplt.rcParams['image.cmap'] = 'gray'\n\n\ndef rel_error(x, y):\n \"\"\" returns relative error \"\"\"\n return np.max(np.abs(x - y) / np.maximum(1e-08, np.abs(x) + np.abs(y)))\n\n\ndef show_net_weights(net):\n W1 = net.params['W1']\n W1 = W1.reshape(32, 32, 3, -1).transpose(3, 0, 1, 2)\n plt.imshow(visualize_grid(W1, padding=3).astype('uint8'))\n plt.gca().axis('off')\n plt.show()\n\n\ndef visualize_loss(stats):\n plt.subplot(2, 1, 1)\n plt.plot(stats['loss_history'])\n plt.title('Loss history')\n plt.xlabel('Iteration')\n plt.ylabel('Loss')\n plt.subplot(2, 1, 2)\n plt.plot(stats['train_acc_history'], label='train')\n plt.plot(stats['val_acc_history'], label='val')\n plt.title('Classification accuracy history')\n plt.xlabel('Epoch')\n plt.ylabel('Clasification accuracy')\n plt.legend(loc='upper right')\n plt.show()\n\n\ndef experiment():\n print()\n print('*****************start experiment*****************')\n\n def init_toy_model():\n np.random.seed(0)\n return TwoLayerNet(input_size, hidden_size, num_classes, std=0.1)\n\n def init_toy_data():\n np.random.seed(1)\n X = 10 * np.random.randn(num_inputs, input_size)\n y = np.array([0, 1, 2, 2, 1])\n return X, y\n input_size = 4\n hidden_size = 10\n num_classes = 3\n num_inputs = 5\n net = init_toy_model()\n X, y = init_toy_data()\n loss, grads = net.loss(X, y, reg=0.05)\n correct_loss = 1.30378789133\n print('Difference between your loss and correct loss: {}'.format(np.sum\n (np.abs(loss - correct_loss))))\n print()\n for param_name in grads:\n f = lambda W: net.loss(X, y, reg=0.05)[0]\n param_grad_num = eval_numerical_gradient(f, net.params[param_name],\n verbose=False)\n print('%s max relative error: %e' % (param_name, rel_error(\n param_grad_num, grads[param_name])))\n print()\n net = init_toy_model()\n stats = net.train(X, y, X, y, learning_rate=0.1, reg=5e-06, num_iters=\n 100, verbose=False)\n print('Final training loss: ', stats['loss_history'][-1])\n plt.plot(stats['loss_history'])\n plt.xlabel('iteration')\n plt.ylabel('training loss')\n plt.title('Training Loss history')\n plt.show()\n\n\ndef read_cifar_data(cifar10_dir):\n num_training = 49000\n num_validation = 1000\n num_test = 1000\n X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n mask = range(num_training, num_training + num_validation)\n X_val = X_train[mask]\n y_val = y_train[mask]\n mask = range(num_training)\n X_train = X_train[mask]\n y_train = y_train[mask]\n mask = range(num_test)\n X_test = X_test[mask]\n y_test = y_test[mask]\n return X_train, y_train, X_val, y_val, X_test, y_test\n\n\ndef preprocess(X_train, X_val, X_test):\n X_train = np.reshape(X_train, (X_train.shape[0], -1))\n X_val = np.reshape(X_val, (X_val.shape[0], -1))\n X_test = np.reshape(X_test, (X_test.shape[0], -1))\n mean_image = np.mean(X_train, axis=0)\n X_train -= mean_image\n X_val -= mean_image\n X_test -= mean_image\n return X_train, X_val, X_test\n\n\ndef tuning_hyperparameters(X_train, y_train, X_val, y_val):\n print()\n print('*****************start tuning hyperparameters*****************')\n input_size = 32 * 32 * 3\n num_classes = 10\n hidden_sizes = [200, 300]\n regularization_strengths = [0.025, 0.1]\n best_val = -1\n best_net = None\n best_stats = None\n for reg in regularization_strengths:\n for hidden_size in hidden_sizes:\n net = TwoLayerNet(input_size, hidden_size, num_classes)\n stats = net.train(X_train, y_train, X_val, y_val, num_iters=\n 3000, batch_size=100, learning_rate=0.001,\n learning_rate_decay=0.95, reg=reg, verbose=False)\n val_acc = (net.predict(X_val) == y_val).mean()\n print('When reg is {}, h_size is {}, Validation accuracy: {}'.\n format(reg, hidden_size, val_acc))\n if val_acc > best_val:\n best_val = val_acc\n best_net = net\n best_stats = stats\n return best_net, best_stats\n\n\nif __name__ == '__main__':\n X_train, y_train, X_val, y_val, X_test, y_test = read_cifar_data(\n 'cs231n/datasets/cifar-10-batches-py')\n X_train, X_val, X_test = preprocess(X_train, X_val, X_test)\n best_net, best_stats = tuning_hyperparameters(X_train, y_train, X_val,\n y_val)\n visualize_loss(best_stats)\n show_net_weights(best_net)\n test_acc = (best_net.predict(X_test) == y_test).mean()\n print('Test accuract: {}'.format(test_acc))\n", "<import token>\nplt.rcParams['figure.figsize'] = 10.0, 8.0\nplt.rcParams['image.interpolation'] = 'nearest'\nplt.rcParams['image.cmap'] = 'gray'\n\n\ndef rel_error(x, y):\n \"\"\" returns relative error \"\"\"\n return np.max(np.abs(x - y) / np.maximum(1e-08, np.abs(x) + np.abs(y)))\n\n\ndef show_net_weights(net):\n W1 = net.params['W1']\n W1 = W1.reshape(32, 32, 3, -1).transpose(3, 0, 1, 2)\n plt.imshow(visualize_grid(W1, padding=3).astype('uint8'))\n plt.gca().axis('off')\n plt.show()\n\n\ndef visualize_loss(stats):\n plt.subplot(2, 1, 1)\n plt.plot(stats['loss_history'])\n plt.title('Loss history')\n plt.xlabel('Iteration')\n plt.ylabel('Loss')\n plt.subplot(2, 1, 2)\n plt.plot(stats['train_acc_history'], label='train')\n plt.plot(stats['val_acc_history'], label='val')\n plt.title('Classification accuracy history')\n plt.xlabel('Epoch')\n plt.ylabel('Clasification accuracy')\n plt.legend(loc='upper right')\n plt.show()\n\n\ndef experiment():\n print()\n print('*****************start experiment*****************')\n\n def init_toy_model():\n np.random.seed(0)\n return TwoLayerNet(input_size, hidden_size, num_classes, std=0.1)\n\n def init_toy_data():\n np.random.seed(1)\n X = 10 * np.random.randn(num_inputs, input_size)\n y = np.array([0, 1, 2, 2, 1])\n return X, y\n input_size = 4\n hidden_size = 10\n num_classes = 3\n num_inputs = 5\n net = init_toy_model()\n X, y = init_toy_data()\n loss, grads = net.loss(X, y, reg=0.05)\n correct_loss = 1.30378789133\n print('Difference between your loss and correct loss: {}'.format(np.sum\n (np.abs(loss - correct_loss))))\n print()\n for param_name in grads:\n f = lambda W: net.loss(X, y, reg=0.05)[0]\n param_grad_num = eval_numerical_gradient(f, net.params[param_name],\n verbose=False)\n print('%s max relative error: %e' % (param_name, rel_error(\n param_grad_num, grads[param_name])))\n print()\n net = init_toy_model()\n stats = net.train(X, y, X, y, learning_rate=0.1, reg=5e-06, num_iters=\n 100, verbose=False)\n print('Final training loss: ', stats['loss_history'][-1])\n plt.plot(stats['loss_history'])\n plt.xlabel('iteration')\n plt.ylabel('training loss')\n plt.title('Training Loss history')\n plt.show()\n\n\ndef read_cifar_data(cifar10_dir):\n num_training = 49000\n num_validation = 1000\n num_test = 1000\n X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n mask = range(num_training, num_training + num_validation)\n X_val = X_train[mask]\n y_val = y_train[mask]\n mask = range(num_training)\n X_train = X_train[mask]\n y_train = y_train[mask]\n mask = range(num_test)\n X_test = X_test[mask]\n y_test = y_test[mask]\n return X_train, y_train, X_val, y_val, X_test, y_test\n\n\ndef preprocess(X_train, X_val, X_test):\n X_train = np.reshape(X_train, (X_train.shape[0], -1))\n X_val = np.reshape(X_val, (X_val.shape[0], -1))\n X_test = np.reshape(X_test, (X_test.shape[0], -1))\n mean_image = np.mean(X_train, axis=0)\n X_train -= mean_image\n X_val -= mean_image\n X_test -= mean_image\n return X_train, X_val, X_test\n\n\ndef tuning_hyperparameters(X_train, y_train, X_val, y_val):\n print()\n print('*****************start tuning hyperparameters*****************')\n input_size = 32 * 32 * 3\n num_classes = 10\n hidden_sizes = [200, 300]\n regularization_strengths = [0.025, 0.1]\n best_val = -1\n best_net = None\n best_stats = None\n for reg in regularization_strengths:\n for hidden_size in hidden_sizes:\n net = TwoLayerNet(input_size, hidden_size, num_classes)\n stats = net.train(X_train, y_train, X_val, y_val, num_iters=\n 3000, batch_size=100, learning_rate=0.001,\n learning_rate_decay=0.95, reg=reg, verbose=False)\n val_acc = (net.predict(X_val) == y_val).mean()\n print('When reg is {}, h_size is {}, Validation accuracy: {}'.\n format(reg, hidden_size, val_acc))\n if val_acc > best_val:\n best_val = val_acc\n best_net = net\n best_stats = stats\n return best_net, best_stats\n\n\nif __name__ == '__main__':\n X_train, y_train, X_val, y_val, X_test, y_test = read_cifar_data(\n 'cs231n/datasets/cifar-10-batches-py')\n X_train, X_val, X_test = preprocess(X_train, X_val, X_test)\n best_net, best_stats = tuning_hyperparameters(X_train, y_train, X_val,\n y_val)\n visualize_loss(best_stats)\n show_net_weights(best_net)\n test_acc = (best_net.predict(X_test) == y_test).mean()\n print('Test accuract: {}'.format(test_acc))\n", "<import token>\n<assignment token>\n\n\ndef rel_error(x, y):\n \"\"\" returns relative error \"\"\"\n return np.max(np.abs(x - y) / np.maximum(1e-08, np.abs(x) + np.abs(y)))\n\n\ndef show_net_weights(net):\n W1 = net.params['W1']\n W1 = W1.reshape(32, 32, 3, -1).transpose(3, 0, 1, 2)\n plt.imshow(visualize_grid(W1, padding=3).astype('uint8'))\n plt.gca().axis('off')\n plt.show()\n\n\ndef visualize_loss(stats):\n plt.subplot(2, 1, 1)\n plt.plot(stats['loss_history'])\n plt.title('Loss history')\n plt.xlabel('Iteration')\n plt.ylabel('Loss')\n plt.subplot(2, 1, 2)\n plt.plot(stats['train_acc_history'], label='train')\n plt.plot(stats['val_acc_history'], label='val')\n plt.title('Classification accuracy history')\n plt.xlabel('Epoch')\n plt.ylabel('Clasification accuracy')\n plt.legend(loc='upper right')\n plt.show()\n\n\ndef experiment():\n print()\n print('*****************start experiment*****************')\n\n def init_toy_model():\n np.random.seed(0)\n return TwoLayerNet(input_size, hidden_size, num_classes, std=0.1)\n\n def init_toy_data():\n np.random.seed(1)\n X = 10 * np.random.randn(num_inputs, input_size)\n y = np.array([0, 1, 2, 2, 1])\n return X, y\n input_size = 4\n hidden_size = 10\n num_classes = 3\n num_inputs = 5\n net = init_toy_model()\n X, y = init_toy_data()\n loss, grads = net.loss(X, y, reg=0.05)\n correct_loss = 1.30378789133\n print('Difference between your loss and correct loss: {}'.format(np.sum\n (np.abs(loss - correct_loss))))\n print()\n for param_name in grads:\n f = lambda W: net.loss(X, y, reg=0.05)[0]\n param_grad_num = eval_numerical_gradient(f, net.params[param_name],\n verbose=False)\n print('%s max relative error: %e' % (param_name, rel_error(\n param_grad_num, grads[param_name])))\n print()\n net = init_toy_model()\n stats = net.train(X, y, X, y, learning_rate=0.1, reg=5e-06, num_iters=\n 100, verbose=False)\n print('Final training loss: ', stats['loss_history'][-1])\n plt.plot(stats['loss_history'])\n plt.xlabel('iteration')\n plt.ylabel('training loss')\n plt.title('Training Loss history')\n plt.show()\n\n\ndef read_cifar_data(cifar10_dir):\n num_training = 49000\n num_validation = 1000\n num_test = 1000\n X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n mask = range(num_training, num_training + num_validation)\n X_val = X_train[mask]\n y_val = y_train[mask]\n mask = range(num_training)\n X_train = X_train[mask]\n y_train = y_train[mask]\n mask = range(num_test)\n X_test = X_test[mask]\n y_test = y_test[mask]\n return X_train, y_train, X_val, y_val, X_test, y_test\n\n\ndef preprocess(X_train, X_val, X_test):\n X_train = np.reshape(X_train, (X_train.shape[0], -1))\n X_val = np.reshape(X_val, (X_val.shape[0], -1))\n X_test = np.reshape(X_test, (X_test.shape[0], -1))\n mean_image = np.mean(X_train, axis=0)\n X_train -= mean_image\n X_val -= mean_image\n X_test -= mean_image\n return X_train, X_val, X_test\n\n\ndef tuning_hyperparameters(X_train, y_train, X_val, y_val):\n print()\n print('*****************start tuning hyperparameters*****************')\n input_size = 32 * 32 * 3\n num_classes = 10\n hidden_sizes = [200, 300]\n regularization_strengths = [0.025, 0.1]\n best_val = -1\n best_net = None\n best_stats = None\n for reg in regularization_strengths:\n for hidden_size in hidden_sizes:\n net = TwoLayerNet(input_size, hidden_size, num_classes)\n stats = net.train(X_train, y_train, X_val, y_val, num_iters=\n 3000, batch_size=100, learning_rate=0.001,\n learning_rate_decay=0.95, reg=reg, verbose=False)\n val_acc = (net.predict(X_val) == y_val).mean()\n print('When reg is {}, h_size is {}, Validation accuracy: {}'.\n format(reg, hidden_size, val_acc))\n if val_acc > best_val:\n best_val = val_acc\n best_net = net\n best_stats = stats\n return best_net, best_stats\n\n\nif __name__ == '__main__':\n X_train, y_train, X_val, y_val, X_test, y_test = read_cifar_data(\n 'cs231n/datasets/cifar-10-batches-py')\n X_train, X_val, X_test = preprocess(X_train, X_val, X_test)\n best_net, best_stats = tuning_hyperparameters(X_train, y_train, X_val,\n y_val)\n visualize_loss(best_stats)\n show_net_weights(best_net)\n test_acc = (best_net.predict(X_test) == y_test).mean()\n print('Test accuract: {}'.format(test_acc))\n", "<import token>\n<assignment token>\n\n\ndef rel_error(x, y):\n \"\"\" returns relative error \"\"\"\n return np.max(np.abs(x - y) / np.maximum(1e-08, np.abs(x) + np.abs(y)))\n\n\ndef show_net_weights(net):\n W1 = net.params['W1']\n W1 = W1.reshape(32, 32, 3, -1).transpose(3, 0, 1, 2)\n plt.imshow(visualize_grid(W1, padding=3).astype('uint8'))\n plt.gca().axis('off')\n plt.show()\n\n\ndef visualize_loss(stats):\n plt.subplot(2, 1, 1)\n plt.plot(stats['loss_history'])\n plt.title('Loss history')\n plt.xlabel('Iteration')\n plt.ylabel('Loss')\n plt.subplot(2, 1, 2)\n plt.plot(stats['train_acc_history'], label='train')\n plt.plot(stats['val_acc_history'], label='val')\n plt.title('Classification accuracy history')\n plt.xlabel('Epoch')\n plt.ylabel('Clasification accuracy')\n plt.legend(loc='upper right')\n plt.show()\n\n\ndef experiment():\n print()\n print('*****************start experiment*****************')\n\n def init_toy_model():\n np.random.seed(0)\n return TwoLayerNet(input_size, hidden_size, num_classes, std=0.1)\n\n def init_toy_data():\n np.random.seed(1)\n X = 10 * np.random.randn(num_inputs, input_size)\n y = np.array([0, 1, 2, 2, 1])\n return X, y\n input_size = 4\n hidden_size = 10\n num_classes = 3\n num_inputs = 5\n net = init_toy_model()\n X, y = init_toy_data()\n loss, grads = net.loss(X, y, reg=0.05)\n correct_loss = 1.30378789133\n print('Difference between your loss and correct loss: {}'.format(np.sum\n (np.abs(loss - correct_loss))))\n print()\n for param_name in grads:\n f = lambda W: net.loss(X, y, reg=0.05)[0]\n param_grad_num = eval_numerical_gradient(f, net.params[param_name],\n verbose=False)\n print('%s max relative error: %e' % (param_name, rel_error(\n param_grad_num, grads[param_name])))\n print()\n net = init_toy_model()\n stats = net.train(X, y, X, y, learning_rate=0.1, reg=5e-06, num_iters=\n 100, verbose=False)\n print('Final training loss: ', stats['loss_history'][-1])\n plt.plot(stats['loss_history'])\n plt.xlabel('iteration')\n plt.ylabel('training loss')\n plt.title('Training Loss history')\n plt.show()\n\n\ndef read_cifar_data(cifar10_dir):\n num_training = 49000\n num_validation = 1000\n num_test = 1000\n X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n mask = range(num_training, num_training + num_validation)\n X_val = X_train[mask]\n y_val = y_train[mask]\n mask = range(num_training)\n X_train = X_train[mask]\n y_train = y_train[mask]\n mask = range(num_test)\n X_test = X_test[mask]\n y_test = y_test[mask]\n return X_train, y_train, X_val, y_val, X_test, y_test\n\n\ndef preprocess(X_train, X_val, X_test):\n X_train = np.reshape(X_train, (X_train.shape[0], -1))\n X_val = np.reshape(X_val, (X_val.shape[0], -1))\n X_test = np.reshape(X_test, (X_test.shape[0], -1))\n mean_image = np.mean(X_train, axis=0)\n X_train -= mean_image\n X_val -= mean_image\n X_test -= mean_image\n return X_train, X_val, X_test\n\n\ndef tuning_hyperparameters(X_train, y_train, X_val, y_val):\n print()\n print('*****************start tuning hyperparameters*****************')\n input_size = 32 * 32 * 3\n num_classes = 10\n hidden_sizes = [200, 300]\n regularization_strengths = [0.025, 0.1]\n best_val = -1\n best_net = None\n best_stats = None\n for reg in regularization_strengths:\n for hidden_size in hidden_sizes:\n net = TwoLayerNet(input_size, hidden_size, num_classes)\n stats = net.train(X_train, y_train, X_val, y_val, num_iters=\n 3000, batch_size=100, learning_rate=0.001,\n learning_rate_decay=0.95, reg=reg, verbose=False)\n val_acc = (net.predict(X_val) == y_val).mean()\n print('When reg is {}, h_size is {}, Validation accuracy: {}'.\n format(reg, hidden_size, val_acc))\n if val_acc > best_val:\n best_val = val_acc\n best_net = net\n best_stats = stats\n return best_net, best_stats\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\ndef rel_error(x, y):\n \"\"\" returns relative error \"\"\"\n return np.max(np.abs(x - y) / np.maximum(1e-08, np.abs(x) + np.abs(y)))\n\n\ndef show_net_weights(net):\n W1 = net.params['W1']\n W1 = W1.reshape(32, 32, 3, -1).transpose(3, 0, 1, 2)\n plt.imshow(visualize_grid(W1, padding=3).astype('uint8'))\n plt.gca().axis('off')\n plt.show()\n\n\n<function token>\n\n\ndef experiment():\n print()\n print('*****************start experiment*****************')\n\n def init_toy_model():\n np.random.seed(0)\n return TwoLayerNet(input_size, hidden_size, num_classes, std=0.1)\n\n def init_toy_data():\n np.random.seed(1)\n X = 10 * np.random.randn(num_inputs, input_size)\n y = np.array([0, 1, 2, 2, 1])\n return X, y\n input_size = 4\n hidden_size = 10\n num_classes = 3\n num_inputs = 5\n net = init_toy_model()\n X, y = init_toy_data()\n loss, grads = net.loss(X, y, reg=0.05)\n correct_loss = 1.30378789133\n print('Difference between your loss and correct loss: {}'.format(np.sum\n (np.abs(loss - correct_loss))))\n print()\n for param_name in grads:\n f = lambda W: net.loss(X, y, reg=0.05)[0]\n param_grad_num = eval_numerical_gradient(f, net.params[param_name],\n verbose=False)\n print('%s max relative error: %e' % (param_name, rel_error(\n param_grad_num, grads[param_name])))\n print()\n net = init_toy_model()\n stats = net.train(X, y, X, y, learning_rate=0.1, reg=5e-06, num_iters=\n 100, verbose=False)\n print('Final training loss: ', stats['loss_history'][-1])\n plt.plot(stats['loss_history'])\n plt.xlabel('iteration')\n plt.ylabel('training loss')\n plt.title('Training Loss history')\n plt.show()\n\n\ndef read_cifar_data(cifar10_dir):\n num_training = 49000\n num_validation = 1000\n num_test = 1000\n X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n mask = range(num_training, num_training + num_validation)\n X_val = X_train[mask]\n y_val = y_train[mask]\n mask = range(num_training)\n X_train = X_train[mask]\n y_train = y_train[mask]\n mask = range(num_test)\n X_test = X_test[mask]\n y_test = y_test[mask]\n return X_train, y_train, X_val, y_val, X_test, y_test\n\n\ndef preprocess(X_train, X_val, X_test):\n X_train = np.reshape(X_train, (X_train.shape[0], -1))\n X_val = np.reshape(X_val, (X_val.shape[0], -1))\n X_test = np.reshape(X_test, (X_test.shape[0], -1))\n mean_image = np.mean(X_train, axis=0)\n X_train -= mean_image\n X_val -= mean_image\n X_test -= mean_image\n return X_train, X_val, X_test\n\n\ndef tuning_hyperparameters(X_train, y_train, X_val, y_val):\n print()\n print('*****************start tuning hyperparameters*****************')\n input_size = 32 * 32 * 3\n num_classes = 10\n hidden_sizes = [200, 300]\n regularization_strengths = [0.025, 0.1]\n best_val = -1\n best_net = None\n best_stats = None\n for reg in regularization_strengths:\n for hidden_size in hidden_sizes:\n net = TwoLayerNet(input_size, hidden_size, num_classes)\n stats = net.train(X_train, y_train, X_val, y_val, num_iters=\n 3000, batch_size=100, learning_rate=0.001,\n learning_rate_decay=0.95, reg=reg, verbose=False)\n val_acc = (net.predict(X_val) == y_val).mean()\n print('When reg is {}, h_size is {}, Validation accuracy: {}'.\n format(reg, hidden_size, val_acc))\n if val_acc > best_val:\n best_val = val_acc\n best_net = net\n best_stats = stats\n return best_net, best_stats\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\ndef rel_error(x, y):\n \"\"\" returns relative error \"\"\"\n return np.max(np.abs(x - y) / np.maximum(1e-08, np.abs(x) + np.abs(y)))\n\n\n<function token>\n<function token>\n\n\ndef experiment():\n print()\n print('*****************start experiment*****************')\n\n def init_toy_model():\n np.random.seed(0)\n return TwoLayerNet(input_size, hidden_size, num_classes, std=0.1)\n\n def init_toy_data():\n np.random.seed(1)\n X = 10 * np.random.randn(num_inputs, input_size)\n y = np.array([0, 1, 2, 2, 1])\n return X, y\n input_size = 4\n hidden_size = 10\n num_classes = 3\n num_inputs = 5\n net = init_toy_model()\n X, y = init_toy_data()\n loss, grads = net.loss(X, y, reg=0.05)\n correct_loss = 1.30378789133\n print('Difference between your loss and correct loss: {}'.format(np.sum\n (np.abs(loss - correct_loss))))\n print()\n for param_name in grads:\n f = lambda W: net.loss(X, y, reg=0.05)[0]\n param_grad_num = eval_numerical_gradient(f, net.params[param_name],\n verbose=False)\n print('%s max relative error: %e' % (param_name, rel_error(\n param_grad_num, grads[param_name])))\n print()\n net = init_toy_model()\n stats = net.train(X, y, X, y, learning_rate=0.1, reg=5e-06, num_iters=\n 100, verbose=False)\n print('Final training loss: ', stats['loss_history'][-1])\n plt.plot(stats['loss_history'])\n plt.xlabel('iteration')\n plt.ylabel('training loss')\n plt.title('Training Loss history')\n plt.show()\n\n\ndef read_cifar_data(cifar10_dir):\n num_training = 49000\n num_validation = 1000\n num_test = 1000\n X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n mask = range(num_training, num_training + num_validation)\n X_val = X_train[mask]\n y_val = y_train[mask]\n mask = range(num_training)\n X_train = X_train[mask]\n y_train = y_train[mask]\n mask = range(num_test)\n X_test = X_test[mask]\n y_test = y_test[mask]\n return X_train, y_train, X_val, y_val, X_test, y_test\n\n\ndef preprocess(X_train, X_val, X_test):\n X_train = np.reshape(X_train, (X_train.shape[0], -1))\n X_val = np.reshape(X_val, (X_val.shape[0], -1))\n X_test = np.reshape(X_test, (X_test.shape[0], -1))\n mean_image = np.mean(X_train, axis=0)\n X_train -= mean_image\n X_val -= mean_image\n X_test -= mean_image\n return X_train, X_val, X_test\n\n\ndef tuning_hyperparameters(X_train, y_train, X_val, y_val):\n print()\n print('*****************start tuning hyperparameters*****************')\n input_size = 32 * 32 * 3\n num_classes = 10\n hidden_sizes = [200, 300]\n regularization_strengths = [0.025, 0.1]\n best_val = -1\n best_net = None\n best_stats = None\n for reg in regularization_strengths:\n for hidden_size in hidden_sizes:\n net = TwoLayerNet(input_size, hidden_size, num_classes)\n stats = net.train(X_train, y_train, X_val, y_val, num_iters=\n 3000, batch_size=100, learning_rate=0.001,\n learning_rate_decay=0.95, reg=reg, verbose=False)\n val_acc = (net.predict(X_val) == y_val).mean()\n print('When reg is {}, h_size is {}, Validation accuracy: {}'.\n format(reg, hidden_size, val_acc))\n if val_acc > best_val:\n best_val = val_acc\n best_net = net\n best_stats = stats\n return best_net, best_stats\n\n\n<code token>\n", "<import token>\n<assignment token>\n\n\ndef rel_error(x, y):\n \"\"\" returns relative error \"\"\"\n return np.max(np.abs(x - y) / np.maximum(1e-08, np.abs(x) + np.abs(y)))\n\n\n<function token>\n<function token>\n\n\ndef experiment():\n print()\n print('*****************start experiment*****************')\n\n def init_toy_model():\n np.random.seed(0)\n return TwoLayerNet(input_size, hidden_size, num_classes, std=0.1)\n\n def init_toy_data():\n np.random.seed(1)\n X = 10 * np.random.randn(num_inputs, input_size)\n y = np.array([0, 1, 2, 2, 1])\n return X, y\n input_size = 4\n hidden_size = 10\n num_classes = 3\n num_inputs = 5\n net = init_toy_model()\n X, y = init_toy_data()\n loss, grads = net.loss(X, y, reg=0.05)\n correct_loss = 1.30378789133\n print('Difference between your loss and correct loss: {}'.format(np.sum\n (np.abs(loss - correct_loss))))\n print()\n for param_name in grads:\n f = lambda W: net.loss(X, y, reg=0.05)[0]\n param_grad_num = eval_numerical_gradient(f, net.params[param_name],\n verbose=False)\n print('%s max relative error: %e' % (param_name, rel_error(\n param_grad_num, grads[param_name])))\n print()\n net = init_toy_model()\n stats = net.train(X, y, X, y, learning_rate=0.1, reg=5e-06, num_iters=\n 100, verbose=False)\n print('Final training loss: ', stats['loss_history'][-1])\n plt.plot(stats['loss_history'])\n plt.xlabel('iteration')\n plt.ylabel('training loss')\n plt.title('Training Loss history')\n plt.show()\n\n\ndef read_cifar_data(cifar10_dir):\n num_training = 49000\n num_validation = 1000\n num_test = 1000\n X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n mask = range(num_training, num_training + num_validation)\n X_val = X_train[mask]\n y_val = y_train[mask]\n mask = range(num_training)\n X_train = X_train[mask]\n y_train = y_train[mask]\n mask = range(num_test)\n X_test = X_test[mask]\n y_test = y_test[mask]\n return X_train, y_train, X_val, y_val, X_test, y_test\n\n\ndef preprocess(X_train, X_val, X_test):\n X_train = np.reshape(X_train, (X_train.shape[0], -1))\n X_val = np.reshape(X_val, (X_val.shape[0], -1))\n X_test = np.reshape(X_test, (X_test.shape[0], -1))\n mean_image = np.mean(X_train, axis=0)\n X_train -= mean_image\n X_val -= mean_image\n X_test -= mean_image\n return X_train, X_val, X_test\n\n\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n\n\ndef rel_error(x, y):\n \"\"\" returns relative error \"\"\"\n return np.max(np.abs(x - y) / np.maximum(1e-08, np.abs(x) + np.abs(y)))\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef read_cifar_data(cifar10_dir):\n num_training = 49000\n num_validation = 1000\n num_test = 1000\n X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n mask = range(num_training, num_training + num_validation)\n X_val = X_train[mask]\n y_val = y_train[mask]\n mask = range(num_training)\n X_train = X_train[mask]\n y_train = y_train[mask]\n mask = range(num_test)\n X_test = X_test[mask]\n y_test = y_test[mask]\n return X_train, y_train, X_val, y_val, X_test, y_test\n\n\ndef preprocess(X_train, X_val, X_test):\n X_train = np.reshape(X_train, (X_train.shape[0], -1))\n X_val = np.reshape(X_val, (X_val.shape[0], -1))\n X_test = np.reshape(X_test, (X_test.shape[0], -1))\n mean_image = np.mean(X_train, axis=0)\n X_train -= mean_image\n X_val -= mean_image\n X_test -= mean_image\n return X_train, X_val, X_test\n\n\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n\n\ndef rel_error(x, y):\n \"\"\" returns relative error \"\"\"\n return np.max(np.abs(x - y) / np.maximum(1e-08, np.abs(x) + np.abs(y)))\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef read_cifar_data(cifar10_dir):\n num_training = 49000\n num_validation = 1000\n num_test = 1000\n X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n mask = range(num_training, num_training + num_validation)\n X_val = X_train[mask]\n y_val = y_train[mask]\n mask = range(num_training)\n X_train = X_train[mask]\n y_train = y_train[mask]\n mask = range(num_test)\n X_test = X_test[mask]\n y_test = y_test[mask]\n return X_train, y_train, X_val, y_val, X_test, y_test\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef read_cifar_data(cifar10_dir):\n num_training = 49000\n num_validation = 1000\n num_test = 1000\n X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n mask = range(num_training, num_training + num_validation)\n X_val = X_train[mask]\n y_val = y_train[mask]\n mask = range(num_training)\n X_train = X_train[mask]\n y_train = y_train[mask]\n mask = range(num_test)\n X_test = X_test[mask]\n y_test = y_test[mask]\n return X_train, y_train, X_val, y_val, X_test, y_test\n\n\n<function token>\n<function token>\n<code token>\n", "<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n" ]
false
98,965
574a4a2468f7aa419c39a45aa3cbc87a4b24a761
# Generated by Django 3.1.7 on 2021-04-10 08:15 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('system', '0003_auto_20210410_0806'), ] operations = [ migrations.AddField( model_name='group', name='has_admin', field=models.BooleanField(default=False), ), ]
[ "# Generated by Django 3.1.7 on 2021-04-10 08:15\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('system', '0003_auto_20210410_0806'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='group',\n name='has_admin',\n field=models.BooleanField(default=False),\n ),\n ]\n", "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('system', '0003_auto_20210410_0806')]\n operations = [migrations.AddField(model_name='group', name='has_admin',\n field=models.BooleanField(default=False))]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('system', '0003_auto_20210410_0806')]\n operations = [migrations.AddField(model_name='group', name='has_admin',\n field=models.BooleanField(default=False))]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
98,966
f76bd92a1e420b2b86d78240367f130544175bd5
import json from django.test import TestCase from django.test import Client class StorageAPITestCase(TestCase): def is_field_in_details(self, field, content): return field in json.loads(content)['detail'] def create_storage_foo(self): c = Client() r = c.post( "/api/v1/factory/storage/", content_type="application/json", data={ "name": "Foo", "key": {"name": "id", "type": "string", "max_length": 5}, "fields": [ {"name": "fieldstring", "max_length": 32, "type": "string", "db_index": True}, {"name": "fieldint", "type": "integer", "db_index": True}, {"name": "fieldstring2", "max_length": 16, "type": "string"}, {"name": "fieldtext", "type": "text"} ] } ) def update_storage_foo(self): c = Client() r = c.post( "/api/v1/factory/storage/", content_type="application/json", data={ "name": "Foo", "key": {"name": "id", "type": "string", "max_length": 5}, "fields": [ {"name": "fieldstring", "max_length": 32, "type": "string", "db_index": True}, {"name": "fieldint", "type": "integer", "db_index": True}, {"name": "fieldstring2", "max_length": 16, "type": "string"}, {"name": "fieldtext", "type": "text"}, {"name": "fieldint2", "type": "integer", "db_index": True, "default": 1}, {"name": "fieldlong", "type": "long", "db_index": True, "default": 2}, {"name": "fieldstring3", "max_length": 16, "type": "string", "default": "S", "db_index": True}, {"name": "fieldtext2", "type": "text", "default": "T"} ] } ) def test_url_validation(self): c = Client() # Send empty data r = c.get('/api/v1/storage/foo1/') self.assertEqual(r.status_code, 404) r = c.get('/api/v1/storage/foo1/1/') self.assertEqual(r.status_code, 404) r = c.get('/api/v1/storage/foo1/1/1') self.assertEqual(r.status_code, 404) def test_positive_flow(self): self.create_storage_foo() c = Client() r = c.get('/api/v1/storage/Foo/') self.assertEqual(r.status_code, 200) r = c.post( '/api/v1/storage/Foo/', content_type="application/json", data={ "id": "foo1", "fieldstring": "f1", "fieldint": 2, "fieldstring2": "fs3", "fieldtext": "SELECT * from factory_storage" } ) self.assertEqual(r.status_code, 201) content = json.loads(r.content) self.assertTrue('id' in content) self.assertTrue('fieldstring' in content) self.assertTrue('fieldint' in content) self.assertTrue('fieldstring2' in content) self.assertTrue('fieldtext' in content) self.assertTrue('version' in content) self.assertTrue('created_at' in content) self.assertTrue('updated_at' in content) r = c.get('/api/v1/storage/Foo/foo1/') self.assertEqual(r.status_code, 200) got_content = json.loads(r.content) self.assertEqual(got_content['id'], content['id']) self.assertEqual(got_content['fieldstring'], content['fieldstring']) self.assertEqual(got_content['fieldint'], content['fieldint']) self.assertEqual(got_content['fieldstring2'], content['fieldstring2']) self.assertEqual(got_content['fieldtext'], content['fieldtext']) self.assertEqual(got_content['version'], content['version']) self.assertEqual(got_content['created_at'], content['created_at']) self.assertEqual(got_content['updated_at'], content['updated_at']) self.update_storage_foo() r = c.get('/api/v1/storage/Foo/foo1/') self.assertEqual(r.status_code, 200) got_content = json.loads(r.content) self.assertEqual(got_content['id'], content['id']) self.assertEqual(got_content['fieldstring'], content['fieldstring']) self.assertEqual(got_content['fieldint'], content['fieldint']) self.assertEqual(got_content['fieldstring2'], content['fieldstring2']) self.assertEqual(got_content['fieldtext'], content['fieldtext']) self.assertEqual(got_content['fieldint2'], 1) self.assertEqual(got_content['fieldlong'], 2) self.assertEqual(got_content['fieldstring3'], "S") self.assertEqual(got_content['fieldtext2'], "T") self.assertEqual(got_content['version'], content['version']) self.assertEqual(got_content['created_at'], content['created_at']) self.assertEqual(got_content['updated_at'], content['updated_at']) r = c.get('/api/v1/storage/Foo/?fieldlong=2') self.assertEqual(r.status_code, 200) content = json.loads(r.content) self.assertEqual(content['count'], 1) r = c.get('/api/v1/storage/Foo/?fieldlong=0') self.assertEqual(r.status_code, 200) content = json.loads(r.content) self.assertEqual(content['count'], 0) def test_version_locking(self): self.create_storage_foo() c = Client() r = c.get('/api/v1/storage/Foo/') self.assertEqual(r.status_code, 200) r = c.post( '/api/v1/storage/Foo/', content_type="application/json", data={ "id": "foo1", "fieldstring": "f1", "fieldint": 2, "fieldstring2": "fs3", "fieldtext": "SELECT * from factory_storage" } ) self.assertEqual(r.status_code, 201) content = json.loads(r.content) self.assertEqual(content["version"], 1) r = c.post( '/api/v1/storage/Foo/', content_type="application/json", data={ "id": "foo1", "fieldstring": "f1", "fieldint": 2, "fieldstring2": "fs3", "fieldtext": "SELECT * from factory_storage" } ) self.assertEqual(r.status_code, 201) content = json.loads(r.content) self.assertEqual(content["version"], 2) r = c.post( '/api/v1/storage/Foo/', content_type="application/json", data={ "id": "foo1", "fieldstring": "f1", "fieldint": 2, "fieldstring2": "fs3", "fieldtext": "SELECT * from factory_storage", "version": 2 } ) self.assertEqual(r.status_code, 201) content = json.loads(r.content) self.assertEqual(content["version"], 3) # Previous version r = c.post( '/api/v1/storage/Foo/', content_type="application/json", data={ "id": "foo1", "fieldstring": "f1", "fieldint": 2, "fieldstring2": "fs3", "fieldtext": "SELECT * from factory_storage", "version": 2 } ) self.assertEqual(r.status_code, 409) # Dataset with random version r = c.post( '/api/v1/storage/Foo/', content_type="application/json", data={ "id": "foo2", "fieldstring": "f1", "fieldint": 2, "fieldstring2": "fs3", "fieldtext": "SELECT * from factory_storage", "version": 10 } ) self.assertEqual(r.status_code, 201) content = json.loads(r.content) self.assertEqual(content["version"], 10)
[ "import json\n\nfrom django.test import TestCase\nfrom django.test import Client\n\n\nclass StorageAPITestCase(TestCase):\n \n def is_field_in_details(self, field, content):\n return field in json.loads(content)['detail']\n \n def create_storage_foo(self):\n c = Client()\n r = c.post(\n \"/api/v1/factory/storage/\",\n content_type=\"application/json\",\n data={\n \"name\": \"Foo\",\n \"key\": {\"name\": \"id\", \"type\": \"string\", \"max_length\": 5},\n \"fields\": [\n {\"name\": \"fieldstring\", \"max_length\": 32, \"type\": \"string\", \"db_index\": True},\n {\"name\": \"fieldint\", \"type\": \"integer\", \"db_index\": True},\n {\"name\": \"fieldstring2\", \"max_length\": 16, \"type\": \"string\"},\n {\"name\": \"fieldtext\", \"type\": \"text\"}\n ]\n }\n )\n \n def update_storage_foo(self):\n c = Client()\n r = c.post(\n \"/api/v1/factory/storage/\",\n content_type=\"application/json\",\n data={\n \"name\": \"Foo\",\n \"key\": {\"name\": \"id\", \"type\": \"string\", \"max_length\": 5},\n \"fields\": [\n {\"name\": \"fieldstring\", \"max_length\": 32, \"type\": \"string\", \"db_index\": True},\n {\"name\": \"fieldint\", \"type\": \"integer\", \"db_index\": True},\n {\"name\": \"fieldstring2\", \"max_length\": 16, \"type\": \"string\"},\n {\"name\": \"fieldtext\", \"type\": \"text\"},\n {\"name\": \"fieldint2\", \"type\": \"integer\", \"db_index\": True, \"default\": 1},\n {\"name\": \"fieldlong\", \"type\": \"long\", \"db_index\": True, \"default\": 2},\n {\"name\": \"fieldstring3\", \"max_length\": 16, \"type\": \"string\", \"default\": \"S\", \"db_index\": True},\n {\"name\": \"fieldtext2\", \"type\": \"text\", \"default\": \"T\"}\n ]\n }\n )\n \n def test_url_validation(self):\n c = Client()\n \n # Send empty data\n r = c.get('/api/v1/storage/foo1/')\n self.assertEqual(r.status_code, 404)\n\n r = c.get('/api/v1/storage/foo1/1/')\n self.assertEqual(r.status_code, 404)\n\n r = c.get('/api/v1/storage/foo1/1/1')\n self.assertEqual(r.status_code, 404)\n \n def test_positive_flow(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n \n r = c.post(\n '/api/v1/storage/Foo/',\n content_type=\"application/json\",\n data={\n \"id\": \"foo1\",\n \"fieldstring\": \"f1\",\n \"fieldint\": 2,\n \"fieldstring2\": \"fs3\",\n \"fieldtext\": \"SELECT * from factory_storage\"\n }\n )\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertTrue('id' in content)\n self.assertTrue('fieldstring' in content)\n self.assertTrue('fieldint' in content)\n self.assertTrue('fieldstring2' in content)\n self.assertTrue('fieldtext' in content)\n self.assertTrue('version' in content)\n self.assertTrue('created_at' in content)\n self.assertTrue('updated_at' in content)\n \n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n \n self.update_storage_foo()\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['fieldint2'], 1)\n self.assertEqual(got_content['fieldlong'], 2)\n self.assertEqual(got_content['fieldstring3'], \"S\")\n self.assertEqual(got_content['fieldtext2'], \"T\")\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n \n r = c.get('/api/v1/storage/Foo/?fieldlong=2')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 1)\n r = c.get('/api/v1/storage/Foo/?fieldlong=0')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 0)\n\n def test_version_locking(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n\n r = c.post(\n '/api/v1/storage/Foo/',\n content_type=\"application/json\",\n data={\n \"id\": \"foo1\",\n \"fieldstring\": \"f1\",\n \"fieldint\": 2,\n \"fieldstring2\": \"fs3\",\n \"fieldtext\": \"SELECT * from factory_storage\"\n }\n )\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content[\"version\"], 1)\n r = c.post(\n '/api/v1/storage/Foo/',\n content_type=\"application/json\",\n data={\n \"id\": \"foo1\",\n \"fieldstring\": \"f1\",\n \"fieldint\": 2,\n \"fieldstring2\": \"fs3\",\n \"fieldtext\": \"SELECT * from factory_storage\"\n }\n )\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content[\"version\"], 2)\n r = c.post(\n '/api/v1/storage/Foo/',\n content_type=\"application/json\",\n data={\n \"id\": \"foo1\",\n \"fieldstring\": \"f1\",\n \"fieldint\": 2,\n \"fieldstring2\": \"fs3\",\n \"fieldtext\": \"SELECT * from factory_storage\",\n \"version\": 2\n }\n )\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content[\"version\"], 3)\n \n # Previous version\n r = c.post(\n '/api/v1/storage/Foo/',\n content_type=\"application/json\",\n data={\n \"id\": \"foo1\",\n \"fieldstring\": \"f1\",\n \"fieldint\": 2,\n \"fieldstring2\": \"fs3\",\n \"fieldtext\": \"SELECT * from factory_storage\",\n \"version\": 2\n }\n )\n self.assertEqual(r.status_code, 409)\n \n # Dataset with random version\n r = c.post(\n '/api/v1/storage/Foo/',\n content_type=\"application/json\",\n data={\n \"id\": \"foo2\",\n \"fieldstring\": \"f1\",\n \"fieldint\": 2,\n \"fieldstring2\": \"fs3\",\n \"fieldtext\": \"SELECT * from factory_storage\",\n \"version\": 10\n }\n )\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content[\"version\"], 10)", "import json\nfrom django.test import TestCase\nfrom django.test import Client\n\n\nclass StorageAPITestCase(TestCase):\n\n def is_field_in_details(self, field, content):\n return field in json.loads(content)['detail']\n\n def create_storage_foo(self):\n c = Client()\n r = c.post('/api/v1/factory/storage/', content_type=\n 'application/json', data={'name': 'Foo', 'key': {'name': 'id',\n 'type': 'string', 'max_length': 5}, 'fields': [{'name':\n 'fieldstring', 'max_length': 32, 'type': 'string', 'db_index': \n True}, {'name': 'fieldint', 'type': 'integer', 'db_index': True\n }, {'name': 'fieldstring2', 'max_length': 16, 'type': 'string'},\n {'name': 'fieldtext', 'type': 'text'}]})\n\n def update_storage_foo(self):\n c = Client()\n r = c.post('/api/v1/factory/storage/', content_type=\n 'application/json', data={'name': 'Foo', 'key': {'name': 'id',\n 'type': 'string', 'max_length': 5}, 'fields': [{'name':\n 'fieldstring', 'max_length': 32, 'type': 'string', 'db_index': \n True}, {'name': 'fieldint', 'type': 'integer', 'db_index': True\n }, {'name': 'fieldstring2', 'max_length': 16, 'type': 'string'},\n {'name': 'fieldtext', 'type': 'text'}, {'name': 'fieldint2',\n 'type': 'integer', 'db_index': True, 'default': 1}, {'name':\n 'fieldlong', 'type': 'long', 'db_index': True, 'default': 2}, {\n 'name': 'fieldstring3', 'max_length': 16, 'type': 'string',\n 'default': 'S', 'db_index': True}, {'name': 'fieldtext2',\n 'type': 'text', 'default': 'T'}]})\n\n def test_url_validation(self):\n c = Client()\n r = c.get('/api/v1/storage/foo1/')\n self.assertEqual(r.status_code, 404)\n r = c.get('/api/v1/storage/foo1/1/')\n self.assertEqual(r.status_code, 404)\n r = c.get('/api/v1/storage/foo1/1/1')\n self.assertEqual(r.status_code, 404)\n\n def test_positive_flow(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertTrue('id' in content)\n self.assertTrue('fieldstring' in content)\n self.assertTrue('fieldint' in content)\n self.assertTrue('fieldstring2' in content)\n self.assertTrue('fieldtext' in content)\n self.assertTrue('version' in content)\n self.assertTrue('created_at' in content)\n self.assertTrue('updated_at' in content)\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n self.update_storage_foo()\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['fieldint2'], 1)\n self.assertEqual(got_content['fieldlong'], 2)\n self.assertEqual(got_content['fieldstring3'], 'S')\n self.assertEqual(got_content['fieldtext2'], 'T')\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n r = c.get('/api/v1/storage/Foo/?fieldlong=2')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 1)\n r = c.get('/api/v1/storage/Foo/?fieldlong=0')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 0)\n\n def test_version_locking(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 1)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 2)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 3)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 409)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo2', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 10})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 10)\n", "<import token>\n\n\nclass StorageAPITestCase(TestCase):\n\n def is_field_in_details(self, field, content):\n return field in json.loads(content)['detail']\n\n def create_storage_foo(self):\n c = Client()\n r = c.post('/api/v1/factory/storage/', content_type=\n 'application/json', data={'name': 'Foo', 'key': {'name': 'id',\n 'type': 'string', 'max_length': 5}, 'fields': [{'name':\n 'fieldstring', 'max_length': 32, 'type': 'string', 'db_index': \n True}, {'name': 'fieldint', 'type': 'integer', 'db_index': True\n }, {'name': 'fieldstring2', 'max_length': 16, 'type': 'string'},\n {'name': 'fieldtext', 'type': 'text'}]})\n\n def update_storage_foo(self):\n c = Client()\n r = c.post('/api/v1/factory/storage/', content_type=\n 'application/json', data={'name': 'Foo', 'key': {'name': 'id',\n 'type': 'string', 'max_length': 5}, 'fields': [{'name':\n 'fieldstring', 'max_length': 32, 'type': 'string', 'db_index': \n True}, {'name': 'fieldint', 'type': 'integer', 'db_index': True\n }, {'name': 'fieldstring2', 'max_length': 16, 'type': 'string'},\n {'name': 'fieldtext', 'type': 'text'}, {'name': 'fieldint2',\n 'type': 'integer', 'db_index': True, 'default': 1}, {'name':\n 'fieldlong', 'type': 'long', 'db_index': True, 'default': 2}, {\n 'name': 'fieldstring3', 'max_length': 16, 'type': 'string',\n 'default': 'S', 'db_index': True}, {'name': 'fieldtext2',\n 'type': 'text', 'default': 'T'}]})\n\n def test_url_validation(self):\n c = Client()\n r = c.get('/api/v1/storage/foo1/')\n self.assertEqual(r.status_code, 404)\n r = c.get('/api/v1/storage/foo1/1/')\n self.assertEqual(r.status_code, 404)\n r = c.get('/api/v1/storage/foo1/1/1')\n self.assertEqual(r.status_code, 404)\n\n def test_positive_flow(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertTrue('id' in content)\n self.assertTrue('fieldstring' in content)\n self.assertTrue('fieldint' in content)\n self.assertTrue('fieldstring2' in content)\n self.assertTrue('fieldtext' in content)\n self.assertTrue('version' in content)\n self.assertTrue('created_at' in content)\n self.assertTrue('updated_at' in content)\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n self.update_storage_foo()\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['fieldint2'], 1)\n self.assertEqual(got_content['fieldlong'], 2)\n self.assertEqual(got_content['fieldstring3'], 'S')\n self.assertEqual(got_content['fieldtext2'], 'T')\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n r = c.get('/api/v1/storage/Foo/?fieldlong=2')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 1)\n r = c.get('/api/v1/storage/Foo/?fieldlong=0')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 0)\n\n def test_version_locking(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 1)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 2)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 3)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 409)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo2', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 10})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 10)\n", "<import token>\n\n\nclass StorageAPITestCase(TestCase):\n\n def is_field_in_details(self, field, content):\n return field in json.loads(content)['detail']\n\n def create_storage_foo(self):\n c = Client()\n r = c.post('/api/v1/factory/storage/', content_type=\n 'application/json', data={'name': 'Foo', 'key': {'name': 'id',\n 'type': 'string', 'max_length': 5}, 'fields': [{'name':\n 'fieldstring', 'max_length': 32, 'type': 'string', 'db_index': \n True}, {'name': 'fieldint', 'type': 'integer', 'db_index': True\n }, {'name': 'fieldstring2', 'max_length': 16, 'type': 'string'},\n {'name': 'fieldtext', 'type': 'text'}]})\n\n def update_storage_foo(self):\n c = Client()\n r = c.post('/api/v1/factory/storage/', content_type=\n 'application/json', data={'name': 'Foo', 'key': {'name': 'id',\n 'type': 'string', 'max_length': 5}, 'fields': [{'name':\n 'fieldstring', 'max_length': 32, 'type': 'string', 'db_index': \n True}, {'name': 'fieldint', 'type': 'integer', 'db_index': True\n }, {'name': 'fieldstring2', 'max_length': 16, 'type': 'string'},\n {'name': 'fieldtext', 'type': 'text'}, {'name': 'fieldint2',\n 'type': 'integer', 'db_index': True, 'default': 1}, {'name':\n 'fieldlong', 'type': 'long', 'db_index': True, 'default': 2}, {\n 'name': 'fieldstring3', 'max_length': 16, 'type': 'string',\n 'default': 'S', 'db_index': True}, {'name': 'fieldtext2',\n 'type': 'text', 'default': 'T'}]})\n <function token>\n\n def test_positive_flow(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertTrue('id' in content)\n self.assertTrue('fieldstring' in content)\n self.assertTrue('fieldint' in content)\n self.assertTrue('fieldstring2' in content)\n self.assertTrue('fieldtext' in content)\n self.assertTrue('version' in content)\n self.assertTrue('created_at' in content)\n self.assertTrue('updated_at' in content)\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n self.update_storage_foo()\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['fieldint2'], 1)\n self.assertEqual(got_content['fieldlong'], 2)\n self.assertEqual(got_content['fieldstring3'], 'S')\n self.assertEqual(got_content['fieldtext2'], 'T')\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n r = c.get('/api/v1/storage/Foo/?fieldlong=2')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 1)\n r = c.get('/api/v1/storage/Foo/?fieldlong=0')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 0)\n\n def test_version_locking(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 1)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 2)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 3)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 409)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo2', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 10})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 10)\n", "<import token>\n\n\nclass StorageAPITestCase(TestCase):\n\n def is_field_in_details(self, field, content):\n return field in json.loads(content)['detail']\n\n def create_storage_foo(self):\n c = Client()\n r = c.post('/api/v1/factory/storage/', content_type=\n 'application/json', data={'name': 'Foo', 'key': {'name': 'id',\n 'type': 'string', 'max_length': 5}, 'fields': [{'name':\n 'fieldstring', 'max_length': 32, 'type': 'string', 'db_index': \n True}, {'name': 'fieldint', 'type': 'integer', 'db_index': True\n }, {'name': 'fieldstring2', 'max_length': 16, 'type': 'string'},\n {'name': 'fieldtext', 'type': 'text'}]})\n <function token>\n <function token>\n\n def test_positive_flow(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertTrue('id' in content)\n self.assertTrue('fieldstring' in content)\n self.assertTrue('fieldint' in content)\n self.assertTrue('fieldstring2' in content)\n self.assertTrue('fieldtext' in content)\n self.assertTrue('version' in content)\n self.assertTrue('created_at' in content)\n self.assertTrue('updated_at' in content)\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n self.update_storage_foo()\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['fieldint2'], 1)\n self.assertEqual(got_content['fieldlong'], 2)\n self.assertEqual(got_content['fieldstring3'], 'S')\n self.assertEqual(got_content['fieldtext2'], 'T')\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n r = c.get('/api/v1/storage/Foo/?fieldlong=2')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 1)\n r = c.get('/api/v1/storage/Foo/?fieldlong=0')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 0)\n\n def test_version_locking(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 1)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 2)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 3)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 409)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo2', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 10})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 10)\n", "<import token>\n\n\nclass StorageAPITestCase(TestCase):\n <function token>\n\n def create_storage_foo(self):\n c = Client()\n r = c.post('/api/v1/factory/storage/', content_type=\n 'application/json', data={'name': 'Foo', 'key': {'name': 'id',\n 'type': 'string', 'max_length': 5}, 'fields': [{'name':\n 'fieldstring', 'max_length': 32, 'type': 'string', 'db_index': \n True}, {'name': 'fieldint', 'type': 'integer', 'db_index': True\n }, {'name': 'fieldstring2', 'max_length': 16, 'type': 'string'},\n {'name': 'fieldtext', 'type': 'text'}]})\n <function token>\n <function token>\n\n def test_positive_flow(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertTrue('id' in content)\n self.assertTrue('fieldstring' in content)\n self.assertTrue('fieldint' in content)\n self.assertTrue('fieldstring2' in content)\n self.assertTrue('fieldtext' in content)\n self.assertTrue('version' in content)\n self.assertTrue('created_at' in content)\n self.assertTrue('updated_at' in content)\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n self.update_storage_foo()\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['fieldint2'], 1)\n self.assertEqual(got_content['fieldlong'], 2)\n self.assertEqual(got_content['fieldstring3'], 'S')\n self.assertEqual(got_content['fieldtext2'], 'T')\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n r = c.get('/api/v1/storage/Foo/?fieldlong=2')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 1)\n r = c.get('/api/v1/storage/Foo/?fieldlong=0')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 0)\n\n def test_version_locking(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 1)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 2)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 3)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 409)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo2', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 10})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 10)\n", "<import token>\n\n\nclass StorageAPITestCase(TestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n\n def test_positive_flow(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertTrue('id' in content)\n self.assertTrue('fieldstring' in content)\n self.assertTrue('fieldint' in content)\n self.assertTrue('fieldstring2' in content)\n self.assertTrue('fieldtext' in content)\n self.assertTrue('version' in content)\n self.assertTrue('created_at' in content)\n self.assertTrue('updated_at' in content)\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n self.update_storage_foo()\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['fieldint2'], 1)\n self.assertEqual(got_content['fieldlong'], 2)\n self.assertEqual(got_content['fieldstring3'], 'S')\n self.assertEqual(got_content['fieldtext2'], 'T')\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n r = c.get('/api/v1/storage/Foo/?fieldlong=2')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 1)\n r = c.get('/api/v1/storage/Foo/?fieldlong=0')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 0)\n\n def test_version_locking(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 1)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 2)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 3)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 2})\n self.assertEqual(r.status_code, 409)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo2', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage', 'version': 10})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertEqual(content['version'], 10)\n", "<import token>\n\n\nclass StorageAPITestCase(TestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n\n def test_positive_flow(self):\n self.create_storage_foo()\n c = Client()\n r = c.get('/api/v1/storage/Foo/')\n self.assertEqual(r.status_code, 200)\n r = c.post('/api/v1/storage/Foo/', content_type='application/json',\n data={'id': 'foo1', 'fieldstring': 'f1', 'fieldint': 2,\n 'fieldstring2': 'fs3', 'fieldtext':\n 'SELECT * from factory_storage'})\n self.assertEqual(r.status_code, 201)\n content = json.loads(r.content)\n self.assertTrue('id' in content)\n self.assertTrue('fieldstring' in content)\n self.assertTrue('fieldint' in content)\n self.assertTrue('fieldstring2' in content)\n self.assertTrue('fieldtext' in content)\n self.assertTrue('version' in content)\n self.assertTrue('created_at' in content)\n self.assertTrue('updated_at' in content)\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n self.update_storage_foo()\n r = c.get('/api/v1/storage/Foo/foo1/')\n self.assertEqual(r.status_code, 200)\n got_content = json.loads(r.content)\n self.assertEqual(got_content['id'], content['id'])\n self.assertEqual(got_content['fieldstring'], content['fieldstring'])\n self.assertEqual(got_content['fieldint'], content['fieldint'])\n self.assertEqual(got_content['fieldstring2'], content['fieldstring2'])\n self.assertEqual(got_content['fieldtext'], content['fieldtext'])\n self.assertEqual(got_content['fieldint2'], 1)\n self.assertEqual(got_content['fieldlong'], 2)\n self.assertEqual(got_content['fieldstring3'], 'S')\n self.assertEqual(got_content['fieldtext2'], 'T')\n self.assertEqual(got_content['version'], content['version'])\n self.assertEqual(got_content['created_at'], content['created_at'])\n self.assertEqual(got_content['updated_at'], content['updated_at'])\n r = c.get('/api/v1/storage/Foo/?fieldlong=2')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 1)\n r = c.get('/api/v1/storage/Foo/?fieldlong=0')\n self.assertEqual(r.status_code, 200)\n content = json.loads(r.content)\n self.assertEqual(content['count'], 0)\n <function token>\n", "<import token>\n\n\nclass StorageAPITestCase(TestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
98,967
a1f59147612126eb6b010cc84e04b9fdbdb19773
from django.db import models from django.contrib.auth.models import User from django.db.models.deletion import CASCADE from datetime import datetime # Create your models here. class Assignment(models.Model): created_by = models.OneToOneField(User, on_delete=CASCADE) name = models.CharField(max_length=50, default='') description = models.CharField(max_length=1000, default='') deadline = models.DateTimeField(default=datetime.now, blank=True) max_credits = models.IntegerField(default=0) def __str__(self): return f'{self.name}'
[ "from django.db import models\nfrom django.contrib.auth.models import User\nfrom django.db.models.deletion import CASCADE\nfrom datetime import datetime\n\n\n# Create your models here.\nclass Assignment(models.Model):\n created_by = models.OneToOneField(User, on_delete=CASCADE)\n name = models.CharField(max_length=50, default='')\n description = models.CharField(max_length=1000, default='')\n deadline = models.DateTimeField(default=datetime.now, blank=True)\n max_credits = models.IntegerField(default=0)\n\n def __str__(self):\n return f'{self.name}'", "from django.db import models\nfrom django.contrib.auth.models import User\nfrom django.db.models.deletion import CASCADE\nfrom datetime import datetime\n\n\nclass Assignment(models.Model):\n created_by = models.OneToOneField(User, on_delete=CASCADE)\n name = models.CharField(max_length=50, default='')\n description = models.CharField(max_length=1000, default='')\n deadline = models.DateTimeField(default=datetime.now, blank=True)\n max_credits = models.IntegerField(default=0)\n\n def __str__(self):\n return f'{self.name}'\n", "<import token>\n\n\nclass Assignment(models.Model):\n created_by = models.OneToOneField(User, on_delete=CASCADE)\n name = models.CharField(max_length=50, default='')\n description = models.CharField(max_length=1000, default='')\n deadline = models.DateTimeField(default=datetime.now, blank=True)\n max_credits = models.IntegerField(default=0)\n\n def __str__(self):\n return f'{self.name}'\n", "<import token>\n\n\nclass Assignment(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return f'{self.name}'\n", "<import token>\n\n\nclass Assignment(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
98,968
069fb9c70bd9e375458712dd48102b680b507565
#Dataset - https://drive.google.com/open?id=1N4bflrjMX2FTWbCkQ2BFhMzvKDwcWDw0 #Importing Libraries import numpy as np import pandas as pd #Importin the dataset dataset = pd.read_csv('creditcard.csv') dataset.head() dataset.isnull().sum() # Feature Scaling from sklearn.preprocessing import StandardScaler sc = StandardScaler() dataset['Amount'] = sc.fit_transform(dataset['Amount'].reshape(-1, 1)) dataset = dataset.drop(['Time'],axis=1) #Splitting into X and Y X = dataset.iloc[:, 0:29].values y = dataset.iloc[:, 29:].values # Number of data points in the minority class number_records_fraud = len(dataset[dataset.Class == 1]) fraud_indices = np.array(dataset[dataset.Class == 1].index) # Picking the indices of the normal classes normal_indices = dataset[dataset.Class == 0].index # Out of the indices we picked, randomly select "x" number (number_records_fraud) random_normal_indices = np.random.choice(normal_indices, number_records_fraud, replace = False) random_normal_indices = np.array(random_normal_indices) # Appending the 2 indices under_sample_indices = np.concatenate([fraud_indices,random_normal_indices]) #Undersampled dataset undersampled_data = dataset.iloc[under_sample_indices,:] #Splitting into Undersampled X and Y X_undersampled = undersampled_data.iloc[:, 0:29].values y_undersampled = undersampled_data.iloc[:, 29:].values #Splitting data into Training set and Test set from sklearn.cross_validation import train_test_split X_train, X_test, Y_train, Y_test = train_test_split(X_undersampled, y_undersampled, test_size = 0.2, random_state = 0) #Using Gaussian Naive Bayes Classifier from sklearn.naive_bayes import GaussianNB gaussian = GaussianNB() gaussian.fit(X_train, Y_train) #Predict Output gauss_pred = gaussian.predict(X_test) #Using Logistic Regression from sklearn.linear_model import LogisticRegression reg = LogisticRegression() reg.fit(X_train, Y_train) #Predict output regression_pred = reg.predict(X_test) #Using K Nearest Neighbors from sklearn.neighbors import KNeighborsClassifier k_near = KNeighborsClassifier() k_near.fit(X_train, Y_train) #Predict output k_near_pred = k_near.predict(X_test) #Using Decision Tree Classifier from sklearn.tree import DecisionTreeClassifier dec_tree = DecisionTreeClassifier() dec_tree.fit(X_train, Y_train) #Predict output dec_tree_pred = dec_tree.predict(X_test) # Fitting SVC to the dataset from sklearn.svm import SVC regressor = SVC() regressor.fit(X_train, Y_train) # Predict output svc_pred = regressor.predict(X_test) #Confusion matrix from sklearn.metrics import confusion_matrix cm = confusion_matrix(Y_test, dec_tree_pred)
[ "#Dataset - https://drive.google.com/open?id=1N4bflrjMX2FTWbCkQ2BFhMzvKDwcWDw0\r\n\r\n#Importing Libraries\r\nimport numpy as np\r\nimport pandas as pd\r\n\r\n#Importin the dataset\r\ndataset = pd.read_csv('creditcard.csv')\r\ndataset.head()\r\ndataset.isnull().sum()\r\n\r\n# Feature Scaling\r\nfrom sklearn.preprocessing import StandardScaler\r\nsc = StandardScaler()\r\ndataset['Amount'] = sc.fit_transform(dataset['Amount'].reshape(-1, 1))\r\ndataset = dataset.drop(['Time'],axis=1)\r\n\r\n#Splitting into X and Y\r\nX = dataset.iloc[:, 0:29].values\r\ny = dataset.iloc[:, 29:].values\r\n\r\n# Number of data points in the minority class\r\nnumber_records_fraud = len(dataset[dataset.Class == 1])\r\nfraud_indices = np.array(dataset[dataset.Class == 1].index)\r\n\r\n# Picking the indices of the normal classes\r\nnormal_indices = dataset[dataset.Class == 0].index\r\n\r\n# Out of the indices we picked, randomly select \"x\" number (number_records_fraud)\r\nrandom_normal_indices = np.random.choice(normal_indices, number_records_fraud, replace = False)\r\nrandom_normal_indices = np.array(random_normal_indices)\r\n\r\n# Appending the 2 indices\r\nunder_sample_indices = np.concatenate([fraud_indices,random_normal_indices])\r\n\r\n#Undersampled dataset\r\nundersampled_data = dataset.iloc[under_sample_indices,:]\r\n\r\n#Splitting into Undersampled X and Y\r\nX_undersampled = undersampled_data.iloc[:, 0:29].values\r\ny_undersampled = undersampled_data.iloc[:, 29:].values\r\n\r\n#Splitting data into Training set and Test set\r\nfrom sklearn.cross_validation import train_test_split\r\nX_train, X_test, Y_train, Y_test = train_test_split(X_undersampled, y_undersampled, test_size = 0.2, random_state = 0)\r\n\r\n#Using Gaussian Naive Bayes Classifier\r\nfrom sklearn.naive_bayes import GaussianNB\r\ngaussian = GaussianNB()\r\ngaussian.fit(X_train, Y_train)\r\n#Predict Output\r\ngauss_pred = gaussian.predict(X_test)\r\n\r\n#Using Logistic Regression\r\nfrom sklearn.linear_model import LogisticRegression\r\nreg = LogisticRegression()\r\nreg.fit(X_train, Y_train)\r\n#Predict output\r\nregression_pred = reg.predict(X_test)\r\n\r\n#Using K Nearest Neighbors\r\nfrom sklearn.neighbors import KNeighborsClassifier\r\nk_near = KNeighborsClassifier()\r\nk_near.fit(X_train, Y_train)\r\n#Predict output\r\nk_near_pred = k_near.predict(X_test)\r\n\r\n#Using Decision Tree Classifier\r\nfrom sklearn.tree import DecisionTreeClassifier\r\ndec_tree = DecisionTreeClassifier()\r\ndec_tree.fit(X_train, Y_train)\r\n#Predict output\r\ndec_tree_pred = dec_tree.predict(X_test)\r\n\r\n# Fitting SVC to the dataset\r\nfrom sklearn.svm import SVC\r\nregressor = SVC()\r\nregressor.fit(X_train, Y_train)\r\n# Predict output\r\nsvc_pred = regressor.predict(X_test)\r\n\r\n#Confusion matrix\r\nfrom sklearn.metrics import confusion_matrix\r\ncm = confusion_matrix(Y_test, dec_tree_pred)\r\n", "import numpy as np\nimport pandas as pd\ndataset = pd.read_csv('creditcard.csv')\ndataset.head()\ndataset.isnull().sum()\nfrom sklearn.preprocessing import StandardScaler\nsc = StandardScaler()\ndataset['Amount'] = sc.fit_transform(dataset['Amount'].reshape(-1, 1))\ndataset = dataset.drop(['Time'], axis=1)\nX = dataset.iloc[:, 0:29].values\ny = dataset.iloc[:, 29:].values\nnumber_records_fraud = len(dataset[dataset.Class == 1])\nfraud_indices = np.array(dataset[dataset.Class == 1].index)\nnormal_indices = dataset[dataset.Class == 0].index\nrandom_normal_indices = np.random.choice(normal_indices,\n number_records_fraud, replace=False)\nrandom_normal_indices = np.array(random_normal_indices)\nunder_sample_indices = np.concatenate([fraud_indices, random_normal_indices])\nundersampled_data = dataset.iloc[under_sample_indices, :]\nX_undersampled = undersampled_data.iloc[:, 0:29].values\ny_undersampled = undersampled_data.iloc[:, 29:].values\nfrom sklearn.cross_validation import train_test_split\nX_train, X_test, Y_train, Y_test = train_test_split(X_undersampled,\n y_undersampled, test_size=0.2, random_state=0)\nfrom sklearn.naive_bayes import GaussianNB\ngaussian = GaussianNB()\ngaussian.fit(X_train, Y_train)\ngauss_pred = gaussian.predict(X_test)\nfrom sklearn.linear_model import LogisticRegression\nreg = LogisticRegression()\nreg.fit(X_train, Y_train)\nregression_pred = reg.predict(X_test)\nfrom sklearn.neighbors import KNeighborsClassifier\nk_near = KNeighborsClassifier()\nk_near.fit(X_train, Y_train)\nk_near_pred = k_near.predict(X_test)\nfrom sklearn.tree import DecisionTreeClassifier\ndec_tree = DecisionTreeClassifier()\ndec_tree.fit(X_train, Y_train)\ndec_tree_pred = dec_tree.predict(X_test)\nfrom sklearn.svm import SVC\nregressor = SVC()\nregressor.fit(X_train, Y_train)\nsvc_pred = regressor.predict(X_test)\nfrom sklearn.metrics import confusion_matrix\ncm = confusion_matrix(Y_test, dec_tree_pred)\n", "<import token>\ndataset = pd.read_csv('creditcard.csv')\ndataset.head()\ndataset.isnull().sum()\n<import token>\nsc = StandardScaler()\ndataset['Amount'] = sc.fit_transform(dataset['Amount'].reshape(-1, 1))\ndataset = dataset.drop(['Time'], axis=1)\nX = dataset.iloc[:, 0:29].values\ny = dataset.iloc[:, 29:].values\nnumber_records_fraud = len(dataset[dataset.Class == 1])\nfraud_indices = np.array(dataset[dataset.Class == 1].index)\nnormal_indices = dataset[dataset.Class == 0].index\nrandom_normal_indices = np.random.choice(normal_indices,\n number_records_fraud, replace=False)\nrandom_normal_indices = np.array(random_normal_indices)\nunder_sample_indices = np.concatenate([fraud_indices, random_normal_indices])\nundersampled_data = dataset.iloc[under_sample_indices, :]\nX_undersampled = undersampled_data.iloc[:, 0:29].values\ny_undersampled = undersampled_data.iloc[:, 29:].values\n<import token>\nX_train, X_test, Y_train, Y_test = train_test_split(X_undersampled,\n y_undersampled, test_size=0.2, random_state=0)\n<import token>\ngaussian = GaussianNB()\ngaussian.fit(X_train, Y_train)\ngauss_pred = gaussian.predict(X_test)\n<import token>\nreg = LogisticRegression()\nreg.fit(X_train, Y_train)\nregression_pred = reg.predict(X_test)\n<import token>\nk_near = KNeighborsClassifier()\nk_near.fit(X_train, Y_train)\nk_near_pred = k_near.predict(X_test)\n<import token>\ndec_tree = DecisionTreeClassifier()\ndec_tree.fit(X_train, Y_train)\ndec_tree_pred = dec_tree.predict(X_test)\n<import token>\nregressor = SVC()\nregressor.fit(X_train, Y_train)\nsvc_pred = regressor.predict(X_test)\n<import token>\ncm = confusion_matrix(Y_test, dec_tree_pred)\n", "<import token>\n<assignment token>\ndataset.head()\ndataset.isnull().sum()\n<import token>\n<assignment token>\n<import token>\n<assignment token>\n<import token>\n<assignment token>\ngaussian.fit(X_train, Y_train)\n<assignment token>\n<import token>\n<assignment token>\nreg.fit(X_train, Y_train)\n<assignment token>\n<import token>\n<assignment token>\nk_near.fit(X_train, Y_train)\n<assignment token>\n<import token>\n<assignment token>\ndec_tree.fit(X_train, Y_train)\n<assignment token>\n<import token>\n<assignment token>\nregressor.fit(X_train, Y_train)\n<assignment token>\n<import token>\n<assignment token>\n", "<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<import token>\n<assignment token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<import token>\n<assignment token>\n" ]
false
98,969
1cfb7c8eb1b2bfc00c18d642d51e0c213997cb1a
""" 543. Diameter of Binary Tree Easy Given a binary tree, you need to compute the length of the diameter of the tree. The diameter of a binary tree is the length of the longest path between any two nodes in a tree. This path may or may not pass through the root. Example: Given a binary tree 1 / \ 2 3 / \ 4 5 Return 3, which is the length of the path [4,2,1,3] or [5,2,1,3]. Note: The length of path between two nodes is represented by the number of edges between them. """ #import sys #sys.path.insert(1, '../tree/') from binary_tree import TreeNode, print_tree, array_to_bt, array_to_bt_lc ############################################################################### """ Solution #1: O(n) time O(n) extra space for recursion stack """ def diameter_bt(root): def diameter(node): if not node: return 0 left = diameter(node.left) + 1 if node.left else 0 right = diameter(node.right) + 1 if node.right else 0 maxlen[0] = max(maxlen[0], left + right) return max(left, right) maxlen = [0] diameter(root) return maxlen[0] """ Solution: same as sol #1 but move the "+ 1" """ def diameter_bt2(root): def diameter(node): if not node: return 0 left = diameter(node.left) right = diameter(node.right) maxlen[0] = max(maxlen[0], left + right) return max(left, right) + 1 maxlen = [0] diameter(root) return maxlen[0] ############################################################################### if __name__ == "__main__": def test(root): print() print("#"*80) print_tree(root) diam = diameter_bt(root) diam2 = diameter_bt2(root) print(f"\ndiameter of BT (sol #1) = {diam}") print(f"diameter of BT (sol #2) = {diam2}") root = None test(root) root = TreeNode(1) test(root) root = TreeNode(1, TreeNode(2, TreeNode(3, TreeNode(4, TreeNode(5, ))))) test(root) arr = [5, 4,5, 1,1,None,5] nodes = array_to_bt(arr) root = nodes[0] test(root) arr = [1, 4,5, 4,4,None,5] nodes = array_to_bt(arr) root = nodes[0] test(root) arr = [5,4,5,4,4,5,3,4,4,None,None,None,4,None,None,4,None,None,4,None,4,4,None,None,4,4] root = array_to_bt_lc(arr) test(root) arr = [1, 2,3, 4,5] # LC example; answer = 3 root = array_to_bt_lc(arr) test(root)
[ "\"\"\"\n543. Diameter of Binary Tree\nEasy\n\nGiven a binary tree, you need to compute the length of the diameter of the tree. The diameter of a binary tree is the length of the longest path between any two nodes in a tree. This path may or may not pass through the root.\n\nExample:\nGiven a binary tree\n 1\n / \\\n 2 3\n / \\ \n 4 5 \nReturn 3, which is the length of the path [4,2,1,3] or [5,2,1,3].\n\nNote: The length of path between two nodes is represented by the number of edges between them.\n\"\"\"\n\n#import sys\n#sys.path.insert(1, '../tree/')\n\nfrom binary_tree import TreeNode, print_tree, array_to_bt, array_to_bt_lc\n\n###############################################################################\n\"\"\"\nSolution #1:\n\nO(n) time\nO(n) extra space for recursion stack\n\"\"\"\ndef diameter_bt(root):\n def diameter(node):\n if not node:\n return 0\n\n left = diameter(node.left) + 1 if node.left else 0\n right = diameter(node.right) + 1 if node.right else 0\n\n maxlen[0] = max(maxlen[0], left + right)\n\n return max(left, right)\n\n maxlen = [0]\n diameter(root)\n\n return maxlen[0]\n\n\"\"\"\nSolution: same as sol #1 but move the \"+ 1\"\n\"\"\"\ndef diameter_bt2(root):\n def diameter(node):\n if not node:\n return 0\n\n left = diameter(node.left)\n right = diameter(node.right)\n\n maxlen[0] = max(maxlen[0], left + right)\n\n return max(left, right) + 1\n\n maxlen = [0]\n diameter(root)\n\n return maxlen[0]\n\n###############################################################################\n\nif __name__ == \"__main__\":\n def test(root):\n print()\n print(\"#\"*80)\n print_tree(root)\n\n diam = diameter_bt(root)\n diam2 = diameter_bt2(root)\n\n print(f\"\\ndiameter of BT (sol #1) = {diam}\")\n print(f\"diameter of BT (sol #2) = {diam2}\")\n\n root = None\n test(root)\n \n root = TreeNode(1)\n test(root)\n\n root = TreeNode(1, TreeNode(2, TreeNode(3, TreeNode(4, TreeNode(5, )))))\n test(root)\n\n arr = [5, 4,5, 1,1,None,5] \n nodes = array_to_bt(arr)\n root = nodes[0]\n test(root)\n\n arr = [1, 4,5, 4,4,None,5] \n nodes = array_to_bt(arr)\n root = nodes[0]\n test(root)\n\n arr = [5,4,5,4,4,5,3,4,4,None,None,None,4,None,None,4,None,None,4,None,4,4,None,None,4,4]\n root = array_to_bt_lc(arr)\n test(root)\n\n arr = [1, 2,3, 4,5] # LC example; answer = 3\n root = array_to_bt_lc(arr)\n test(root)\n ", "<docstring token>\nfrom binary_tree import TreeNode, print_tree, array_to_bt, array_to_bt_lc\n<docstring token>\n\n\ndef diameter_bt(root):\n\n def diameter(node):\n if not node:\n return 0\n left = diameter(node.left) + 1 if node.left else 0\n right = diameter(node.right) + 1 if node.right else 0\n maxlen[0] = max(maxlen[0], left + right)\n return max(left, right)\n maxlen = [0]\n diameter(root)\n return maxlen[0]\n\n\n<docstring token>\n\n\ndef diameter_bt2(root):\n\n def diameter(node):\n if not node:\n return 0\n left = diameter(node.left)\n right = diameter(node.right)\n maxlen[0] = max(maxlen[0], left + right)\n return max(left, right) + 1\n maxlen = [0]\n diameter(root)\n return maxlen[0]\n\n\nif __name__ == '__main__':\n\n def test(root):\n print()\n print('#' * 80)\n print_tree(root)\n diam = diameter_bt(root)\n diam2 = diameter_bt2(root)\n print(f'\\ndiameter of BT (sol #1) = {diam}')\n print(f'diameter of BT (sol #2) = {diam2}')\n root = None\n test(root)\n root = TreeNode(1)\n test(root)\n root = TreeNode(1, TreeNode(2, TreeNode(3, TreeNode(4, TreeNode(5)))))\n test(root)\n arr = [5, 4, 5, 1, 1, None, 5]\n nodes = array_to_bt(arr)\n root = nodes[0]\n test(root)\n arr = [1, 4, 5, 4, 4, None, 5]\n nodes = array_to_bt(arr)\n root = nodes[0]\n test(root)\n arr = [5, 4, 5, 4, 4, 5, 3, 4, 4, None, None, None, 4, None, None, 4,\n None, None, 4, None, 4, 4, None, None, 4, 4]\n root = array_to_bt_lc(arr)\n test(root)\n arr = [1, 2, 3, 4, 5]\n root = array_to_bt_lc(arr)\n test(root)\n", "<docstring token>\n<import token>\n<docstring token>\n\n\ndef diameter_bt(root):\n\n def diameter(node):\n if not node:\n return 0\n left = diameter(node.left) + 1 if node.left else 0\n right = diameter(node.right) + 1 if node.right else 0\n maxlen[0] = max(maxlen[0], left + right)\n return max(left, right)\n maxlen = [0]\n diameter(root)\n return maxlen[0]\n\n\n<docstring token>\n\n\ndef diameter_bt2(root):\n\n def diameter(node):\n if not node:\n return 0\n left = diameter(node.left)\n right = diameter(node.right)\n maxlen[0] = max(maxlen[0], left + right)\n return max(left, right) + 1\n maxlen = [0]\n diameter(root)\n return maxlen[0]\n\n\nif __name__ == '__main__':\n\n def test(root):\n print()\n print('#' * 80)\n print_tree(root)\n diam = diameter_bt(root)\n diam2 = diameter_bt2(root)\n print(f'\\ndiameter of BT (sol #1) = {diam}')\n print(f'diameter of BT (sol #2) = {diam2}')\n root = None\n test(root)\n root = TreeNode(1)\n test(root)\n root = TreeNode(1, TreeNode(2, TreeNode(3, TreeNode(4, TreeNode(5)))))\n test(root)\n arr = [5, 4, 5, 1, 1, None, 5]\n nodes = array_to_bt(arr)\n root = nodes[0]\n test(root)\n arr = [1, 4, 5, 4, 4, None, 5]\n nodes = array_to_bt(arr)\n root = nodes[0]\n test(root)\n arr = [5, 4, 5, 4, 4, 5, 3, 4, 4, None, None, None, 4, None, None, 4,\n None, None, 4, None, 4, 4, None, None, 4, 4]\n root = array_to_bt_lc(arr)\n test(root)\n arr = [1, 2, 3, 4, 5]\n root = array_to_bt_lc(arr)\n test(root)\n", "<docstring token>\n<import token>\n<docstring token>\n\n\ndef diameter_bt(root):\n\n def diameter(node):\n if not node:\n return 0\n left = diameter(node.left) + 1 if node.left else 0\n right = diameter(node.right) + 1 if node.right else 0\n maxlen[0] = max(maxlen[0], left + right)\n return max(left, right)\n maxlen = [0]\n diameter(root)\n return maxlen[0]\n\n\n<docstring token>\n\n\ndef diameter_bt2(root):\n\n def diameter(node):\n if not node:\n return 0\n left = diameter(node.left)\n right = diameter(node.right)\n maxlen[0] = max(maxlen[0], left + right)\n return max(left, right) + 1\n maxlen = [0]\n diameter(root)\n return maxlen[0]\n\n\n<code token>\n", "<docstring token>\n<import token>\n<docstring token>\n\n\ndef diameter_bt(root):\n\n def diameter(node):\n if not node:\n return 0\n left = diameter(node.left) + 1 if node.left else 0\n right = diameter(node.right) + 1 if node.right else 0\n maxlen[0] = max(maxlen[0], left + right)\n return max(left, right)\n maxlen = [0]\n diameter(root)\n return maxlen[0]\n\n\n<docstring token>\n<function token>\n<code token>\n", "<docstring token>\n<import token>\n<docstring token>\n<function token>\n<docstring token>\n<function token>\n<code token>\n" ]
false
98,970
519706758a2a7cae9ccdee4a23fcef8cc764e9bc
import inspect import re import sys from six import iteritems from .exceptions import ( InheritanceException, InterruptedVersioningException, MissingDecoratorException) from .utils import CLASS_SUFFIX_RE, FUNCTION_SUFFIX_RE def validate_module_versioning(module_name): """ Function to validate the versioning of all members for a given module. Args: module_name (str): The name of the module. """ module = sys.modules[module_name] _validate_continuous_versioning(module) _validate_missing_versioned_tags(module) def validate_inheritance_for_class(cls): """ Function to validate if the provided class inherits from the previous version of the class. Args: cls (class): The class to validate inheritance for. Raises: InheritanceException: When inherited from the wrong version. """ # Skip all non versioned classes. if hasattr(cls, '__version__'): # Strip the name from the version suffix. class_suffix = re.compile('V\d+$') class_base_name = class_suffix.split(cls.__name__)[0] for base in cls.__bases__: # Inheriting from ProxyClass is not allowed. if base.__name__ == 'ProxyClass': message = 'Not allowed to inherit from `%s` without version!' % class_base_name raise InheritanceException(message) # Skip base classes that are not versioned. if hasattr(base, '__version__'): # Strip the name from the version suffix. base_base_name = class_suffix.split(base.__name__)[0] # If the inherited class has the same base name and # isn't the previous version of the provided class raise exception. if class_base_name == base_base_name: if base.__version__ != cls.__version__ - 1: message = ('`%s` with version `%s` is not allowed to inherit from ' 'version `%s`! Can only inherit from previous version' % ( class_base_name, cls.__version__, base.__version__, )) raise InheritanceException(message) def _validate_continuous_versioning(module): """ Validate if there are no gaps in the versioning of functions and classes for the given module. Args: module (module): The module to check for. Raises: InterruptedVersioningException: When there is a gap in the versioning of a function or class. """ version_table = getattr(module, '__version_table__', {}) # Loop all functions or classes with their given version mappings. for member_name, version_mapping in iteritems(version_table): # Get versions and sort them. versions = list(version_mapping['members']) versions.sort() # Check if there are gaps in the versions or if it does not start at 1. if versions != list(range(1, len(versions) + 1)): missing_versions = list(set(range(1, len(versions) + 1)) - set(versions)) error = ('Versions need to be consecutive and start at `1`, missing version `%s`' ' for `%s` in file `%s`' % ( missing_versions, member_name, module.__file__, )) raise InterruptedVersioningException(error) def _validate_missing_versioned_tags(module): """ Function to validate if there any version tags missing which would lead to overriding versioned members and very dangerous behaviour! Args: module (module): The module to check for. Raises: MissingDecoratorException: When there is a decorator missing on a function or class. """ version_table = getattr(module, '__version_table__', {}) # Get all functions from the module. functions = inspect.getmembers(module, inspect.isfunction) functions_dict = dict(functions) function_names = list(functions_dict) # Get all classes from the module. classes = inspect.getmembers(module, inspect.isclass) classes_dict = dict(classes) class_names = list(classes_dict) for name in version_table.keys(): msg = 'Both a versioned and unversioned `%s` exist in file `%s`!' % (name, module.__file__) class_pattern = re.compile('^%s%s' % (name, CLASS_SUFFIX_RE)) func_pattern = re.compile('^%s%s' % (name, FUNCTION_SUFFIX_RE)) class_matches = [class_pattern.search(_name) for _name in class_names if class_pattern.search(_name)] function_matches = [func_pattern.search(_name) for _name in function_names if func_pattern.search(_name)] # Check 1: @versioned() decorator on a function. # Check for duplicate names in classes or function names. Unversioned # functions appear in the funtions list whilst versioned appear in # the classes list. If the same name exists in both lists there's # a unversioned function. if (name in class_names or class_matches) and (name in function_names or function_matches): raise MissingDecoratorException(msg) # Check 2: @versioned(NUMBER) decorator on a function. # Versioned members are always a class due to the return of a # ProxyClass. If the name is in the version table there is a # decorated member. This filters decorated functions. If a function # is decorated and not decorated it shows in the functions list but # no longer in the classes list. if name not in class_names and name in function_names: raise MissingDecoratorException(msg) # Check 3: @versioned() or @versioned(NUMBER) decorator on a class. if name in class_names or class_matches: names_to_check = [] # In case of suffix classes find all matching suffixed classes # to check. if class_matches: for match in class_matches: names_to_check.append(match.group()) else: names_to_check.append(name) # Check if all the listed classes are versioned. for key in names_to_check: if not getattr(classes_dict[key], '_is_versioned', False): raise MissingDecoratorException(msg)
[ "import inspect\nimport re\nimport sys\n\nfrom six import iteritems\n\nfrom .exceptions import (\n InheritanceException, InterruptedVersioningException, MissingDecoratorException)\nfrom .utils import CLASS_SUFFIX_RE, FUNCTION_SUFFIX_RE\n\n\ndef validate_module_versioning(module_name):\n \"\"\"\n Function to validate the versioning of all members for a given module.\n\n Args:\n module_name (str): The name of the module.\n \"\"\"\n module = sys.modules[module_name]\n\n _validate_continuous_versioning(module)\n _validate_missing_versioned_tags(module)\n\n\ndef validate_inheritance_for_class(cls):\n \"\"\"\n Function to validate if the provided class inherits from the\n previous version of the class.\n\n Args:\n cls (class): The class to validate inheritance for.\n\n Raises:\n InheritanceException: When inherited from the wrong version.\n \"\"\"\n # Skip all non versioned classes.\n if hasattr(cls, '__version__'):\n # Strip the name from the version suffix.\n class_suffix = re.compile('V\\d+$')\n class_base_name = class_suffix.split(cls.__name__)[0]\n\n for base in cls.__bases__:\n # Inheriting from ProxyClass is not allowed.\n if base.__name__ == 'ProxyClass':\n message = 'Not allowed to inherit from `%s` without version!' % class_base_name\n raise InheritanceException(message)\n\n # Skip base classes that are not versioned.\n if hasattr(base, '__version__'):\n # Strip the name from the version suffix.\n base_base_name = class_suffix.split(base.__name__)[0]\n\n # If the inherited class has the same base name and\n # isn't the previous version of the provided class raise exception.\n if class_base_name == base_base_name:\n if base.__version__ != cls.__version__ - 1:\n message = ('`%s` with version `%s` is not allowed to inherit from '\n 'version `%s`! Can only inherit from previous version' % (\n class_base_name,\n cls.__version__,\n base.__version__,\n ))\n raise InheritanceException(message)\n\n\ndef _validate_continuous_versioning(module):\n \"\"\"\n Validate if there are no gaps in the versioning of functions and\n classes for the given module.\n\n Args:\n module (module): The module to check for.\n\n Raises:\n InterruptedVersioningException: When there is a gap in the versioning\n of a function or class.\n \"\"\"\n version_table = getattr(module, '__version_table__', {})\n\n # Loop all functions or classes with their given version mappings.\n for member_name, version_mapping in iteritems(version_table):\n # Get versions and sort them.\n versions = list(version_mapping['members'])\n versions.sort()\n\n # Check if there are gaps in the versions or if it does not start at 1.\n if versions != list(range(1, len(versions) + 1)):\n missing_versions = list(set(range(1, len(versions) + 1)) - set(versions))\n error = ('Versions need to be consecutive and start at `1`, missing version `%s`'\n ' for `%s` in file `%s`' % (\n missing_versions,\n member_name,\n module.__file__,\n ))\n raise InterruptedVersioningException(error)\n\n\ndef _validate_missing_versioned_tags(module):\n \"\"\"\n Function to validate if there any version tags missing which would lead\n to overriding versioned members and very dangerous behaviour!\n\n Args:\n module (module): The module to check for.\n\n Raises:\n MissingDecoratorException: When there is a decorator missing on\n a function or class.\n \"\"\"\n version_table = getattr(module, '__version_table__', {})\n\n # Get all functions from the module.\n functions = inspect.getmembers(module, inspect.isfunction)\n functions_dict = dict(functions)\n function_names = list(functions_dict)\n\n # Get all classes from the module.\n classes = inspect.getmembers(module, inspect.isclass)\n classes_dict = dict(classes)\n class_names = list(classes_dict)\n\n for name in version_table.keys():\n msg = 'Both a versioned and unversioned `%s` exist in file `%s`!' % (name, module.__file__)\n class_pattern = re.compile('^%s%s' % (name, CLASS_SUFFIX_RE))\n func_pattern = re.compile('^%s%s' % (name, FUNCTION_SUFFIX_RE))\n class_matches = [class_pattern.search(_name) for _name in class_names if class_pattern.search(_name)]\n function_matches = [func_pattern.search(_name) for _name in function_names if func_pattern.search(_name)]\n\n # Check 1: @versioned() decorator on a function.\n # Check for duplicate names in classes or function names. Unversioned\n # functions appear in the funtions list whilst versioned appear in\n # the classes list. If the same name exists in both lists there's\n # a unversioned function.\n if (name in class_names or class_matches) and (name in function_names or function_matches):\n raise MissingDecoratorException(msg)\n\n # Check 2: @versioned(NUMBER) decorator on a function.\n # Versioned members are always a class due to the return of a\n # ProxyClass. If the name is in the version table there is a\n # decorated member. This filters decorated functions. If a function\n # is decorated and not decorated it shows in the functions list but\n # no longer in the classes list.\n if name not in class_names and name in function_names:\n raise MissingDecoratorException(msg)\n\n # Check 3: @versioned() or @versioned(NUMBER) decorator on a class.\n if name in class_names or class_matches:\n names_to_check = []\n # In case of suffix classes find all matching suffixed classes\n # to check.\n if class_matches:\n for match in class_matches:\n names_to_check.append(match.group())\n else:\n names_to_check.append(name)\n\n # Check if all the listed classes are versioned.\n for key in names_to_check:\n if not getattr(classes_dict[key], '_is_versioned', False):\n raise MissingDecoratorException(msg)\n", "import inspect\nimport re\nimport sys\nfrom six import iteritems\nfrom .exceptions import InheritanceException, InterruptedVersioningException, MissingDecoratorException\nfrom .utils import CLASS_SUFFIX_RE, FUNCTION_SUFFIX_RE\n\n\ndef validate_module_versioning(module_name):\n \"\"\"\n Function to validate the versioning of all members for a given module.\n\n Args:\n module_name (str): The name of the module.\n \"\"\"\n module = sys.modules[module_name]\n _validate_continuous_versioning(module)\n _validate_missing_versioned_tags(module)\n\n\ndef validate_inheritance_for_class(cls):\n \"\"\"\n Function to validate if the provided class inherits from the\n previous version of the class.\n\n Args:\n cls (class): The class to validate inheritance for.\n\n Raises:\n InheritanceException: When inherited from the wrong version.\n \"\"\"\n if hasattr(cls, '__version__'):\n class_suffix = re.compile('V\\\\d+$')\n class_base_name = class_suffix.split(cls.__name__)[0]\n for base in cls.__bases__:\n if base.__name__ == 'ProxyClass':\n message = (\n 'Not allowed to inherit from `%s` without version!' %\n class_base_name)\n raise InheritanceException(message)\n if hasattr(base, '__version__'):\n base_base_name = class_suffix.split(base.__name__)[0]\n if class_base_name == base_base_name:\n if base.__version__ != cls.__version__ - 1:\n message = (\n '`%s` with version `%s` is not allowed to inherit from version `%s`! Can only inherit from previous version'\n % (class_base_name, cls.__version__, base.\n __version__))\n raise InheritanceException(message)\n\n\ndef _validate_continuous_versioning(module):\n \"\"\"\n Validate if there are no gaps in the versioning of functions and\n classes for the given module.\n\n Args:\n module (module): The module to check for.\n\n Raises:\n InterruptedVersioningException: When there is a gap in the versioning\n of a function or class.\n \"\"\"\n version_table = getattr(module, '__version_table__', {})\n for member_name, version_mapping in iteritems(version_table):\n versions = list(version_mapping['members'])\n versions.sort()\n if versions != list(range(1, len(versions) + 1)):\n missing_versions = list(set(range(1, len(versions) + 1)) - set(\n versions))\n error = (\n 'Versions need to be consecutive and start at `1`, missing version `%s` for `%s` in file `%s`'\n % (missing_versions, member_name, module.__file__))\n raise InterruptedVersioningException(error)\n\n\ndef _validate_missing_versioned_tags(module):\n \"\"\"\n Function to validate if there any version tags missing which would lead\n to overriding versioned members and very dangerous behaviour!\n\n Args:\n module (module): The module to check for.\n\n Raises:\n MissingDecoratorException: When there is a decorator missing on\n a function or class.\n \"\"\"\n version_table = getattr(module, '__version_table__', {})\n functions = inspect.getmembers(module, inspect.isfunction)\n functions_dict = dict(functions)\n function_names = list(functions_dict)\n classes = inspect.getmembers(module, inspect.isclass)\n classes_dict = dict(classes)\n class_names = list(classes_dict)\n for name in version_table.keys():\n msg = 'Both a versioned and unversioned `%s` exist in file `%s`!' % (\n name, module.__file__)\n class_pattern = re.compile('^%s%s' % (name, CLASS_SUFFIX_RE))\n func_pattern = re.compile('^%s%s' % (name, FUNCTION_SUFFIX_RE))\n class_matches = [class_pattern.search(_name) for _name in\n class_names if class_pattern.search(_name)]\n function_matches = [func_pattern.search(_name) for _name in\n function_names if func_pattern.search(_name)]\n if (name in class_names or class_matches) and (name in\n function_names or function_matches):\n raise MissingDecoratorException(msg)\n if name not in class_names and name in function_names:\n raise MissingDecoratorException(msg)\n if name in class_names or class_matches:\n names_to_check = []\n if class_matches:\n for match in class_matches:\n names_to_check.append(match.group())\n else:\n names_to_check.append(name)\n for key in names_to_check:\n if not getattr(classes_dict[key], '_is_versioned', False):\n raise MissingDecoratorException(msg)\n", "<import token>\n\n\ndef validate_module_versioning(module_name):\n \"\"\"\n Function to validate the versioning of all members for a given module.\n\n Args:\n module_name (str): The name of the module.\n \"\"\"\n module = sys.modules[module_name]\n _validate_continuous_versioning(module)\n _validate_missing_versioned_tags(module)\n\n\ndef validate_inheritance_for_class(cls):\n \"\"\"\n Function to validate if the provided class inherits from the\n previous version of the class.\n\n Args:\n cls (class): The class to validate inheritance for.\n\n Raises:\n InheritanceException: When inherited from the wrong version.\n \"\"\"\n if hasattr(cls, '__version__'):\n class_suffix = re.compile('V\\\\d+$')\n class_base_name = class_suffix.split(cls.__name__)[0]\n for base in cls.__bases__:\n if base.__name__ == 'ProxyClass':\n message = (\n 'Not allowed to inherit from `%s` without version!' %\n class_base_name)\n raise InheritanceException(message)\n if hasattr(base, '__version__'):\n base_base_name = class_suffix.split(base.__name__)[0]\n if class_base_name == base_base_name:\n if base.__version__ != cls.__version__ - 1:\n message = (\n '`%s` with version `%s` is not allowed to inherit from version `%s`! Can only inherit from previous version'\n % (class_base_name, cls.__version__, base.\n __version__))\n raise InheritanceException(message)\n\n\ndef _validate_continuous_versioning(module):\n \"\"\"\n Validate if there are no gaps in the versioning of functions and\n classes for the given module.\n\n Args:\n module (module): The module to check for.\n\n Raises:\n InterruptedVersioningException: When there is a gap in the versioning\n of a function or class.\n \"\"\"\n version_table = getattr(module, '__version_table__', {})\n for member_name, version_mapping in iteritems(version_table):\n versions = list(version_mapping['members'])\n versions.sort()\n if versions != list(range(1, len(versions) + 1)):\n missing_versions = list(set(range(1, len(versions) + 1)) - set(\n versions))\n error = (\n 'Versions need to be consecutive and start at `1`, missing version `%s` for `%s` in file `%s`'\n % (missing_versions, member_name, module.__file__))\n raise InterruptedVersioningException(error)\n\n\ndef _validate_missing_versioned_tags(module):\n \"\"\"\n Function to validate if there any version tags missing which would lead\n to overriding versioned members and very dangerous behaviour!\n\n Args:\n module (module): The module to check for.\n\n Raises:\n MissingDecoratorException: When there is a decorator missing on\n a function or class.\n \"\"\"\n version_table = getattr(module, '__version_table__', {})\n functions = inspect.getmembers(module, inspect.isfunction)\n functions_dict = dict(functions)\n function_names = list(functions_dict)\n classes = inspect.getmembers(module, inspect.isclass)\n classes_dict = dict(classes)\n class_names = list(classes_dict)\n for name in version_table.keys():\n msg = 'Both a versioned and unversioned `%s` exist in file `%s`!' % (\n name, module.__file__)\n class_pattern = re.compile('^%s%s' % (name, CLASS_SUFFIX_RE))\n func_pattern = re.compile('^%s%s' % (name, FUNCTION_SUFFIX_RE))\n class_matches = [class_pattern.search(_name) for _name in\n class_names if class_pattern.search(_name)]\n function_matches = [func_pattern.search(_name) for _name in\n function_names if func_pattern.search(_name)]\n if (name in class_names or class_matches) and (name in\n function_names or function_matches):\n raise MissingDecoratorException(msg)\n if name not in class_names and name in function_names:\n raise MissingDecoratorException(msg)\n if name in class_names or class_matches:\n names_to_check = []\n if class_matches:\n for match in class_matches:\n names_to_check.append(match.group())\n else:\n names_to_check.append(name)\n for key in names_to_check:\n if not getattr(classes_dict[key], '_is_versioned', False):\n raise MissingDecoratorException(msg)\n", "<import token>\n\n\ndef validate_module_versioning(module_name):\n \"\"\"\n Function to validate the versioning of all members for a given module.\n\n Args:\n module_name (str): The name of the module.\n \"\"\"\n module = sys.modules[module_name]\n _validate_continuous_versioning(module)\n _validate_missing_versioned_tags(module)\n\n\ndef validate_inheritance_for_class(cls):\n \"\"\"\n Function to validate if the provided class inherits from the\n previous version of the class.\n\n Args:\n cls (class): The class to validate inheritance for.\n\n Raises:\n InheritanceException: When inherited from the wrong version.\n \"\"\"\n if hasattr(cls, '__version__'):\n class_suffix = re.compile('V\\\\d+$')\n class_base_name = class_suffix.split(cls.__name__)[0]\n for base in cls.__bases__:\n if base.__name__ == 'ProxyClass':\n message = (\n 'Not allowed to inherit from `%s` without version!' %\n class_base_name)\n raise InheritanceException(message)\n if hasattr(base, '__version__'):\n base_base_name = class_suffix.split(base.__name__)[0]\n if class_base_name == base_base_name:\n if base.__version__ != cls.__version__ - 1:\n message = (\n '`%s` with version `%s` is not allowed to inherit from version `%s`! Can only inherit from previous version'\n % (class_base_name, cls.__version__, base.\n __version__))\n raise InheritanceException(message)\n\n\ndef _validate_continuous_versioning(module):\n \"\"\"\n Validate if there are no gaps in the versioning of functions and\n classes for the given module.\n\n Args:\n module (module): The module to check for.\n\n Raises:\n InterruptedVersioningException: When there is a gap in the versioning\n of a function or class.\n \"\"\"\n version_table = getattr(module, '__version_table__', {})\n for member_name, version_mapping in iteritems(version_table):\n versions = list(version_mapping['members'])\n versions.sort()\n if versions != list(range(1, len(versions) + 1)):\n missing_versions = list(set(range(1, len(versions) + 1)) - set(\n versions))\n error = (\n 'Versions need to be consecutive and start at `1`, missing version `%s` for `%s` in file `%s`'\n % (missing_versions, member_name, module.__file__))\n raise InterruptedVersioningException(error)\n\n\n<function token>\n", "<import token>\n<function token>\n\n\ndef validate_inheritance_for_class(cls):\n \"\"\"\n Function to validate if the provided class inherits from the\n previous version of the class.\n\n Args:\n cls (class): The class to validate inheritance for.\n\n Raises:\n InheritanceException: When inherited from the wrong version.\n \"\"\"\n if hasattr(cls, '__version__'):\n class_suffix = re.compile('V\\\\d+$')\n class_base_name = class_suffix.split(cls.__name__)[0]\n for base in cls.__bases__:\n if base.__name__ == 'ProxyClass':\n message = (\n 'Not allowed to inherit from `%s` without version!' %\n class_base_name)\n raise InheritanceException(message)\n if hasattr(base, '__version__'):\n base_base_name = class_suffix.split(base.__name__)[0]\n if class_base_name == base_base_name:\n if base.__version__ != cls.__version__ - 1:\n message = (\n '`%s` with version `%s` is not allowed to inherit from version `%s`! Can only inherit from previous version'\n % (class_base_name, cls.__version__, base.\n __version__))\n raise InheritanceException(message)\n\n\ndef _validate_continuous_versioning(module):\n \"\"\"\n Validate if there are no gaps in the versioning of functions and\n classes for the given module.\n\n Args:\n module (module): The module to check for.\n\n Raises:\n InterruptedVersioningException: When there is a gap in the versioning\n of a function or class.\n \"\"\"\n version_table = getattr(module, '__version_table__', {})\n for member_name, version_mapping in iteritems(version_table):\n versions = list(version_mapping['members'])\n versions.sort()\n if versions != list(range(1, len(versions) + 1)):\n missing_versions = list(set(range(1, len(versions) + 1)) - set(\n versions))\n error = (\n 'Versions need to be consecutive and start at `1`, missing version `%s` for `%s` in file `%s`'\n % (missing_versions, member_name, module.__file__))\n raise InterruptedVersioningException(error)\n\n\n<function token>\n", "<import token>\n<function token>\n\n\ndef validate_inheritance_for_class(cls):\n \"\"\"\n Function to validate if the provided class inherits from the\n previous version of the class.\n\n Args:\n cls (class): The class to validate inheritance for.\n\n Raises:\n InheritanceException: When inherited from the wrong version.\n \"\"\"\n if hasattr(cls, '__version__'):\n class_suffix = re.compile('V\\\\d+$')\n class_base_name = class_suffix.split(cls.__name__)[0]\n for base in cls.__bases__:\n if base.__name__ == 'ProxyClass':\n message = (\n 'Not allowed to inherit from `%s` without version!' %\n class_base_name)\n raise InheritanceException(message)\n if hasattr(base, '__version__'):\n base_base_name = class_suffix.split(base.__name__)[0]\n if class_base_name == base_base_name:\n if base.__version__ != cls.__version__ - 1:\n message = (\n '`%s` with version `%s` is not allowed to inherit from version `%s`! Can only inherit from previous version'\n % (class_base_name, cls.__version__, base.\n __version__))\n raise InheritanceException(message)\n\n\n<function token>\n<function token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
98,971
b0bf0cc981bbfc48f08022506ceea36aa7e89046
# Copyright 2011 Seppo Yli-Olli # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import dbc import types class EventFactory: __metaclass__ = dbc.DBC def _maxCount__invar(self): assert isinstance(self._maxCount, types.IntType) def _recycledObjects__invar(self): assert isinstance(self._recycledObjects, types.ListType) def __init__(self): self._maxCount = 10 self._recycledObjects = [] def recycleEvent(self, event): if len(self._recycledObjects) < self._maxCount: event.__recycle__() self._recycledObjects.append(event) def recycleEvent__pre(self, event): assert isinstance(event, Event) def recycleEvent__post(self, rval): assert rval is None def createEvent(self, function, args=None): if len(self._recycledObjects): event = self._recycledObjects.pop() event.__init__(function, args) return event else: return Event(function, args) def createEvent__pre(self, function, args): assert isinstance(function, types.LambdaType) def createEvent__post(self, rval): assert isinstance(rval, Event) def setMaxCount(self, size): self._maxCount = size while len(self._recycledObjects) < self._maxCount: self._recycledObjects.pop() def setMaxCount__pre(self, size): assert size >= 0 def setMaxCount__post(self, rval): assert rval is None class Event: def __init__(self, function, args=None): self._function = function self._args = args self._loop = None def execute(self): if self._loop: if self._args: return self._function(self._args) else: return self._function(self._loop) def __recycle__(self): self._function = None self._args = None self._loop = None
[ "# Copyright 2011 Seppo Yli-Olli\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport dbc\nimport types\n\nclass EventFactory:\n __metaclass__ = dbc.DBC\n\n def _maxCount__invar(self):\n assert isinstance(self._maxCount, types.IntType)\n\n def _recycledObjects__invar(self):\n assert isinstance(self._recycledObjects, types.ListType)\n \n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n \n def recycleEvent__pre(self, event):\n assert isinstance(event, Event)\n\n def recycleEvent__post(self, rval):\n assert rval is None\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n\n def createEvent__pre(self, function, args):\n assert isinstance(function, types.LambdaType)\n\n def createEvent__post(self, rval):\n assert isinstance(rval, Event)\n \n def setMaxCount(self, size):\n self._maxCount = size\n while len(self._recycledObjects) < self._maxCount:\n self._recycledObjects.pop() \n def setMaxCount__pre(self, size):\n assert size >= 0\n\n def setMaxCount__post(self, rval):\n assert rval is None\n\nclass Event:\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "import dbc\nimport types\n\n\nclass EventFactory:\n __metaclass__ = dbc.DBC\n\n def _maxCount__invar(self):\n assert isinstance(self._maxCount, types.IntType)\n\n def _recycledObjects__invar(self):\n assert isinstance(self._recycledObjects, types.ListType)\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n\n def recycleEvent__pre(self, event):\n assert isinstance(event, Event)\n\n def recycleEvent__post(self, rval):\n assert rval is None\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n\n def createEvent__pre(self, function, args):\n assert isinstance(function, types.LambdaType)\n\n def createEvent__post(self, rval):\n assert isinstance(rval, Event)\n\n def setMaxCount(self, size):\n self._maxCount = size\n while len(self._recycledObjects) < self._maxCount:\n self._recycledObjects.pop()\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n\n def setMaxCount__post(self, rval):\n assert rval is None\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n __metaclass__ = dbc.DBC\n\n def _maxCount__invar(self):\n assert isinstance(self._maxCount, types.IntType)\n\n def _recycledObjects__invar(self):\n assert isinstance(self._recycledObjects, types.ListType)\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n\n def recycleEvent__pre(self, event):\n assert isinstance(event, Event)\n\n def recycleEvent__post(self, rval):\n assert rval is None\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n\n def createEvent__pre(self, function, args):\n assert isinstance(function, types.LambdaType)\n\n def createEvent__post(self, rval):\n assert isinstance(rval, Event)\n\n def setMaxCount(self, size):\n self._maxCount = size\n while len(self._recycledObjects) < self._maxCount:\n self._recycledObjects.pop()\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n\n def setMaxCount__post(self, rval):\n assert rval is None\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n\n def _maxCount__invar(self):\n assert isinstance(self._maxCount, types.IntType)\n\n def _recycledObjects__invar(self):\n assert isinstance(self._recycledObjects, types.ListType)\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n\n def recycleEvent__pre(self, event):\n assert isinstance(event, Event)\n\n def recycleEvent__post(self, rval):\n assert rval is None\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n\n def createEvent__pre(self, function, args):\n assert isinstance(function, types.LambdaType)\n\n def createEvent__post(self, rval):\n assert isinstance(rval, Event)\n\n def setMaxCount(self, size):\n self._maxCount = size\n while len(self._recycledObjects) < self._maxCount:\n self._recycledObjects.pop()\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n\n def setMaxCount__post(self, rval):\n assert rval is None\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n\n def _recycledObjects__invar(self):\n assert isinstance(self._recycledObjects, types.ListType)\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n\n def recycleEvent__pre(self, event):\n assert isinstance(event, Event)\n\n def recycleEvent__post(self, rval):\n assert rval is None\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n\n def createEvent__pre(self, function, args):\n assert isinstance(function, types.LambdaType)\n\n def createEvent__post(self, rval):\n assert isinstance(rval, Event)\n\n def setMaxCount(self, size):\n self._maxCount = size\n while len(self._recycledObjects) < self._maxCount:\n self._recycledObjects.pop()\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n\n def setMaxCount__post(self, rval):\n assert rval is None\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n\n def _recycledObjects__invar(self):\n assert isinstance(self._recycledObjects, types.ListType)\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n\n def recycleEvent__pre(self, event):\n assert isinstance(event, Event)\n\n def recycleEvent__post(self, rval):\n assert rval is None\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n\n def createEvent__pre(self, function, args):\n assert isinstance(function, types.LambdaType)\n\n def createEvent__post(self, rval):\n assert isinstance(rval, Event)\n\n def setMaxCount(self, size):\n self._maxCount = size\n while len(self._recycledObjects) < self._maxCount:\n self._recycledObjects.pop()\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n <function token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n <function token>\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n\n def recycleEvent__pre(self, event):\n assert isinstance(event, Event)\n\n def recycleEvent__post(self, rval):\n assert rval is None\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n\n def createEvent__pre(self, function, args):\n assert isinstance(function, types.LambdaType)\n\n def createEvent__post(self, rval):\n assert isinstance(rval, Event)\n\n def setMaxCount(self, size):\n self._maxCount = size\n while len(self._recycledObjects) < self._maxCount:\n self._recycledObjects.pop()\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n <function token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n <function token>\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n <function token>\n\n def recycleEvent__post(self, rval):\n assert rval is None\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n\n def createEvent__pre(self, function, args):\n assert isinstance(function, types.LambdaType)\n\n def createEvent__post(self, rval):\n assert isinstance(rval, Event)\n\n def setMaxCount(self, size):\n self._maxCount = size\n while len(self._recycledObjects) < self._maxCount:\n self._recycledObjects.pop()\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n <function token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n <function token>\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n <function token>\n <function token>\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n\n def createEvent__pre(self, function, args):\n assert isinstance(function, types.LambdaType)\n\n def createEvent__post(self, rval):\n assert isinstance(rval, Event)\n\n def setMaxCount(self, size):\n self._maxCount = size\n while len(self._recycledObjects) < self._maxCount:\n self._recycledObjects.pop()\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n <function token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n <function token>\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n <function token>\n <function token>\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n\n def createEvent__pre(self, function, args):\n assert isinstance(function, types.LambdaType)\n\n def createEvent__post(self, rval):\n assert isinstance(rval, Event)\n <function token>\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n <function token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n <function token>\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n <function token>\n <function token>\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n <function token>\n\n def createEvent__post(self, rval):\n assert isinstance(rval, Event)\n <function token>\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n <function token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n <function token>\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n\n def recycleEvent(self, event):\n if len(self._recycledObjects) < self._maxCount:\n event.__recycle__()\n self._recycledObjects.append(event)\n <function token>\n <function token>\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n <function token>\n <function token>\n <function token>\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n <function token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n <function token>\n\n def __init__(self):\n self._maxCount = 10\n self._recycledObjects = []\n <function token>\n <function token>\n <function token>\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n <function token>\n <function token>\n <function token>\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n <function token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def createEvent(self, function, args=None):\n if len(self._recycledObjects):\n event = self._recycledObjects.pop()\n event.__init__(function, args)\n return event\n else:\n return Event(function, args)\n <function token>\n <function token>\n <function token>\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n <function token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def setMaxCount__pre(self, size):\n assert size >= 0\n <function token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n\n\nclass EventFactory:\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n<class token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n\n def execute(self):\n if self._loop:\n if self._args:\n return self._function(self._args)\n else:\n return self._function(self._loop)\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n<class token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n <function token>\n\n def __recycle__(self):\n self._function = None\n self._args = None\n self._loop = None\n", "<import token>\n<class token>\n\n\nclass Event:\n\n def __init__(self, function, args=None):\n self._function = function\n self._args = args\n self._loop = None\n <function token>\n <function token>\n", "<import token>\n<class token>\n\n\nclass Event:\n <function token>\n <function token>\n <function token>\n", "<import token>\n<class token>\n<class token>\n" ]
false
98,972
c1114bd36a26cdc9bc7024228f03347afb9ab67b
from django.db import models from django.db.models.fields import CharField, IntegerField # Create your models here. class Student(models.Model): id = IntegerField(primary_key=True) name = CharField(max_length=20) score = IntegerField() def __str__(self): return self.name
[ "from django.db import models\nfrom django.db.models.fields import CharField, IntegerField\n\n# Create your models here.\nclass Student(models.Model):\n id = IntegerField(primary_key=True)\n name = CharField(max_length=20)\n score = IntegerField()\n\n def __str__(self):\n return self.name", "from django.db import models\nfrom django.db.models.fields import CharField, IntegerField\n\n\nclass Student(models.Model):\n id = IntegerField(primary_key=True)\n name = CharField(max_length=20)\n score = IntegerField()\n\n def __str__(self):\n return self.name\n", "<import token>\n\n\nclass Student(models.Model):\n id = IntegerField(primary_key=True)\n name = CharField(max_length=20)\n score = IntegerField()\n\n def __str__(self):\n return self.name\n", "<import token>\n\n\nclass Student(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.name\n", "<import token>\n\n\nclass Student(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n", "<import token>\n<class token>\n" ]
false
98,973
6cc64120ef1ebf0cff5dfe71bf92df46e354fbdc
from env.env_real_sac import rozum_real env=rozum_real() import gym import numpy as np import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' import tensorflow as tf tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR) from tensorflow.python.util import deprecation deprecation._PRINT_DEPRECATION_WARNINGS = False from stable_baselines.sac.policies import MlpPolicy from stable_baselines import SAC from stable_baselines import results_plotter os.chdir("/") model = SAC.load("/home/ali/Industrial_assmbly_RL/sac_rozum_new(2).zip",env=env) # print(model.get_parameters()) # model.learn(total_timesteps=1000, log_interval=10)#,tb_log_name="stage2") # model.save("sac_rozum2") print(model.action_space) print(model.action_space) print("\n After training \n") obs = env.reset() for i in range(200): action, _states = model.predict(obs) obs, reward, done, info = env.step(action) print(reward) if done: env.reset()
[ "from env.env_real_sac import rozum_real\nenv=rozum_real()\n\nimport gym\nimport numpy as np\n\nimport os\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\n\nimport tensorflow as tf\ntf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)\n\nfrom tensorflow.python.util import deprecation\ndeprecation._PRINT_DEPRECATION_WARNINGS = False\n\n\nfrom stable_baselines.sac.policies import MlpPolicy\nfrom stable_baselines import SAC\nfrom stable_baselines import results_plotter\n\nos.chdir(\"/\")\nmodel = SAC.load(\"/home/ali/Industrial_assmbly_RL/sac_rozum_new(2).zip\",env=env)\n# print(model.get_parameters())\n# model.learn(total_timesteps=1000, log_interval=10)#,tb_log_name=\"stage2\")\n# model.save(\"sac_rozum2\")\nprint(model.action_space)\nprint(model.action_space)\nprint(\"\\n After training \\n\")\nobs = env.reset()\nfor i in range(200):\n action, _states = model.predict(obs)\n obs, reward, done, info = env.step(action)\n print(reward)\n if done:\n env.reset()", "from env.env_real_sac import rozum_real\nenv = rozum_real()\nimport gym\nimport numpy as np\nimport os\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\nimport tensorflow as tf\ntf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)\nfrom tensorflow.python.util import deprecation\ndeprecation._PRINT_DEPRECATION_WARNINGS = False\nfrom stable_baselines.sac.policies import MlpPolicy\nfrom stable_baselines import SAC\nfrom stable_baselines import results_plotter\nos.chdir('/')\nmodel = SAC.load('/home/ali/Industrial_assmbly_RL/sac_rozum_new(2).zip',\n env=env)\nprint(model.action_space)\nprint(model.action_space)\nprint(\"\"\"\n After training \n\"\"\")\nobs = env.reset()\nfor i in range(200):\n action, _states = model.predict(obs)\n obs, reward, done, info = env.step(action)\n print(reward)\n if done:\n env.reset()\n", "<import token>\nenv = rozum_real()\n<import token>\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\n<import token>\ntf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)\n<import token>\ndeprecation._PRINT_DEPRECATION_WARNINGS = False\n<import token>\nos.chdir('/')\nmodel = SAC.load('/home/ali/Industrial_assmbly_RL/sac_rozum_new(2).zip',\n env=env)\nprint(model.action_space)\nprint(model.action_space)\nprint(\"\"\"\n After training \n\"\"\")\nobs = env.reset()\nfor i in range(200):\n action, _states = model.predict(obs)\n obs, reward, done, info = env.step(action)\n print(reward)\n if done:\n env.reset()\n", "<import token>\n<assignment token>\n<import token>\n<assignment token>\n<import token>\ntf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)\n<import token>\n<assignment token>\n<import token>\nos.chdir('/')\n<assignment token>\nprint(model.action_space)\nprint(model.action_space)\nprint(\"\"\"\n After training \n\"\"\")\n<assignment token>\nfor i in range(200):\n action, _states = model.predict(obs)\n obs, reward, done, info = env.step(action)\n print(reward)\n if done:\n env.reset()\n", "<import token>\n<assignment token>\n<import token>\n<assignment token>\n<import token>\n<code token>\n<import token>\n<assignment token>\n<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
98,974
c62b52e1c9f737d2e35375d3f6ae6ab93731474f
import time # from paip_othello_class import othello as Paip # from paip_othello_class import MCTS as MCTSSearcher # import paip_othello_class as othello from paip_othello_class_mcts import othello as Paip from paip_othello_class_mcts import MCTS as MCTSSearcher import paip_othello_class_mcts as othello from policyNetwork import PolicyNetwork from randomNetwork import RandomNetwork # init_board=[ # '$', '$', '$', '$', '$', '$', '$', '$', '$', '$', # '$', 'x', 'x', 'x', 'x', 'x', 'x', 'o', 'x', '$', # '$', 'o', 'o', 'o', 'o', 'o', 'o', 'o', 'x', '$', # '$', 'o', 'o', 'x', 'o', 'o', 'x', 'o', 'o', '$', # '$', 'o', 'o', 'o', 'x', 'x', 'x', 'o', 'o', '$', # '$', 'o', 'o', 'o', 'x', 'x', 'o', 'x', 'o', '$', # '$', 'o', 'o', 'x', 'o', 'o', 'x', 'o', 'o', '$', # '$', 'o', 'o', 'x', 'o', 'o', 'o', 'o', 'o', '$', # '$', '.', 'o', 'x', 'x', '.', 'o', 'o', 'x', '$', # '$', '$', '$', '$', '$', '$', '$', '$', '$', '$'] # paip = Paip(board=init_board) matches = 100 report_cycle = 10 SEARCH_TIME = 5 class battle_bot(object): ''' This class is for the battle between AIs. The first heuristic is for the white, the second is for the black. ''' def __init__(self, heuristic): self.heuristic = heuristic self.engine = Paip() def loop(self, iterations, count): k = 0 sum_black = 0 sum_white = 0 draw = 0 for i in xrange(iterations): if k >= count: k = 0 print "Black wins:", sum_black, "/", i, ", winning rate =", sum_black*1.0/i print "White wins:", sum_white, "/", i, ", winning rate =", sum_white*1.0/i print "Draw:", draw, "/", i, "draw rate =", draw*1.0/i _, score = self.engine.play_with_MCTS(self.heuristic[0], self.heuristic[1]) for i in (0,1): if isinstance(self.heuristic[i], MCTSSearcher): self.heuristic[i].clear() if score > 0: # black wins sum_black += 1 elif score < 0: # white wins sum_white += 1 elif score == 0: # draw draw += 1 k+=1 print "Black wins:", sum_black, "/", iterations, ", winning rate =", sum_black*1.0/iterations print "White wins:", sum_white, "/", iterations, ", winning rate =", sum_white*1.0/iterations print "Draw:", draw, "/", iterations, "draw rate =", draw*1.0/iterations def match_set(model, opponent, model_side): if model_side == "black": black = model white = opponent elif model_side == "white": black = opponent white = model Start = time.time() battle = battle_bot(heuristic=[black, white]) battle.loop(matches, report_cycle) End = time.time() print "Time:", round(End-Start,3), "Average match Time:", round(End-Start,3)*1.0/matches print if __name__=='__main__': depth = 3 # policynetworks = { # 'fast': PolicyNetwork( # "./model/p_CNN_cat_10_model_L_conv4x4sigx50_conv3x3sigx100_fulltanh200_fulltanh100_500.h5" # ), # 'best': PolicyNetwork( # "./model/p_CNN_cat_10_model_L_conv4x4sigx64_conv3x3sigx128_fulltanh256_fulltanh128_500.h5" # ), # 'overfit': PolicyNetwork( # "./model/p_CNN_cat_10_model_L_conv4x4sigx64_conv3x3sigx256_fulltanh512_fulltanh128_1000.h5" # )} opponents = { 'WSS': othello.alphabeta_searcher(depth, othello.weighted_score), 'RAN': MCTSSearcher(RandomNetwork(), seconds_per_move=SEARCH_TIME) } # for oppo in opponents: # for network in policynetworks: # model_1 = MCTSSearcher(policynetworks[network], seconds_per_move=SEARCH_TIME) # model_2 = opponents[oppo] # # print "Black:", network, "v.s. White:", oppo # match_set(model_1, model_2, "black") # # print "Black:", oppo, "v.s. White:", network # match_set(model_1, model_2, "white") model_1 = opponents['WSS'] model_2 = opponents['RAN'] print "Black: WSS v.s. White: RAN" match_set(model_1, model_2, "black") print "Black: RAN v.s. White: WSS" match_set(model_1, model_2, "white")
[ "import time\n# from paip_othello_class import othello as Paip\n# from paip_othello_class import MCTS as MCTSSearcher\n# import paip_othello_class as othello\n\nfrom paip_othello_class_mcts import othello as Paip\nfrom paip_othello_class_mcts import MCTS as MCTSSearcher\nimport paip_othello_class_mcts as othello\n\nfrom policyNetwork import PolicyNetwork\nfrom randomNetwork import RandomNetwork\n\n# init_board=[\n# '$', '$', '$', '$', '$', '$', '$', '$', '$', '$',\n# '$', 'x', 'x', 'x', 'x', 'x', 'x', 'o', 'x', '$',\n# '$', 'o', 'o', 'o', 'o', 'o', 'o', 'o', 'x', '$',\n# '$', 'o', 'o', 'x', 'o', 'o', 'x', 'o', 'o', '$',\n# '$', 'o', 'o', 'o', 'x', 'x', 'x', 'o', 'o', '$',\n# '$', 'o', 'o', 'o', 'x', 'x', 'o', 'x', 'o', '$',\n# '$', 'o', 'o', 'x', 'o', 'o', 'x', 'o', 'o', '$',\n# '$', 'o', 'o', 'x', 'o', 'o', 'o', 'o', 'o', '$',\n# '$', '.', 'o', 'x', 'x', '.', 'o', 'o', 'x', '$',\n# '$', '$', '$', '$', '$', '$', '$', '$', '$', '$']\n# paip = Paip(board=init_board)\n\n\nmatches = 100\nreport_cycle = 10\nSEARCH_TIME = 5\n\nclass battle_bot(object):\n '''\n This class is for the battle between AIs. The first heuristic is for the white, the second is for the black.\n '''\n def __init__(self, heuristic):\n self.heuristic = heuristic\n self.engine = Paip()\n\n def loop(self, iterations, count):\n k = 0\n sum_black = 0\n sum_white = 0\n draw = 0\n for i in xrange(iterations):\n if k >= count:\n k = 0\n print \"Black wins:\", sum_black, \"/\", i, \", winning rate =\", sum_black*1.0/i\n print \"White wins:\", sum_white, \"/\", i, \", winning rate =\", sum_white*1.0/i\n print \"Draw:\", draw, \"/\", i, \"draw rate =\", draw*1.0/i\n\n _, score = self.engine.play_with_MCTS(self.heuristic[0], self.heuristic[1])\n for i in (0,1):\n if isinstance(self.heuristic[i], MCTSSearcher):\n self.heuristic[i].clear()\n\n if score > 0: # black wins\n sum_black += 1\n elif score < 0: # white wins\n sum_white += 1\n elif score == 0: # draw\n draw += 1\n k+=1\n\n print \"Black wins:\", sum_black, \"/\", iterations, \", winning rate =\", sum_black*1.0/iterations\n print \"White wins:\", sum_white, \"/\", iterations, \", winning rate =\", sum_white*1.0/iterations\n print \"Draw:\", draw, \"/\", iterations, \"draw rate =\", draw*1.0/iterations\n\ndef match_set(model, opponent, model_side):\n if model_side == \"black\":\n black = model\n white = opponent\n elif model_side == \"white\":\n black = opponent\n white = model\n Start = time.time()\n battle = battle_bot(heuristic=[black, white])\n battle.loop(matches, report_cycle)\n End = time.time()\n print \"Time:\", round(End-Start,3), \"Average match Time:\", round(End-Start,3)*1.0/matches\n print\n\nif __name__=='__main__':\n\n depth = 3\n\n # policynetworks = {\n # 'fast': PolicyNetwork(\n # \"./model/p_CNN_cat_10_model_L_conv4x4sigx50_conv3x3sigx100_fulltanh200_fulltanh100_500.h5\"\n # ),\n # 'best': PolicyNetwork(\n # \"./model/p_CNN_cat_10_model_L_conv4x4sigx64_conv3x3sigx128_fulltanh256_fulltanh128_500.h5\"\n # ),\n # 'overfit': PolicyNetwork(\n # \"./model/p_CNN_cat_10_model_L_conv4x4sigx64_conv3x3sigx256_fulltanh512_fulltanh128_1000.h5\"\n # )}\n\n opponents = {\n 'WSS': othello.alphabeta_searcher(depth, othello.weighted_score),\n 'RAN': MCTSSearcher(RandomNetwork(), seconds_per_move=SEARCH_TIME)\n }\n\n # for oppo in opponents:\n # for network in policynetworks:\n # model_1 = MCTSSearcher(policynetworks[network], seconds_per_move=SEARCH_TIME)\n # model_2 = opponents[oppo]\n #\n # print \"Black:\", network, \"v.s. White:\", oppo\n # match_set(model_1, model_2, \"black\")\n #\n # print \"Black:\", oppo, \"v.s. White:\", network\n # match_set(model_1, model_2, \"white\")\n\n model_1 = opponents['WSS']\n model_2 = opponents['RAN']\n print \"Black: WSS v.s. White: RAN\"\n match_set(model_1, model_2, \"black\")\n print \"Black: RAN v.s. White: WSS\"\n match_set(model_1, model_2, \"white\")\n" ]
true
98,975
fe548ee8b0dc80bdd7e5d9f0ad5bbe478556282d
import time from datetime import datetime, timezone import flow_api def handler(system: flow_api.System, this: flow_api.Execution): inputs = this.get('input_value') or {} message_id = inputs.get('message_id') if message_id is None: defaults = { 'interval': 60, 'wait': True, } if 'flow_name' in inputs: defaults['flow_name'] = inputs['flow_name'] if 'flow_id' in inputs: defaults['flow_name'] = system.flow(inputs['flow_id'], by='id').get('name') message = system.message( subject='Recurring execution', body={ 'type': 'object', 'properties': { 'flow_name': { 'label': 'Name of the flow which should be started recurring', 'element': 'string', 'type': 'string', 'example': defaults.get('flow_name'), 'default': defaults.get('flow_name'), 'order': 1, }, 'interval': { 'label': 'Interval of recurring execution in seconds', 'element': 'number', 'type': 'number', 'example': defaults['interval'], 'default': defaults['interval'], 'order': 2, }, 'wait': { 'label': 'Wait for child executions to finish', 'element': 'toggle', 'type': 'boolean', 'default': defaults['wait'], 'order': 3, }, 'max_iterations': { 'label': 'Maximum number of iterations (unlimited if omitted)', 'element': 'number', 'type': 'number', 'order': 4, }, 'start': { 'label': 'Start recurring', 'element': 'submit', 'type': 'boolean', 'order': 5, }, }, 'required': [ 'flow_name', 'interval', ], }, ) message_id = message.get('id') this.save(output_value={ 'message_id': message_id, }) this.flow( 'Recurring', name='Recurring execution', message_id=message_id, wait=False, ) return this.success('requested details') message = system.message(message_id) response = message.wait().get('response') this.log(response=response) flow_name = response['flow_name'] interval = response['interval'] wait = response['wait'] max_iterations = response.get('max_iterations') this.save(name=f'Recurring {flow_name}') # Loop iterations = 0 start = time.time() while max_iterations is None or iterations < max_iterations: iterations += 1 if max_iterations: this.save(message=f'iteration {iterations}/{max_iterations}') else: this.save(message=f'iteration {iterations}') # Start child execution inputs = { 'start': start, 'iterations': iterations, 'max_iterations': max_iterations, } child = this.flow( flow_name, inputs=inputs, name=f'{flow_name} iteration #{iterations}', run=False ) if wait: try: child.run() except Exception: this.log(f'iteration #{iterations} failed') else: child.run_async() if max_iterations is not None and iterations >= max_iterations: break if wait: now = time.time() scheduled = datetime.fromtimestamp(now + interval, timezone.utc) else: scheduled = datetime.fromtimestamp(start + (iterations * interval), timezone.utc) scheduled_ts = scheduled.isoformat(sep=' ', timespec='minutes') this.save(message=scheduled_ts) if wait: this.sleep(interval) else: this.sleep_until(start + (iterations * interval)) return this.success(f'started {iterations} iterations')
[ "import time\nfrom datetime import datetime, timezone\n\nimport flow_api\n\ndef handler(system: flow_api.System, this: flow_api.Execution):\n inputs = this.get('input_value') or {}\n message_id = inputs.get('message_id')\n\n if message_id is None:\n defaults = {\n 'interval': 60,\n 'wait': True,\n }\n if 'flow_name' in inputs:\n defaults['flow_name'] = inputs['flow_name']\n if 'flow_id' in inputs:\n defaults['flow_name'] = system.flow(inputs['flow_id'], by='id').get('name')\n\n message = system.message(\n subject='Recurring execution',\n body={\n 'type': 'object',\n 'properties': {\n 'flow_name': {\n 'label': 'Name of the flow which should be started recurring',\n 'element': 'string',\n 'type': 'string',\n 'example': defaults.get('flow_name'),\n 'default': defaults.get('flow_name'),\n 'order': 1,\n },\n 'interval': {\n 'label': 'Interval of recurring execution in seconds',\n 'element': 'number',\n 'type': 'number',\n 'example': defaults['interval'],\n 'default': defaults['interval'],\n 'order': 2,\n },\n 'wait': {\n 'label': 'Wait for child executions to finish',\n 'element': 'toggle',\n 'type': 'boolean',\n 'default': defaults['wait'],\n 'order': 3,\n },\n 'max_iterations': {\n 'label': 'Maximum number of iterations (unlimited if omitted)',\n 'element': 'number',\n 'type': 'number',\n 'order': 4,\n },\n 'start': {\n 'label': 'Start recurring',\n 'element': 'submit',\n 'type': 'boolean',\n 'order': 5,\n },\n },\n 'required': [\n 'flow_name',\n 'interval',\n ],\n },\n )\n message_id = message.get('id')\n this.save(output_value={\n 'message_id': message_id,\n })\n this.flow(\n 'Recurring',\n name='Recurring execution',\n message_id=message_id,\n wait=False,\n )\n return this.success('requested details')\n\n message = system.message(message_id)\n response = message.wait().get('response')\n this.log(response=response)\n flow_name = response['flow_name']\n interval = response['interval']\n wait = response['wait']\n max_iterations = response.get('max_iterations')\n this.save(name=f'Recurring {flow_name}')\n\n # Loop\n iterations = 0\n start = time.time()\n while max_iterations is None or iterations < max_iterations:\n iterations += 1\n if max_iterations:\n this.save(message=f'iteration {iterations}/{max_iterations}')\n else:\n this.save(message=f'iteration {iterations}')\n # Start child execution\n inputs = {\n 'start': start,\n 'iterations': iterations,\n 'max_iterations': max_iterations,\n }\n child = this.flow(\n flow_name,\n inputs=inputs,\n name=f'{flow_name} iteration #{iterations}',\n run=False\n )\n if wait:\n try:\n child.run()\n except Exception:\n this.log(f'iteration #{iterations} failed')\n else:\n child.run_async()\n if max_iterations is not None and iterations >= max_iterations:\n break\n if wait:\n now = time.time()\n scheduled = datetime.fromtimestamp(now + interval, timezone.utc)\n else:\n scheduled = datetime.fromtimestamp(start + (iterations * interval), timezone.utc)\n scheduled_ts = scheduled.isoformat(sep=' ', timespec='minutes')\n this.save(message=scheduled_ts)\n if wait:\n this.sleep(interval)\n else:\n this.sleep_until(start + (iterations * interval))\n\n return this.success(f'started {iterations} iterations')\n", "import time\nfrom datetime import datetime, timezone\nimport flow_api\n\n\ndef handler(system: flow_api.System, this: flow_api.Execution):\n inputs = this.get('input_value') or {}\n message_id = inputs.get('message_id')\n if message_id is None:\n defaults = {'interval': 60, 'wait': True}\n if 'flow_name' in inputs:\n defaults['flow_name'] = inputs['flow_name']\n if 'flow_id' in inputs:\n defaults['flow_name'] = system.flow(inputs['flow_id'], by='id'\n ).get('name')\n message = system.message(subject='Recurring execution', body={\n 'type': 'object', 'properties': {'flow_name': {'label':\n 'Name of the flow which should be started recurring', 'element':\n 'string', 'type': 'string', 'example': defaults.get('flow_name'\n ), 'default': defaults.get('flow_name'), 'order': 1},\n 'interval': {'label':\n 'Interval of recurring execution in seconds', 'element':\n 'number', 'type': 'number', 'example': defaults['interval'],\n 'default': defaults['interval'], 'order': 2}, 'wait': {'label':\n 'Wait for child executions to finish', 'element': 'toggle',\n 'type': 'boolean', 'default': defaults['wait'], 'order': 3},\n 'max_iterations': {'label':\n 'Maximum number of iterations (unlimited if omitted)',\n 'element': 'number', 'type': 'number', 'order': 4}, 'start': {\n 'label': 'Start recurring', 'element': 'submit', 'type':\n 'boolean', 'order': 5}}, 'required': ['flow_name', 'interval']})\n message_id = message.get('id')\n this.save(output_value={'message_id': message_id})\n this.flow('Recurring', name='Recurring execution', message_id=\n message_id, wait=False)\n return this.success('requested details')\n message = system.message(message_id)\n response = message.wait().get('response')\n this.log(response=response)\n flow_name = response['flow_name']\n interval = response['interval']\n wait = response['wait']\n max_iterations = response.get('max_iterations')\n this.save(name=f'Recurring {flow_name}')\n iterations = 0\n start = time.time()\n while max_iterations is None or iterations < max_iterations:\n iterations += 1\n if max_iterations:\n this.save(message=f'iteration {iterations}/{max_iterations}')\n else:\n this.save(message=f'iteration {iterations}')\n inputs = {'start': start, 'iterations': iterations,\n 'max_iterations': max_iterations}\n child = this.flow(flow_name, inputs=inputs, name=\n f'{flow_name} iteration #{iterations}', run=False)\n if wait:\n try:\n child.run()\n except Exception:\n this.log(f'iteration #{iterations} failed')\n else:\n child.run_async()\n if max_iterations is not None and iterations >= max_iterations:\n break\n if wait:\n now = time.time()\n scheduled = datetime.fromtimestamp(now + interval, timezone.utc)\n else:\n scheduled = datetime.fromtimestamp(start + iterations *\n interval, timezone.utc)\n scheduled_ts = scheduled.isoformat(sep=' ', timespec='minutes')\n this.save(message=scheduled_ts)\n if wait:\n this.sleep(interval)\n else:\n this.sleep_until(start + iterations * interval)\n return this.success(f'started {iterations} iterations')\n", "<import token>\n\n\ndef handler(system: flow_api.System, this: flow_api.Execution):\n inputs = this.get('input_value') or {}\n message_id = inputs.get('message_id')\n if message_id is None:\n defaults = {'interval': 60, 'wait': True}\n if 'flow_name' in inputs:\n defaults['flow_name'] = inputs['flow_name']\n if 'flow_id' in inputs:\n defaults['flow_name'] = system.flow(inputs['flow_id'], by='id'\n ).get('name')\n message = system.message(subject='Recurring execution', body={\n 'type': 'object', 'properties': {'flow_name': {'label':\n 'Name of the flow which should be started recurring', 'element':\n 'string', 'type': 'string', 'example': defaults.get('flow_name'\n ), 'default': defaults.get('flow_name'), 'order': 1},\n 'interval': {'label':\n 'Interval of recurring execution in seconds', 'element':\n 'number', 'type': 'number', 'example': defaults['interval'],\n 'default': defaults['interval'], 'order': 2}, 'wait': {'label':\n 'Wait for child executions to finish', 'element': 'toggle',\n 'type': 'boolean', 'default': defaults['wait'], 'order': 3},\n 'max_iterations': {'label':\n 'Maximum number of iterations (unlimited if omitted)',\n 'element': 'number', 'type': 'number', 'order': 4}, 'start': {\n 'label': 'Start recurring', 'element': 'submit', 'type':\n 'boolean', 'order': 5}}, 'required': ['flow_name', 'interval']})\n message_id = message.get('id')\n this.save(output_value={'message_id': message_id})\n this.flow('Recurring', name='Recurring execution', message_id=\n message_id, wait=False)\n return this.success('requested details')\n message = system.message(message_id)\n response = message.wait().get('response')\n this.log(response=response)\n flow_name = response['flow_name']\n interval = response['interval']\n wait = response['wait']\n max_iterations = response.get('max_iterations')\n this.save(name=f'Recurring {flow_name}')\n iterations = 0\n start = time.time()\n while max_iterations is None or iterations < max_iterations:\n iterations += 1\n if max_iterations:\n this.save(message=f'iteration {iterations}/{max_iterations}')\n else:\n this.save(message=f'iteration {iterations}')\n inputs = {'start': start, 'iterations': iterations,\n 'max_iterations': max_iterations}\n child = this.flow(flow_name, inputs=inputs, name=\n f'{flow_name} iteration #{iterations}', run=False)\n if wait:\n try:\n child.run()\n except Exception:\n this.log(f'iteration #{iterations} failed')\n else:\n child.run_async()\n if max_iterations is not None and iterations >= max_iterations:\n break\n if wait:\n now = time.time()\n scheduled = datetime.fromtimestamp(now + interval, timezone.utc)\n else:\n scheduled = datetime.fromtimestamp(start + iterations *\n interval, timezone.utc)\n scheduled_ts = scheduled.isoformat(sep=' ', timespec='minutes')\n this.save(message=scheduled_ts)\n if wait:\n this.sleep(interval)\n else:\n this.sleep_until(start + iterations * interval)\n return this.success(f'started {iterations} iterations')\n", "<import token>\n<function token>\n" ]
false
98,976
9c99cc7e97e1b98dd0fb9eb6087c33166da90f9c
# -*- coding: utf-8 -*- # Terceiros from setuptools import find_packages, setup __version__ = '0.1.0' __description__ = 'Api test to Boa Vista Interview' __long_description__ = '' __author__ = 'Dheinny Marques' __author_email__ = '[email protected]' setup( name='api', version=__version__, author=__author__, author_email=__author_email__, packges=find_packages(), license='MIT', description=__description__, long_description=__long_description__, url='https://github.com/dheinny', keywords='API, CRUD', include_package_data=True, zip_safe=False, classifiers=[ 'Intended Audience :: Interviewers', 'Operation System :: OS Independent', 'Topic :: Software Development', 'Enviroment :: Web Enviroment', 'Programming Language :: Python :: 3.8', 'License :: OSI Approved :: MIT License', ], )
[ "# -*- coding: utf-8 -*-\n\n# Terceiros\nfrom setuptools import find_packages, setup\n\n__version__ = '0.1.0'\n__description__ = 'Api test to Boa Vista Interview'\n__long_description__ = ''\n\n__author__ = 'Dheinny Marques'\n__author_email__ = '[email protected]'\n\nsetup(\n name='api',\n version=__version__,\n author=__author__,\n author_email=__author_email__,\n packges=find_packages(),\n license='MIT',\n description=__description__,\n long_description=__long_description__,\n url='https://github.com/dheinny',\n keywords='API, CRUD',\n include_package_data=True,\n zip_safe=False,\n classifiers=[\n 'Intended Audience :: Interviewers',\n 'Operation System :: OS Independent',\n 'Topic :: Software Development',\n 'Enviroment :: Web Enviroment',\n 'Programming Language :: Python :: 3.8',\n 'License :: OSI Approved :: MIT License',\n ],\n)\n\n", "from setuptools import find_packages, setup\n__version__ = '0.1.0'\n__description__ = 'Api test to Boa Vista Interview'\n__long_description__ = ''\n__author__ = 'Dheinny Marques'\n__author_email__ = '[email protected]'\nsetup(name='api', version=__version__, author=__author__, author_email=\n __author_email__, packges=find_packages(), license='MIT', description=\n __description__, long_description=__long_description__, url=\n 'https://github.com/dheinny', keywords='API, CRUD',\n include_package_data=True, zip_safe=False, classifiers=[\n 'Intended Audience :: Interviewers',\n 'Operation System :: OS Independent', 'Topic :: Software Development',\n 'Enviroment :: Web Enviroment', 'Programming Language :: Python :: 3.8',\n 'License :: OSI Approved :: MIT License'])\n", "<import token>\n__version__ = '0.1.0'\n__description__ = 'Api test to Boa Vista Interview'\n__long_description__ = ''\n__author__ = 'Dheinny Marques'\n__author_email__ = '[email protected]'\nsetup(name='api', version=__version__, author=__author__, author_email=\n __author_email__, packges=find_packages(), license='MIT', description=\n __description__, long_description=__long_description__, url=\n 'https://github.com/dheinny', keywords='API, CRUD',\n include_package_data=True, zip_safe=False, classifiers=[\n 'Intended Audience :: Interviewers',\n 'Operation System :: OS Independent', 'Topic :: Software Development',\n 'Enviroment :: Web Enviroment', 'Programming Language :: Python :: 3.8',\n 'License :: OSI Approved :: MIT License'])\n", "<import token>\n<assignment token>\nsetup(name='api', version=__version__, author=__author__, author_email=\n __author_email__, packges=find_packages(), license='MIT', description=\n __description__, long_description=__long_description__, url=\n 'https://github.com/dheinny', keywords='API, CRUD',\n include_package_data=True, zip_safe=False, classifiers=[\n 'Intended Audience :: Interviewers',\n 'Operation System :: OS Independent', 'Topic :: Software Development',\n 'Enviroment :: Web Enviroment', 'Programming Language :: Python :: 3.8',\n 'License :: OSI Approved :: MIT License'])\n", "<import token>\n<assignment token>\n<code token>\n" ]
false
98,977
fee4e647f7c1fabe4e2f2a6cc2c40a8a07af54f5
from os import system from datetime import date from time import sleep import random class Account: def __init__(self, idNum, balance, dateCreated): self.id = idNum self.balance = balance self.dateCreated = dateCreated def getId(self): return self.id def getBalance(self): return self.balance def getDateCreated(self): return self.DateCreated def setAccount(self, Id, Balance): self.id = Id self.balance = Balance def withdraw(self, amount): self.balance = self.balance - amount return self.balance def deposit(self, amount): self.balance = self.balance + amount return self.balance class CheckingAccount(Account): def __init__(self, overdraft): self.overdraft = overdraft def overdraft(self): return self.overdraft class SavingsAccount(Account): def __init__(self, annualInterestRate, monthlyInterestRate): self.annualInterestRate = annualInterestRate self.monthlyInterestRate = monthlyInterestRate def getMonthlyInterestRate(self): self.monthlyInterestRate = (self.annualInterestRate / 100.0) / 12.0 return self.monthlyInterestRate def getMonthlyInterest(self): return self.balance * self.monthlyInterestRate def idCheck(idNum): if len(idNum) == 13 and idNum.isnumeric(): return True else: return False def display(): print("Available balance: R" + str(acc.getBalance())) def menu(): print("1. Check the balance\n2. Withdraw\n3. Deposit\n4. Exit") savings = [] checking = [] option = 0 while True: sleep(1.0) system('cls') identity = input("Enter your ID number:") acc = Account(identity, 0, date.today()) if idCheck(identity): # accNum = random.choice(savings) menu() option = eval(input("Choose an option:")) while option != 4: if option == 1: display() elif option == 2: out = eval(input("Enter the amount to withdraw: ")) acc.withdraw(out) print("Money out:-R"+str(out)) elif option == 3: dep = eval(input("Enter the amount to deposit:")) acc.deposit(dep) print("Amount of R" + str(dep) + " was deposited to the account\n ref:" + acc.getId()) else: print("Invalid option!!!.") option = eval(input("Choose an option:")) else: print("Please enter a valid ID number!!!.")
[ "from os import system\nfrom datetime import date\nfrom time import sleep\nimport random\n\n\nclass Account:\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n\n def getBalance(self):\n return self.balance\n\n def getDateCreated(self):\n return self.DateCreated\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n\n def withdraw(self, amount):\n self.balance = self.balance - amount\n return self.balance\n\n def deposit(self, amount):\n self.balance = self.balance + amount\n return self.balance\n\n\nclass CheckingAccount(Account):\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = (self.annualInterestRate / 100.0) / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\ndef idCheck(idNum):\n if len(idNum) == 13 and idNum.isnumeric():\n return True\n else:\n return False\n\n\ndef display():\n print(\"Available balance: R\" + str(acc.getBalance()))\n\n\ndef menu():\n print(\"1. Check the balance\\n2. Withdraw\\n3. Deposit\\n4. Exit\")\n\n\nsavings = []\nchecking = []\noption = 0\n\nwhile True:\n sleep(1.0)\n system('cls')\n identity = input(\"Enter your ID number:\")\n acc = Account(identity, 0, date.today())\n if idCheck(identity):\n # accNum = random.choice(savings)\n menu()\n option = eval(input(\"Choose an option:\"))\n while option != 4:\n if option == 1:\n display()\n elif option == 2:\n out = eval(input(\"Enter the amount to withdraw: \"))\n acc.withdraw(out)\n print(\"Money out:-R\"+str(out))\n elif option == 3:\n dep = eval(input(\"Enter the amount to deposit:\"))\n acc.deposit(dep)\n print(\"Amount of R\" + str(dep) + \" was deposited to the account\\n ref:\" + acc.getId())\n\n else:\n print(\"Invalid option!!!.\")\n option = eval(input(\"Choose an option:\"))\n else:\n print(\"Please enter a valid ID number!!!.\")\n", "from os import system\nfrom datetime import date\nfrom time import sleep\nimport random\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n\n def getBalance(self):\n return self.balance\n\n def getDateCreated(self):\n return self.DateCreated\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n\n def withdraw(self, amount):\n self.balance = self.balance - amount\n return self.balance\n\n def deposit(self, amount):\n self.balance = self.balance + amount\n return self.balance\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\ndef idCheck(idNum):\n if len(idNum) == 13 and idNum.isnumeric():\n return True\n else:\n return False\n\n\ndef display():\n print('Available balance: R' + str(acc.getBalance()))\n\n\ndef menu():\n print('1. Check the balance\\n2. Withdraw\\n3. Deposit\\n4. Exit')\n\n\nsavings = []\nchecking = []\noption = 0\nwhile True:\n sleep(1.0)\n system('cls')\n identity = input('Enter your ID number:')\n acc = Account(identity, 0, date.today())\n if idCheck(identity):\n menu()\n option = eval(input('Choose an option:'))\n while option != 4:\n if option == 1:\n display()\n elif option == 2:\n out = eval(input('Enter the amount to withdraw: '))\n acc.withdraw(out)\n print('Money out:-R' + str(out))\n elif option == 3:\n dep = eval(input('Enter the amount to deposit:'))\n acc.deposit(dep)\n print('Amount of R' + str(dep) +\n ' was deposited to the account\\n ref:' + acc.getId())\n else:\n print('Invalid option!!!.')\n option = eval(input('Choose an option:'))\n else:\n print('Please enter a valid ID number!!!.')\n", "<import token>\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n\n def getBalance(self):\n return self.balance\n\n def getDateCreated(self):\n return self.DateCreated\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n\n def withdraw(self, amount):\n self.balance = self.balance - amount\n return self.balance\n\n def deposit(self, amount):\n self.balance = self.balance + amount\n return self.balance\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\ndef idCheck(idNum):\n if len(idNum) == 13 and idNum.isnumeric():\n return True\n else:\n return False\n\n\ndef display():\n print('Available balance: R' + str(acc.getBalance()))\n\n\ndef menu():\n print('1. Check the balance\\n2. Withdraw\\n3. Deposit\\n4. Exit')\n\n\nsavings = []\nchecking = []\noption = 0\nwhile True:\n sleep(1.0)\n system('cls')\n identity = input('Enter your ID number:')\n acc = Account(identity, 0, date.today())\n if idCheck(identity):\n menu()\n option = eval(input('Choose an option:'))\n while option != 4:\n if option == 1:\n display()\n elif option == 2:\n out = eval(input('Enter the amount to withdraw: '))\n acc.withdraw(out)\n print('Money out:-R' + str(out))\n elif option == 3:\n dep = eval(input('Enter the amount to deposit:'))\n acc.deposit(dep)\n print('Amount of R' + str(dep) +\n ' was deposited to the account\\n ref:' + acc.getId())\n else:\n print('Invalid option!!!.')\n option = eval(input('Choose an option:'))\n else:\n print('Please enter a valid ID number!!!.')\n", "<import token>\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n\n def getBalance(self):\n return self.balance\n\n def getDateCreated(self):\n return self.DateCreated\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n\n def withdraw(self, amount):\n self.balance = self.balance - amount\n return self.balance\n\n def deposit(self, amount):\n self.balance = self.balance + amount\n return self.balance\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\ndef idCheck(idNum):\n if len(idNum) == 13 and idNum.isnumeric():\n return True\n else:\n return False\n\n\ndef display():\n print('Available balance: R' + str(acc.getBalance()))\n\n\ndef menu():\n print('1. Check the balance\\n2. Withdraw\\n3. Deposit\\n4. Exit')\n\n\n<assignment token>\nwhile True:\n sleep(1.0)\n system('cls')\n identity = input('Enter your ID number:')\n acc = Account(identity, 0, date.today())\n if idCheck(identity):\n menu()\n option = eval(input('Choose an option:'))\n while option != 4:\n if option == 1:\n display()\n elif option == 2:\n out = eval(input('Enter the amount to withdraw: '))\n acc.withdraw(out)\n print('Money out:-R' + str(out))\n elif option == 3:\n dep = eval(input('Enter the amount to deposit:'))\n acc.deposit(dep)\n print('Amount of R' + str(dep) +\n ' was deposited to the account\\n ref:' + acc.getId())\n else:\n print('Invalid option!!!.')\n option = eval(input('Choose an option:'))\n else:\n print('Please enter a valid ID number!!!.')\n", "<import token>\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n\n def getBalance(self):\n return self.balance\n\n def getDateCreated(self):\n return self.DateCreated\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n\n def withdraw(self, amount):\n self.balance = self.balance - amount\n return self.balance\n\n def deposit(self, amount):\n self.balance = self.balance + amount\n return self.balance\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\ndef idCheck(idNum):\n if len(idNum) == 13 and idNum.isnumeric():\n return True\n else:\n return False\n\n\ndef display():\n print('Available balance: R' + str(acc.getBalance()))\n\n\ndef menu():\n print('1. Check the balance\\n2. Withdraw\\n3. Deposit\\n4. Exit')\n\n\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n\n def getBalance(self):\n return self.balance\n\n def getDateCreated(self):\n return self.DateCreated\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n\n def withdraw(self, amount):\n self.balance = self.balance - amount\n return self.balance\n\n def deposit(self, amount):\n self.balance = self.balance + amount\n return self.balance\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n\n\ndef display():\n print('Available balance: R' + str(acc.getBalance()))\n\n\ndef menu():\n print('1. Check the balance\\n2. Withdraw\\n3. Deposit\\n4. Exit')\n\n\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n\n def getBalance(self):\n return self.balance\n\n def getDateCreated(self):\n return self.DateCreated\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n\n def withdraw(self, amount):\n self.balance = self.balance - amount\n return self.balance\n\n def deposit(self, amount):\n self.balance = self.balance + amount\n return self.balance\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n\n\ndef display():\n print('Available balance: R' + str(acc.getBalance()))\n\n\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n\n def getBalance(self):\n return self.balance\n\n def getDateCreated(self):\n return self.DateCreated\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n\n def withdraw(self, amount):\n self.balance = self.balance - amount\n return self.balance\n\n def deposit(self, amount):\n self.balance = self.balance + amount\n return self.balance\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n\n def getBalance(self):\n return self.balance\n\n def getDateCreated(self):\n return self.DateCreated\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n\n def withdraw(self, amount):\n self.balance = self.balance - amount\n return self.balance\n <function token>\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n\n def getBalance(self):\n return self.balance\n <function token>\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n\n def withdraw(self, amount):\n self.balance = self.balance - amount\n return self.balance\n <function token>\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n <function token>\n <function token>\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n\n def withdraw(self, amount):\n self.balance = self.balance - amount\n return self.balance\n <function token>\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n\n def getId(self):\n return self.id\n <function token>\n <function token>\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n <function token>\n <function token>\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass Account:\n\n def __init__(self, idNum, balance, dateCreated):\n self.id = idNum\n self.balance = balance\n self.dateCreated = dateCreated\n <function token>\n <function token>\n <function token>\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n <function token>\n <function token>\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass Account:\n <function token>\n <function token>\n <function token>\n <function token>\n\n def setAccount(self, Id, Balance):\n self.id = Id\n self.balance = Balance\n <function token>\n <function token>\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n\n\nclass Account:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n\n\nclass CheckingAccount(Account):\n\n def __init__(self, overdraft):\n self.overdraft = overdraft\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n\n\nclass CheckingAccount(Account):\n <function token>\n\n def overdraft(self):\n return self.overdraft\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n\n\nclass CheckingAccount(Account):\n <function token>\n <function token>\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n\n def getMonthlyInterest(self):\n return self.balance * self.monthlyInterestRate\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n\n def getMonthlyInterestRate(self):\n self.monthlyInterestRate = self.annualInterestRate / 100.0 / 12.0\n return self.monthlyInterestRate\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n\n\nclass SavingsAccount(Account):\n\n def __init__(self, annualInterestRate, monthlyInterestRate):\n self.annualInterestRate = annualInterestRate\n self.monthlyInterestRate = monthlyInterestRate\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n\n\nclass SavingsAccount(Account):\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<import token>\n<class token>\n<class token>\n<class token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n" ]
false
98,978
efa6a1971efec869999e291ba3ae018950d56c21
#!/bin/env python """ Driver script to update the workflow status @author: Hailiang Zhang <[email protected]> """ import sys import argparse from agiovanni.queueUtilities import update_workflow from agiovanni.celeryGiovanni import app if __name__ == "__main__": """ Driver script to update the workflow status Exit code: 0: everything is ok 1: exceptions """ # parse input parameters parser = argparse.ArgumentParser(description="Update the workflow status") parser.add_argument("-w", "--WORKFLOW_FILE", type=str, help="workflow status file") args = parser.parse_args() workflowFile = args.WORKFLOW_FILE #Update the workflowFile try: update_workflow(app, workflowFile) except: sys.exit(1)
[ "#!/bin/env python\n\n\"\"\"\nDriver script to update the workflow status\n\n@author: Hailiang Zhang <[email protected]>\n\"\"\"\n\nimport sys\nimport argparse\nfrom agiovanni.queueUtilities import update_workflow\nfrom agiovanni.celeryGiovanni import app\n\n\nif __name__ == \"__main__\":\n \"\"\"\n Driver script to update the workflow status\n\n Exit code:\n 0: everything is ok\n 1: exceptions\n \"\"\"\n # parse input parameters\n parser = argparse.ArgumentParser(description=\"Update the workflow status\")\n parser.add_argument(\"-w\", \"--WORKFLOW_FILE\", type=str, help=\"workflow status file\")\n args = parser.parse_args()\n workflowFile = args.WORKFLOW_FILE\n\n #Update the workflowFile\n try:\n update_workflow(app, workflowFile)\n except:\n sys.exit(1)\n", "<docstring token>\nimport sys\nimport argparse\nfrom agiovanni.queueUtilities import update_workflow\nfrom agiovanni.celeryGiovanni import app\nif __name__ == '__main__':\n \"\"\"\n Driver script to update the workflow status\n\n Exit code:\n 0: everything is ok\n 1: exceptions\n \"\"\"\n parser = argparse.ArgumentParser(description='Update the workflow status')\n parser.add_argument('-w', '--WORKFLOW_FILE', type=str, help=\n 'workflow status file')\n args = parser.parse_args()\n workflowFile = args.WORKFLOW_FILE\n try:\n update_workflow(app, workflowFile)\n except:\n sys.exit(1)\n", "<docstring token>\n<import token>\nif __name__ == '__main__':\n \"\"\"\n Driver script to update the workflow status\n\n Exit code:\n 0: everything is ok\n 1: exceptions\n \"\"\"\n parser = argparse.ArgumentParser(description='Update the workflow status')\n parser.add_argument('-w', '--WORKFLOW_FILE', type=str, help=\n 'workflow status file')\n args = parser.parse_args()\n workflowFile = args.WORKFLOW_FILE\n try:\n update_workflow(app, workflowFile)\n except:\n sys.exit(1)\n", "<docstring token>\n<import token>\n<code token>\n" ]
false
98,979
aeb14e36a734c58d07cf22633e2b3fd2cf0128fe
import json import os import numpy as np import pommerman from pommerman.characters import Bomber as Bomber from pommerman.characters import Bomb as Bomb from pommerman.characters import Flame as Flame import argparse import time def import_gamestate(filename) : with open(filename, 'r') as f: pm_gamestate = json.loads(f.read()) states = pm_gamestate['state'] # Values in state dict are actually json encoded # decode them new_states = [] for s in states: new_state = {} for key, value in s.items(): new_state[key] = json.loads(value) new_states.append(new_state) # Sort states by step count new_states.sort(key=lambda state: state['step_count']) # Remove intended_actions from states and add it as actions # to previous state for i in range(len(new_states) - 1): actions = new_states[i+1].pop('intended_actions') new_states[i]['action'] = actions # replace old state array pm_gamestate['state'] = new_states return pm_gamestate def stateToScene(state) : agents = [] for agent_state in state['agents']: agent = Bomber(agent_id = agent_state['agent_id']) position = tuple(agent_state['position']) agent.set_start_position(position) agent.reset( int(agent_state['ammo']), bool(agent_state['is_alive']), int(agent_state['blast_strength']), bool(agent_state['can_kick'])) agents.append(agent) bombs = [] for bomb_state in state['bombs']: direction = bomb_state['moving_direction'] if direction is not None: direction = pommerman.constants.Action(direction) bomb = Bomb(agents[bomb_state['bomber_id']], tuple(bomb_state['position']), int(bomb_state['life']), int(bomb_state['blast_strength']), direction) bombs.append(bomb) items = {} for item_state in state['items']: items[tuple(item_state[0])] = item_state[1] flames = [] for flame_state in state['flames']: flame = Flame(tuple(flame_state['position']), flame_state['life']) flames.append(flame) board = np.asarray(state['board'], np.uint8) return board, agents, bombs, items, flames def main(args): viewer = None if (None == args.gamefile) : print("Please add --gamefile <file>") return verify = not args.noverify render = args.render gs = import_gamestate(args.gamefile) board, agents, bombs, items, flames = stateToScene(gs['state'][0]) for i in range(len(gs['state'])-1): action= gs['state'][i]['action']; if (args.verbose) : print ("Step: ", i, "Action: ", action); print (board) board, agents, bombs, items, flames = pommerman.forward_model.ForwardModel.step(action, board, agents, bombs, items, flames) if render: if viewer is None: viewer = pommerman.graphics.PommeViewer() viewer.set_board(board) viewer.set_agents(agents) if hasattr(viewer, 'set_bombs'): viewer.set_bombs(bombs) viewer.set_step(i) viewer.render() time.sleep(0.1) if verify: tboard, tagents, tbombs, titems, tflames = stateToScene(gs['state'][i+1]) if ( not np.array_equal(board,tboard)): print ("failed at step:", i) return if (args.verbose) : print ("Step: ", len(gs['state']) - 1); print (board) if __name__ == "__main__": parser = argparse.ArgumentParser(description='Replay Flags.') parser.add_argument( "--noverify", default=False, action='store_true', help="Whether to skip verifying. Defaults to False.") parser.add_argument( '--gamefile', default=None, help='Game file to replay') parser.add_argument( "--verbose", default=False, action='store_true', help="Print out map at each step") parser.add_argument( "--render", default=False, action='store_true', help="Render Game") args = parser.parse_args() main(args)
[ "import json\nimport os\nimport numpy as np\nimport pommerman\nfrom pommerman.characters import Bomber as Bomber\nfrom pommerman.characters import Bomb as Bomb\nfrom pommerman.characters import Flame as Flame\nimport argparse\nimport time\n\n\ndef import_gamestate(filename) :\n with open(filename, 'r') as f:\n pm_gamestate = json.loads(f.read())\n states = pm_gamestate['state']\n # Values in state dict are actually json encoded\n # decode them\n new_states = []\n for s in states:\n new_state = {}\n for key, value in s.items():\n new_state[key] = json.loads(value)\n new_states.append(new_state)\n\n # Sort states by step count\n new_states.sort(key=lambda state: state['step_count'])\n\n # Remove intended_actions from states and add it as actions\n # to previous state\n\n for i in range(len(new_states) - 1):\n actions = new_states[i+1].pop('intended_actions')\n new_states[i]['action'] = actions\n\n # replace old state array\n pm_gamestate['state'] = new_states\n return pm_gamestate\n\ndef stateToScene(state) :\n agents = []\n for agent_state in state['agents']:\n agent = Bomber(agent_id = agent_state['agent_id'])\n position = tuple(agent_state['position'])\n agent.set_start_position(position)\n agent.reset( int(agent_state['ammo']),\n bool(agent_state['is_alive']),\n int(agent_state['blast_strength']),\n bool(agent_state['can_kick']))\n agents.append(agent)\n\n bombs = []\n for bomb_state in state['bombs']:\n direction = bomb_state['moving_direction']\n if direction is not None:\n direction = pommerman.constants.Action(direction)\n bomb = Bomb(agents[bomb_state['bomber_id']],\n tuple(bomb_state['position']),\n int(bomb_state['life']),\n int(bomb_state['blast_strength']),\n direction)\n bombs.append(bomb)\n\n items = {}\n for item_state in state['items']:\n items[tuple(item_state[0])] = item_state[1]\n\n flames = []\n for flame_state in state['flames']:\n flame = Flame(tuple(flame_state['position']),\n flame_state['life'])\n flames.append(flame)\n\n\n board = np.asarray(state['board'], np.uint8)\n\n return board, agents, bombs, items, flames\n\n\ndef main(args):\n\n viewer = None\n\n if (None == args.gamefile) :\n print(\"Please add --gamefile <file>\")\n return\n\n verify = not args.noverify\n render = args.render\n\n gs = import_gamestate(args.gamefile)\n\n board, agents, bombs, items, flames = stateToScene(gs['state'][0])\n\n for i in range(len(gs['state'])-1):\n\n action= gs['state'][i]['action'];\n\n if (args.verbose) :\n print (\"Step: \", i, \"Action: \", action);\n print (board)\n\n board, agents, bombs, items, flames = pommerman.forward_model.ForwardModel.step(action, board, agents, bombs, items, flames)\n\n if render:\n if viewer is None:\n viewer = pommerman.graphics.PommeViewer()\n viewer.set_board(board)\n viewer.set_agents(agents)\n if hasattr(viewer, 'set_bombs'):\n viewer.set_bombs(bombs)\n viewer.set_step(i)\n viewer.render()\n time.sleep(0.1)\n\n if verify:\n tboard, tagents, tbombs, titems, tflames = stateToScene(gs['state'][i+1])\n if ( not np.array_equal(board,tboard)):\n print (\"failed at step:\", i)\n return\n\n if (args.verbose) :\n print (\"Step: \", len(gs['state']) - 1);\n print (board)\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser(description='Replay Flags.')\n\n parser.add_argument(\n \"--noverify\",\n default=False,\n action='store_true',\n help=\"Whether to skip verifying. Defaults to False.\")\n\n\n parser.add_argument(\n '--gamefile',\n default=None,\n help='Game file to replay')\n\n parser.add_argument(\n \"--verbose\",\n default=False,\n action='store_true',\n help=\"Print out map at each step\")\n\n parser.add_argument(\n \"--render\",\n default=False,\n action='store_true',\n help=\"Render Game\")\n\n\n\n args = parser.parse_args()\n main(args)\n", "import json\nimport os\nimport numpy as np\nimport pommerman\nfrom pommerman.characters import Bomber as Bomber\nfrom pommerman.characters import Bomb as Bomb\nfrom pommerman.characters import Flame as Flame\nimport argparse\nimport time\n\n\ndef import_gamestate(filename):\n with open(filename, 'r') as f:\n pm_gamestate = json.loads(f.read())\n states = pm_gamestate['state']\n new_states = []\n for s in states:\n new_state = {}\n for key, value in s.items():\n new_state[key] = json.loads(value)\n new_states.append(new_state)\n new_states.sort(key=lambda state: state['step_count'])\n for i in range(len(new_states) - 1):\n actions = new_states[i + 1].pop('intended_actions')\n new_states[i]['action'] = actions\n pm_gamestate['state'] = new_states\n return pm_gamestate\n\n\ndef stateToScene(state):\n agents = []\n for agent_state in state['agents']:\n agent = Bomber(agent_id=agent_state['agent_id'])\n position = tuple(agent_state['position'])\n agent.set_start_position(position)\n agent.reset(int(agent_state['ammo']), bool(agent_state['is_alive']),\n int(agent_state['blast_strength']), bool(agent_state['can_kick']))\n agents.append(agent)\n bombs = []\n for bomb_state in state['bombs']:\n direction = bomb_state['moving_direction']\n if direction is not None:\n direction = pommerman.constants.Action(direction)\n bomb = Bomb(agents[bomb_state['bomber_id']], tuple(bomb_state[\n 'position']), int(bomb_state['life']), int(bomb_state[\n 'blast_strength']), direction)\n bombs.append(bomb)\n items = {}\n for item_state in state['items']:\n items[tuple(item_state[0])] = item_state[1]\n flames = []\n for flame_state in state['flames']:\n flame = Flame(tuple(flame_state['position']), flame_state['life'])\n flames.append(flame)\n board = np.asarray(state['board'], np.uint8)\n return board, agents, bombs, items, flames\n\n\ndef main(args):\n viewer = None\n if None == args.gamefile:\n print('Please add --gamefile <file>')\n return\n verify = not args.noverify\n render = args.render\n gs = import_gamestate(args.gamefile)\n board, agents, bombs, items, flames = stateToScene(gs['state'][0])\n for i in range(len(gs['state']) - 1):\n action = gs['state'][i]['action']\n if args.verbose:\n print('Step: ', i, 'Action: ', action)\n print(board)\n board, agents, bombs, items, flames = (pommerman.forward_model.\n ForwardModel.step(action, board, agents, bombs, items, flames))\n if render:\n if viewer is None:\n viewer = pommerman.graphics.PommeViewer()\n viewer.set_board(board)\n viewer.set_agents(agents)\n if hasattr(viewer, 'set_bombs'):\n viewer.set_bombs(bombs)\n viewer.set_step(i)\n viewer.render()\n time.sleep(0.1)\n if verify:\n tboard, tagents, tbombs, titems, tflames = stateToScene(gs[\n 'state'][i + 1])\n if not np.array_equal(board, tboard):\n print('failed at step:', i)\n return\n if args.verbose:\n print('Step: ', len(gs['state']) - 1)\n print(board)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Replay Flags.')\n parser.add_argument('--noverify', default=False, action='store_true',\n help='Whether to skip verifying. Defaults to False.')\n parser.add_argument('--gamefile', default=None, help='Game file to replay')\n parser.add_argument('--verbose', default=False, action='store_true',\n help='Print out map at each step')\n parser.add_argument('--render', default=False, action='store_true',\n help='Render Game')\n args = parser.parse_args()\n main(args)\n", "<import token>\n\n\ndef import_gamestate(filename):\n with open(filename, 'r') as f:\n pm_gamestate = json.loads(f.read())\n states = pm_gamestate['state']\n new_states = []\n for s in states:\n new_state = {}\n for key, value in s.items():\n new_state[key] = json.loads(value)\n new_states.append(new_state)\n new_states.sort(key=lambda state: state['step_count'])\n for i in range(len(new_states) - 1):\n actions = new_states[i + 1].pop('intended_actions')\n new_states[i]['action'] = actions\n pm_gamestate['state'] = new_states\n return pm_gamestate\n\n\ndef stateToScene(state):\n agents = []\n for agent_state in state['agents']:\n agent = Bomber(agent_id=agent_state['agent_id'])\n position = tuple(agent_state['position'])\n agent.set_start_position(position)\n agent.reset(int(agent_state['ammo']), bool(agent_state['is_alive']),\n int(agent_state['blast_strength']), bool(agent_state['can_kick']))\n agents.append(agent)\n bombs = []\n for bomb_state in state['bombs']:\n direction = bomb_state['moving_direction']\n if direction is not None:\n direction = pommerman.constants.Action(direction)\n bomb = Bomb(agents[bomb_state['bomber_id']], tuple(bomb_state[\n 'position']), int(bomb_state['life']), int(bomb_state[\n 'blast_strength']), direction)\n bombs.append(bomb)\n items = {}\n for item_state in state['items']:\n items[tuple(item_state[0])] = item_state[1]\n flames = []\n for flame_state in state['flames']:\n flame = Flame(tuple(flame_state['position']), flame_state['life'])\n flames.append(flame)\n board = np.asarray(state['board'], np.uint8)\n return board, agents, bombs, items, flames\n\n\ndef main(args):\n viewer = None\n if None == args.gamefile:\n print('Please add --gamefile <file>')\n return\n verify = not args.noverify\n render = args.render\n gs = import_gamestate(args.gamefile)\n board, agents, bombs, items, flames = stateToScene(gs['state'][0])\n for i in range(len(gs['state']) - 1):\n action = gs['state'][i]['action']\n if args.verbose:\n print('Step: ', i, 'Action: ', action)\n print(board)\n board, agents, bombs, items, flames = (pommerman.forward_model.\n ForwardModel.step(action, board, agents, bombs, items, flames))\n if render:\n if viewer is None:\n viewer = pommerman.graphics.PommeViewer()\n viewer.set_board(board)\n viewer.set_agents(agents)\n if hasattr(viewer, 'set_bombs'):\n viewer.set_bombs(bombs)\n viewer.set_step(i)\n viewer.render()\n time.sleep(0.1)\n if verify:\n tboard, tagents, tbombs, titems, tflames = stateToScene(gs[\n 'state'][i + 1])\n if not np.array_equal(board, tboard):\n print('failed at step:', i)\n return\n if args.verbose:\n print('Step: ', len(gs['state']) - 1)\n print(board)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Replay Flags.')\n parser.add_argument('--noverify', default=False, action='store_true',\n help='Whether to skip verifying. Defaults to False.')\n parser.add_argument('--gamefile', default=None, help='Game file to replay')\n parser.add_argument('--verbose', default=False, action='store_true',\n help='Print out map at each step')\n parser.add_argument('--render', default=False, action='store_true',\n help='Render Game')\n args = parser.parse_args()\n main(args)\n", "<import token>\n\n\ndef import_gamestate(filename):\n with open(filename, 'r') as f:\n pm_gamestate = json.loads(f.read())\n states = pm_gamestate['state']\n new_states = []\n for s in states:\n new_state = {}\n for key, value in s.items():\n new_state[key] = json.loads(value)\n new_states.append(new_state)\n new_states.sort(key=lambda state: state['step_count'])\n for i in range(len(new_states) - 1):\n actions = new_states[i + 1].pop('intended_actions')\n new_states[i]['action'] = actions\n pm_gamestate['state'] = new_states\n return pm_gamestate\n\n\ndef stateToScene(state):\n agents = []\n for agent_state in state['agents']:\n agent = Bomber(agent_id=agent_state['agent_id'])\n position = tuple(agent_state['position'])\n agent.set_start_position(position)\n agent.reset(int(agent_state['ammo']), bool(agent_state['is_alive']),\n int(agent_state['blast_strength']), bool(agent_state['can_kick']))\n agents.append(agent)\n bombs = []\n for bomb_state in state['bombs']:\n direction = bomb_state['moving_direction']\n if direction is not None:\n direction = pommerman.constants.Action(direction)\n bomb = Bomb(agents[bomb_state['bomber_id']], tuple(bomb_state[\n 'position']), int(bomb_state['life']), int(bomb_state[\n 'blast_strength']), direction)\n bombs.append(bomb)\n items = {}\n for item_state in state['items']:\n items[tuple(item_state[0])] = item_state[1]\n flames = []\n for flame_state in state['flames']:\n flame = Flame(tuple(flame_state['position']), flame_state['life'])\n flames.append(flame)\n board = np.asarray(state['board'], np.uint8)\n return board, agents, bombs, items, flames\n\n\ndef main(args):\n viewer = None\n if None == args.gamefile:\n print('Please add --gamefile <file>')\n return\n verify = not args.noverify\n render = args.render\n gs = import_gamestate(args.gamefile)\n board, agents, bombs, items, flames = stateToScene(gs['state'][0])\n for i in range(len(gs['state']) - 1):\n action = gs['state'][i]['action']\n if args.verbose:\n print('Step: ', i, 'Action: ', action)\n print(board)\n board, agents, bombs, items, flames = (pommerman.forward_model.\n ForwardModel.step(action, board, agents, bombs, items, flames))\n if render:\n if viewer is None:\n viewer = pommerman.graphics.PommeViewer()\n viewer.set_board(board)\n viewer.set_agents(agents)\n if hasattr(viewer, 'set_bombs'):\n viewer.set_bombs(bombs)\n viewer.set_step(i)\n viewer.render()\n time.sleep(0.1)\n if verify:\n tboard, tagents, tbombs, titems, tflames = stateToScene(gs[\n 'state'][i + 1])\n if not np.array_equal(board, tboard):\n print('failed at step:', i)\n return\n if args.verbose:\n print('Step: ', len(gs['state']) - 1)\n print(board)\n\n\n<code token>\n", "<import token>\n<function token>\n\n\ndef stateToScene(state):\n agents = []\n for agent_state in state['agents']:\n agent = Bomber(agent_id=agent_state['agent_id'])\n position = tuple(agent_state['position'])\n agent.set_start_position(position)\n agent.reset(int(agent_state['ammo']), bool(agent_state['is_alive']),\n int(agent_state['blast_strength']), bool(agent_state['can_kick']))\n agents.append(agent)\n bombs = []\n for bomb_state in state['bombs']:\n direction = bomb_state['moving_direction']\n if direction is not None:\n direction = pommerman.constants.Action(direction)\n bomb = Bomb(agents[bomb_state['bomber_id']], tuple(bomb_state[\n 'position']), int(bomb_state['life']), int(bomb_state[\n 'blast_strength']), direction)\n bombs.append(bomb)\n items = {}\n for item_state in state['items']:\n items[tuple(item_state[0])] = item_state[1]\n flames = []\n for flame_state in state['flames']:\n flame = Flame(tuple(flame_state['position']), flame_state['life'])\n flames.append(flame)\n board = np.asarray(state['board'], np.uint8)\n return board, agents, bombs, items, flames\n\n\ndef main(args):\n viewer = None\n if None == args.gamefile:\n print('Please add --gamefile <file>')\n return\n verify = not args.noverify\n render = args.render\n gs = import_gamestate(args.gamefile)\n board, agents, bombs, items, flames = stateToScene(gs['state'][0])\n for i in range(len(gs['state']) - 1):\n action = gs['state'][i]['action']\n if args.verbose:\n print('Step: ', i, 'Action: ', action)\n print(board)\n board, agents, bombs, items, flames = (pommerman.forward_model.\n ForwardModel.step(action, board, agents, bombs, items, flames))\n if render:\n if viewer is None:\n viewer = pommerman.graphics.PommeViewer()\n viewer.set_board(board)\n viewer.set_agents(agents)\n if hasattr(viewer, 'set_bombs'):\n viewer.set_bombs(bombs)\n viewer.set_step(i)\n viewer.render()\n time.sleep(0.1)\n if verify:\n tboard, tagents, tbombs, titems, tflames = stateToScene(gs[\n 'state'][i + 1])\n if not np.array_equal(board, tboard):\n print('failed at step:', i)\n return\n if args.verbose:\n print('Step: ', len(gs['state']) - 1)\n print(board)\n\n\n<code token>\n", "<import token>\n<function token>\n\n\ndef stateToScene(state):\n agents = []\n for agent_state in state['agents']:\n agent = Bomber(agent_id=agent_state['agent_id'])\n position = tuple(agent_state['position'])\n agent.set_start_position(position)\n agent.reset(int(agent_state['ammo']), bool(agent_state['is_alive']),\n int(agent_state['blast_strength']), bool(agent_state['can_kick']))\n agents.append(agent)\n bombs = []\n for bomb_state in state['bombs']:\n direction = bomb_state['moving_direction']\n if direction is not None:\n direction = pommerman.constants.Action(direction)\n bomb = Bomb(agents[bomb_state['bomber_id']], tuple(bomb_state[\n 'position']), int(bomb_state['life']), int(bomb_state[\n 'blast_strength']), direction)\n bombs.append(bomb)\n items = {}\n for item_state in state['items']:\n items[tuple(item_state[0])] = item_state[1]\n flames = []\n for flame_state in state['flames']:\n flame = Flame(tuple(flame_state['position']), flame_state['life'])\n flames.append(flame)\n board = np.asarray(state['board'], np.uint8)\n return board, agents, bombs, items, flames\n\n\n<function token>\n<code token>\n", "<import token>\n<function token>\n<function token>\n<function token>\n<code token>\n" ]
false
98,980
e60a08aaab2d25f9e6ce7683aeb67242277ba69a
import sys from PyQt4 import QtGui, QtCore from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar import matplotlib.pyplot as plt from matplotlib import style from collections import deque import re style.use('ggplot') import serial import time import random class Window(QtGui.QMainWindow): def __init__(self): super(Window,self).__init__() # some variable self.dati = [] self.Vepm = deque(range(0, 200, 2), 100) self.Tepm = deque(range(0, 100, 1), 100) self.Vipm = deque(range(0, 300, 3), 100) self.Tipm = deque(range(0, 800, 8), 100) self.loop = 0 self.flag = 0 self.tempaccio = deque([0] * 100, 100) self.clock = 5; # set geometry of the window self.setGeometry(50,50,1250,650) self.setWindowTitle("Shielding Platform Control Interface") self.setWindowIcon(QtGui.QIcon('stormy.jpg')) # make the menu comunica = QtGui.QAction("&Open Serial", self) comunica.setShortcut("Ctrl+G") comunica.setStatusTip('Start comunicating with STM32') comunica.triggered.connect(self.start_serial) basta_parlare = QtGui.QAction("&Close Serial", self) basta_parlare.setShortcut("Ctrl+H") basta_parlare.setStatusTip('Stop comunicating with STM32') basta_parlare.triggered.connect(self.stop_serial) chiudi = QtGui.QAction("&Exit", self) chiudi.setShortcut("Ctrl+Q") chiudi.setStatusTip('Leave the app') chiudi.triggered.connect(self.close_application) # saveFile = QtGui.QAction("&Save File", self) saveFile.setShortcut("Ctrl + S") saveFile.setStatusTip('Save File') saveFile.triggered.connect(self.file_save) self.statusBar() # start a timer based acquisition self.timer = QtCore.QTimer(self) self.timer.timeout.connect(self.tempo) self.timer.start(self.clock) mainMenu = self.menuBar() fileMenu = mainMenu.addMenu('&File') fileMenu.addAction(comunica) fileMenu.addAction(basta_parlare) fileMenu.addAction(saveFile) fileMenu.addAction(chiudi) self.home() def home(self): self.main_widget = Main_widget(self) self.setCentralWidget(self.main_widget) #set title and axis labels on the 4 plots self.main_widget.EPM.ax1.set_title('EPM velocity') self.main_widget.EPM.ax1.set_xlabel('time [ms]') self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]') self.main_widget.EPM.ax2.set_title('EPM Torque') self.main_widget.EPM.ax2.set_xlabel('time[ms]') self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]') self.main_widget.IPM.ax1.set_title('IPM velocity') self.main_widget.IPM.ax1.set_xlabel('time[ms]') self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]') self.main_widget.IPM.ax2.set_title('EPM Torque') self.main_widget.IPM.ax2.set_xlabel('time[ms]') self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]') self.show() def file_save(self): name = QtGui.QFileDialog.getSaveFileName(self,'Save File') file = open(name,'w') title = "Vepm\tTepm\tVipm\tTipm\n\n" file.write(title) text = "" for i in range(len(self.Vepm)): text += str(self.Vepm[i]) + "\t" + str(self.Tepm[i]) + "\t" + str(self.Vipm[i]) + "\t" + str(self.Tipm[i]) + "\n" file.write(text) print(text) file.close() def start_serial(self): self.result = 'COM9' self.s = serial.Serial(self.result,9600) # self.s.open() # self.s.write("8") self.flag = 1 def stop_serial(self,s): self.s.close() self.flag = 0 def close_application(self): choice = QtGui.QMessageBox.question(self,'Extract!', "Wanna get the duck out?", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No) if choice == QtGui.QMessageBox.Yes: print("Extracting naaaaaaaaaaaaaaow!") sys.exit() else: pass def tempo(self): # self.dati = self.s.read(2) if self.flag: self.dati=self.s.readline() # print(self.dati) data = re.findall(r"[-+]?\d*\.\d+|\d+", self.dati) print(data) # print(type(data)) self.Vepm.append(float(data[0])) self.Tepm.append(float(data[1])) self.Vipm.append(float(data[2])) self.Tipm.append(float(data[3])) # print(self.Vepm) # print(type(self.Vepm)) # [random.random() for i in range(10)] self.main_widget.EPM.plot(self.tempaccio,self.Vepm, 1) self.main_widget.EPM.plot(self.tempaccio,self.Tepm, 2) self.main_widget.IPM.plot(self.tempaccio,self.Vipm, 1) self.main_widget.IPM.plot(self.tempaccio,self.Tipm, 2) # self.main_widget.EPM.plot(self.tempaccio, self.Tepm, self.main_widget.EPM.plot_torque, # self.main_widget.EPM.ax2, self.main_widget.EPM.canvas2) # self.main_widget.EPM.plot(self.tempaccio,self.Tepm) # print(self.loop) self.loop += 1 self.main_widget.loopcicle.display(self.loop) self.tempaccio.append(self.tempaccio[-1]+self.clock) # print(self.tempaccio) else: print('comunication blocked\n') class Magnet(QtGui.QWidget): def __init__(self, id, parent=None): super(Magnet, self).__init__(parent) self.parent = parent self.id = id # two figures instance to plot on Velocity and Torque self.figure1 = plt.figure() self.ax1 = self.figure1.add_subplot(111) self.figure2 = plt.figure() self.ax2 = self.figure2.add_subplot(111) self.ax1.hold(False) self.ax2.hold(False) self.canvas1 = FigureCanvas(self.figure1) self.canvas2 = FigureCanvas(self.figure2) self.toolbar1 = NavigationToolbar(self.canvas1, self) self.toolbar2 = NavigationToolbar(self.canvas2,self) self.PWM = PWM_slider(self) # set the layout layout = QtGui.QVBoxLayout() layout.addWidget(self.toolbar1) layout.addWidget(self.canvas1) layout.addWidget(self.PWM) layout.addWidget(self.toolbar2) layout.addWidget(self.canvas2) self.setLayout(layout) # ax = self.figure1.add_subplot(111) # ax.hold(False) self.plot_velocity = self.ax1.plot([], '*-')[0] self.plot_torque = self.ax2.plot([],'*-')[0] def plot(self, tempaccio, data, whichplot): if whichplot == 1: # data = [random.random() for i in range(10)] self.plot_velocity.set_ydata(data) self.plot_velocity.set_xdata(tempaccio) self.ax1.set_xlim(tempaccio[0],tempaccio[-1]) self.ax1.set_ylim(min(data)*.8,max(data)*1.2) self.canvas1.draw() else: if whichplot == 2: self.plot_torque.set_ydata(data) self.plot_torque.set_xdata(tempaccio) self.ax2.set_xlim(tempaccio[0], tempaccio[-1]) self.ax2.set_ylim(min(data) * .8, max(data) * 1.2) self.canvas2.draw() else: print("Specify better your plot") class PWM_slider (QtGui.QWidget): def __init__(self, parent=None): super(PWM_slider,self).__init__(parent) self.parent = parent # self.button = QtGui.QPushButton('Plot') # self.button.clicked.connect(self.plot) self.lcd = QtGui.QLCDNumber(self) self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self) self.sld.valueChanged.connect(self.lcd.display) self.sld.valueChanged.connect(self.parla) # print(self.sld.valueChanged[int]) # ciccia = self.sld.value layout = QtGui.QHBoxLayout() layout.addWidget(self.lcd) layout.addWidget(self.sld) self.setLayout(layout) def parla(self): # print('1' + str(self.sld.value())) print(str(self.sld.value())) if self.parent.parent.parent.flag: if self.sld.value() < 10: print(str(self.parent.id) + '0' + str(self.sld.value())) self.parent.parent.parent.s.write(str(self.parent.id) + '0' + str(self.sld.value())) time.sleep(0.08) else: print(str(self.parent.id) + str(self.sld.value())) self.parent.parent.parent.s.write(str(self.parent.id) + str(self.sld.value())) time.sleep(0.08) # if (self.s.isOpen()): # self.s.write('1') # self.s.write(value) # else: # pass class Main_widget(QtGui.QWidget): def __init__(self, parent=None): super(Main_widget, self).__init__(parent) self.parent = parent self.EPM = Magnet(1,self) self.IPM = Magnet(2,self) self.loopcicle = QtGui.QLCDNumber(self) layout = QtGui.QHBoxLayout() layout.addWidget(self.EPM) layout.addWidget(self.loopcicle) layout.addWidget(self.IPM) self.setLayout(layout) def run(): app = QtGui.QApplication(sys.argv) GUI = Window() sys.exit(app.exec_()) run()
[ "import sys\nfrom PyQt4 import QtGui, QtCore\nfrom matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas\nfrom matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar\nimport matplotlib.pyplot as plt\nfrom matplotlib import style\nfrom collections import deque\nimport re\nstyle.use('ggplot')\nimport serial\nimport time\nimport random\n\n\n\nclass Window(QtGui.QMainWindow):\n\n def __init__(self):\n super(Window,self).__init__()\n# some variable\n\n self.dati = []\n self.Vepm = deque(range(0, 200, 2), 100)\n self.Tepm = deque(range(0, 100, 1), 100)\n self.Vipm = deque(range(0, 300, 3), 100)\n self.Tipm = deque(range(0, 800, 8), 100)\n\n self.loop = 0\n self.flag = 0\n self.tempaccio = deque([0] * 100, 100)\n self.clock = 5;\n\n# set geometry of the window\n self.setGeometry(50,50,1250,650)\n self.setWindowTitle(\"Shielding Platform Control Interface\")\n self.setWindowIcon(QtGui.QIcon('stormy.jpg'))\n# make the menu\n comunica = QtGui.QAction(\"&Open Serial\", self)\n comunica.setShortcut(\"Ctrl+G\")\n comunica.setStatusTip('Start comunicating with STM32')\n comunica.triggered.connect(self.start_serial)\n\n basta_parlare = QtGui.QAction(\"&Close Serial\", self)\n basta_parlare.setShortcut(\"Ctrl+H\")\n basta_parlare.setStatusTip('Stop comunicating with STM32')\n basta_parlare.triggered.connect(self.stop_serial)\n\n chiudi = QtGui.QAction(\"&Exit\", self)\n chiudi.setShortcut(\"Ctrl+Q\")\n chiudi.setStatusTip('Leave the app')\n chiudi.triggered.connect(self.close_application)\n #\n saveFile = QtGui.QAction(\"&Save File\", self)\n saveFile.setShortcut(\"Ctrl + S\")\n saveFile.setStatusTip('Save File')\n saveFile.triggered.connect(self.file_save)\n\n self.statusBar()\n# start a timer based acquisition\n self.timer = QtCore.QTimer(self)\n self.timer.timeout.connect(self.tempo)\n self.timer.start(self.clock)\n\n mainMenu = self.menuBar()\n fileMenu = mainMenu.addMenu('&File')\n fileMenu.addAction(comunica)\n fileMenu.addAction(basta_parlare)\n fileMenu.addAction(saveFile)\n fileMenu.addAction(chiudi)\n\n self.home()\n\n def home(self):\n\n self.main_widget = Main_widget(self)\n self.setCentralWidget(self.main_widget)\n\n #set title and axis labels on the 4 plots\n self.main_widget.EPM.ax1.set_title('EPM velocity')\n self.main_widget.EPM.ax1.set_xlabel('time [ms]')\n self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]')\n\n self.main_widget.EPM.ax2.set_title('EPM Torque')\n self.main_widget.EPM.ax2.set_xlabel('time[ms]')\n self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]')\n\n self.main_widget.IPM.ax1.set_title('IPM velocity')\n self.main_widget.IPM.ax1.set_xlabel('time[ms]')\n self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]')\n\n self.main_widget.IPM.ax2.set_title('EPM Torque')\n self.main_widget.IPM.ax2.set_xlabel('time[ms]')\n self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]')\n\n self.show()\n\n def file_save(self):\n\n name = QtGui.QFileDialog.getSaveFileName(self,'Save File')\n file = open(name,'w')\n title = \"Vepm\\tTepm\\tVipm\\tTipm\\n\\n\"\n file.write(title)\n text = \"\"\n for i in range(len(self.Vepm)):\n text += str(self.Vepm[i]) + \"\\t\" + str(self.Tepm[i]) + \"\\t\" + str(self.Vipm[i]) + \"\\t\" + str(self.Tipm[i]) + \"\\n\"\n file.write(text)\n print(text)\n file.close()\n\n def start_serial(self):\n self.result = 'COM9'\n self.s = serial.Serial(self.result,9600)\n # self.s.open()\n # self.s.write(\"8\")\n self.flag = 1\n\n def stop_serial(self,s):\n self.s.close()\n self.flag = 0\n\n def close_application(self):\n choice = QtGui.QMessageBox.question(self,'Extract!', \"Wanna get the duck out?\",\n QtGui.QMessageBox.Yes | QtGui.QMessageBox.No)\n if choice == QtGui.QMessageBox.Yes:\n print(\"Extracting naaaaaaaaaaaaaaow!\")\n sys.exit()\n else:\n pass\n\n def tempo(self):\n # self.dati = self.s.read(2)\n if self.flag:\n self.dati=self.s.readline()\n # print(self.dati)\n\n data = re.findall(r\"[-+]?\\d*\\.\\d+|\\d+\", self.dati)\n print(data)\n # print(type(data))\n\n self.Vepm.append(float(data[0]))\n self.Tepm.append(float(data[1]))\n self.Vipm.append(float(data[2]))\n self.Tipm.append(float(data[3]))\n\n # print(self.Vepm)\n # print(type(self.Vepm))\n # [random.random() for i in range(10)]\n self.main_widget.EPM.plot(self.tempaccio,self.Vepm, 1)\n self.main_widget.EPM.plot(self.tempaccio,self.Tepm, 2)\n self.main_widget.IPM.plot(self.tempaccio,self.Vipm, 1)\n self.main_widget.IPM.plot(self.tempaccio,self.Tipm, 2)\n\n # self.main_widget.EPM.plot(self.tempaccio, self.Tepm, self.main_widget.EPM.plot_torque,\n # self.main_widget.EPM.ax2, self.main_widget.EPM.canvas2)\n # self.main_widget.EPM.plot(self.tempaccio,self.Tepm)\n # print(self.loop)\n self.loop += 1\n self.main_widget.loopcicle.display(self.loop)\n self.tempaccio.append(self.tempaccio[-1]+self.clock)\n\n # print(self.tempaccio)\n\n else:\n print('comunication blocked\\n')\n\n\n\nclass Magnet(QtGui.QWidget):\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n # two figures instance to plot on Velocity and Torque\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2,self)\n\n self.PWM = PWM_slider(self)\n\n # set the layout\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n\n # ax = self.figure1.add_subplot(111)\n # ax.hold(False)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([],'*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n\n if whichplot == 1:\n # data = [random.random() for i in range(10)]\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0],tempaccio[-1])\n self.ax1.set_ylim(min(data)*.8,max(data)*1.2)\n self.canvas1.draw()\n else:\n if whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * .8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print(\"Specify better your plot\")\n\nclass PWM_slider (QtGui.QWidget):\n def __init__(self, parent=None):\n super(PWM_slider,self).__init__(parent)\n self.parent = parent\n\n # self.button = QtGui.QPushButton('Plot')\n # self.button.clicked.connect(self.plot)\n\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n # print(self.sld.valueChanged[int])\n # ciccia = self.sld.value\n\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n # print('1' + str(self.sld.value()))\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' + str(self.sld.value()))\n time.sleep(0.08)\n\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str(self.sld.value()))\n time.sleep(0.08)\n\n\n # if (self.s.isOpen()):\n # self.s.write('1')\n # self.s.write(value)\n # else:\n # pass\n\n\nclass Main_widget(QtGui.QWidget):\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n\n self.EPM = Magnet(1,self)\n self.IPM = Magnet(2,self)\n self.loopcicle = QtGui.QLCDNumber(self)\n\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n\n self.setLayout(layout)\n\ndef run():\n app = QtGui.QApplication(sys.argv)\n GUI = Window()\n sys.exit(app.exec_())\n\nrun()\n\n\n\n\n", "import sys\nfrom PyQt4 import QtGui, QtCore\nfrom matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas\nfrom matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar\nimport matplotlib.pyplot as plt\nfrom matplotlib import style\nfrom collections import deque\nimport re\nstyle.use('ggplot')\nimport serial\nimport time\nimport random\n\n\nclass Window(QtGui.QMainWindow):\n\n def __init__(self):\n super(Window, self).__init__()\n self.dati = []\n self.Vepm = deque(range(0, 200, 2), 100)\n self.Tepm = deque(range(0, 100, 1), 100)\n self.Vipm = deque(range(0, 300, 3), 100)\n self.Tipm = deque(range(0, 800, 8), 100)\n self.loop = 0\n self.flag = 0\n self.tempaccio = deque([0] * 100, 100)\n self.clock = 5\n self.setGeometry(50, 50, 1250, 650)\n self.setWindowTitle('Shielding Platform Control Interface')\n self.setWindowIcon(QtGui.QIcon('stormy.jpg'))\n comunica = QtGui.QAction('&Open Serial', self)\n comunica.setShortcut('Ctrl+G')\n comunica.setStatusTip('Start comunicating with STM32')\n comunica.triggered.connect(self.start_serial)\n basta_parlare = QtGui.QAction('&Close Serial', self)\n basta_parlare.setShortcut('Ctrl+H')\n basta_parlare.setStatusTip('Stop comunicating with STM32')\n basta_parlare.triggered.connect(self.stop_serial)\n chiudi = QtGui.QAction('&Exit', self)\n chiudi.setShortcut('Ctrl+Q')\n chiudi.setStatusTip('Leave the app')\n chiudi.triggered.connect(self.close_application)\n saveFile = QtGui.QAction('&Save File', self)\n saveFile.setShortcut('Ctrl + S')\n saveFile.setStatusTip('Save File')\n saveFile.triggered.connect(self.file_save)\n self.statusBar()\n self.timer = QtCore.QTimer(self)\n self.timer.timeout.connect(self.tempo)\n self.timer.start(self.clock)\n mainMenu = self.menuBar()\n fileMenu = mainMenu.addMenu('&File')\n fileMenu.addAction(comunica)\n fileMenu.addAction(basta_parlare)\n fileMenu.addAction(saveFile)\n fileMenu.addAction(chiudi)\n self.home()\n\n def home(self):\n self.main_widget = Main_widget(self)\n self.setCentralWidget(self.main_widget)\n self.main_widget.EPM.ax1.set_title('EPM velocity')\n self.main_widget.EPM.ax1.set_xlabel('time [ms]')\n self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.EPM.ax2.set_title('EPM Torque')\n self.main_widget.EPM.ax2.set_xlabel('time[ms]')\n self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]')\n self.main_widget.IPM.ax1.set_title('IPM velocity')\n self.main_widget.IPM.ax1.set_xlabel('time[ms]')\n self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.IPM.ax2.set_title('EPM Torque')\n self.main_widget.IPM.ax2.set_xlabel('time[ms]')\n self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]')\n self.show()\n\n def file_save(self):\n name = QtGui.QFileDialog.getSaveFileName(self, 'Save File')\n file = open(name, 'w')\n title = 'Vepm\\tTepm\\tVipm\\tTipm\\n\\n'\n file.write(title)\n text = ''\n for i in range(len(self.Vepm)):\n text += str(self.Vepm[i]) + '\\t' + str(self.Tepm[i]) + '\\t' + str(\n self.Vipm[i]) + '\\t' + str(self.Tipm[i]) + '\\n'\n file.write(text)\n print(text)\n file.close()\n\n def start_serial(self):\n self.result = 'COM9'\n self.s = serial.Serial(self.result, 9600)\n self.flag = 1\n\n def stop_serial(self, s):\n self.s.close()\n self.flag = 0\n\n def close_application(self):\n choice = QtGui.QMessageBox.question(self, 'Extract!',\n 'Wanna get the duck out?', QtGui.QMessageBox.Yes | QtGui.\n QMessageBox.No)\n if choice == QtGui.QMessageBox.Yes:\n print('Extracting naaaaaaaaaaaaaaow!')\n sys.exit()\n else:\n pass\n\n def tempo(self):\n if self.flag:\n self.dati = self.s.readline()\n data = re.findall('[-+]?\\\\d*\\\\.\\\\d+|\\\\d+', self.dati)\n print(data)\n self.Vepm.append(float(data[0]))\n self.Tepm.append(float(data[1]))\n self.Vipm.append(float(data[2]))\n self.Tipm.append(float(data[3]))\n self.main_widget.EPM.plot(self.tempaccio, self.Vepm, 1)\n self.main_widget.EPM.plot(self.tempaccio, self.Tepm, 2)\n self.main_widget.IPM.plot(self.tempaccio, self.Vipm, 1)\n self.main_widget.IPM.plot(self.tempaccio, self.Tipm, 2)\n self.loop += 1\n self.main_widget.loopcicle.display(self.loop)\n self.tempaccio.append(self.tempaccio[-1] + self.clock)\n else:\n print('comunication blocked\\n')\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\ndef run():\n app = QtGui.QApplication(sys.argv)\n GUI = Window()\n sys.exit(app.exec_())\n\n\nrun()\n", "<import token>\nstyle.use('ggplot')\n<import token>\n\n\nclass Window(QtGui.QMainWindow):\n\n def __init__(self):\n super(Window, self).__init__()\n self.dati = []\n self.Vepm = deque(range(0, 200, 2), 100)\n self.Tepm = deque(range(0, 100, 1), 100)\n self.Vipm = deque(range(0, 300, 3), 100)\n self.Tipm = deque(range(0, 800, 8), 100)\n self.loop = 0\n self.flag = 0\n self.tempaccio = deque([0] * 100, 100)\n self.clock = 5\n self.setGeometry(50, 50, 1250, 650)\n self.setWindowTitle('Shielding Platform Control Interface')\n self.setWindowIcon(QtGui.QIcon('stormy.jpg'))\n comunica = QtGui.QAction('&Open Serial', self)\n comunica.setShortcut('Ctrl+G')\n comunica.setStatusTip('Start comunicating with STM32')\n comunica.triggered.connect(self.start_serial)\n basta_parlare = QtGui.QAction('&Close Serial', self)\n basta_parlare.setShortcut('Ctrl+H')\n basta_parlare.setStatusTip('Stop comunicating with STM32')\n basta_parlare.triggered.connect(self.stop_serial)\n chiudi = QtGui.QAction('&Exit', self)\n chiudi.setShortcut('Ctrl+Q')\n chiudi.setStatusTip('Leave the app')\n chiudi.triggered.connect(self.close_application)\n saveFile = QtGui.QAction('&Save File', self)\n saveFile.setShortcut('Ctrl + S')\n saveFile.setStatusTip('Save File')\n saveFile.triggered.connect(self.file_save)\n self.statusBar()\n self.timer = QtCore.QTimer(self)\n self.timer.timeout.connect(self.tempo)\n self.timer.start(self.clock)\n mainMenu = self.menuBar()\n fileMenu = mainMenu.addMenu('&File')\n fileMenu.addAction(comunica)\n fileMenu.addAction(basta_parlare)\n fileMenu.addAction(saveFile)\n fileMenu.addAction(chiudi)\n self.home()\n\n def home(self):\n self.main_widget = Main_widget(self)\n self.setCentralWidget(self.main_widget)\n self.main_widget.EPM.ax1.set_title('EPM velocity')\n self.main_widget.EPM.ax1.set_xlabel('time [ms]')\n self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.EPM.ax2.set_title('EPM Torque')\n self.main_widget.EPM.ax2.set_xlabel('time[ms]')\n self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]')\n self.main_widget.IPM.ax1.set_title('IPM velocity')\n self.main_widget.IPM.ax1.set_xlabel('time[ms]')\n self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.IPM.ax2.set_title('EPM Torque')\n self.main_widget.IPM.ax2.set_xlabel('time[ms]')\n self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]')\n self.show()\n\n def file_save(self):\n name = QtGui.QFileDialog.getSaveFileName(self, 'Save File')\n file = open(name, 'w')\n title = 'Vepm\\tTepm\\tVipm\\tTipm\\n\\n'\n file.write(title)\n text = ''\n for i in range(len(self.Vepm)):\n text += str(self.Vepm[i]) + '\\t' + str(self.Tepm[i]) + '\\t' + str(\n self.Vipm[i]) + '\\t' + str(self.Tipm[i]) + '\\n'\n file.write(text)\n print(text)\n file.close()\n\n def start_serial(self):\n self.result = 'COM9'\n self.s = serial.Serial(self.result, 9600)\n self.flag = 1\n\n def stop_serial(self, s):\n self.s.close()\n self.flag = 0\n\n def close_application(self):\n choice = QtGui.QMessageBox.question(self, 'Extract!',\n 'Wanna get the duck out?', QtGui.QMessageBox.Yes | QtGui.\n QMessageBox.No)\n if choice == QtGui.QMessageBox.Yes:\n print('Extracting naaaaaaaaaaaaaaow!')\n sys.exit()\n else:\n pass\n\n def tempo(self):\n if self.flag:\n self.dati = self.s.readline()\n data = re.findall('[-+]?\\\\d*\\\\.\\\\d+|\\\\d+', self.dati)\n print(data)\n self.Vepm.append(float(data[0]))\n self.Tepm.append(float(data[1]))\n self.Vipm.append(float(data[2]))\n self.Tipm.append(float(data[3]))\n self.main_widget.EPM.plot(self.tempaccio, self.Vepm, 1)\n self.main_widget.EPM.plot(self.tempaccio, self.Tepm, 2)\n self.main_widget.IPM.plot(self.tempaccio, self.Vipm, 1)\n self.main_widget.IPM.plot(self.tempaccio, self.Tipm, 2)\n self.loop += 1\n self.main_widget.loopcicle.display(self.loop)\n self.tempaccio.append(self.tempaccio[-1] + self.clock)\n else:\n print('comunication blocked\\n')\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\ndef run():\n app = QtGui.QApplication(sys.argv)\n GUI = Window()\n sys.exit(app.exec_())\n\n\nrun()\n", "<import token>\n<code token>\n<import token>\n\n\nclass Window(QtGui.QMainWindow):\n\n def __init__(self):\n super(Window, self).__init__()\n self.dati = []\n self.Vepm = deque(range(0, 200, 2), 100)\n self.Tepm = deque(range(0, 100, 1), 100)\n self.Vipm = deque(range(0, 300, 3), 100)\n self.Tipm = deque(range(0, 800, 8), 100)\n self.loop = 0\n self.flag = 0\n self.tempaccio = deque([0] * 100, 100)\n self.clock = 5\n self.setGeometry(50, 50, 1250, 650)\n self.setWindowTitle('Shielding Platform Control Interface')\n self.setWindowIcon(QtGui.QIcon('stormy.jpg'))\n comunica = QtGui.QAction('&Open Serial', self)\n comunica.setShortcut('Ctrl+G')\n comunica.setStatusTip('Start comunicating with STM32')\n comunica.triggered.connect(self.start_serial)\n basta_parlare = QtGui.QAction('&Close Serial', self)\n basta_parlare.setShortcut('Ctrl+H')\n basta_parlare.setStatusTip('Stop comunicating with STM32')\n basta_parlare.triggered.connect(self.stop_serial)\n chiudi = QtGui.QAction('&Exit', self)\n chiudi.setShortcut('Ctrl+Q')\n chiudi.setStatusTip('Leave the app')\n chiudi.triggered.connect(self.close_application)\n saveFile = QtGui.QAction('&Save File', self)\n saveFile.setShortcut('Ctrl + S')\n saveFile.setStatusTip('Save File')\n saveFile.triggered.connect(self.file_save)\n self.statusBar()\n self.timer = QtCore.QTimer(self)\n self.timer.timeout.connect(self.tempo)\n self.timer.start(self.clock)\n mainMenu = self.menuBar()\n fileMenu = mainMenu.addMenu('&File')\n fileMenu.addAction(comunica)\n fileMenu.addAction(basta_parlare)\n fileMenu.addAction(saveFile)\n fileMenu.addAction(chiudi)\n self.home()\n\n def home(self):\n self.main_widget = Main_widget(self)\n self.setCentralWidget(self.main_widget)\n self.main_widget.EPM.ax1.set_title('EPM velocity')\n self.main_widget.EPM.ax1.set_xlabel('time [ms]')\n self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.EPM.ax2.set_title('EPM Torque')\n self.main_widget.EPM.ax2.set_xlabel('time[ms]')\n self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]')\n self.main_widget.IPM.ax1.set_title('IPM velocity')\n self.main_widget.IPM.ax1.set_xlabel('time[ms]')\n self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.IPM.ax2.set_title('EPM Torque')\n self.main_widget.IPM.ax2.set_xlabel('time[ms]')\n self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]')\n self.show()\n\n def file_save(self):\n name = QtGui.QFileDialog.getSaveFileName(self, 'Save File')\n file = open(name, 'w')\n title = 'Vepm\\tTepm\\tVipm\\tTipm\\n\\n'\n file.write(title)\n text = ''\n for i in range(len(self.Vepm)):\n text += str(self.Vepm[i]) + '\\t' + str(self.Tepm[i]) + '\\t' + str(\n self.Vipm[i]) + '\\t' + str(self.Tipm[i]) + '\\n'\n file.write(text)\n print(text)\n file.close()\n\n def start_serial(self):\n self.result = 'COM9'\n self.s = serial.Serial(self.result, 9600)\n self.flag = 1\n\n def stop_serial(self, s):\n self.s.close()\n self.flag = 0\n\n def close_application(self):\n choice = QtGui.QMessageBox.question(self, 'Extract!',\n 'Wanna get the duck out?', QtGui.QMessageBox.Yes | QtGui.\n QMessageBox.No)\n if choice == QtGui.QMessageBox.Yes:\n print('Extracting naaaaaaaaaaaaaaow!')\n sys.exit()\n else:\n pass\n\n def tempo(self):\n if self.flag:\n self.dati = self.s.readline()\n data = re.findall('[-+]?\\\\d*\\\\.\\\\d+|\\\\d+', self.dati)\n print(data)\n self.Vepm.append(float(data[0]))\n self.Tepm.append(float(data[1]))\n self.Vipm.append(float(data[2]))\n self.Tipm.append(float(data[3]))\n self.main_widget.EPM.plot(self.tempaccio, self.Vepm, 1)\n self.main_widget.EPM.plot(self.tempaccio, self.Tepm, 2)\n self.main_widget.IPM.plot(self.tempaccio, self.Vipm, 1)\n self.main_widget.IPM.plot(self.tempaccio, self.Tipm, 2)\n self.loop += 1\n self.main_widget.loopcicle.display(self.loop)\n self.tempaccio.append(self.tempaccio[-1] + self.clock)\n else:\n print('comunication blocked\\n')\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\ndef run():\n app = QtGui.QApplication(sys.argv)\n GUI = Window()\n sys.exit(app.exec_())\n\n\n<code token>\n", "<import token>\n<code token>\n<import token>\n\n\nclass Window(QtGui.QMainWindow):\n\n def __init__(self):\n super(Window, self).__init__()\n self.dati = []\n self.Vepm = deque(range(0, 200, 2), 100)\n self.Tepm = deque(range(0, 100, 1), 100)\n self.Vipm = deque(range(0, 300, 3), 100)\n self.Tipm = deque(range(0, 800, 8), 100)\n self.loop = 0\n self.flag = 0\n self.tempaccio = deque([0] * 100, 100)\n self.clock = 5\n self.setGeometry(50, 50, 1250, 650)\n self.setWindowTitle('Shielding Platform Control Interface')\n self.setWindowIcon(QtGui.QIcon('stormy.jpg'))\n comunica = QtGui.QAction('&Open Serial', self)\n comunica.setShortcut('Ctrl+G')\n comunica.setStatusTip('Start comunicating with STM32')\n comunica.triggered.connect(self.start_serial)\n basta_parlare = QtGui.QAction('&Close Serial', self)\n basta_parlare.setShortcut('Ctrl+H')\n basta_parlare.setStatusTip('Stop comunicating with STM32')\n basta_parlare.triggered.connect(self.stop_serial)\n chiudi = QtGui.QAction('&Exit', self)\n chiudi.setShortcut('Ctrl+Q')\n chiudi.setStatusTip('Leave the app')\n chiudi.triggered.connect(self.close_application)\n saveFile = QtGui.QAction('&Save File', self)\n saveFile.setShortcut('Ctrl + S')\n saveFile.setStatusTip('Save File')\n saveFile.triggered.connect(self.file_save)\n self.statusBar()\n self.timer = QtCore.QTimer(self)\n self.timer.timeout.connect(self.tempo)\n self.timer.start(self.clock)\n mainMenu = self.menuBar()\n fileMenu = mainMenu.addMenu('&File')\n fileMenu.addAction(comunica)\n fileMenu.addAction(basta_parlare)\n fileMenu.addAction(saveFile)\n fileMenu.addAction(chiudi)\n self.home()\n\n def home(self):\n self.main_widget = Main_widget(self)\n self.setCentralWidget(self.main_widget)\n self.main_widget.EPM.ax1.set_title('EPM velocity')\n self.main_widget.EPM.ax1.set_xlabel('time [ms]')\n self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.EPM.ax2.set_title('EPM Torque')\n self.main_widget.EPM.ax2.set_xlabel('time[ms]')\n self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]')\n self.main_widget.IPM.ax1.set_title('IPM velocity')\n self.main_widget.IPM.ax1.set_xlabel('time[ms]')\n self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.IPM.ax2.set_title('EPM Torque')\n self.main_widget.IPM.ax2.set_xlabel('time[ms]')\n self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]')\n self.show()\n\n def file_save(self):\n name = QtGui.QFileDialog.getSaveFileName(self, 'Save File')\n file = open(name, 'w')\n title = 'Vepm\\tTepm\\tVipm\\tTipm\\n\\n'\n file.write(title)\n text = ''\n for i in range(len(self.Vepm)):\n text += str(self.Vepm[i]) + '\\t' + str(self.Tepm[i]) + '\\t' + str(\n self.Vipm[i]) + '\\t' + str(self.Tipm[i]) + '\\n'\n file.write(text)\n print(text)\n file.close()\n\n def start_serial(self):\n self.result = 'COM9'\n self.s = serial.Serial(self.result, 9600)\n self.flag = 1\n\n def stop_serial(self, s):\n self.s.close()\n self.flag = 0\n\n def close_application(self):\n choice = QtGui.QMessageBox.question(self, 'Extract!',\n 'Wanna get the duck out?', QtGui.QMessageBox.Yes | QtGui.\n QMessageBox.No)\n if choice == QtGui.QMessageBox.Yes:\n print('Extracting naaaaaaaaaaaaaaow!')\n sys.exit()\n else:\n pass\n\n def tempo(self):\n if self.flag:\n self.dati = self.s.readline()\n data = re.findall('[-+]?\\\\d*\\\\.\\\\d+|\\\\d+', self.dati)\n print(data)\n self.Vepm.append(float(data[0]))\n self.Tepm.append(float(data[1]))\n self.Vipm.append(float(data[2]))\n self.Tipm.append(float(data[3]))\n self.main_widget.EPM.plot(self.tempaccio, self.Vepm, 1)\n self.main_widget.EPM.plot(self.tempaccio, self.Tepm, 2)\n self.main_widget.IPM.plot(self.tempaccio, self.Vipm, 1)\n self.main_widget.IPM.plot(self.tempaccio, self.Tipm, 2)\n self.loop += 1\n self.main_widget.loopcicle.display(self.loop)\n self.tempaccio.append(self.tempaccio[-1] + self.clock)\n else:\n print('comunication blocked\\n')\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n\n\nclass Window(QtGui.QMainWindow):\n\n def __init__(self):\n super(Window, self).__init__()\n self.dati = []\n self.Vepm = deque(range(0, 200, 2), 100)\n self.Tepm = deque(range(0, 100, 1), 100)\n self.Vipm = deque(range(0, 300, 3), 100)\n self.Tipm = deque(range(0, 800, 8), 100)\n self.loop = 0\n self.flag = 0\n self.tempaccio = deque([0] * 100, 100)\n self.clock = 5\n self.setGeometry(50, 50, 1250, 650)\n self.setWindowTitle('Shielding Platform Control Interface')\n self.setWindowIcon(QtGui.QIcon('stormy.jpg'))\n comunica = QtGui.QAction('&Open Serial', self)\n comunica.setShortcut('Ctrl+G')\n comunica.setStatusTip('Start comunicating with STM32')\n comunica.triggered.connect(self.start_serial)\n basta_parlare = QtGui.QAction('&Close Serial', self)\n basta_parlare.setShortcut('Ctrl+H')\n basta_parlare.setStatusTip('Stop comunicating with STM32')\n basta_parlare.triggered.connect(self.stop_serial)\n chiudi = QtGui.QAction('&Exit', self)\n chiudi.setShortcut('Ctrl+Q')\n chiudi.setStatusTip('Leave the app')\n chiudi.triggered.connect(self.close_application)\n saveFile = QtGui.QAction('&Save File', self)\n saveFile.setShortcut('Ctrl + S')\n saveFile.setStatusTip('Save File')\n saveFile.triggered.connect(self.file_save)\n self.statusBar()\n self.timer = QtCore.QTimer(self)\n self.timer.timeout.connect(self.tempo)\n self.timer.start(self.clock)\n mainMenu = self.menuBar()\n fileMenu = mainMenu.addMenu('&File')\n fileMenu.addAction(comunica)\n fileMenu.addAction(basta_parlare)\n fileMenu.addAction(saveFile)\n fileMenu.addAction(chiudi)\n self.home()\n\n def home(self):\n self.main_widget = Main_widget(self)\n self.setCentralWidget(self.main_widget)\n self.main_widget.EPM.ax1.set_title('EPM velocity')\n self.main_widget.EPM.ax1.set_xlabel('time [ms]')\n self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.EPM.ax2.set_title('EPM Torque')\n self.main_widget.EPM.ax2.set_xlabel('time[ms]')\n self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]')\n self.main_widget.IPM.ax1.set_title('IPM velocity')\n self.main_widget.IPM.ax1.set_xlabel('time[ms]')\n self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.IPM.ax2.set_title('EPM Torque')\n self.main_widget.IPM.ax2.set_xlabel('time[ms]')\n self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]')\n self.show()\n <function token>\n\n def start_serial(self):\n self.result = 'COM9'\n self.s = serial.Serial(self.result, 9600)\n self.flag = 1\n\n def stop_serial(self, s):\n self.s.close()\n self.flag = 0\n\n def close_application(self):\n choice = QtGui.QMessageBox.question(self, 'Extract!',\n 'Wanna get the duck out?', QtGui.QMessageBox.Yes | QtGui.\n QMessageBox.No)\n if choice == QtGui.QMessageBox.Yes:\n print('Extracting naaaaaaaaaaaaaaow!')\n sys.exit()\n else:\n pass\n\n def tempo(self):\n if self.flag:\n self.dati = self.s.readline()\n data = re.findall('[-+]?\\\\d*\\\\.\\\\d+|\\\\d+', self.dati)\n print(data)\n self.Vepm.append(float(data[0]))\n self.Tepm.append(float(data[1]))\n self.Vipm.append(float(data[2]))\n self.Tipm.append(float(data[3]))\n self.main_widget.EPM.plot(self.tempaccio, self.Vepm, 1)\n self.main_widget.EPM.plot(self.tempaccio, self.Tepm, 2)\n self.main_widget.IPM.plot(self.tempaccio, self.Vipm, 1)\n self.main_widget.IPM.plot(self.tempaccio, self.Tipm, 2)\n self.loop += 1\n self.main_widget.loopcicle.display(self.loop)\n self.tempaccio.append(self.tempaccio[-1] + self.clock)\n else:\n print('comunication blocked\\n')\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n\n\nclass Window(QtGui.QMainWindow):\n\n def __init__(self):\n super(Window, self).__init__()\n self.dati = []\n self.Vepm = deque(range(0, 200, 2), 100)\n self.Tepm = deque(range(0, 100, 1), 100)\n self.Vipm = deque(range(0, 300, 3), 100)\n self.Tipm = deque(range(0, 800, 8), 100)\n self.loop = 0\n self.flag = 0\n self.tempaccio = deque([0] * 100, 100)\n self.clock = 5\n self.setGeometry(50, 50, 1250, 650)\n self.setWindowTitle('Shielding Platform Control Interface')\n self.setWindowIcon(QtGui.QIcon('stormy.jpg'))\n comunica = QtGui.QAction('&Open Serial', self)\n comunica.setShortcut('Ctrl+G')\n comunica.setStatusTip('Start comunicating with STM32')\n comunica.triggered.connect(self.start_serial)\n basta_parlare = QtGui.QAction('&Close Serial', self)\n basta_parlare.setShortcut('Ctrl+H')\n basta_parlare.setStatusTip('Stop comunicating with STM32')\n basta_parlare.triggered.connect(self.stop_serial)\n chiudi = QtGui.QAction('&Exit', self)\n chiudi.setShortcut('Ctrl+Q')\n chiudi.setStatusTip('Leave the app')\n chiudi.triggered.connect(self.close_application)\n saveFile = QtGui.QAction('&Save File', self)\n saveFile.setShortcut('Ctrl + S')\n saveFile.setStatusTip('Save File')\n saveFile.triggered.connect(self.file_save)\n self.statusBar()\n self.timer = QtCore.QTimer(self)\n self.timer.timeout.connect(self.tempo)\n self.timer.start(self.clock)\n mainMenu = self.menuBar()\n fileMenu = mainMenu.addMenu('&File')\n fileMenu.addAction(comunica)\n fileMenu.addAction(basta_parlare)\n fileMenu.addAction(saveFile)\n fileMenu.addAction(chiudi)\n self.home()\n\n def home(self):\n self.main_widget = Main_widget(self)\n self.setCentralWidget(self.main_widget)\n self.main_widget.EPM.ax1.set_title('EPM velocity')\n self.main_widget.EPM.ax1.set_xlabel('time [ms]')\n self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.EPM.ax2.set_title('EPM Torque')\n self.main_widget.EPM.ax2.set_xlabel('time[ms]')\n self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]')\n self.main_widget.IPM.ax1.set_title('IPM velocity')\n self.main_widget.IPM.ax1.set_xlabel('time[ms]')\n self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.IPM.ax2.set_title('EPM Torque')\n self.main_widget.IPM.ax2.set_xlabel('time[ms]')\n self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]')\n self.show()\n <function token>\n\n def start_serial(self):\n self.result = 'COM9'\n self.s = serial.Serial(self.result, 9600)\n self.flag = 1\n <function token>\n\n def close_application(self):\n choice = QtGui.QMessageBox.question(self, 'Extract!',\n 'Wanna get the duck out?', QtGui.QMessageBox.Yes | QtGui.\n QMessageBox.No)\n if choice == QtGui.QMessageBox.Yes:\n print('Extracting naaaaaaaaaaaaaaow!')\n sys.exit()\n else:\n pass\n\n def tempo(self):\n if self.flag:\n self.dati = self.s.readline()\n data = re.findall('[-+]?\\\\d*\\\\.\\\\d+|\\\\d+', self.dati)\n print(data)\n self.Vepm.append(float(data[0]))\n self.Tepm.append(float(data[1]))\n self.Vipm.append(float(data[2]))\n self.Tipm.append(float(data[3]))\n self.main_widget.EPM.plot(self.tempaccio, self.Vepm, 1)\n self.main_widget.EPM.plot(self.tempaccio, self.Tepm, 2)\n self.main_widget.IPM.plot(self.tempaccio, self.Vipm, 1)\n self.main_widget.IPM.plot(self.tempaccio, self.Tipm, 2)\n self.loop += 1\n self.main_widget.loopcicle.display(self.loop)\n self.tempaccio.append(self.tempaccio[-1] + self.clock)\n else:\n print('comunication blocked\\n')\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n\n\nclass Window(QtGui.QMainWindow):\n <function token>\n\n def home(self):\n self.main_widget = Main_widget(self)\n self.setCentralWidget(self.main_widget)\n self.main_widget.EPM.ax1.set_title('EPM velocity')\n self.main_widget.EPM.ax1.set_xlabel('time [ms]')\n self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.EPM.ax2.set_title('EPM Torque')\n self.main_widget.EPM.ax2.set_xlabel('time[ms]')\n self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]')\n self.main_widget.IPM.ax1.set_title('IPM velocity')\n self.main_widget.IPM.ax1.set_xlabel('time[ms]')\n self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.IPM.ax2.set_title('EPM Torque')\n self.main_widget.IPM.ax2.set_xlabel('time[ms]')\n self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]')\n self.show()\n <function token>\n\n def start_serial(self):\n self.result = 'COM9'\n self.s = serial.Serial(self.result, 9600)\n self.flag = 1\n <function token>\n\n def close_application(self):\n choice = QtGui.QMessageBox.question(self, 'Extract!',\n 'Wanna get the duck out?', QtGui.QMessageBox.Yes | QtGui.\n QMessageBox.No)\n if choice == QtGui.QMessageBox.Yes:\n print('Extracting naaaaaaaaaaaaaaow!')\n sys.exit()\n else:\n pass\n\n def tempo(self):\n if self.flag:\n self.dati = self.s.readline()\n data = re.findall('[-+]?\\\\d*\\\\.\\\\d+|\\\\d+', self.dati)\n print(data)\n self.Vepm.append(float(data[0]))\n self.Tepm.append(float(data[1]))\n self.Vipm.append(float(data[2]))\n self.Tipm.append(float(data[3]))\n self.main_widget.EPM.plot(self.tempaccio, self.Vepm, 1)\n self.main_widget.EPM.plot(self.tempaccio, self.Tepm, 2)\n self.main_widget.IPM.plot(self.tempaccio, self.Vipm, 1)\n self.main_widget.IPM.plot(self.tempaccio, self.Tipm, 2)\n self.loop += 1\n self.main_widget.loopcicle.display(self.loop)\n self.tempaccio.append(self.tempaccio[-1] + self.clock)\n else:\n print('comunication blocked\\n')\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n\n\nclass Window(QtGui.QMainWindow):\n <function token>\n\n def home(self):\n self.main_widget = Main_widget(self)\n self.setCentralWidget(self.main_widget)\n self.main_widget.EPM.ax1.set_title('EPM velocity')\n self.main_widget.EPM.ax1.set_xlabel('time [ms]')\n self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.EPM.ax2.set_title('EPM Torque')\n self.main_widget.EPM.ax2.set_xlabel('time[ms]')\n self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]')\n self.main_widget.IPM.ax1.set_title('IPM velocity')\n self.main_widget.IPM.ax1.set_xlabel('time[ms]')\n self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.IPM.ax2.set_title('EPM Torque')\n self.main_widget.IPM.ax2.set_xlabel('time[ms]')\n self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]')\n self.show()\n <function token>\n\n def start_serial(self):\n self.result = 'COM9'\n self.s = serial.Serial(self.result, 9600)\n self.flag = 1\n <function token>\n <function token>\n\n def tempo(self):\n if self.flag:\n self.dati = self.s.readline()\n data = re.findall('[-+]?\\\\d*\\\\.\\\\d+|\\\\d+', self.dati)\n print(data)\n self.Vepm.append(float(data[0]))\n self.Tepm.append(float(data[1]))\n self.Vipm.append(float(data[2]))\n self.Tipm.append(float(data[3]))\n self.main_widget.EPM.plot(self.tempaccio, self.Vepm, 1)\n self.main_widget.EPM.plot(self.tempaccio, self.Tepm, 2)\n self.main_widget.IPM.plot(self.tempaccio, self.Vipm, 1)\n self.main_widget.IPM.plot(self.tempaccio, self.Tipm, 2)\n self.loop += 1\n self.main_widget.loopcicle.display(self.loop)\n self.tempaccio.append(self.tempaccio[-1] + self.clock)\n else:\n print('comunication blocked\\n')\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n\n\nclass Window(QtGui.QMainWindow):\n <function token>\n\n def home(self):\n self.main_widget = Main_widget(self)\n self.setCentralWidget(self.main_widget)\n self.main_widget.EPM.ax1.set_title('EPM velocity')\n self.main_widget.EPM.ax1.set_xlabel('time [ms]')\n self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.EPM.ax2.set_title('EPM Torque')\n self.main_widget.EPM.ax2.set_xlabel('time[ms]')\n self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]')\n self.main_widget.IPM.ax1.set_title('IPM velocity')\n self.main_widget.IPM.ax1.set_xlabel('time[ms]')\n self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.IPM.ax2.set_title('EPM Torque')\n self.main_widget.IPM.ax2.set_xlabel('time[ms]')\n self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]')\n self.show()\n <function token>\n <function token>\n <function token>\n <function token>\n\n def tempo(self):\n if self.flag:\n self.dati = self.s.readline()\n data = re.findall('[-+]?\\\\d*\\\\.\\\\d+|\\\\d+', self.dati)\n print(data)\n self.Vepm.append(float(data[0]))\n self.Tepm.append(float(data[1]))\n self.Vipm.append(float(data[2]))\n self.Tipm.append(float(data[3]))\n self.main_widget.EPM.plot(self.tempaccio, self.Vepm, 1)\n self.main_widget.EPM.plot(self.tempaccio, self.Tepm, 2)\n self.main_widget.IPM.plot(self.tempaccio, self.Vipm, 1)\n self.main_widget.IPM.plot(self.tempaccio, self.Tipm, 2)\n self.loop += 1\n self.main_widget.loopcicle.display(self.loop)\n self.tempaccio.append(self.tempaccio[-1] + self.clock)\n else:\n print('comunication blocked\\n')\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n\n\nclass Window(QtGui.QMainWindow):\n <function token>\n\n def home(self):\n self.main_widget = Main_widget(self)\n self.setCentralWidget(self.main_widget)\n self.main_widget.EPM.ax1.set_title('EPM velocity')\n self.main_widget.EPM.ax1.set_xlabel('time [ms]')\n self.main_widget.EPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.EPM.ax2.set_title('EPM Torque')\n self.main_widget.EPM.ax2.set_xlabel('time[ms]')\n self.main_widget.EPM.ax2.set_ylabel('Torque [mNm]')\n self.main_widget.IPM.ax1.set_title('IPM velocity')\n self.main_widget.IPM.ax1.set_xlabel('time[ms]')\n self.main_widget.IPM.ax1.set_ylabel('Velocity [RPM]')\n self.main_widget.IPM.ax2.set_title('EPM Torque')\n self.main_widget.IPM.ax2.set_xlabel('time[ms]')\n self.main_widget.IPM.ax2.set_ylabel('Torque [mNm]')\n self.show()\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n\n\nclass Window(QtGui.QMainWindow):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n<class token>\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n\n def plot(self, tempaccio, data, whichplot):\n if whichplot == 1:\n self.plot_velocity.set_ydata(data)\n self.plot_velocity.set_xdata(tempaccio)\n self.ax1.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax1.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas1.draw()\n elif whichplot == 2:\n self.plot_torque.set_ydata(data)\n self.plot_torque.set_xdata(tempaccio)\n self.ax2.set_xlim(tempaccio[0], tempaccio[-1])\n self.ax2.set_ylim(min(data) * 0.8, max(data) * 1.2)\n self.canvas2.draw()\n else:\n print('Specify better your plot')\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n<class token>\n\n\nclass Magnet(QtGui.QWidget):\n\n def __init__(self, id, parent=None):\n super(Magnet, self).__init__(parent)\n self.parent = parent\n self.id = id\n self.figure1 = plt.figure()\n self.ax1 = self.figure1.add_subplot(111)\n self.figure2 = plt.figure()\n self.ax2 = self.figure2.add_subplot(111)\n self.ax1.hold(False)\n self.ax2.hold(False)\n self.canvas1 = FigureCanvas(self.figure1)\n self.canvas2 = FigureCanvas(self.figure2)\n self.toolbar1 = NavigationToolbar(self.canvas1, self)\n self.toolbar2 = NavigationToolbar(self.canvas2, self)\n self.PWM = PWM_slider(self)\n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.toolbar1)\n layout.addWidget(self.canvas1)\n layout.addWidget(self.PWM)\n layout.addWidget(self.toolbar2)\n layout.addWidget(self.canvas2)\n self.setLayout(layout)\n self.plot_velocity = self.ax1.plot([], '*-')[0]\n self.plot_torque = self.ax2.plot([], '*-')[0]\n <function token>\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n<class token>\n\n\nclass Magnet(QtGui.QWidget):\n <function token>\n <function token>\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n<class token>\n<class token>\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n\n def parla(self):\n print(str(self.sld.value()))\n if self.parent.parent.parent.flag:\n if self.sld.value() < 10:\n print(str(self.parent.id) + '0' + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + '0' +\n str(self.sld.value()))\n time.sleep(0.08)\n else:\n print(str(self.parent.id) + str(self.sld.value()))\n self.parent.parent.parent.s.write(str(self.parent.id) + str\n (self.sld.value()))\n time.sleep(0.08)\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n<class token>\n<class token>\n\n\nclass PWM_slider(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(PWM_slider, self).__init__(parent)\n self.parent = parent\n self.lcd = QtGui.QLCDNumber(self)\n self.sld = QtGui.QSlider(QtCore.Qt.Horizontal, self)\n self.sld.valueChanged.connect(self.lcd.display)\n self.sld.valueChanged.connect(self.parla)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.lcd)\n layout.addWidget(self.sld)\n self.setLayout(layout)\n <function token>\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n<class token>\n<class token>\n\n\nclass PWM_slider(QtGui.QWidget):\n <function token>\n <function token>\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass Main_widget(QtGui.QWidget):\n\n def __init__(self, parent=None):\n super(Main_widget, self).__init__(parent)\n self.parent = parent\n self.EPM = Magnet(1, self)\n self.IPM = Magnet(2, self)\n self.loopcicle = QtGui.QLCDNumber(self)\n layout = QtGui.QHBoxLayout()\n layout.addWidget(self.EPM)\n layout.addWidget(self.loopcicle)\n layout.addWidget(self.IPM)\n self.setLayout(layout)\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass Main_widget(QtGui.QWidget):\n <function token>\n\n\n<function token>\n<code token>\n", "<import token>\n<code token>\n<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n<code token>\n" ]
false
98,981
a3b3e067c3416f5d874752fd40897fcdaaf011c8
#Get acm_data.txt as input and you will be able to generate it's csv version. #This code has been used from here( "https://www.snip2code.com/Snippet/1084356/parse-aminer-s-dblp-dataset-(https---ami" ). #use python for this code. Not python3 import csv from itertools import groupby def load_dblp_arnet(infname, outfname): with open(infname, 'rb') as f, open(outfname, 'wb') as csvfile: csv_writer = csv.writer( csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) count = 0 S = ['title', 'authors', 'year','venue','citation', 'refs', 'abstract'] csv_writer.writerow(S) for key, group in groupby(f, key=lambda l: l.strip(' \n\r') == ''): if not key: refs = [] authors = [] title, venue, year, citation, abstract = [''] * 5 for item in group: item = item.strip(' \r\n') if item.startswith('#*'): title = item[2:] elif item.startswith('#@'): authors = item[2:].split(',') elif item.startswith('#year'): year = item[5:] elif item.startswith('#conf'): venue = item[5:] elif item.startswith('#citation'): citation = item[9:] elif item.startswith('#!'): abstract = item[2:] elif item.startswith('#%'): refs.append(item[2:]) csv_writer.writerow( [title, authors, year,venue,citation, refs, abstract]) count += 1 print '\r%d\tlines' % (count,), load_dblp_arnet('acm_output.txt', 'acm_citation.csv')
[ "#Get acm_data.txt as input and you will be able to generate it's csv version.\n#This code has been used from here( \"https://www.snip2code.com/Snippet/1084356/parse-aminer-s-dblp-dataset-(https---ami\" ).\n#use python for this code. Not python3\nimport csv\nfrom itertools import groupby\n\n\n\ndef load_dblp_arnet(infname, outfname):\n with open(infname, 'rb') as f, open(outfname, 'wb') as csvfile:\n csv_writer = csv.writer(\n csvfile, delimiter=',',\n quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n count = 0\n S = ['title', 'authors', 'year','venue','citation', 'refs', 'abstract']\n csv_writer.writerow(S)\n for key, group in groupby(f, key=lambda l: l.strip(' \\n\\r') == ''):\n if not key:\n refs = []\n authors = []\n title, venue, year, citation, abstract = [''] * 5\n for item in group:\n item = item.strip(' \\r\\n')\n if item.startswith('#*'):\n title = item[2:]\n elif item.startswith('#@'):\n authors = item[2:].split(',')\n elif item.startswith('#year'):\n year = item[5:]\n elif item.startswith('#conf'):\n venue = item[5:]\n elif item.startswith('#citation'):\n citation = item[9:]\n elif item.startswith('#!'):\n abstract = item[2:]\n elif item.startswith('#%'):\n refs.append(item[2:])\n csv_writer.writerow(\n [title, authors, year,venue,citation, refs, abstract])\n count += 1\n print '\\r%d\\tlines' % (count,),\n\n\nload_dblp_arnet('acm_output.txt', 'acm_citation.csv')\n" ]
true
98,982
fb41490b17035344a630400d00119711f36c168a
#!/usr/bin/python # -*- coding: utf8 -*- """FIX Application""" import sys # from datetime import datetime import quickfix as fix import quickfix44 as fix44 from classes.book import LAST_TRADE,BOOK # configured __SOH__ = chr(1) class FixConnector(fix.Application): """FIX Application""" callback = 0 sessionID = 0 orderID = 0 execID = 0 def __init__(self,callback=0): fix.Application.__init__(self) self.callback = callback def onCreate(self, sessionID): self.sessionID = sessionID return def onLogon(self, sessionID): self.sessionID = sessionID print("logged on!") return def onLogout(self, sessionID): return def toAdmin(self, message, sessionID): username = fix.Username("YOUR API KEY") mypass = fix.Password("YOUR API SECRET") message.setField(username) message.setField(mypass) msg = message.toString().replace(__SOH__, "|") return def fromAdmin(self, message, sessionID): msg = message.toString().replace(__SOH__, "|") return def toApp(self, message, sessionID): msg = message.toString().replace(__SOH__, "|") return def fromApp(self, message, sessionID): msg = message.toString().replace(__SOH__, "|") self.onMessage(message, sessionID) return def onMessage(self, message, sessionID): # print("OnMessage %s" % message) msgType = fix.MsgType() message.getHeader().getField(msgType) if msgType.getValue() == "X": # print("MarketDataIncrementalRefresh %s" % message) noMDEntries = fix.NoMDEntries() message.getField(noMDEntries) if (noMDEntries.getValue() != 1): # print("NoMDEntries in MarketDataIncrementalRefresh is not 1!") return group = fix44.MarketDataIncrementalRefresh.NoMDEntries() message.getGroup(1, group); entryID = fix.MDEntryID() group.getField(entryID) action = fix.MDUpdateAction() group.getField(action); security = LAST_TRADE() security.MDEntryID = entryID.getValue() security.MDUpdateAction = action.getValue() symbol = fix.Symbol() if (group.isSetField(symbol)): group.getField(symbol) security.Symbol = symbol.getValue() entryType = fix.MDEntryType() if (group.isSetField(entryType)): group.getField(entryType) security.MDEntryType = entryType.getValue() price = fix.MDEntryPx() if (group.isSetField(price)): group.getField(price) security.MDEntryPx = price.getValue() size = fix.MDEntrySize() if (group.isSetField(size)): group.getField(size) security.MDEntrySize = size.getValue() qty = fix.MinQty() if (group.isSetField(qty)): group.getField(qty) security.MinQty = qty.getValue() fire(self.callback, "OnTradeUpdated",**{"trade":security}) if msgType.getValue() == 'W': book = BOOK() Symbol = fix.Symbol() message.getField(Symbol) book.symbol = Symbol.getValue() noMDEntries = fix.NoMDEntries() message.getField(noMDEntries) group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries() MDEntryType = fix.MDEntryType() MDEntryPx = fix.MDEntryPx() MDEntrySize = fix.MDEntrySize() for i in range(1,noMDEntries.getValue()): message.getGroup(i, group) group.getField(MDEntryType) group.getField(MDEntryPx) group.getField(MDEntrySize) if MDEntryType.getValue() == '0': book.bid.append(MDEntryPx.getValue()) book.bid_size.append(MDEntrySize.getValue()) if MDEntryType.getValue() == '1': book.ask.append(MDEntryPx.getValue()) book.ask_size.append(MDEntrySize.getValue()) fire(self.callback, "OnBookUpdated",**{"book":book}) pass def genOrderID(self): self.orderID = self.orderID+1 return str(self.orderID) def genExecID(self): self.execID = self.execID+1 return str(self.execID) def marketDataRequest(self,ticker,subscription_type): mdr = fix.Message() mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44)) mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest)) group = fix44.MarketDataRequest().NoRelatedSym() group.setField(fix.Symbol(ticker)) mdr.addGroup(group) mdr.setField(fix.MDReqID('1')) mdr.setField(fix.SubscriptionRequestType(subscription_type)) mdr.setField(fix.MarketDepth(0)) mdr.setField(fix.NoMDEntryTypes(3)) group = fix44.MarketDataRequest().NoMDEntryTypes() group.setField(fix.MDEntryType(fix.MDEntryType_BID)) mdr.addGroup(group) group.setField(fix.MDEntryType(fix.MDEntryType_OFFER)) mdr.addGroup(group) group.setField(fix.MDEntryType(fix.MDEntryType_TRADE)) mdr.addGroup(group) fix.Session.sendToTarget(mdr, self.sessionID) return def sendOrder(self,ticker,side,type,px,qty): nos = fix.Message() nos.getHeader().setField(fix.BeginString(fix.BeginString_FIX44)) nos.getHeader().setField(fix.MsgType(fix.MsgType_NewOrderSingle)) symbol = fix.Symbol(ticker) nos.setField(symbol) if side == "BUY": side = fix.Side(fix.Side_BUY) if side == "SELL": side = fix.Side(fix.Side_SELL) nos.setField(side) if type == "MARKET": ordType = fix.OrdType(fix.OrdType_MARKET) px = fix.Price(0) if type == "LIMIT": ordType = fix.OrdType(fix.OrdType_MARKET) px = fix.Price(px) nos.setField(ordType) nos.setField(px) orderQty = fix.OrderQty(qty) clOrdID = fix.ClOrdID(self.genOrderID()) nos.setField(orderQty) nos.setField(clOrdID) TimeInForce = fix.TimeInForce(fix.TimeInForce_GOOD_TILL_CANCEL) TransactTime = fix.TransactTime() nos.setField(TimeInForce) nos.setField(TransactTime) fix.Session.sendToTarget(nos, self.sessionID) def fire(handlers, event, **kwargs): for handler in handlers.get(event, []): handler(**kwargs)
[ "#!/usr/bin/python\n# -*- coding: utf8 -*-\n\"\"\"FIX Application\"\"\"\nimport sys\n\n# from datetime import datetime\nimport quickfix as fix\nimport quickfix44 as fix44\n\nfrom classes.book import LAST_TRADE,BOOK\n\n# configured\n__SOH__ = chr(1)\n\n\n\n\nclass FixConnector(fix.Application):\n \"\"\"FIX Application\"\"\"\n\n callback = 0\n sessionID = 0\n\n orderID = 0\n execID = 0\n\n def __init__(self,callback=0):\n fix.Application.__init__(self)\n self.callback = callback\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print(\"logged on!\")\n return\n def onLogout(self, sessionID): \n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username(\"YOUR API KEY\")\n mypass = fix.Password(\"YOUR API SECRET\")\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, \"|\")\n return\n def fromAdmin(self, message, sessionID):\n msg = message.toString().replace(__SOH__, \"|\")\n return\n def toApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, \"|\")\n return\n def fromApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, \"|\")\n self.onMessage(message, sessionID)\n return\n\n def onMessage(self, message, sessionID):\n # print(\"OnMessage %s\" % message)\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == \"X\":\n # print(\"MarketDataIncrementalRefresh %s\" % message)\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if (noMDEntries.getValue() != 1):\n # print(\"NoMDEntries in MarketDataIncrementalRefresh is not 1!\")\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group);\n\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action);\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if (group.isSetField(symbol)):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if (group.isSetField(entryType)):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if (group.isSetField(price)):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if (group.isSetField(size)):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if (group.isSetField(qty)):\n group.getField(qty)\n security.MinQty = qty.getValue()\n\n fire(self.callback, \"OnTradeUpdated\",**{\"trade\":security})\n\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n\n for i in range(1,noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n\n fire(self.callback, \"OnBookUpdated\",**{\"book\":book})\n\n pass\n\n\n def genOrderID(self):\n self.orderID = self.orderID+1\n return str(self.orderID)\n def genExecID(self):\n self.execID = self.execID+1\n return str(self.execID)\n\n\n def marketDataRequest(self,ticker,subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n\n def sendOrder(self,ticker,side,type,px,qty):\n nos = fix.Message()\n nos.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n nos.getHeader().setField(fix.MsgType(fix.MsgType_NewOrderSingle))\n\n symbol = fix.Symbol(ticker)\n nos.setField(symbol)\n if side == \"BUY\":\n side = fix.Side(fix.Side_BUY)\n if side == \"SELL\":\n side = fix.Side(fix.Side_SELL)\n nos.setField(side)\n\n if type == \"MARKET\":\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(0)\n if type == \"LIMIT\":\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(px)\n nos.setField(ordType)\n nos.setField(px)\n\n orderQty = fix.OrderQty(qty)\n clOrdID = fix.ClOrdID(self.genOrderID())\n nos.setField(orderQty)\n nos.setField(clOrdID)\n\n TimeInForce = fix.TimeInForce(fix.TimeInForce_GOOD_TILL_CANCEL)\n TransactTime = fix.TransactTime()\n nos.setField(TimeInForce)\n nos.setField(TransactTime)\n\n fix.Session.sendToTarget(nos, self.sessionID)\n\n\ndef fire(handlers, event, **kwargs):\n for handler in handlers.get(event, []):\n handler(**kwargs)\n", "<docstring token>\nimport sys\nimport quickfix as fix\nimport quickfix44 as fix44\nfrom classes.book import LAST_TRADE, BOOK\n__SOH__ = chr(1)\n\n\nclass FixConnector(fix.Application):\n \"\"\"FIX Application\"\"\"\n callback = 0\n sessionID = 0\n orderID = 0\n execID = 0\n\n def __init__(self, callback=0):\n fix.Application.__init__(self)\n self.callback = callback\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n\n def onLogout(self, sessionID):\n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromAdmin(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def toApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n self.onMessage(message, sessionID)\n return\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n\n def sendOrder(self, ticker, side, type, px, qty):\n nos = fix.Message()\n nos.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n nos.getHeader().setField(fix.MsgType(fix.MsgType_NewOrderSingle))\n symbol = fix.Symbol(ticker)\n nos.setField(symbol)\n if side == 'BUY':\n side = fix.Side(fix.Side_BUY)\n if side == 'SELL':\n side = fix.Side(fix.Side_SELL)\n nos.setField(side)\n if type == 'MARKET':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(0)\n if type == 'LIMIT':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(px)\n nos.setField(ordType)\n nos.setField(px)\n orderQty = fix.OrderQty(qty)\n clOrdID = fix.ClOrdID(self.genOrderID())\n nos.setField(orderQty)\n nos.setField(clOrdID)\n TimeInForce = fix.TimeInForce(fix.TimeInForce_GOOD_TILL_CANCEL)\n TransactTime = fix.TransactTime()\n nos.setField(TimeInForce)\n nos.setField(TransactTime)\n fix.Session.sendToTarget(nos, self.sessionID)\n\n\ndef fire(handlers, event, **kwargs):\n for handler in handlers.get(event, []):\n handler(**kwargs)\n", "<docstring token>\n<import token>\n__SOH__ = chr(1)\n\n\nclass FixConnector(fix.Application):\n \"\"\"FIX Application\"\"\"\n callback = 0\n sessionID = 0\n orderID = 0\n execID = 0\n\n def __init__(self, callback=0):\n fix.Application.__init__(self)\n self.callback = callback\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n\n def onLogout(self, sessionID):\n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromAdmin(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def toApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n self.onMessage(message, sessionID)\n return\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n\n def sendOrder(self, ticker, side, type, px, qty):\n nos = fix.Message()\n nos.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n nos.getHeader().setField(fix.MsgType(fix.MsgType_NewOrderSingle))\n symbol = fix.Symbol(ticker)\n nos.setField(symbol)\n if side == 'BUY':\n side = fix.Side(fix.Side_BUY)\n if side == 'SELL':\n side = fix.Side(fix.Side_SELL)\n nos.setField(side)\n if type == 'MARKET':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(0)\n if type == 'LIMIT':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(px)\n nos.setField(ordType)\n nos.setField(px)\n orderQty = fix.OrderQty(qty)\n clOrdID = fix.ClOrdID(self.genOrderID())\n nos.setField(orderQty)\n nos.setField(clOrdID)\n TimeInForce = fix.TimeInForce(fix.TimeInForce_GOOD_TILL_CANCEL)\n TransactTime = fix.TransactTime()\n nos.setField(TimeInForce)\n nos.setField(TransactTime)\n fix.Session.sendToTarget(nos, self.sessionID)\n\n\ndef fire(handlers, event, **kwargs):\n for handler in handlers.get(event, []):\n handler(**kwargs)\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n \"\"\"FIX Application\"\"\"\n callback = 0\n sessionID = 0\n orderID = 0\n execID = 0\n\n def __init__(self, callback=0):\n fix.Application.__init__(self)\n self.callback = callback\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n\n def onLogout(self, sessionID):\n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromAdmin(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def toApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n self.onMessage(message, sessionID)\n return\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n\n def sendOrder(self, ticker, side, type, px, qty):\n nos = fix.Message()\n nos.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n nos.getHeader().setField(fix.MsgType(fix.MsgType_NewOrderSingle))\n symbol = fix.Symbol(ticker)\n nos.setField(symbol)\n if side == 'BUY':\n side = fix.Side(fix.Side_BUY)\n if side == 'SELL':\n side = fix.Side(fix.Side_SELL)\n nos.setField(side)\n if type == 'MARKET':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(0)\n if type == 'LIMIT':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(px)\n nos.setField(ordType)\n nos.setField(px)\n orderQty = fix.OrderQty(qty)\n clOrdID = fix.ClOrdID(self.genOrderID())\n nos.setField(orderQty)\n nos.setField(clOrdID)\n TimeInForce = fix.TimeInForce(fix.TimeInForce_GOOD_TILL_CANCEL)\n TransactTime = fix.TransactTime()\n nos.setField(TimeInForce)\n nos.setField(TransactTime)\n fix.Session.sendToTarget(nos, self.sessionID)\n\n\ndef fire(handlers, event, **kwargs):\n for handler in handlers.get(event, []):\n handler(**kwargs)\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n \"\"\"FIX Application\"\"\"\n callback = 0\n sessionID = 0\n orderID = 0\n execID = 0\n\n def __init__(self, callback=0):\n fix.Application.__init__(self)\n self.callback = callback\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n\n def onLogout(self, sessionID):\n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromAdmin(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def toApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n self.onMessage(message, sessionID)\n return\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n\n def sendOrder(self, ticker, side, type, px, qty):\n nos = fix.Message()\n nos.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n nos.getHeader().setField(fix.MsgType(fix.MsgType_NewOrderSingle))\n symbol = fix.Symbol(ticker)\n nos.setField(symbol)\n if side == 'BUY':\n side = fix.Side(fix.Side_BUY)\n if side == 'SELL':\n side = fix.Side(fix.Side_SELL)\n nos.setField(side)\n if type == 'MARKET':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(0)\n if type == 'LIMIT':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(px)\n nos.setField(ordType)\n nos.setField(px)\n orderQty = fix.OrderQty(qty)\n clOrdID = fix.ClOrdID(self.genOrderID())\n nos.setField(orderQty)\n nos.setField(clOrdID)\n TimeInForce = fix.TimeInForce(fix.TimeInForce_GOOD_TILL_CANCEL)\n TransactTime = fix.TransactTime()\n nos.setField(TimeInForce)\n nos.setField(TransactTime)\n fix.Session.sendToTarget(nos, self.sessionID)\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n callback = 0\n sessionID = 0\n orderID = 0\n execID = 0\n\n def __init__(self, callback=0):\n fix.Application.__init__(self)\n self.callback = callback\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n\n def onLogout(self, sessionID):\n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromAdmin(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def toApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n self.onMessage(message, sessionID)\n return\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n\n def sendOrder(self, ticker, side, type, px, qty):\n nos = fix.Message()\n nos.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n nos.getHeader().setField(fix.MsgType(fix.MsgType_NewOrderSingle))\n symbol = fix.Symbol(ticker)\n nos.setField(symbol)\n if side == 'BUY':\n side = fix.Side(fix.Side_BUY)\n if side == 'SELL':\n side = fix.Side(fix.Side_SELL)\n nos.setField(side)\n if type == 'MARKET':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(0)\n if type == 'LIMIT':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(px)\n nos.setField(ordType)\n nos.setField(px)\n orderQty = fix.OrderQty(qty)\n clOrdID = fix.ClOrdID(self.genOrderID())\n nos.setField(orderQty)\n nos.setField(clOrdID)\n TimeInForce = fix.TimeInForce(fix.TimeInForce_GOOD_TILL_CANCEL)\n TransactTime = fix.TransactTime()\n nos.setField(TimeInForce)\n nos.setField(TransactTime)\n fix.Session.sendToTarget(nos, self.sessionID)\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, callback=0):\n fix.Application.__init__(self)\n self.callback = callback\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n\n def onLogout(self, sessionID):\n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromAdmin(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def toApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n self.onMessage(message, sessionID)\n return\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n\n def sendOrder(self, ticker, side, type, px, qty):\n nos = fix.Message()\n nos.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n nos.getHeader().setField(fix.MsgType(fix.MsgType_NewOrderSingle))\n symbol = fix.Symbol(ticker)\n nos.setField(symbol)\n if side == 'BUY':\n side = fix.Side(fix.Side_BUY)\n if side == 'SELL':\n side = fix.Side(fix.Side_SELL)\n nos.setField(side)\n if type == 'MARKET':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(0)\n if type == 'LIMIT':\n ordType = fix.OrdType(fix.OrdType_MARKET)\n px = fix.Price(px)\n nos.setField(ordType)\n nos.setField(px)\n orderQty = fix.OrderQty(qty)\n clOrdID = fix.ClOrdID(self.genOrderID())\n nos.setField(orderQty)\n nos.setField(clOrdID)\n TimeInForce = fix.TimeInForce(fix.TimeInForce_GOOD_TILL_CANCEL)\n TransactTime = fix.TransactTime()\n nos.setField(TimeInForce)\n nos.setField(TransactTime)\n fix.Session.sendToTarget(nos, self.sessionID)\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, callback=0):\n fix.Application.__init__(self)\n self.callback = callback\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n\n def onLogout(self, sessionID):\n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromAdmin(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def toApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n self.onMessage(message, sessionID)\n return\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, callback=0):\n fix.Application.__init__(self)\n self.callback = callback\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n\n def onLogout(self, sessionID):\n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromAdmin(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def toApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n\n def onLogout(self, sessionID):\n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def fromAdmin(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n\n def toApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n\n def onLogout(self, sessionID):\n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n\n def toApp(self, message, sessionID):\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n\n def onLogout(self, sessionID):\n return\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n <function token>\n <function token>\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n <function token>\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n <function token>\n <function token>\n\n def onMessage(self, message, sessionID):\n msgType = fix.MsgType()\n message.getHeader().getField(msgType)\n if msgType.getValue() == 'X':\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n if noMDEntries.getValue() != 1:\n return\n group = fix44.MarketDataIncrementalRefresh.NoMDEntries()\n message.getGroup(1, group)\n entryID = fix.MDEntryID()\n group.getField(entryID)\n action = fix.MDUpdateAction()\n group.getField(action)\n security = LAST_TRADE()\n security.MDEntryID = entryID.getValue()\n security.MDUpdateAction = action.getValue()\n symbol = fix.Symbol()\n if group.isSetField(symbol):\n group.getField(symbol)\n security.Symbol = symbol.getValue()\n entryType = fix.MDEntryType()\n if group.isSetField(entryType):\n group.getField(entryType)\n security.MDEntryType = entryType.getValue()\n price = fix.MDEntryPx()\n if group.isSetField(price):\n group.getField(price)\n security.MDEntryPx = price.getValue()\n size = fix.MDEntrySize()\n if group.isSetField(size):\n group.getField(size)\n security.MDEntrySize = size.getValue()\n qty = fix.MinQty()\n if group.isSetField(qty):\n group.getField(qty)\n security.MinQty = qty.getValue()\n fire(self.callback, 'OnTradeUpdated', **{'trade': security})\n if msgType.getValue() == 'W':\n book = BOOK()\n Symbol = fix.Symbol()\n message.getField(Symbol)\n book.symbol = Symbol.getValue()\n noMDEntries = fix.NoMDEntries()\n message.getField(noMDEntries)\n group = fix44.MarketDataSnapshotFullRefresh.NoMDEntries()\n MDEntryType = fix.MDEntryType()\n MDEntryPx = fix.MDEntryPx()\n MDEntrySize = fix.MDEntrySize()\n for i in range(1, noMDEntries.getValue()):\n message.getGroup(i, group)\n group.getField(MDEntryType)\n group.getField(MDEntryPx)\n group.getField(MDEntrySize)\n if MDEntryType.getValue() == '0':\n book.bid.append(MDEntryPx.getValue())\n book.bid_size.append(MDEntrySize.getValue())\n if MDEntryType.getValue() == '1':\n book.ask.append(MDEntryPx.getValue())\n book.ask_size.append(MDEntrySize.getValue())\n fire(self.callback, 'OnBookUpdated', **{'book': book})\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n\n def onLogon(self, sessionID):\n self.sessionID = sessionID\n print('logged on!')\n return\n <function token>\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n <function token>\n <function token>\n <function token>\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n <function token>\n <function token>\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n <function token>\n <function token>\n <function token>\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n\n def genExecID(self):\n self.execID = self.execID + 1\n return str(self.execID)\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def onCreate(self, sessionID):\n self.sessionID = sessionID\n return\n <function token>\n <function token>\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n <function token>\n <function token>\n <function token>\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n <function token>\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n <function token>\n <function token>\n <function token>\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n <function token>\n\n def marketDataRequest(self, ticker, subscription_type):\n mdr = fix.Message()\n mdr.getHeader().setField(fix.BeginString(fix.BeginString_FIX44))\n mdr.getHeader().setField(fix.MsgType(fix.MsgType_MarketDataRequest))\n group = fix44.MarketDataRequest().NoRelatedSym()\n group.setField(fix.Symbol(ticker))\n mdr.addGroup(group)\n mdr.setField(fix.MDReqID('1'))\n mdr.setField(fix.SubscriptionRequestType(subscription_type))\n mdr.setField(fix.MarketDepth(0))\n mdr.setField(fix.NoMDEntryTypes(3))\n group = fix44.MarketDataRequest().NoMDEntryTypes()\n group.setField(fix.MDEntryType(fix.MDEntryType_BID))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_OFFER))\n mdr.addGroup(group)\n group.setField(fix.MDEntryType(fix.MDEntryType_TRADE))\n mdr.addGroup(group)\n fix.Session.sendToTarget(mdr, self.sessionID)\n return\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n <function token>\n <function token>\n <function token>\n pass\n\n def genOrderID(self):\n self.orderID = self.orderID + 1\n return str(self.orderID)\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def toAdmin(self, message, sessionID):\n username = fix.Username('YOUR API KEY')\n mypass = fix.Password('YOUR API SECRET')\n message.setField(username)\n message.setField(mypass)\n msg = message.toString().replace(__SOH__, '|')\n return\n <function token>\n <function token>\n <function token>\n <function token>\n pass\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n\n\nclass FixConnector(fix.Application):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n pass\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n", "<docstring token>\n<import token>\n<assignment token>\n<class token>\n<function token>\n" ]
false
98,983
7ff6c790d75c94a83663e4f9856a947b38cdd04b
""" @Time: 2018/5/10 15:09 @Author: qingyaocui """ import os import pickle from course.src.models import Admin, School, Teacher, Course, Classes, CourseToTeahcer, Student def show_choice(): show = ''' 1.菜单 2.创建学校 3.查看学校 4.创建老师 5.创建课程 6.查看课程 7.为课程增加老师 8.创建班级 Q|q.退出系统 ''' print(show) def create_school(): ''' 创建学校 :return: ''' school_name = input('请输入学校的名称:') if find_school_by_name(school_name): print("学校已经存在!") return new_school = School(school_name) new_school.save() print("%s 创建成功!" % school_name) def show_schools(): ''' 查看所有学校 :return: ''' for i in os.listdir(School.db_path): with open('%s/%s' % (School.db_path, i), 'rb') as f: sc = pickle.load(f) print(sc) def find_school_by_name(school_name): ''' 按学校名称查找学校 :param school_name:学校名称 :return: ''' for i in os.listdir(School.db_path): with open('%s/%s'% (School.db_path, i), 'rb') as f: sc = pickle.load(f) if sc.school_name == school_name: return sc return None def create_teacher(): ''' 创建老师 :return: ''' teacher_name = input('请输入老师的姓名:') teacher_level = input('请输入老师的等级:') school_name = input('请输入老师所在的学校:') sc = find_school_by_name(school_name) if sc: new_teacher = Teacher(teacher_name, teacher_level, sc.nid) new_teacher.save() sc.add_teacher(new_teacher.nid) sc.save() else: print("学校不存在!老师添加失败!") def find_teacher_by_name(teacher_name): ''' 按姓名查找老师 :param teacher_name: 老师姓名 :return:老师集合 ''' teachers = [] for i in os.listdir(Teacher.db_path): with open('%s/%s' % (Teacher.db_path, i), 'rb') as f: tea = pickle.load(f) if tea.teacher_name == teacher_name: teachers.append(tea) return teachers def show_teachers(school_name): ''' 按学校名称展示师资力量 :param school_name: 学校名称 :return: ''' sc = find_school_by_name(school_name) if sc: sc.show_teachers() else: print("学校不存在!无法展示老师信息!") def create_course(): ''' 创建课程 :return: ''' school_name = input('请输入要添加课程的学校名称:') course_name = input('请输入课程名称:') course_price = input('请输入课程价格:') course_period = input('请输入课程周期:') sc = find_school_by_name(school_name) if sc: new_course = Course(course_name, course_price, course_period, sc.nid) new_course.save() sc.add_course(new_course.nid) sc.save() else: print("学校不存在!课程添加失败!") def show_courses(): ''' 按学校名展示课程 :param school_name:学校名称 :return: ''' school_name = input('请输入要查询课程的学校名称:') sc = find_school_by_name(school_name) if sc: sc.show_courses() else: print('学校不存在!无法展示课程信息!') def add_teacher_to_course(): ''' 按学校名为课程添加教师 :param school_name: 学校名称 :return: ''' school_name = input("请输入操作的学校名称:") sc = find_school_by_name(school_name) if sc: sc.show_courses() course_name = input("请输入要添加任课老师的课程名称:") for c in sc.courses: c_obj = c.get_obj_by_uuid() if c_obj.course_name == course_name: teacher_name = input('请输入任课老师姓名:') for t in sc.teachers: t_obj = t.get_obj_by_uuid() if t_obj.teacher_name == teacher_name: c_obj.add_teacher(t_obj.nid) c_obj.save() ctt = CourseToTeahcer(c, t) ctt.save() sc.save() return print('老师不存在!') print("课程不存在!") else: print("学校不存在!为课程添加老师失败!") def create_class(): ''' 创建班级 :return: ''' school_name = input('请输入要添加班级的学校名称:') class_name = input('请输入班级名称:') sc = find_school_by_name(school_name) if sc: new_class = Classes(class_name, sc.nid) new_class.save() sc.add_class(new_class.nid) sc.save() else: print("学校不存在!班级添加失败!") def find_class_by_name(class_name): ''' 按班级名称查找班级 :param school_name:班级名称 :return: ''' for i in os.listdir(Classes.db_path): with open('%s/%s' % (Classes.db_path, i), 'rb') as f: cl = pickle.load(f) if cl.class_name == class_name: return cl return None def show_classes(): ''' 按学校名展示班级 :param school_name:学校名 :return: ''' school_name = input('请输入要查询班级的学校名称:') sc = find_school_by_name(school_name) if sc: sc.show_classes() else: print("学校不存在!无法展示班级信息!") def find_student_by_name(student_name): ''' 按姓名称查找学生 :param student_name:学生姓名 :return: ''' for i in os.listdir(Student.db_path): with open('%s/%s' % (Student.db_path, i), 'rb') as f: stu = pickle.load(f) if stu.student_name == student_name: return stu return None def quit_system(): print('Bye!') exit(0) def show_login(): print('选课系统'.center(30,'-')) print('管理员接口') times_limit = 5 count = 0 while True: if count < times_limit: username = input('请输入[管理员]用户名:') password = input('请输入[管理员]密码:') if Admin.login(username, password): break else: print('用户名或密码输入错误!请重新输入') count += 1 else: quit_system() def main(): show_login() choice_dict = { '1':show_choice, '2':create_school, '3':show_schools, '4':create_teacher, '5':create_course, '6':show_courses, '7':add_teacher_to_course, '8':create_class, 'Q':quit_system, 'q':quit_system } show_choice() while True: user_input = input("请输入选项:") if user_input not in choice_dict: print('请输入正确的选项~') continue option = choice_dict[user_input] option()
[ "\"\"\"\n @Time: 2018/5/10 15:09\n @Author: qingyaocui\n\"\"\"\nimport os\nimport pickle\nfrom course.src.models import Admin, School, Teacher, Course, Classes, CourseToTeahcer, Student\ndef show_choice():\n show = '''\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n '''\n print(show)\n\ndef create_school():\n '''\n 创建学校\n :return:\n '''\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print(\"学校已经存在!\")\n return\n new_school = School(school_name)\n new_school.save()\n print(\"%s 创建成功!\" % school_name)\n\ndef show_schools():\n '''\n 查看所有学校\n :return:\n '''\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\ndef find_school_by_name(school_name):\n '''\n 按学校名称查找学校\n :param school_name:学校名称\n :return:\n '''\n for i in os.listdir(School.db_path):\n with open('%s/%s'% (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n if sc.school_name == school_name:\n return sc\n\n return None\n\ndef create_teacher():\n '''\n 创建老师\n :return:\n '''\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print(\"学校不存在!老师添加失败!\")\n\ndef find_teacher_by_name(teacher_name):\n '''\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n '''\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\ndef show_teachers(school_name):\n '''\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n '''\n\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print(\"学校不存在!无法展示老师信息!\")\n\n\n\ndef create_course():\n '''\n 创建课程\n :return:\n '''\n school_name = input('请输入要添加课程的学校名称:')\n course_name = input('请输入课程名称:')\n course_price = input('请输入课程价格:')\n course_period = input('请输入课程周期:')\n sc = find_school_by_name(school_name)\n if sc:\n new_course = Course(course_name, course_price, course_period, sc.nid)\n new_course.save()\n sc.add_course(new_course.nid)\n sc.save()\n else:\n print(\"学校不存在!课程添加失败!\")\n\n\n\ndef show_courses():\n '''\n 按学校名展示课程\n :param school_name:学校名称\n :return:\n '''\n school_name = input('请输入要查询课程的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_courses()\n else:\n print('学校不存在!无法展示课程信息!')\n\n\ndef add_teacher_to_course():\n '''\n 按学校名为课程添加教师\n :param school_name: 学校名称\n :return:\n '''\n school_name = input(\"请输入操作的学校名称:\")\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_courses()\n course_name = input(\"请输入要添加任课老师的课程名称:\")\n for c in sc.courses:\n c_obj = c.get_obj_by_uuid()\n if c_obj.course_name == course_name:\n teacher_name = input('请输入任课老师姓名:')\n for t in sc.teachers:\n t_obj = t.get_obj_by_uuid()\n if t_obj.teacher_name == teacher_name:\n c_obj.add_teacher(t_obj.nid)\n c_obj.save()\n ctt = CourseToTeahcer(c, t)\n ctt.save()\n sc.save()\n return\n print('老师不存在!')\n print(\"课程不存在!\")\n else:\n print(\"学校不存在!为课程添加老师失败!\")\n\n\ndef create_class():\n '''\n 创建班级\n :return:\n '''\n school_name = input('请输入要添加班级的学校名称:')\n class_name = input('请输入班级名称:')\n sc = find_school_by_name(school_name)\n if sc:\n new_class = Classes(class_name, sc.nid)\n new_class.save()\n sc.add_class(new_class.nid)\n sc.save()\n else:\n print(\"学校不存在!班级添加失败!\")\n\ndef find_class_by_name(class_name):\n '''\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n '''\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n\n return None\n\n\ndef show_classes():\n '''\n 按学校名展示班级\n :param school_name:学校名\n :return:\n '''\n school_name = input('请输入要查询班级的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_classes()\n else:\n print(\"学校不存在!无法展示班级信息!\")\n\ndef find_student_by_name(student_name):\n '''\n 按姓名称查找学生\n :param student_name:学生姓名\n :return:\n '''\n for i in os.listdir(Student.db_path):\n with open('%s/%s' % (Student.db_path, i), 'rb') as f:\n stu = pickle.load(f)\n if stu.student_name == student_name:\n return stu\n\n return None\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\ndef show_login():\n print('选课系统'.center(30,'-'))\n print('管理员接口')\n times_limit = 5\n count = 0\n while True:\n\n if count < times_limit:\n username = input('请输入[管理员]用户名:')\n password = input('请输入[管理员]密码:')\n if Admin.login(username, password):\n break\n else:\n print('用户名或密码输入错误!请重新输入')\n count += 1\n else:\n quit_system()\n\n\n\ndef main():\n\n show_login()\n choice_dict = {\n '1':show_choice,\n '2':create_school,\n '3':show_schools,\n '4':create_teacher,\n '5':create_course,\n '6':show_courses,\n '7':add_teacher_to_course,\n '8':create_class,\n 'Q':quit_system,\n 'q':quit_system\n }\n show_choice()\n while True:\n user_input = input(\"请输入选项:\")\n if user_input not in choice_dict:\n print('请输入正确的选项~')\n continue\n\n option = choice_dict[user_input]\n option()", "<docstring token>\nimport os\nimport pickle\nfrom course.src.models import Admin, School, Teacher, Course, Classes, CourseToTeahcer, Student\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\ndef find_school_by_name(school_name):\n \"\"\"\n 按学校名称查找学校\n :param school_name:学校名称\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n if sc.school_name == school_name:\n return sc\n return None\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\ndef create_course():\n \"\"\"\n 创建课程\n :return:\n \"\"\"\n school_name = input('请输入要添加课程的学校名称:')\n course_name = input('请输入课程名称:')\n course_price = input('请输入课程价格:')\n course_period = input('请输入课程周期:')\n sc = find_school_by_name(school_name)\n if sc:\n new_course = Course(course_name, course_price, course_period, sc.nid)\n new_course.save()\n sc.add_course(new_course.nid)\n sc.save()\n else:\n print('学校不存在!课程添加失败!')\n\n\ndef show_courses():\n \"\"\"\n 按学校名展示课程\n :param school_name:学校名称\n :return:\n \"\"\"\n school_name = input('请输入要查询课程的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_courses()\n else:\n print('学校不存在!无法展示课程信息!')\n\n\ndef add_teacher_to_course():\n \"\"\"\n 按学校名为课程添加教师\n :param school_name: 学校名称\n :return:\n \"\"\"\n school_name = input('请输入操作的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_courses()\n course_name = input('请输入要添加任课老师的课程名称:')\n for c in sc.courses:\n c_obj = c.get_obj_by_uuid()\n if c_obj.course_name == course_name:\n teacher_name = input('请输入任课老师姓名:')\n for t in sc.teachers:\n t_obj = t.get_obj_by_uuid()\n if t_obj.teacher_name == teacher_name:\n c_obj.add_teacher(t_obj.nid)\n c_obj.save()\n ctt = CourseToTeahcer(c, t)\n ctt.save()\n sc.save()\n return\n print('老师不存在!')\n print('课程不存在!')\n else:\n print('学校不存在!为课程添加老师失败!')\n\n\ndef create_class():\n \"\"\"\n 创建班级\n :return:\n \"\"\"\n school_name = input('请输入要添加班级的学校名称:')\n class_name = input('请输入班级名称:')\n sc = find_school_by_name(school_name)\n if sc:\n new_class = Classes(class_name, sc.nid)\n new_class.save()\n sc.add_class(new_class.nid)\n sc.save()\n else:\n print('学校不存在!班级添加失败!')\n\n\ndef find_class_by_name(class_name):\n \"\"\"\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n \"\"\"\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n return None\n\n\ndef show_classes():\n \"\"\"\n 按学校名展示班级\n :param school_name:学校名\n :return:\n \"\"\"\n school_name = input('请输入要查询班级的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_classes()\n else:\n print('学校不存在!无法展示班级信息!')\n\n\ndef find_student_by_name(student_name):\n \"\"\"\n 按姓名称查找学生\n :param student_name:学生姓名\n :return:\n \"\"\"\n for i in os.listdir(Student.db_path):\n with open('%s/%s' % (Student.db_path, i), 'rb') as f:\n stu = pickle.load(f)\n if stu.student_name == student_name:\n return stu\n return None\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\ndef show_login():\n print('选课系统'.center(30, '-'))\n print('管理员接口')\n times_limit = 5\n count = 0\n while True:\n if count < times_limit:\n username = input('请输入[管理员]用户名:')\n password = input('请输入[管理员]密码:')\n if Admin.login(username, password):\n break\n else:\n print('用户名或密码输入错误!请重新输入')\n count += 1\n else:\n quit_system()\n\n\ndef main():\n show_login()\n choice_dict = {'1': show_choice, '2': create_school, '3': show_schools,\n '4': create_teacher, '5': create_course, '6': show_courses, '7':\n add_teacher_to_course, '8': create_class, 'Q': quit_system, 'q':\n quit_system}\n show_choice()\n while True:\n user_input = input('请输入选项:')\n if user_input not in choice_dict:\n print('请输入正确的选项~')\n continue\n option = choice_dict[user_input]\n option()\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\ndef find_school_by_name(school_name):\n \"\"\"\n 按学校名称查找学校\n :param school_name:学校名称\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n if sc.school_name == school_name:\n return sc\n return None\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\ndef create_course():\n \"\"\"\n 创建课程\n :return:\n \"\"\"\n school_name = input('请输入要添加课程的学校名称:')\n course_name = input('请输入课程名称:')\n course_price = input('请输入课程价格:')\n course_period = input('请输入课程周期:')\n sc = find_school_by_name(school_name)\n if sc:\n new_course = Course(course_name, course_price, course_period, sc.nid)\n new_course.save()\n sc.add_course(new_course.nid)\n sc.save()\n else:\n print('学校不存在!课程添加失败!')\n\n\ndef show_courses():\n \"\"\"\n 按学校名展示课程\n :param school_name:学校名称\n :return:\n \"\"\"\n school_name = input('请输入要查询课程的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_courses()\n else:\n print('学校不存在!无法展示课程信息!')\n\n\ndef add_teacher_to_course():\n \"\"\"\n 按学校名为课程添加教师\n :param school_name: 学校名称\n :return:\n \"\"\"\n school_name = input('请输入操作的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_courses()\n course_name = input('请输入要添加任课老师的课程名称:')\n for c in sc.courses:\n c_obj = c.get_obj_by_uuid()\n if c_obj.course_name == course_name:\n teacher_name = input('请输入任课老师姓名:')\n for t in sc.teachers:\n t_obj = t.get_obj_by_uuid()\n if t_obj.teacher_name == teacher_name:\n c_obj.add_teacher(t_obj.nid)\n c_obj.save()\n ctt = CourseToTeahcer(c, t)\n ctt.save()\n sc.save()\n return\n print('老师不存在!')\n print('课程不存在!')\n else:\n print('学校不存在!为课程添加老师失败!')\n\n\ndef create_class():\n \"\"\"\n 创建班级\n :return:\n \"\"\"\n school_name = input('请输入要添加班级的学校名称:')\n class_name = input('请输入班级名称:')\n sc = find_school_by_name(school_name)\n if sc:\n new_class = Classes(class_name, sc.nid)\n new_class.save()\n sc.add_class(new_class.nid)\n sc.save()\n else:\n print('学校不存在!班级添加失败!')\n\n\ndef find_class_by_name(class_name):\n \"\"\"\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n \"\"\"\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n return None\n\n\ndef show_classes():\n \"\"\"\n 按学校名展示班级\n :param school_name:学校名\n :return:\n \"\"\"\n school_name = input('请输入要查询班级的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_classes()\n else:\n print('学校不存在!无法展示班级信息!')\n\n\ndef find_student_by_name(student_name):\n \"\"\"\n 按姓名称查找学生\n :param student_name:学生姓名\n :return:\n \"\"\"\n for i in os.listdir(Student.db_path):\n with open('%s/%s' % (Student.db_path, i), 'rb') as f:\n stu = pickle.load(f)\n if stu.student_name == student_name:\n return stu\n return None\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\ndef show_login():\n print('选课系统'.center(30, '-'))\n print('管理员接口')\n times_limit = 5\n count = 0\n while True:\n if count < times_limit:\n username = input('请输入[管理员]用户名:')\n password = input('请输入[管理员]密码:')\n if Admin.login(username, password):\n break\n else:\n print('用户名或密码输入错误!请重新输入')\n count += 1\n else:\n quit_system()\n\n\ndef main():\n show_login()\n choice_dict = {'1': show_choice, '2': create_school, '3': show_schools,\n '4': create_teacher, '5': create_course, '6': show_courses, '7':\n add_teacher_to_course, '8': create_class, 'Q': quit_system, 'q':\n quit_system}\n show_choice()\n while True:\n user_input = input('请输入选项:')\n if user_input not in choice_dict:\n print('请输入正确的选项~')\n continue\n option = choice_dict[user_input]\n option()\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\ndef find_school_by_name(school_name):\n \"\"\"\n 按学校名称查找学校\n :param school_name:学校名称\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n if sc.school_name == school_name:\n return sc\n return None\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\ndef create_course():\n \"\"\"\n 创建课程\n :return:\n \"\"\"\n school_name = input('请输入要添加课程的学校名称:')\n course_name = input('请输入课程名称:')\n course_price = input('请输入课程价格:')\n course_period = input('请输入课程周期:')\n sc = find_school_by_name(school_name)\n if sc:\n new_course = Course(course_name, course_price, course_period, sc.nid)\n new_course.save()\n sc.add_course(new_course.nid)\n sc.save()\n else:\n print('学校不存在!课程添加失败!')\n\n\ndef show_courses():\n \"\"\"\n 按学校名展示课程\n :param school_name:学校名称\n :return:\n \"\"\"\n school_name = input('请输入要查询课程的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_courses()\n else:\n print('学校不存在!无法展示课程信息!')\n\n\n<function token>\n\n\ndef create_class():\n \"\"\"\n 创建班级\n :return:\n \"\"\"\n school_name = input('请输入要添加班级的学校名称:')\n class_name = input('请输入班级名称:')\n sc = find_school_by_name(school_name)\n if sc:\n new_class = Classes(class_name, sc.nid)\n new_class.save()\n sc.add_class(new_class.nid)\n sc.save()\n else:\n print('学校不存在!班级添加失败!')\n\n\ndef find_class_by_name(class_name):\n \"\"\"\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n \"\"\"\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n return None\n\n\ndef show_classes():\n \"\"\"\n 按学校名展示班级\n :param school_name:学校名\n :return:\n \"\"\"\n school_name = input('请输入要查询班级的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_classes()\n else:\n print('学校不存在!无法展示班级信息!')\n\n\ndef find_student_by_name(student_name):\n \"\"\"\n 按姓名称查找学生\n :param student_name:学生姓名\n :return:\n \"\"\"\n for i in os.listdir(Student.db_path):\n with open('%s/%s' % (Student.db_path, i), 'rb') as f:\n stu = pickle.load(f)\n if stu.student_name == student_name:\n return stu\n return None\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\ndef show_login():\n print('选课系统'.center(30, '-'))\n print('管理员接口')\n times_limit = 5\n count = 0\n while True:\n if count < times_limit:\n username = input('请输入[管理员]用户名:')\n password = input('请输入[管理员]密码:')\n if Admin.login(username, password):\n break\n else:\n print('用户名或密码输入错误!请重新输入')\n count += 1\n else:\n quit_system()\n\n\ndef main():\n show_login()\n choice_dict = {'1': show_choice, '2': create_school, '3': show_schools,\n '4': create_teacher, '5': create_course, '6': show_courses, '7':\n add_teacher_to_course, '8': create_class, 'Q': quit_system, 'q':\n quit_system}\n show_choice()\n while True:\n user_input = input('请输入选项:')\n if user_input not in choice_dict:\n print('请输入正确的选项~')\n continue\n option = choice_dict[user_input]\n option()\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\ndef find_school_by_name(school_name):\n \"\"\"\n 按学校名称查找学校\n :param school_name:学校名称\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n if sc.school_name == school_name:\n return sc\n return None\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\ndef create_course():\n \"\"\"\n 创建课程\n :return:\n \"\"\"\n school_name = input('请输入要添加课程的学校名称:')\n course_name = input('请输入课程名称:')\n course_price = input('请输入课程价格:')\n course_period = input('请输入课程周期:')\n sc = find_school_by_name(school_name)\n if sc:\n new_course = Course(course_name, course_price, course_period, sc.nid)\n new_course.save()\n sc.add_course(new_course.nid)\n sc.save()\n else:\n print('学校不存在!课程添加失败!')\n\n\ndef show_courses():\n \"\"\"\n 按学校名展示课程\n :param school_name:学校名称\n :return:\n \"\"\"\n school_name = input('请输入要查询课程的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_courses()\n else:\n print('学校不存在!无法展示课程信息!')\n\n\n<function token>\n\n\ndef create_class():\n \"\"\"\n 创建班级\n :return:\n \"\"\"\n school_name = input('请输入要添加班级的学校名称:')\n class_name = input('请输入班级名称:')\n sc = find_school_by_name(school_name)\n if sc:\n new_class = Classes(class_name, sc.nid)\n new_class.save()\n sc.add_class(new_class.nid)\n sc.save()\n else:\n print('学校不存在!班级添加失败!')\n\n\ndef find_class_by_name(class_name):\n \"\"\"\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n \"\"\"\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n return None\n\n\n<function token>\n\n\ndef find_student_by_name(student_name):\n \"\"\"\n 按姓名称查找学生\n :param student_name:学生姓名\n :return:\n \"\"\"\n for i in os.listdir(Student.db_path):\n with open('%s/%s' % (Student.db_path, i), 'rb') as f:\n stu = pickle.load(f)\n if stu.student_name == student_name:\n return stu\n return None\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\ndef show_login():\n print('选课系统'.center(30, '-'))\n print('管理员接口')\n times_limit = 5\n count = 0\n while True:\n if count < times_limit:\n username = input('请输入[管理员]用户名:')\n password = input('请输入[管理员]密码:')\n if Admin.login(username, password):\n break\n else:\n print('用户名或密码输入错误!请重新输入')\n count += 1\n else:\n quit_system()\n\n\ndef main():\n show_login()\n choice_dict = {'1': show_choice, '2': create_school, '3': show_schools,\n '4': create_teacher, '5': create_course, '6': show_courses, '7':\n add_teacher_to_course, '8': create_class, 'Q': quit_system, 'q':\n quit_system}\n show_choice()\n while True:\n user_input = input('请输入选项:')\n if user_input not in choice_dict:\n print('请输入正确的选项~')\n continue\n option = choice_dict[user_input]\n option()\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\ndef find_school_by_name(school_name):\n \"\"\"\n 按学校名称查找学校\n :param school_name:学校名称\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n if sc.school_name == school_name:\n return sc\n return None\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\ndef create_course():\n \"\"\"\n 创建课程\n :return:\n \"\"\"\n school_name = input('请输入要添加课程的学校名称:')\n course_name = input('请输入课程名称:')\n course_price = input('请输入课程价格:')\n course_period = input('请输入课程周期:')\n sc = find_school_by_name(school_name)\n if sc:\n new_course = Course(course_name, course_price, course_period, sc.nid)\n new_course.save()\n sc.add_course(new_course.nid)\n sc.save()\n else:\n print('学校不存在!课程添加失败!')\n\n\ndef show_courses():\n \"\"\"\n 按学校名展示课程\n :param school_name:学校名称\n :return:\n \"\"\"\n school_name = input('请输入要查询课程的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_courses()\n else:\n print('学校不存在!无法展示课程信息!')\n\n\n<function token>\n<function token>\n\n\ndef find_class_by_name(class_name):\n \"\"\"\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n \"\"\"\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n return None\n\n\n<function token>\n\n\ndef find_student_by_name(student_name):\n \"\"\"\n 按姓名称查找学生\n :param student_name:学生姓名\n :return:\n \"\"\"\n for i in os.listdir(Student.db_path):\n with open('%s/%s' % (Student.db_path, i), 'rb') as f:\n stu = pickle.load(f)\n if stu.student_name == student_name:\n return stu\n return None\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\ndef show_login():\n print('选课系统'.center(30, '-'))\n print('管理员接口')\n times_limit = 5\n count = 0\n while True:\n if count < times_limit:\n username = input('请输入[管理员]用户名:')\n password = input('请输入[管理员]密码:')\n if Admin.login(username, password):\n break\n else:\n print('用户名或密码输入错误!请重新输入')\n count += 1\n else:\n quit_system()\n\n\ndef main():\n show_login()\n choice_dict = {'1': show_choice, '2': create_school, '3': show_schools,\n '4': create_teacher, '5': create_course, '6': show_courses, '7':\n add_teacher_to_course, '8': create_class, 'Q': quit_system, 'q':\n quit_system}\n show_choice()\n while True:\n user_input = input('请输入选项:')\n if user_input not in choice_dict:\n print('请输入正确的选项~')\n continue\n option = choice_dict[user_input]\n option()\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\ndef find_school_by_name(school_name):\n \"\"\"\n 按学校名称查找学校\n :param school_name:学校名称\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n if sc.school_name == school_name:\n return sc\n return None\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\ndef create_course():\n \"\"\"\n 创建课程\n :return:\n \"\"\"\n school_name = input('请输入要添加课程的学校名称:')\n course_name = input('请输入课程名称:')\n course_price = input('请输入课程价格:')\n course_period = input('请输入课程周期:')\n sc = find_school_by_name(school_name)\n if sc:\n new_course = Course(course_name, course_price, course_period, sc.nid)\n new_course.save()\n sc.add_course(new_course.nid)\n sc.save()\n else:\n print('学校不存在!课程添加失败!')\n\n\ndef show_courses():\n \"\"\"\n 按学校名展示课程\n :param school_name:学校名称\n :return:\n \"\"\"\n school_name = input('请输入要查询课程的学校名称:')\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_courses()\n else:\n print('学校不存在!无法展示课程信息!')\n\n\n<function token>\n<function token>\n\n\ndef find_class_by_name(class_name):\n \"\"\"\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n \"\"\"\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n return None\n\n\n<function token>\n\n\ndef find_student_by_name(student_name):\n \"\"\"\n 按姓名称查找学生\n :param student_name:学生姓名\n :return:\n \"\"\"\n for i in os.listdir(Student.db_path):\n with open('%s/%s' % (Student.db_path, i), 'rb') as f:\n stu = pickle.load(f)\n if stu.student_name == student_name:\n return stu\n return None\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\n<function token>\n\n\ndef main():\n show_login()\n choice_dict = {'1': show_choice, '2': create_school, '3': show_schools,\n '4': create_teacher, '5': create_course, '6': show_courses, '7':\n add_teacher_to_course, '8': create_class, 'Q': quit_system, 'q':\n quit_system}\n show_choice()\n while True:\n user_input = input('请输入选项:')\n if user_input not in choice_dict:\n print('请输入正确的选项~')\n continue\n option = choice_dict[user_input]\n option()\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\ndef find_school_by_name(school_name):\n \"\"\"\n 按学校名称查找学校\n :param school_name:学校名称\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n if sc.school_name == school_name:\n return sc\n return None\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\ndef create_course():\n \"\"\"\n 创建课程\n :return:\n \"\"\"\n school_name = input('请输入要添加课程的学校名称:')\n course_name = input('请输入课程名称:')\n course_price = input('请输入课程价格:')\n course_period = input('请输入课程周期:')\n sc = find_school_by_name(school_name)\n if sc:\n new_course = Course(course_name, course_price, course_period, sc.nid)\n new_course.save()\n sc.add_course(new_course.nid)\n sc.save()\n else:\n print('学校不存在!课程添加失败!')\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef find_class_by_name(class_name):\n \"\"\"\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n \"\"\"\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n return None\n\n\n<function token>\n\n\ndef find_student_by_name(student_name):\n \"\"\"\n 按姓名称查找学生\n :param student_name:学生姓名\n :return:\n \"\"\"\n for i in os.listdir(Student.db_path):\n with open('%s/%s' % (Student.db_path, i), 'rb') as f:\n stu = pickle.load(f)\n if stu.student_name == student_name:\n return stu\n return None\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\n<function token>\n\n\ndef main():\n show_login()\n choice_dict = {'1': show_choice, '2': create_school, '3': show_schools,\n '4': create_teacher, '5': create_course, '6': show_courses, '7':\n add_teacher_to_course, '8': create_class, 'Q': quit_system, 'q':\n quit_system}\n show_choice()\n while True:\n user_input = input('请输入选项:')\n if user_input not in choice_dict:\n print('请输入正确的选项~')\n continue\n option = choice_dict[user_input]\n option()\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\ndef find_school_by_name(school_name):\n \"\"\"\n 按学校名称查找学校\n :param school_name:学校名称\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n if sc.school_name == school_name:\n return sc\n return None\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef find_class_by_name(class_name):\n \"\"\"\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n \"\"\"\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n return None\n\n\n<function token>\n\n\ndef find_student_by_name(student_name):\n \"\"\"\n 按姓名称查找学生\n :param student_name:学生姓名\n :return:\n \"\"\"\n for i in os.listdir(Student.db_path):\n with open('%s/%s' % (Student.db_path, i), 'rb') as f:\n stu = pickle.load(f)\n if stu.student_name == student_name:\n return stu\n return None\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\n<function token>\n\n\ndef main():\n show_login()\n choice_dict = {'1': show_choice, '2': create_school, '3': show_schools,\n '4': create_teacher, '5': create_course, '6': show_courses, '7':\n add_teacher_to_course, '8': create_class, 'Q': quit_system, 'q':\n quit_system}\n show_choice()\n while True:\n user_input = input('请输入选项:')\n if user_input not in choice_dict:\n print('请输入正确的选项~')\n continue\n option = choice_dict[user_input]\n option()\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\ndef find_school_by_name(school_name):\n \"\"\"\n 按学校名称查找学校\n :param school_name:学校名称\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n if sc.school_name == school_name:\n return sc\n return None\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef find_class_by_name(class_name):\n \"\"\"\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n \"\"\"\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n return None\n\n\n<function token>\n\n\ndef find_student_by_name(student_name):\n \"\"\"\n 按姓名称查找学生\n :param student_name:学生姓名\n :return:\n \"\"\"\n for i in os.listdir(Student.db_path):\n with open('%s/%s' % (Student.db_path, i), 'rb') as f:\n stu = pickle.load(f)\n if stu.student_name == student_name:\n return stu\n return None\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\n<function token>\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef find_class_by_name(class_name):\n \"\"\"\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n \"\"\"\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n return None\n\n\n<function token>\n\n\ndef find_student_by_name(student_name):\n \"\"\"\n 按姓名称查找学生\n :param student_name:学生姓名\n :return:\n \"\"\"\n for i in os.listdir(Student.db_path):\n with open('%s/%s' % (Student.db_path, i), 'rb') as f:\n stu = pickle.load(f)\n if stu.student_name == student_name:\n return stu\n return None\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\n<function token>\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef find_class_by_name(class_name):\n \"\"\"\n 按班级名称查找班级\n :param school_name:班级名称\n :return:\n \"\"\"\n for i in os.listdir(Classes.db_path):\n with open('%s/%s' % (Classes.db_path, i), 'rb') as f:\n cl = pickle.load(f)\n if cl.class_name == class_name:\n return cl\n return None\n\n\n<function token>\n<function token>\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\n<function token>\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\ndef find_teacher_by_name(teacher_name):\n \"\"\"\n 按姓名查找老师\n :param teacher_name: 老师姓名\n :return:老师集合\n \"\"\"\n teachers = []\n for i in os.listdir(Teacher.db_path):\n with open('%s/%s' % (Teacher.db_path, i), 'rb') as f:\n tea = pickle.load(f)\n if tea.teacher_name == teacher_name:\n teachers.append(tea)\n return teachers\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\n<function token>\n\n\ndef create_teacher():\n \"\"\"\n 创建老师\n :return:\n \"\"\"\n teacher_name = input('请输入老师的姓名:')\n teacher_level = input('请输入老师的等级:')\n school_name = input('请输入老师所在的学校:')\n sc = find_school_by_name(school_name)\n if sc:\n new_teacher = Teacher(teacher_name, teacher_level, sc.nid)\n new_teacher.save()\n sc.add_teacher(new_teacher.nid)\n sc.save()\n else:\n print('学校不存在!老师添加失败!')\n\n\n<function token>\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef quit_system():\n print('Bye!')\n exit(0)\n\n\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef show_teachers(school_name):\n \"\"\"\n 按学校名称展示师资力量\n :param school_name: 学校名称\n :return:\n \"\"\"\n sc = find_school_by_name(school_name)\n if sc:\n sc.show_teachers()\n else:\n print('学校不存在!无法展示老师信息!')\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\ndef create_school():\n \"\"\"\n 创建学校\n :return:\n \"\"\"\n school_name = input('请输入学校的名称:')\n if find_school_by_name(school_name):\n print('学校已经存在!')\n return\n new_school = School(school_name)\n new_school.save()\n print('%s 创建成功!' % school_name)\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\n<function token>\n\n\ndef show_schools():\n \"\"\"\n 查看所有学校\n :return:\n \"\"\"\n for i in os.listdir(School.db_path):\n with open('%s/%s' % (School.db_path, i), 'rb') as f:\n sc = pickle.load(f)\n print(sc)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n\n\ndef show_choice():\n show = \"\"\"\n 1.菜单\n 2.创建学校\n 3.查看学校\n 4.创建老师\n 5.创建课程\n 6.查看课程\n 7.为课程增加老师\n 8.创建班级\n Q|q.退出系统\n \"\"\"\n print(show)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n", "<docstring token>\n<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n" ]
false
98,984
02c2b9725dce20a9c4933f927962d91dea0a62ee
#!/usr/bin/env python import rospy import time from ark_bridge.msg import Empty def response_callback(data): global started if started: rospy.signal_shutdown("ARK Started") def ark_starter(): global started started = False print "Starting..." rospy.init_node("ark_starter") rospy.Subscriber("/ark_bridge/ark_start_response", Empty, response_callback) time.sleep(0.3) started = True print "Starting ARK" pub = rospy.Publisher("/ark_bridge/ark_start_call", Empty, latch=True, queue_size=1) pub.publish(Empty()) rospy.spin() print "Done" ark_starter()
[ "#!/usr/bin/env python\n\nimport rospy\nimport time\nfrom ark_bridge.msg import Empty\n\ndef response_callback(data):\n global started\n if started:\n rospy.signal_shutdown(\"ARK Started\")\n\ndef ark_starter():\n global started\n started = False\n print \"Starting...\"\n\n rospy.init_node(\"ark_starter\")\n rospy.Subscriber(\"/ark_bridge/ark_start_response\", Empty, response_callback)\n time.sleep(0.3)\n started = True\n\n print \"Starting ARK\"\n\n pub = rospy.Publisher(\"/ark_bridge/ark_start_call\", Empty, latch=True, queue_size=1)\n pub.publish(Empty())\n\n rospy.spin()\n\n print \"Done\"\n\nark_starter()\n" ]
true
98,985
301543d3330cb2e1b3473e623141cdf4a424b9b4
class BankAccountSnapshot: """ A memento token class that captures a snapshot of a bank account's state. """ def __init__(self, balance): self.balance = balance class BankAccount: def __init__(self, balance=0): self.balance = balance self._changes = [BankAccountSnapshot(balance=self.balance)] self._current = 0 def deposit(self, amount): self.balance += amount m = BankAccountSnapshot(balance=self.balance) self._changes.append(m) self._current += 1 return m def withdraw(self, amount): self.balance -= amount m = BankAccountSnapshot(balance=self.balance) self._changes.append(m) self._current += 1 return m def restore(self, snapshot): if snapshot: self.balance = snapshot.balance self._changes.append(snapshot) self._current = len(self._changes) - 1 def undo(self): if self._current > 0: self._current -= 1 m = self._changes[self._current] self.balance = m.balance return m def redo(self): if self._current + 1 < len(self._changes): self._current += 1 m = self._changes[self._current] self.balance = m.balance return m def __repr__(self): return f'{self.__class__.__name__}(balance={self.balance})' if __name__ == '__main__': acct = BankAccount() acct.deposit(200) acct.withdraw(50) print(acct) acct.undo() print(acct) acct.redo() print(acct)
[ "class BankAccountSnapshot:\n \"\"\"\n A memento token class that captures a snapshot of a bank account's state.\n \"\"\"\n\n def __init__(self, balance):\n self.balance = balance\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n\n def deposit(self, amount):\n self.balance += amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def withdraw(self, amount):\n self.balance -= amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def restore(self, snapshot):\n if snapshot:\n self.balance = snapshot.balance\n self._changes.append(snapshot)\n self._current = len(self._changes) - 1\n\n def undo(self):\n if self._current > 0:\n self._current -= 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def redo(self):\n if self._current + 1 < len(self._changes):\n self._current += 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def __repr__(self):\n return f'{self.__class__.__name__}(balance={self.balance})'\n\n\nif __name__ == '__main__':\n acct = BankAccount()\n acct.deposit(200)\n acct.withdraw(50)\n print(acct)\n\n acct.undo()\n print(acct)\n\n acct.redo()\n print(acct)\n", "class BankAccountSnapshot:\n \"\"\"\n A memento token class that captures a snapshot of a bank account's state.\n \"\"\"\n\n def __init__(self, balance):\n self.balance = balance\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n\n def deposit(self, amount):\n self.balance += amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def withdraw(self, amount):\n self.balance -= amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def restore(self, snapshot):\n if snapshot:\n self.balance = snapshot.balance\n self._changes.append(snapshot)\n self._current = len(self._changes) - 1\n\n def undo(self):\n if self._current > 0:\n self._current -= 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def redo(self):\n if self._current + 1 < len(self._changes):\n self._current += 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def __repr__(self):\n return f'{self.__class__.__name__}(balance={self.balance})'\n\n\nif __name__ == '__main__':\n acct = BankAccount()\n acct.deposit(200)\n acct.withdraw(50)\n print(acct)\n acct.undo()\n print(acct)\n acct.redo()\n print(acct)\n", "class BankAccountSnapshot:\n \"\"\"\n A memento token class that captures a snapshot of a bank account's state.\n \"\"\"\n\n def __init__(self, balance):\n self.balance = balance\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n\n def deposit(self, amount):\n self.balance += amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def withdraw(self, amount):\n self.balance -= amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def restore(self, snapshot):\n if snapshot:\n self.balance = snapshot.balance\n self._changes.append(snapshot)\n self._current = len(self._changes) - 1\n\n def undo(self):\n if self._current > 0:\n self._current -= 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def redo(self):\n if self._current + 1 < len(self._changes):\n self._current += 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def __repr__(self):\n return f'{self.__class__.__name__}(balance={self.balance})'\n\n\n<code token>\n", "class BankAccountSnapshot:\n <docstring token>\n\n def __init__(self, balance):\n self.balance = balance\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n\n def deposit(self, amount):\n self.balance += amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def withdraw(self, amount):\n self.balance -= amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def restore(self, snapshot):\n if snapshot:\n self.balance = snapshot.balance\n self._changes.append(snapshot)\n self._current = len(self._changes) - 1\n\n def undo(self):\n if self._current > 0:\n self._current -= 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def redo(self):\n if self._current + 1 < len(self._changes):\n self._current += 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def __repr__(self):\n return f'{self.__class__.__name__}(balance={self.balance})'\n\n\n<code token>\n", "class BankAccountSnapshot:\n <docstring token>\n <function token>\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n\n def deposit(self, amount):\n self.balance += amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def withdraw(self, amount):\n self.balance -= amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def restore(self, snapshot):\n if snapshot:\n self.balance = snapshot.balance\n self._changes.append(snapshot)\n self._current = len(self._changes) - 1\n\n def undo(self):\n if self._current > 0:\n self._current -= 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def redo(self):\n if self._current + 1 < len(self._changes):\n self._current += 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def __repr__(self):\n return f'{self.__class__.__name__}(balance={self.balance})'\n\n\n<code token>\n", "<class token>\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n\n def deposit(self, amount):\n self.balance += amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def withdraw(self, amount):\n self.balance -= amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def restore(self, snapshot):\n if snapshot:\n self.balance = snapshot.balance\n self._changes.append(snapshot)\n self._current = len(self._changes) - 1\n\n def undo(self):\n if self._current > 0:\n self._current -= 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def redo(self):\n if self._current + 1 < len(self._changes):\n self._current += 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def __repr__(self):\n return f'{self.__class__.__name__}(balance={self.balance})'\n\n\n<code token>\n", "<class token>\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n\n def deposit(self, amount):\n self.balance += amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n\n def withdraw(self, amount):\n self.balance -= amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n <function token>\n\n def undo(self):\n if self._current > 0:\n self._current -= 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def redo(self):\n if self._current + 1 < len(self._changes):\n self._current += 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def __repr__(self):\n return f'{self.__class__.__name__}(balance={self.balance})'\n\n\n<code token>\n", "<class token>\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n\n def deposit(self, amount):\n self.balance += amount\n m = BankAccountSnapshot(balance=self.balance)\n self._changes.append(m)\n self._current += 1\n return m\n <function token>\n <function token>\n\n def undo(self):\n if self._current > 0:\n self._current -= 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def redo(self):\n if self._current + 1 < len(self._changes):\n self._current += 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def __repr__(self):\n return f'{self.__class__.__name__}(balance={self.balance})'\n\n\n<code token>\n", "<class token>\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n <function token>\n <function token>\n <function token>\n\n def undo(self):\n if self._current > 0:\n self._current -= 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def redo(self):\n if self._current + 1 < len(self._changes):\n self._current += 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n\n def __repr__(self):\n return f'{self.__class__.__name__}(balance={self.balance})'\n\n\n<code token>\n", "<class token>\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n <function token>\n <function token>\n <function token>\n\n def undo(self):\n if self._current > 0:\n self._current -= 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n <function token>\n\n def __repr__(self):\n return f'{self.__class__.__name__}(balance={self.balance})'\n\n\n<code token>\n", "<class token>\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n <function token>\n <function token>\n <function token>\n\n def undo(self):\n if self._current > 0:\n self._current -= 1\n m = self._changes[self._current]\n self.balance = m.balance\n return m\n <function token>\n <function token>\n\n\n<code token>\n", "<class token>\n\n\nclass BankAccount:\n\n def __init__(self, balance=0):\n self.balance = balance\n self._changes = [BankAccountSnapshot(balance=self.balance)]\n self._current = 0\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<code token>\n", "<class token>\n\n\nclass BankAccount:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<code token>\n", "<class token>\n<class token>\n<code token>\n" ]
false
98,986
18c77cabc6df5f39be1ef4889eb72f970cca1ea7
import socket HOST,PORT = '',8888 listen_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM) listen_socket.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1) listen_socket.bind((HOST,PORT)) listen_socket.listen(1) print("Serving HTTP on port %s ..." % PORT) while True: client_connection,client_address = listen_socket.accept() request = client_connection.recv(1024) print(request) http_response = '''\ HTTP/1.1 200 OK Hello,World! ''' # client_connection.sendall(http_response) client_connection.send(bytes(http_response,encoding = "utf8")) client_connection.close()
[ "import socket\n\nHOST,PORT = '',8888\n\nlisten_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)\nlisten_socket.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)\nlisten_socket.bind((HOST,PORT))\nlisten_socket.listen(1)\nprint(\"Serving HTTP on port %s ...\" % PORT)\nwhile True:\n client_connection,client_address = listen_socket.accept()\n request = client_connection.recv(1024)\n print(request)\n http_response = '''\\\n HTTP/1.1 200 OK\n\n Hello,World!\n\n '''\n # client_connection.sendall(http_response)\n client_connection.send(bytes(http_response,encoding = \"utf8\"))\n client_connection.close()", "import socket\nHOST, PORT = '', 8888\nlisten_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nlisten_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\nlisten_socket.bind((HOST, PORT))\nlisten_socket.listen(1)\nprint('Serving HTTP on port %s ...' % PORT)\nwhile True:\n client_connection, client_address = listen_socket.accept()\n request = client_connection.recv(1024)\n print(request)\n http_response = \"\"\" HTTP/1.1 200 OK\n\n Hello,World!\n\n \"\"\"\n client_connection.send(bytes(http_response, encoding='utf8'))\n client_connection.close()\n", "<import token>\nHOST, PORT = '', 8888\nlisten_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nlisten_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\nlisten_socket.bind((HOST, PORT))\nlisten_socket.listen(1)\nprint('Serving HTTP on port %s ...' % PORT)\nwhile True:\n client_connection, client_address = listen_socket.accept()\n request = client_connection.recv(1024)\n print(request)\n http_response = \"\"\" HTTP/1.1 200 OK\n\n Hello,World!\n\n \"\"\"\n client_connection.send(bytes(http_response, encoding='utf8'))\n client_connection.close()\n", "<import token>\n<assignment token>\nlisten_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\nlisten_socket.bind((HOST, PORT))\nlisten_socket.listen(1)\nprint('Serving HTTP on port %s ...' % PORT)\nwhile True:\n client_connection, client_address = listen_socket.accept()\n request = client_connection.recv(1024)\n print(request)\n http_response = \"\"\" HTTP/1.1 200 OK\n\n Hello,World!\n\n \"\"\"\n client_connection.send(bytes(http_response, encoding='utf8'))\n client_connection.close()\n", "<import token>\n<assignment token>\n<code token>\n" ]
false
98,987
e32cd902426150aa88bdab9cde90233448eb6223
# Randomly fills a grid of size height and width whose values are input by the user, # with nonnegative integers randomly generated up to an upper bound N also input the user, # and computes, for each n <= N, the number of paths consisting of all integers from 1 up to n # that cannot be extended to n+1. # Outputs the number of such paths, when at least one exists. # # Written by *** and Eric Martin for COMP9021 from random import seed, randint import sys from collections import defaultdict def display_grid(): for i in range(len(grid)): print(' ', ' '.join(str(grid[i][j]) for j in range(len(grid[0])))) class searchPoint: def __init__(self, a, b): self.x = a self.y = b self.used = set() def get_paths(): # start # # pre-process count = [0] * (max_length + 1) start = set() for y in range(height): for x in range(width): if grid[y][x] == 1: one = 1 if y != 0 and grid[y - 1][x] == 2: one = 0 if x != 0 and grid[y][x - 1] == 2: one = 0 if x != width - 1 and grid[y][x + 1] == 2: one = 0 if y != height - 1 and grid[y + 1][x] == 2: one = 0 if one == 1: count[1] += 1 elif grid[y][x] > 1: large = 0 if x != 0 and grid[y][x - 1] == grid[y][x] + 1: large = 1 elif y != height - 1 and grid[y + 1][x] == grid[y][x] + 1: large = 1 elif y != 0 and grid[y - 1][x] == grid[y][x] + 1: large = 1 elif x != width - 1 and grid[y][x + 1] == grid[y][x] + 1: large = 1 if large == 0: start.add((x, y)) # search for x, y in start: path = [] length = grid[y][x] cur_point = searchPoint(x, y) path.append(cur_point) while 1: if grid[y][x] == 1: nxt = None else: if x != 0 and grid[y][x - 1] == grid[y][x] - 1 and (x - 1, y) not in path[len(path) - 1].used: nxt = x - 1, y elif x != width - 1 and grid[y][x + 1] == grid[y][x] - 1 and (x + 1, y) not in path[len(path) - 1].used: nxt = x + 1, y elif y != 0 and grid[y - 1][x] == grid[y][x] - 1 and (x, y - 1) not in path[len(path) - 1].used: nxt = x, y - 1 elif y != height - 1 and grid[y + 1][x] == grid[y][x] - 1 and (x, y + 1) not in path[ len(path) - 1].used: nxt = x, y + 1 else: nxt = None if nxt == None: if grid[y][x] == 1: count[length] += 1 if len(path) == 1: break path.pop() x, y = path[len(path) - 1].x, path[len(path) - 1].y else: new_point = searchPoint(nxt[0], nxt[1]) path[len(path) - 1].used.add(nxt) x, y = nxt path.append(new_point) return count # # end try: for_seed, max_length, height, width = [int(i) for i in input('Enter four nonnegative integers: ').split() ] if for_seed < 0 or max_length < 0 or height < 0 or width < 0: raise ValueError except ValueError: print('Incorrect input, giving up.') sys.exit() seed(for_seed) grid = [[randint(0, max_length) for _ in range(width)] for _ in range(height)] print('Here is the grid that has been generated:') display_grid() paths = get_paths() for i in range(len(paths)): if paths[i] != 0: print(f'The number of paths from 1 to {i} is: {paths[i]}')
[ "# Randomly fills a grid of size height and width whose values are input by the user,\n# with nonnegative integers randomly generated up to an upper bound N also input the user,\n# and computes, for each n <= N, the number of paths consisting of all integers from 1 up to n\n# that cannot be extended to n+1.\n# Outputs the number of such paths, when at least one exists.\n#\n# Written by *** and Eric Martin for COMP9021\n\n\nfrom random import seed, randint\nimport sys\nfrom collections import defaultdict\n\n\ndef display_grid():\n for i in range(len(grid)):\n print(' ', ' '.join(str(grid[i][j]) for j in range(len(grid[0]))))\n\n\nclass searchPoint:\n def __init__(self, a, b):\n self.x = a\n self.y = b\n self.used = set()\n\n\ndef get_paths():\n # start\n #\n # pre-process\n count = [0] * (max_length + 1)\n start = set()\n for y in range(height):\n for x in range(width):\n if grid[y][x] == 1:\n one = 1\n if y != 0 and grid[y - 1][x] == 2:\n one = 0\n if x != 0 and grid[y][x - 1] == 2:\n one = 0\n if x != width - 1 and grid[y][x + 1] == 2:\n one = 0\n if y != height - 1 and grid[y + 1][x] == 2:\n one = 0\n if one == 1:\n count[1] += 1\n elif grid[y][x] > 1:\n large = 0\n if x != 0 and grid[y][x - 1] == grid[y][x] + 1:\n large = 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] + 1:\n large = 1\n elif y != 0 and grid[y - 1][x] == grid[y][x] + 1:\n large = 1\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] + 1:\n large = 1\n if large == 0:\n start.add((x, y))\n # search\n for x, y in start:\n path = []\n length = grid[y][x]\n cur_point = searchPoint(x, y)\n path.append(cur_point)\n while 1:\n if grid[y][x] == 1:\n nxt = None\n else:\n if x != 0 and grid[y][x - 1] == grid[y][x] - 1 and (x - 1, y) not in path[len(path) - 1].used:\n nxt = x - 1, y\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] - 1 and (x + 1, y) not in path[len(path) - 1].used:\n nxt = x + 1, y\n elif y != 0 and grid[y - 1][x] == grid[y][x] - 1 and (x, y - 1) not in path[len(path) - 1].used:\n nxt = x, y - 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] - 1 and (x, y + 1) not in path[\n len(path) - 1].used:\n nxt = x, y + 1\n else:\n nxt = None\n if nxt == None:\n if grid[y][x] == 1:\n count[length] += 1\n if len(path) == 1:\n break\n path.pop()\n x, y = path[len(path) - 1].x, path[len(path) - 1].y\n else:\n new_point = searchPoint(nxt[0], nxt[1])\n path[len(path) - 1].used.add(nxt)\n x, y = nxt\n path.append(new_point)\n return count\n\n\n\n\n\n #\n # end\n\n\ntry:\n for_seed, max_length, height, width = [int(i) for i in\n input('Enter four nonnegative integers: ').split()\n ]\n if for_seed < 0 or max_length < 0 or height < 0 or width < 0:\n raise ValueError\nexcept ValueError:\n print('Incorrect input, giving up.')\n sys.exit()\n\nseed(for_seed)\ngrid = [[randint(0, max_length) for _ in range(width)] for _ in range(height)]\nprint('Here is the grid that has been generated:')\ndisplay_grid()\npaths = get_paths()\nfor i in range(len(paths)):\n if paths[i] != 0:\n print(f'The number of paths from 1 to {i} is: {paths[i]}')\n", "from random import seed, randint\nimport sys\nfrom collections import defaultdict\n\n\ndef display_grid():\n for i in range(len(grid)):\n print(' ', ' '.join(str(grid[i][j]) for j in range(len(grid[0]))))\n\n\nclass searchPoint:\n\n def __init__(self, a, b):\n self.x = a\n self.y = b\n self.used = set()\n\n\ndef get_paths():\n count = [0] * (max_length + 1)\n start = set()\n for y in range(height):\n for x in range(width):\n if grid[y][x] == 1:\n one = 1\n if y != 0 and grid[y - 1][x] == 2:\n one = 0\n if x != 0 and grid[y][x - 1] == 2:\n one = 0\n if x != width - 1 and grid[y][x + 1] == 2:\n one = 0\n if y != height - 1 and grid[y + 1][x] == 2:\n one = 0\n if one == 1:\n count[1] += 1\n elif grid[y][x] > 1:\n large = 0\n if x != 0 and grid[y][x - 1] == grid[y][x] + 1:\n large = 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] + 1:\n large = 1\n elif y != 0 and grid[y - 1][x] == grid[y][x] + 1:\n large = 1\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] + 1:\n large = 1\n if large == 0:\n start.add((x, y))\n for x, y in start:\n path = []\n length = grid[y][x]\n cur_point = searchPoint(x, y)\n path.append(cur_point)\n while 1:\n if grid[y][x] == 1:\n nxt = None\n elif x != 0 and grid[y][x - 1] == grid[y][x] - 1 and (x - 1, y\n ) not in path[len(path) - 1].used:\n nxt = x - 1, y\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] - 1 and (x +\n 1, y) not in path[len(path) - 1].used:\n nxt = x + 1, y\n elif y != 0 and grid[y - 1][x] == grid[y][x] - 1 and (x, y - 1\n ) not in path[len(path) - 1].used:\n nxt = x, y - 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] - 1 and (x,\n y + 1) not in path[len(path) - 1].used:\n nxt = x, y + 1\n else:\n nxt = None\n if nxt == None:\n if grid[y][x] == 1:\n count[length] += 1\n if len(path) == 1:\n break\n path.pop()\n x, y = path[len(path) - 1].x, path[len(path) - 1].y\n else:\n new_point = searchPoint(nxt[0], nxt[1])\n path[len(path) - 1].used.add(nxt)\n x, y = nxt\n path.append(new_point)\n return count\n\n\ntry:\n for_seed, max_length, height, width = [int(i) for i in input(\n 'Enter four nonnegative integers: ').split()]\n if for_seed < 0 or max_length < 0 or height < 0 or width < 0:\n raise ValueError\nexcept ValueError:\n print('Incorrect input, giving up.')\n sys.exit()\nseed(for_seed)\ngrid = [[randint(0, max_length) for _ in range(width)] for _ in range(height)]\nprint('Here is the grid that has been generated:')\ndisplay_grid()\npaths = get_paths()\nfor i in range(len(paths)):\n if paths[i] != 0:\n print(f'The number of paths from 1 to {i} is: {paths[i]}')\n", "<import token>\n\n\ndef display_grid():\n for i in range(len(grid)):\n print(' ', ' '.join(str(grid[i][j]) for j in range(len(grid[0]))))\n\n\nclass searchPoint:\n\n def __init__(self, a, b):\n self.x = a\n self.y = b\n self.used = set()\n\n\ndef get_paths():\n count = [0] * (max_length + 1)\n start = set()\n for y in range(height):\n for x in range(width):\n if grid[y][x] == 1:\n one = 1\n if y != 0 and grid[y - 1][x] == 2:\n one = 0\n if x != 0 and grid[y][x - 1] == 2:\n one = 0\n if x != width - 1 and grid[y][x + 1] == 2:\n one = 0\n if y != height - 1 and grid[y + 1][x] == 2:\n one = 0\n if one == 1:\n count[1] += 1\n elif grid[y][x] > 1:\n large = 0\n if x != 0 and grid[y][x - 1] == grid[y][x] + 1:\n large = 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] + 1:\n large = 1\n elif y != 0 and grid[y - 1][x] == grid[y][x] + 1:\n large = 1\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] + 1:\n large = 1\n if large == 0:\n start.add((x, y))\n for x, y in start:\n path = []\n length = grid[y][x]\n cur_point = searchPoint(x, y)\n path.append(cur_point)\n while 1:\n if grid[y][x] == 1:\n nxt = None\n elif x != 0 and grid[y][x - 1] == grid[y][x] - 1 and (x - 1, y\n ) not in path[len(path) - 1].used:\n nxt = x - 1, y\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] - 1 and (x +\n 1, y) not in path[len(path) - 1].used:\n nxt = x + 1, y\n elif y != 0 and grid[y - 1][x] == grid[y][x] - 1 and (x, y - 1\n ) not in path[len(path) - 1].used:\n nxt = x, y - 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] - 1 and (x,\n y + 1) not in path[len(path) - 1].used:\n nxt = x, y + 1\n else:\n nxt = None\n if nxt == None:\n if grid[y][x] == 1:\n count[length] += 1\n if len(path) == 1:\n break\n path.pop()\n x, y = path[len(path) - 1].x, path[len(path) - 1].y\n else:\n new_point = searchPoint(nxt[0], nxt[1])\n path[len(path) - 1].used.add(nxt)\n x, y = nxt\n path.append(new_point)\n return count\n\n\ntry:\n for_seed, max_length, height, width = [int(i) for i in input(\n 'Enter four nonnegative integers: ').split()]\n if for_seed < 0 or max_length < 0 or height < 0 or width < 0:\n raise ValueError\nexcept ValueError:\n print('Incorrect input, giving up.')\n sys.exit()\nseed(for_seed)\ngrid = [[randint(0, max_length) for _ in range(width)] for _ in range(height)]\nprint('Here is the grid that has been generated:')\ndisplay_grid()\npaths = get_paths()\nfor i in range(len(paths)):\n if paths[i] != 0:\n print(f'The number of paths from 1 to {i} is: {paths[i]}')\n", "<import token>\n\n\ndef display_grid():\n for i in range(len(grid)):\n print(' ', ' '.join(str(grid[i][j]) for j in range(len(grid[0]))))\n\n\nclass searchPoint:\n\n def __init__(self, a, b):\n self.x = a\n self.y = b\n self.used = set()\n\n\ndef get_paths():\n count = [0] * (max_length + 1)\n start = set()\n for y in range(height):\n for x in range(width):\n if grid[y][x] == 1:\n one = 1\n if y != 0 and grid[y - 1][x] == 2:\n one = 0\n if x != 0 and grid[y][x - 1] == 2:\n one = 0\n if x != width - 1 and grid[y][x + 1] == 2:\n one = 0\n if y != height - 1 and grid[y + 1][x] == 2:\n one = 0\n if one == 1:\n count[1] += 1\n elif grid[y][x] > 1:\n large = 0\n if x != 0 and grid[y][x - 1] == grid[y][x] + 1:\n large = 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] + 1:\n large = 1\n elif y != 0 and grid[y - 1][x] == grid[y][x] + 1:\n large = 1\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] + 1:\n large = 1\n if large == 0:\n start.add((x, y))\n for x, y in start:\n path = []\n length = grid[y][x]\n cur_point = searchPoint(x, y)\n path.append(cur_point)\n while 1:\n if grid[y][x] == 1:\n nxt = None\n elif x != 0 and grid[y][x - 1] == grid[y][x] - 1 and (x - 1, y\n ) not in path[len(path) - 1].used:\n nxt = x - 1, y\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] - 1 and (x +\n 1, y) not in path[len(path) - 1].used:\n nxt = x + 1, y\n elif y != 0 and grid[y - 1][x] == grid[y][x] - 1 and (x, y - 1\n ) not in path[len(path) - 1].used:\n nxt = x, y - 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] - 1 and (x,\n y + 1) not in path[len(path) - 1].used:\n nxt = x, y + 1\n else:\n nxt = None\n if nxt == None:\n if grid[y][x] == 1:\n count[length] += 1\n if len(path) == 1:\n break\n path.pop()\n x, y = path[len(path) - 1].x, path[len(path) - 1].y\n else:\n new_point = searchPoint(nxt[0], nxt[1])\n path[len(path) - 1].used.add(nxt)\n x, y = nxt\n path.append(new_point)\n return count\n\n\ntry:\n for_seed, max_length, height, width = [int(i) for i in input(\n 'Enter four nonnegative integers: ').split()]\n if for_seed < 0 or max_length < 0 or height < 0 or width < 0:\n raise ValueError\nexcept ValueError:\n print('Incorrect input, giving up.')\n sys.exit()\nseed(for_seed)\n<assignment token>\nprint('Here is the grid that has been generated:')\ndisplay_grid()\n<assignment token>\nfor i in range(len(paths)):\n if paths[i] != 0:\n print(f'The number of paths from 1 to {i} is: {paths[i]}')\n", "<import token>\n\n\ndef display_grid():\n for i in range(len(grid)):\n print(' ', ' '.join(str(grid[i][j]) for j in range(len(grid[0]))))\n\n\nclass searchPoint:\n\n def __init__(self, a, b):\n self.x = a\n self.y = b\n self.used = set()\n\n\ndef get_paths():\n count = [0] * (max_length + 1)\n start = set()\n for y in range(height):\n for x in range(width):\n if grid[y][x] == 1:\n one = 1\n if y != 0 and grid[y - 1][x] == 2:\n one = 0\n if x != 0 and grid[y][x - 1] == 2:\n one = 0\n if x != width - 1 and grid[y][x + 1] == 2:\n one = 0\n if y != height - 1 and grid[y + 1][x] == 2:\n one = 0\n if one == 1:\n count[1] += 1\n elif grid[y][x] > 1:\n large = 0\n if x != 0 and grid[y][x - 1] == grid[y][x] + 1:\n large = 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] + 1:\n large = 1\n elif y != 0 and grid[y - 1][x] == grid[y][x] + 1:\n large = 1\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] + 1:\n large = 1\n if large == 0:\n start.add((x, y))\n for x, y in start:\n path = []\n length = grid[y][x]\n cur_point = searchPoint(x, y)\n path.append(cur_point)\n while 1:\n if grid[y][x] == 1:\n nxt = None\n elif x != 0 and grid[y][x - 1] == grid[y][x] - 1 and (x - 1, y\n ) not in path[len(path) - 1].used:\n nxt = x - 1, y\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] - 1 and (x +\n 1, y) not in path[len(path) - 1].used:\n nxt = x + 1, y\n elif y != 0 and grid[y - 1][x] == grid[y][x] - 1 and (x, y - 1\n ) not in path[len(path) - 1].used:\n nxt = x, y - 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] - 1 and (x,\n y + 1) not in path[len(path) - 1].used:\n nxt = x, y + 1\n else:\n nxt = None\n if nxt == None:\n if grid[y][x] == 1:\n count[length] += 1\n if len(path) == 1:\n break\n path.pop()\n x, y = path[len(path) - 1].x, path[len(path) - 1].y\n else:\n new_point = searchPoint(nxt[0], nxt[1])\n path[len(path) - 1].used.add(nxt)\n x, y = nxt\n path.append(new_point)\n return count\n\n\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n<function token>\n\n\nclass searchPoint:\n\n def __init__(self, a, b):\n self.x = a\n self.y = b\n self.used = set()\n\n\ndef get_paths():\n count = [0] * (max_length + 1)\n start = set()\n for y in range(height):\n for x in range(width):\n if grid[y][x] == 1:\n one = 1\n if y != 0 and grid[y - 1][x] == 2:\n one = 0\n if x != 0 and grid[y][x - 1] == 2:\n one = 0\n if x != width - 1 and grid[y][x + 1] == 2:\n one = 0\n if y != height - 1 and grid[y + 1][x] == 2:\n one = 0\n if one == 1:\n count[1] += 1\n elif grid[y][x] > 1:\n large = 0\n if x != 0 and grid[y][x - 1] == grid[y][x] + 1:\n large = 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] + 1:\n large = 1\n elif y != 0 and grid[y - 1][x] == grid[y][x] + 1:\n large = 1\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] + 1:\n large = 1\n if large == 0:\n start.add((x, y))\n for x, y in start:\n path = []\n length = grid[y][x]\n cur_point = searchPoint(x, y)\n path.append(cur_point)\n while 1:\n if grid[y][x] == 1:\n nxt = None\n elif x != 0 and grid[y][x - 1] == grid[y][x] - 1 and (x - 1, y\n ) not in path[len(path) - 1].used:\n nxt = x - 1, y\n elif x != width - 1 and grid[y][x + 1] == grid[y][x] - 1 and (x +\n 1, y) not in path[len(path) - 1].used:\n nxt = x + 1, y\n elif y != 0 and grid[y - 1][x] == grid[y][x] - 1 and (x, y - 1\n ) not in path[len(path) - 1].used:\n nxt = x, y - 1\n elif y != height - 1 and grid[y + 1][x] == grid[y][x] - 1 and (x,\n y + 1) not in path[len(path) - 1].used:\n nxt = x, y + 1\n else:\n nxt = None\n if nxt == None:\n if grid[y][x] == 1:\n count[length] += 1\n if len(path) == 1:\n break\n path.pop()\n x, y = path[len(path) - 1].x, path[len(path) - 1].y\n else:\n new_point = searchPoint(nxt[0], nxt[1])\n path[len(path) - 1].used.add(nxt)\n x, y = nxt\n path.append(new_point)\n return count\n\n\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n<function token>\n\n\nclass searchPoint:\n\n def __init__(self, a, b):\n self.x = a\n self.y = b\n self.used = set()\n\n\n<function token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n<function token>\n\n\nclass searchPoint:\n <function token>\n\n\n<function token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<import token>\n<function token>\n<class token>\n<function token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
98,988
61fae2fad3c0e1053178e6c82b7ce475792993d9
# ===== 局部变量的演示 ====== def demo01(): num = 10 print(num) num = 20 print("修改后 %d" % num) def demo02(): num = 100 print(num) # demo01() # demo02() print("over") # ===== 全局变量 ====== num2 = 30 def demo03(): # global 关键字,告诉 Python 解释器 num2 是一个全局变量 global num2 # ====== 局部变量不能修改全局变量 ====== num2 = 40 print("num2 ===> %s" % num2) def demo04(): print("num2 ===> %s" % num2) demo03() demo04() print("over")
[ "# ===== 局部变量的演示 ======\n\ndef demo01():\n \n num = 10\n\n print(num)\n\n num = 20\n\n print(\"修改后 %d\" % num)\n\n\ndef demo02():\n\n num = 100\n\n print(num)\n\n\n# demo01()\n# demo02()\n\nprint(\"over\")\n\n# ===== 全局变量 ======\n\nnum2 = 30\n\ndef demo03():\n\n # global 关键字,告诉 Python 解释器 num2 是一个全局变量\n global num2\n # ====== 局部变量不能修改全局变量 ======\n num2 = 40\n print(\"num2 ===> %s\" % num2)\n\ndef demo04():\n\n print(\"num2 ===> %s\" % num2)\n\ndemo03()\ndemo04()\n\nprint(\"over\")\n\n", "def demo01():\n num = 10\n print(num)\n num = 20\n print('修改后 %d' % num)\n\n\ndef demo02():\n num = 100\n print(num)\n\n\nprint('over')\nnum2 = 30\n\n\ndef demo03():\n global num2\n num2 = 40\n print('num2 ===> %s' % num2)\n\n\ndef demo04():\n print('num2 ===> %s' % num2)\n\n\ndemo03()\ndemo04()\nprint('over')\n", "def demo01():\n num = 10\n print(num)\n num = 20\n print('修改后 %d' % num)\n\n\ndef demo02():\n num = 100\n print(num)\n\n\nprint('over')\n<assignment token>\n\n\ndef demo03():\n global num2\n num2 = 40\n print('num2 ===> %s' % num2)\n\n\ndef demo04():\n print('num2 ===> %s' % num2)\n\n\ndemo03()\ndemo04()\nprint('over')\n", "def demo01():\n num = 10\n print(num)\n num = 20\n print('修改后 %d' % num)\n\n\ndef demo02():\n num = 100\n print(num)\n\n\n<code token>\n<assignment token>\n\n\ndef demo03():\n global num2\n num2 = 40\n print('num2 ===> %s' % num2)\n\n\ndef demo04():\n print('num2 ===> %s' % num2)\n\n\n<code token>\n", "def demo01():\n num = 10\n print(num)\n num = 20\n print('修改后 %d' % num)\n\n\ndef demo02():\n num = 100\n print(num)\n\n\n<code token>\n<assignment token>\n\n\ndef demo03():\n global num2\n num2 = 40\n print('num2 ===> %s' % num2)\n\n\n<function token>\n<code token>\n", "def demo01():\n num = 10\n print(num)\n num = 20\n print('修改后 %d' % num)\n\n\n<function token>\n<code token>\n<assignment token>\n\n\ndef demo03():\n global num2\n num2 = 40\n print('num2 ===> %s' % num2)\n\n\n<function token>\n<code token>\n", "def demo01():\n num = 10\n print(num)\n num = 20\n print('修改后 %d' % num)\n\n\n<function token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<code token>\n", "<function token>\n<function token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<code token>\n" ]
false
98,989
15b5b3a0a5d5726ddc22ae0d5b5949a40109c36c
''' Proportionally resizing We want to downscale the images of a veterinary blog website so all of them have the same compressed size. It's important that you do this proportionally, meaning that these are not distorted. First, you'll try it out for one image so you know what code to test later in the rest of the pictures. The image preloaded as dogs_banner. Remember that by looking at the shape of the image, you can know its width and height. Instructions 100 XP Import the module and function to resize. Set the proportional height and width so it is half the image's height size. Resize using the calculated proportional height and width. ''' SOLUTION # Import the module and function from skimage.transform import resize # Set proportional height so its half its size height = int(dogs_banner.shape[0] / 2) width = int(dogs_banner.shape[1] / 2) # Resize using the calculated proportional height and width image_resized = resize(dogs_banner, (height, width), anti_aliasing=True) # Show the original and rotated image show_image(dogs_banner, 'Original') show_image(image_resized, 'Resized image')
[ "'''\nProportionally resizing\nWe want to downscale the images of a veterinary blog website so all of them have the same compressed size.\n\nIt's important that you do this proportionally, meaning that these are not distorted.\n\nFirst, you'll try it out for one image so you know what code to test later in the rest of the pictures.\n\n\nThe image preloaded as dogs_banner.\nRemember that by looking at the shape of the image, you can know its width and height.\n\nInstructions\n100 XP\nImport the module and function to resize.\nSet the proportional height and width so it is half the image's height size.\nResize using the calculated proportional height and width.\n'''\nSOLUTION\n\n# Import the module and function\nfrom skimage.transform import resize\n\n# Set proportional height so its half its size\nheight = int(dogs_banner.shape[0] / 2)\nwidth = int(dogs_banner.shape[1] / 2)\n\n# Resize using the calculated proportional height and width\nimage_resized = resize(dogs_banner, (height, width),\n anti_aliasing=True)\n\n# Show the original and rotated image\nshow_image(dogs_banner, 'Original')\nshow_image(image_resized, 'Resized image')", "<docstring token>\nSOLUTION\nfrom skimage.transform import resize\nheight = int(dogs_banner.shape[0] / 2)\nwidth = int(dogs_banner.shape[1] / 2)\nimage_resized = resize(dogs_banner, (height, width), anti_aliasing=True)\nshow_image(dogs_banner, 'Original')\nshow_image(image_resized, 'Resized image')\n", "<docstring token>\nSOLUTION\n<import token>\nheight = int(dogs_banner.shape[0] / 2)\nwidth = int(dogs_banner.shape[1] / 2)\nimage_resized = resize(dogs_banner, (height, width), anti_aliasing=True)\nshow_image(dogs_banner, 'Original')\nshow_image(image_resized, 'Resized image')\n", "<docstring token>\nSOLUTION\n<import token>\n<assignment token>\nshow_image(dogs_banner, 'Original')\nshow_image(image_resized, 'Resized image')\n", "<docstring token>\n<code token>\n<import token>\n<assignment token>\n<code token>\n" ]
false
98,990
65d7d2acdeddc74b20db003ce49f810d079ed8ab
# -*- coding: utf-8 -*- # Generated by Django 1.10.4 on 2017-03-10 12:50 from __future__ import unicode_literals from django.db import migrations, models import jiahuaApp.models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='BatchHistory', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('insertTime', models.DateTimeField(auto_now=True, verbose_name='\u5bfc\u5165\u65e5\u671f')), ('insertFileName', models.CharField(max_length=20, verbose_name='\u5bfc\u5165\u6587\u4ef6\u540d')), ('insertResult', models.CharField(max_length=20, verbose_name='\u5bfc\u5165\u7ed3\u679c')), ('insertNum', models.CharField(max_length=20, verbose_name='\u5bfc\u5165\u8ba2\u5355\u603b\u6570')), ('uploadFile', models.FileField(upload_to='upload', verbose_name='\u6587\u4ef6\u8def\u5f84')), ], ), migrations.CreateModel( name='Cat', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('plateNum', models.CharField(max_length=20, verbose_name='\u8f66\u724c\u53f7')), ('catType', models.CharField(max_length=20, verbose_name='\u8f66\u578b')), ('fullName', models.CharField(blank=True, max_length=20, verbose_name='\u59d3\u540d')), ('phoneNumber', models.CharField(blank=True, max_length=20, verbose_name='\u7535\u8bdd')), ('catOther', models.CharField(max_length=45, verbose_name='\u8f66\u51b5\u5907\u6ce8')), ('ofterPlace', models.CharField(blank=True, max_length=60, verbose_name='\u5e38\u8fd0\u5730\u70b9')), ], ), migrations.CreateModel( name='History', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('content', models.CharField(blank=True, max_length=200, verbose_name='\u5185\u5bb9')), ('action', models.CharField(blank=True, max_length=10, verbose_name='\u52a8\u4f5c')), ('operator', models.CharField(blank=True, max_length=10, verbose_name='\u64cd\u4f5c\u5458')), ('operateTime', models.DateField(auto_now_add=True, verbose_name='\u64cd\u4f5c\u65f6\u95f4')), ], ), migrations.CreateModel( name='Location', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=200, verbose_name='\u59d3\u540d')), ('username', models.CharField(blank=True, max_length=200, verbose_name='\u7528\u6237\u540d')), ('latitude', models.CharField(blank=True, max_length=200, verbose_name='\u7eac\u5ea6')), ('longitude', models.CharField(blank=True, max_length=10, verbose_name='\u7ecf\u5ea6')), ('precision', models.CharField(blank=True, max_length=10, verbose_name='\u7cbe\u786e\u5ea6')), ('insertTime', models.DateTimeField(auto_now_add=True, verbose_name='\u4e0a\u62a5\u65f6\u95f4')), ], ), migrations.CreateModel( name='OrderForm', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('catNum', models.IntegerField(blank=True, null=True, verbose_name='\u8f66\u6b21')), ('tranNum', models.IntegerField(blank=True, null=True, verbose_name='\u8d9f\u6570')), ('placeNum', models.IntegerField(blank=True, null=True, verbose_name='\u4efb\u52a1\u6e05\u5355\u6570')), ('getGoodsDate', models.DateField(blank=True, null=True, verbose_name='\u53d6\u8d27\u65e5\u671f')), ('getGoodsTime', models.TimeField(blank=True, null=True, verbose_name='\u8282\u70b9\u65f6\u95f4')), ('sendName', models.CharField(max_length=20, verbose_name='\u53d1\u8d27\u65b9\u59d3\u540d')), ('sendPhoneNumber', models.CharField(blank=True, max_length=20, verbose_name='\u53d1\u8d27\u65b9\u7535\u8bdd')), ('sendAddress', models.CharField(blank=True, max_length=20, verbose_name='\u53d1\u8d27\u65b9\u5730\u5740')), ('sendCode', models.CharField(blank=True, max_length=20, verbose_name='\u51fa\u8d27\u5730\u5b8c\u6574\u7801')), ('receiveName', models.CharField(max_length=20, verbose_name='\u6536\u8d27\u65b9\u59d3\u540d')), ('receivePhoneNumber', models.CharField(blank=True, max_length=20, verbose_name='\u6536\u8d27\u65b9\u7535\u8bdd')), ('receiveAddress', models.CharField(blank=True, max_length=20, verbose_name='\u6536\u8d27\u65b9\u5730\u5740')), ('receiveCode', models.CharField(blank=True, max_length=20, verbose_name='\u4ea4\u8d27\u5730\u7f16\u7801')), ('fe', models.IntegerField(blank=True, default=0, verbose_name='\u94c1\u67b6')), ('box', models.IntegerField(blank=True, default=0, verbose_name='\u80f6\u7bb1')), ('lastDate', models.DateField(blank=True, null=True, verbose_name='\u7eb3\u671f')), ('lastTime', models.TimeField(blank=True, null=True, verbose_name='\u7eb3\u65f6')), ('runType', models.CharField(max_length=20, null=True, verbose_name='\u8fd0\u4f5c\u65b9\u5f0f')), ('plateNum', models.CharField(blank=True, max_length=20, null=True, verbose_name='\u8f66\u53f7')), ('createTime', models.DateField(db_index=True, verbose_name='\u521b\u5efa\u65f6\u95f4')), ('receiveFormTime', models.DateTimeField(blank=True, null=True, verbose_name='\u63a5\u5355\u65f6\u95f4')), ('receiveFormPerson', models.CharField(blank=True, max_length=20, verbose_name='\u63a5\u5355\u4eba')), ('receiveGoodsTime', models.DateTimeField(blank=True, null=True, verbose_name='\u88c5\u8d27\u65f6\u95f4')), ('receiveGoodsPerson', models.CharField(blank=True, max_length=20, verbose_name='\u88c5\u8d27\u4eba')), ('acceptTime', models.DateTimeField(blank=True, null=True, verbose_name='\u7b7e\u6536\u65f6\u95f4')), ('acceptPerson', models.CharField(blank=True, max_length=20, verbose_name='\u7b7e\u6536\u4eba')), ('problem', models.IntegerField(default=0, verbose_name='\u5f02\u5e38')), ('other', models.CharField(blank=True, max_length=20, verbose_name='\u5907\u6ce8')), ('stateType', models.IntegerField(default=0, verbose_name='\u8fd0\u5355\u72b6\u6001')), ('getStartTime', models.DateTimeField(blank=True, null=True, verbose_name='\u53d6\u8d27\u5f00\u59cb\u65f6\u95f4')), ('getEndTime', models.DateTimeField(blank=True, null=True, verbose_name='\u53d6\u8d27\u7ed3\u675f\u65f6\u95f4')), ('getTime', models.IntegerField(blank=True, null=True, verbose_name='\u53d6\u8d27\u65f6\u957f')), ('sendStartTime', models.DateTimeField(blank=True, null=True, verbose_name='\u6536\u8d27\u5f00\u59cb\u65f6\u95f4')), ('sendEndTime', models.DateTimeField(blank=True, null=True, verbose_name='\u6536\u8d27\u7ed3\u675f\u65f6\u95f4')), ('sendTime', models.IntegerField(blank=True, null=True, verbose_name='\u6536\u8d27\u65f6\u957f')), ('operator', models.CharField(blank=True, max_length=20, verbose_name='\u64cd\u4f5c\u5458')), ], bases=(models.Model, jiahuaApp.models.BaseModel), ), migrations.CreateModel( name='ReceiveClient', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=20, verbose_name='\u59d3\u540d')), ('phoneNumber', models.CharField(blank=True, max_length=20, verbose_name='\u8054\u7cfb\u7535\u8bdd')), ('address', models.CharField(blank=True, max_length=20, verbose_name='\u8054\u7cfb\u5730\u5740')), ('receiveCode', models.CharField(max_length=20, verbose_name='\u4ea4\u8d27\u5730\u7f16\u7801')), ], ), migrations.CreateModel( name='SendClient', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=20, verbose_name='\u59d3\u540d')), ('phoneNumber', models.CharField(blank=True, max_length=20, verbose_name='\u8054\u7cfb\u7535\u8bdd')), ('address', models.CharField(blank=True, max_length=20, verbose_name='\u8054\u7cfb\u5730\u5740')), ('sendCode', models.CharField(max_length=20, verbose_name='\u51fa\u8d27\u5730\u5b8c\u6574\u7801')), ], ), ]
[ "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.4 on 2017-03-10 12:50\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport jiahuaApp.models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='BatchHistory',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('insertTime', models.DateTimeField(auto_now=True, verbose_name='\\u5bfc\\u5165\\u65e5\\u671f')),\n ('insertFileName', models.CharField(max_length=20, verbose_name='\\u5bfc\\u5165\\u6587\\u4ef6\\u540d')),\n ('insertResult', models.CharField(max_length=20, verbose_name='\\u5bfc\\u5165\\u7ed3\\u679c')),\n ('insertNum', models.CharField(max_length=20, verbose_name='\\u5bfc\\u5165\\u8ba2\\u5355\\u603b\\u6570')),\n ('uploadFile', models.FileField(upload_to='upload', verbose_name='\\u6587\\u4ef6\\u8def\\u5f84')),\n ],\n ),\n migrations.CreateModel(\n name='Cat',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('plateNum', models.CharField(max_length=20, verbose_name='\\u8f66\\u724c\\u53f7')),\n ('catType', models.CharField(max_length=20, verbose_name='\\u8f66\\u578b')),\n ('fullName', models.CharField(blank=True, max_length=20, verbose_name='\\u59d3\\u540d')),\n ('phoneNumber', models.CharField(blank=True, max_length=20, verbose_name='\\u7535\\u8bdd')),\n ('catOther', models.CharField(max_length=45, verbose_name='\\u8f66\\u51b5\\u5907\\u6ce8')),\n ('ofterPlace', models.CharField(blank=True, max_length=60, verbose_name='\\u5e38\\u8fd0\\u5730\\u70b9')),\n ],\n ),\n migrations.CreateModel(\n name='History',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('content', models.CharField(blank=True, max_length=200, verbose_name='\\u5185\\u5bb9')),\n ('action', models.CharField(blank=True, max_length=10, verbose_name='\\u52a8\\u4f5c')),\n ('operator', models.CharField(blank=True, max_length=10, verbose_name='\\u64cd\\u4f5c\\u5458')),\n ('operateTime', models.DateField(auto_now_add=True, verbose_name='\\u64cd\\u4f5c\\u65f6\\u95f4')),\n ],\n ),\n migrations.CreateModel(\n name='Location',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(blank=True, max_length=200, verbose_name='\\u59d3\\u540d')),\n ('username', models.CharField(blank=True, max_length=200, verbose_name='\\u7528\\u6237\\u540d')),\n ('latitude', models.CharField(blank=True, max_length=200, verbose_name='\\u7eac\\u5ea6')),\n ('longitude', models.CharField(blank=True, max_length=10, verbose_name='\\u7ecf\\u5ea6')),\n ('precision', models.CharField(blank=True, max_length=10, verbose_name='\\u7cbe\\u786e\\u5ea6')),\n ('insertTime', models.DateTimeField(auto_now_add=True, verbose_name='\\u4e0a\\u62a5\\u65f6\\u95f4')),\n ],\n ),\n migrations.CreateModel(\n name='OrderForm',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('catNum', models.IntegerField(blank=True, null=True, verbose_name='\\u8f66\\u6b21')),\n ('tranNum', models.IntegerField(blank=True, null=True, verbose_name='\\u8d9f\\u6570')),\n ('placeNum', models.IntegerField(blank=True, null=True, verbose_name='\\u4efb\\u52a1\\u6e05\\u5355\\u6570')),\n ('getGoodsDate', models.DateField(blank=True, null=True, verbose_name='\\u53d6\\u8d27\\u65e5\\u671f')),\n ('getGoodsTime', models.TimeField(blank=True, null=True, verbose_name='\\u8282\\u70b9\\u65f6\\u95f4')),\n ('sendName', models.CharField(max_length=20, verbose_name='\\u53d1\\u8d27\\u65b9\\u59d3\\u540d')),\n ('sendPhoneNumber', models.CharField(blank=True, max_length=20, verbose_name='\\u53d1\\u8d27\\u65b9\\u7535\\u8bdd')),\n ('sendAddress', models.CharField(blank=True, max_length=20, verbose_name='\\u53d1\\u8d27\\u65b9\\u5730\\u5740')),\n ('sendCode', models.CharField(blank=True, max_length=20, verbose_name='\\u51fa\\u8d27\\u5730\\u5b8c\\u6574\\u7801')),\n ('receiveName', models.CharField(max_length=20, verbose_name='\\u6536\\u8d27\\u65b9\\u59d3\\u540d')),\n ('receivePhoneNumber', models.CharField(blank=True, max_length=20, verbose_name='\\u6536\\u8d27\\u65b9\\u7535\\u8bdd')),\n ('receiveAddress', models.CharField(blank=True, max_length=20, verbose_name='\\u6536\\u8d27\\u65b9\\u5730\\u5740')),\n ('receiveCode', models.CharField(blank=True, max_length=20, verbose_name='\\u4ea4\\u8d27\\u5730\\u7f16\\u7801')),\n ('fe', models.IntegerField(blank=True, default=0, verbose_name='\\u94c1\\u67b6')),\n ('box', models.IntegerField(blank=True, default=0, verbose_name='\\u80f6\\u7bb1')),\n ('lastDate', models.DateField(blank=True, null=True, verbose_name='\\u7eb3\\u671f')),\n ('lastTime', models.TimeField(blank=True, null=True, verbose_name='\\u7eb3\\u65f6')),\n ('runType', models.CharField(max_length=20, null=True, verbose_name='\\u8fd0\\u4f5c\\u65b9\\u5f0f')),\n ('plateNum', models.CharField(blank=True, max_length=20, null=True, verbose_name='\\u8f66\\u53f7')),\n ('createTime', models.DateField(db_index=True, verbose_name='\\u521b\\u5efa\\u65f6\\u95f4')),\n ('receiveFormTime', models.DateTimeField(blank=True, null=True, verbose_name='\\u63a5\\u5355\\u65f6\\u95f4')),\n ('receiveFormPerson', models.CharField(blank=True, max_length=20, verbose_name='\\u63a5\\u5355\\u4eba')),\n ('receiveGoodsTime', models.DateTimeField(blank=True, null=True, verbose_name='\\u88c5\\u8d27\\u65f6\\u95f4')),\n ('receiveGoodsPerson', models.CharField(blank=True, max_length=20, verbose_name='\\u88c5\\u8d27\\u4eba')),\n ('acceptTime', models.DateTimeField(blank=True, null=True, verbose_name='\\u7b7e\\u6536\\u65f6\\u95f4')),\n ('acceptPerson', models.CharField(blank=True, max_length=20, verbose_name='\\u7b7e\\u6536\\u4eba')),\n ('problem', models.IntegerField(default=0, verbose_name='\\u5f02\\u5e38')),\n ('other', models.CharField(blank=True, max_length=20, verbose_name='\\u5907\\u6ce8')),\n ('stateType', models.IntegerField(default=0, verbose_name='\\u8fd0\\u5355\\u72b6\\u6001')),\n ('getStartTime', models.DateTimeField(blank=True, null=True, verbose_name='\\u53d6\\u8d27\\u5f00\\u59cb\\u65f6\\u95f4')),\n ('getEndTime', models.DateTimeField(blank=True, null=True, verbose_name='\\u53d6\\u8d27\\u7ed3\\u675f\\u65f6\\u95f4')),\n ('getTime', models.IntegerField(blank=True, null=True, verbose_name='\\u53d6\\u8d27\\u65f6\\u957f')),\n ('sendStartTime', models.DateTimeField(blank=True, null=True, verbose_name='\\u6536\\u8d27\\u5f00\\u59cb\\u65f6\\u95f4')),\n ('sendEndTime', models.DateTimeField(blank=True, null=True, verbose_name='\\u6536\\u8d27\\u7ed3\\u675f\\u65f6\\u95f4')),\n ('sendTime', models.IntegerField(blank=True, null=True, verbose_name='\\u6536\\u8d27\\u65f6\\u957f')),\n ('operator', models.CharField(blank=True, max_length=20, verbose_name='\\u64cd\\u4f5c\\u5458')),\n ],\n bases=(models.Model, jiahuaApp.models.BaseModel),\n ),\n migrations.CreateModel(\n name='ReceiveClient',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=20, verbose_name='\\u59d3\\u540d')),\n ('phoneNumber', models.CharField(blank=True, max_length=20, verbose_name='\\u8054\\u7cfb\\u7535\\u8bdd')),\n ('address', models.CharField(blank=True, max_length=20, verbose_name='\\u8054\\u7cfb\\u5730\\u5740')),\n ('receiveCode', models.CharField(max_length=20, verbose_name='\\u4ea4\\u8d27\\u5730\\u7f16\\u7801')),\n ],\n ),\n migrations.CreateModel(\n name='SendClient',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=20, verbose_name='\\u59d3\\u540d')),\n ('phoneNumber', models.CharField(blank=True, max_length=20, verbose_name='\\u8054\\u7cfb\\u7535\\u8bdd')),\n ('address', models.CharField(blank=True, max_length=20, verbose_name='\\u8054\\u7cfb\\u5730\\u5740')),\n ('sendCode', models.CharField(max_length=20, verbose_name='\\u51fa\\u8d27\\u5730\\u5b8c\\u6574\\u7801')),\n ],\n ),\n ]\n", "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport jiahuaApp.models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='BatchHistory', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('insertTime', models.DateTimeField(\n auto_now=True, verbose_name='导入日期')), ('insertFileName', models.\n CharField(max_length=20, verbose_name='导入文件名')), ('insertResult',\n models.CharField(max_length=20, verbose_name='导入结果')), ('insertNum',\n models.CharField(max_length=20, verbose_name='导入订单总数')), (\n 'uploadFile', models.FileField(upload_to='upload', verbose_name=\n '文件路径'))]), migrations.CreateModel(name='Cat', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('plateNum', models.CharField(\n max_length=20, verbose_name='车牌号')), ('catType', models.CharField(\n max_length=20, verbose_name='车型')), ('fullName', models.CharField(\n blank=True, max_length=20, verbose_name='姓名')), ('phoneNumber',\n models.CharField(blank=True, max_length=20, verbose_name='电话')), (\n 'catOther', models.CharField(max_length=45, verbose_name='车况备注')),\n ('ofterPlace', models.CharField(blank=True, max_length=60,\n verbose_name='常运地点'))]), migrations.CreateModel(name='History',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('content', models.CharField(\n blank=True, max_length=200, verbose_name='内容')), ('action', models.\n CharField(blank=True, max_length=10, verbose_name='动作')), (\n 'operator', models.CharField(blank=True, max_length=10,\n verbose_name='操作员')), ('operateTime', models.DateField(auto_now_add\n =True, verbose_name='操作时间'))]), migrations.CreateModel(name=\n 'Location', fields=[('id', models.AutoField(auto_created=True,\n primary_key=True, serialize=False, verbose_name='ID')), ('name',\n models.CharField(blank=True, max_length=200, verbose_name='姓名')), (\n 'username', models.CharField(blank=True, max_length=200,\n verbose_name='用户名')), ('latitude', models.CharField(blank=True,\n max_length=200, verbose_name='纬度')), ('longitude', models.CharField\n (blank=True, max_length=10, verbose_name='经度')), ('precision',\n models.CharField(blank=True, max_length=10, verbose_name='精确度')), (\n 'insertTime', models.DateTimeField(auto_now_add=True, verbose_name=\n '上报时间'))]), migrations.CreateModel(name='OrderForm', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('catNum', models.IntegerField(blank=\n True, null=True, verbose_name='车次')), ('tranNum', models.\n IntegerField(blank=True, null=True, verbose_name='趟数')), (\n 'placeNum', models.IntegerField(blank=True, null=True, verbose_name\n ='任务清单数')), ('getGoodsDate', models.DateField(blank=True, null=True,\n verbose_name='取货日期')), ('getGoodsTime', models.TimeField(blank=True,\n null=True, verbose_name='节点时间')), ('sendName', models.CharField(\n max_length=20, verbose_name='发货方姓名')), ('sendPhoneNumber', models.\n CharField(blank=True, max_length=20, verbose_name='发货方电话')), (\n 'sendAddress', models.CharField(blank=True, max_length=20,\n verbose_name='发货方地址')), ('sendCode', models.CharField(blank=True,\n max_length=20, verbose_name='出货地完整码')), ('receiveName', models.\n CharField(max_length=20, verbose_name='收货方姓名')), (\n 'receivePhoneNumber', models.CharField(blank=True, max_length=20,\n verbose_name='收货方电话')), ('receiveAddress', models.CharField(blank=\n True, max_length=20, verbose_name='收货方地址')), ('receiveCode', models\n .CharField(blank=True, max_length=20, verbose_name='交货地编码')), ('fe',\n models.IntegerField(blank=True, default=0, verbose_name='铁架')), (\n 'box', models.IntegerField(blank=True, default=0, verbose_name='胶箱'\n )), ('lastDate', models.DateField(blank=True, null=True,\n verbose_name='纳期')), ('lastTime', models.TimeField(blank=True, null\n =True, verbose_name='纳时')), ('runType', models.CharField(max_length\n =20, null=True, verbose_name='运作方式')), ('plateNum', models.\n CharField(blank=True, max_length=20, null=True, verbose_name='车号')),\n ('createTime', models.DateField(db_index=True, verbose_name='创建时间')\n ), ('receiveFormTime', models.DateTimeField(blank=True, null=True,\n verbose_name='接单时间')), ('receiveFormPerson', models.CharField(blank\n =True, max_length=20, verbose_name='接单人')), ('receiveGoodsTime',\n models.DateTimeField(blank=True, null=True, verbose_name='装货时间')),\n ('receiveGoodsPerson', models.CharField(blank=True, max_length=20,\n verbose_name='装货人')), ('acceptTime', models.DateTimeField(blank=\n True, null=True, verbose_name='签收时间')), ('acceptPerson', models.\n CharField(blank=True, max_length=20, verbose_name='签收人')), (\n 'problem', models.IntegerField(default=0, verbose_name='异常')), (\n 'other', models.CharField(blank=True, max_length=20, verbose_name=\n '备注')), ('stateType', models.IntegerField(default=0, verbose_name=\n '运单状态')), ('getStartTime', models.DateTimeField(blank=True, null=\n True, verbose_name='取货开始时间')), ('getEndTime', models.DateTimeField(\n blank=True, null=True, verbose_name='取货结束时间')), ('getTime', models.\n IntegerField(blank=True, null=True, verbose_name='取货时长')), (\n 'sendStartTime', models.DateTimeField(blank=True, null=True,\n verbose_name='收货开始时间')), ('sendEndTime', models.DateTimeField(blank\n =True, null=True, verbose_name='收货结束时间')), ('sendTime', models.\n IntegerField(blank=True, null=True, verbose_name='收货时长')), (\n 'operator', models.CharField(blank=True, max_length=20,\n verbose_name='操作员'))], bases=(models.Model, jiahuaApp.models.\n BaseModel)), migrations.CreateModel(name='ReceiveClient', fields=[(\n 'id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('name', models.CharField(\n max_length=20, verbose_name='姓名')), ('phoneNumber', models.\n CharField(blank=True, max_length=20, verbose_name='联系电话')), (\n 'address', models.CharField(blank=True, max_length=20, verbose_name\n ='联系地址')), ('receiveCode', models.CharField(max_length=20,\n verbose_name='交货地编码'))]), migrations.CreateModel(name='SendClient',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('name', models.CharField(\n max_length=20, verbose_name='姓名')), ('phoneNumber', models.\n CharField(blank=True, max_length=20, verbose_name='联系电话')), (\n 'address', models.CharField(blank=True, max_length=20, verbose_name\n ='联系地址')), ('sendCode', models.CharField(max_length=20,\n verbose_name='出货地完整码'))])]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='BatchHistory', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('insertTime', models.DateTimeField(\n auto_now=True, verbose_name='导入日期')), ('insertFileName', models.\n CharField(max_length=20, verbose_name='导入文件名')), ('insertResult',\n models.CharField(max_length=20, verbose_name='导入结果')), ('insertNum',\n models.CharField(max_length=20, verbose_name='导入订单总数')), (\n 'uploadFile', models.FileField(upload_to='upload', verbose_name=\n '文件路径'))]), migrations.CreateModel(name='Cat', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('plateNum', models.CharField(\n max_length=20, verbose_name='车牌号')), ('catType', models.CharField(\n max_length=20, verbose_name='车型')), ('fullName', models.CharField(\n blank=True, max_length=20, verbose_name='姓名')), ('phoneNumber',\n models.CharField(blank=True, max_length=20, verbose_name='电话')), (\n 'catOther', models.CharField(max_length=45, verbose_name='车况备注')),\n ('ofterPlace', models.CharField(blank=True, max_length=60,\n verbose_name='常运地点'))]), migrations.CreateModel(name='History',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('content', models.CharField(\n blank=True, max_length=200, verbose_name='内容')), ('action', models.\n CharField(blank=True, max_length=10, verbose_name='动作')), (\n 'operator', models.CharField(blank=True, max_length=10,\n verbose_name='操作员')), ('operateTime', models.DateField(auto_now_add\n =True, verbose_name='操作时间'))]), migrations.CreateModel(name=\n 'Location', fields=[('id', models.AutoField(auto_created=True,\n primary_key=True, serialize=False, verbose_name='ID')), ('name',\n models.CharField(blank=True, max_length=200, verbose_name='姓名')), (\n 'username', models.CharField(blank=True, max_length=200,\n verbose_name='用户名')), ('latitude', models.CharField(blank=True,\n max_length=200, verbose_name='纬度')), ('longitude', models.CharField\n (blank=True, max_length=10, verbose_name='经度')), ('precision',\n models.CharField(blank=True, max_length=10, verbose_name='精确度')), (\n 'insertTime', models.DateTimeField(auto_now_add=True, verbose_name=\n '上报时间'))]), migrations.CreateModel(name='OrderForm', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('catNum', models.IntegerField(blank=\n True, null=True, verbose_name='车次')), ('tranNum', models.\n IntegerField(blank=True, null=True, verbose_name='趟数')), (\n 'placeNum', models.IntegerField(blank=True, null=True, verbose_name\n ='任务清单数')), ('getGoodsDate', models.DateField(blank=True, null=True,\n verbose_name='取货日期')), ('getGoodsTime', models.TimeField(blank=True,\n null=True, verbose_name='节点时间')), ('sendName', models.CharField(\n max_length=20, verbose_name='发货方姓名')), ('sendPhoneNumber', models.\n CharField(blank=True, max_length=20, verbose_name='发货方电话')), (\n 'sendAddress', models.CharField(blank=True, max_length=20,\n verbose_name='发货方地址')), ('sendCode', models.CharField(blank=True,\n max_length=20, verbose_name='出货地完整码')), ('receiveName', models.\n CharField(max_length=20, verbose_name='收货方姓名')), (\n 'receivePhoneNumber', models.CharField(blank=True, max_length=20,\n verbose_name='收货方电话')), ('receiveAddress', models.CharField(blank=\n True, max_length=20, verbose_name='收货方地址')), ('receiveCode', models\n .CharField(blank=True, max_length=20, verbose_name='交货地编码')), ('fe',\n models.IntegerField(blank=True, default=0, verbose_name='铁架')), (\n 'box', models.IntegerField(blank=True, default=0, verbose_name='胶箱'\n )), ('lastDate', models.DateField(blank=True, null=True,\n verbose_name='纳期')), ('lastTime', models.TimeField(blank=True, null\n =True, verbose_name='纳时')), ('runType', models.CharField(max_length\n =20, null=True, verbose_name='运作方式')), ('plateNum', models.\n CharField(blank=True, max_length=20, null=True, verbose_name='车号')),\n ('createTime', models.DateField(db_index=True, verbose_name='创建时间')\n ), ('receiveFormTime', models.DateTimeField(blank=True, null=True,\n verbose_name='接单时间')), ('receiveFormPerson', models.CharField(blank\n =True, max_length=20, verbose_name='接单人')), ('receiveGoodsTime',\n models.DateTimeField(blank=True, null=True, verbose_name='装货时间')),\n ('receiveGoodsPerson', models.CharField(blank=True, max_length=20,\n verbose_name='装货人')), ('acceptTime', models.DateTimeField(blank=\n True, null=True, verbose_name='签收时间')), ('acceptPerson', models.\n CharField(blank=True, max_length=20, verbose_name='签收人')), (\n 'problem', models.IntegerField(default=0, verbose_name='异常')), (\n 'other', models.CharField(blank=True, max_length=20, verbose_name=\n '备注')), ('stateType', models.IntegerField(default=0, verbose_name=\n '运单状态')), ('getStartTime', models.DateTimeField(blank=True, null=\n True, verbose_name='取货开始时间')), ('getEndTime', models.DateTimeField(\n blank=True, null=True, verbose_name='取货结束时间')), ('getTime', models.\n IntegerField(blank=True, null=True, verbose_name='取货时长')), (\n 'sendStartTime', models.DateTimeField(blank=True, null=True,\n verbose_name='收货开始时间')), ('sendEndTime', models.DateTimeField(blank\n =True, null=True, verbose_name='收货结束时间')), ('sendTime', models.\n IntegerField(blank=True, null=True, verbose_name='收货时长')), (\n 'operator', models.CharField(blank=True, max_length=20,\n verbose_name='操作员'))], bases=(models.Model, jiahuaApp.models.\n BaseModel)), migrations.CreateModel(name='ReceiveClient', fields=[(\n 'id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('name', models.CharField(\n max_length=20, verbose_name='姓名')), ('phoneNumber', models.\n CharField(blank=True, max_length=20, verbose_name='联系电话')), (\n 'address', models.CharField(blank=True, max_length=20, verbose_name\n ='联系地址')), ('receiveCode', models.CharField(max_length=20,\n verbose_name='交货地编码'))]), migrations.CreateModel(name='SendClient',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('name', models.CharField(\n max_length=20, verbose_name='姓名')), ('phoneNumber', models.\n CharField(blank=True, max_length=20, verbose_name='联系电话')), (\n 'address', models.CharField(blank=True, max_length=20, verbose_name\n ='联系地址')), ('sendCode', models.CharField(max_length=20,\n verbose_name='出货地完整码'))])]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
98,991
30deb3821401be410bb6c5f277b6d633fd801cf6
from falcon import HTTP_200, COMBINED_METHODS, __version__ as FALCONVERSION from falcon_caching.options import CacheEvictionStrategy, HttpMethods import logging import re import msgpack from typing import TYPE_CHECKING, Any, Dict, Tuple if TYPE_CHECKING: from falcon_caching.cache import Cache logger = logging.getLogger(__name__) _DECORABLE_METHOD_NAME = re.compile(r'^on_({})(_\w+)?$'.format( '|'.join(method.lower() for method in COMBINED_METHODS))) # what is the Falcon main version (eg 2 or 3, etc) FALCONVERSION_MAIN = int(FALCONVERSION.split('.')[0]) class Middleware: """ It integrates a cache object with Falcon by turning it into a Falcon Middleware """ def __init__(self, cache: 'Cache', config: Dict[str, Any]) -> None: self.cache = cache self.cache_config = config def process_resource(self, req, resp, resource, params): """ Determine if the given request is marked for caching and if yes, then look it up in the cache and if found, then return the cached value """ # Step 1: for 'rest-based' and 'rest&time-based' eviction strategies the # POST/PATCH/PUT/DELETE calls are never cached, they should never be # loaded from cache as they must always execute, # so for those we don't need to try to search the cache if self.cache_config['CACHE_EVICTION_STRATEGY'] in [CacheEvictionStrategy.rest_based, CacheEvictionStrategy.rest_and_time_based] \ and req.method.upper() in [HttpMethods.POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]: return # Step 2: determine whether the given responder has caching setup # and if not then short-circuit to save on the lookup of request in the cache # as anyhow this request was not marked to be cached # find out which responder ("on_..." method) is going to be used to process this request responder = None for _method in dir(resource): if _DECORABLE_METHOD_NAME.match(_method) and _method[3:].upper() == req.method.upper(): responder = _method break if responder: # get the name of the responder wrapper, which for cached objects is 'cache_wrap' # see the "Cache.cache" decorator in cache.py responder_wrapper_name = getattr(getattr(resource, responder), '__name__') # is the given method (or its class) decorated by the cache_wrap being the topmost decorator? if responder_wrapper_name == 'cache_wrap': logger.debug(" This endpoint is decorated by 'cache' being the topmost decorator.") else: # 'cache_wrap' is not the topmost decorator - let's check whether 'cache' is # any of the other decorator on this method (not the topmost): # this requires the use of @register(decor1, decor2) as the decorator if hasattr(getattr(resource, responder), '_decorators') and \ 'cache' in [d._decorator_name for d in getattr(resource, responder)._decorators if hasattr(d, '_decorator_name')]: logger.debug(" This endpoint is decorated by 'cache', but it is NOT the topmost decorator.") else: # no cache was requested on this responder as no decorator at all logger.debug(" No 'cache' was requested for this endpoint.") return # Step 3: look up the record in the cache key = self.generate_cache_key(req) data = self.cache.get(key) if data: # if the CACHE_CONTENT_TYPE_JSON_ONLY = True, then we are NOT # caching the response's Content-Type, only its body if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']: if FALCONVERSION_MAIN < 3: resp.body = self.deserialize(data) else: resp.text = self.deserialize(data) else: if FALCONVERSION_MAIN < 3: resp.content_type, resp.body = self.deserialize(data) else: resp.content_type, resp.text = self.deserialize(data) resp.status = HTTP_200 req.context.cached = True # Short-circuit any further processing to skip any remaining # 'process_request' and 'process_resource' methods, as well as # the 'responder' method that the request would have been routed to. # However, any 'process_response' middleware methods will still be called. resp.complete = True def process_response(self, req, resp, resource, req_succeeded): """ Cache the response if this request qualifies and has not been cached yet or for rest-based and rest-and-time-based evict the record from the cache if the request method is POST/PATCH/PUT or DELETE """ # Step 1: for 'rest-based' and 'rest&time-based' eviction strategies the # POST/PATCH/PUT/DELETE calls are never cached and even more they # invalidate the record cached by the GET method if self.cache_config['CACHE_EVICTION_STRATEGY'] in [CacheEvictionStrategy.rest_based, CacheEvictionStrategy.rest_and_time_based] \ and req.method.upper() in [HttpMethods.POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]: # get the cache key created by the GET method (assuming there was one) key = self.generate_cache_key(req, method='GET') self.cache.delete(key) return # Step 2: if it is marked to be cached, but has not yet been cached # then we cache it if hasattr(req.context, 'cache') and req.context.cache \ and (not hasattr(req.context, 'cached') or not req.context.cached): key = self.generate_cache_key(req) value = self.serialize(req, resp, resource) # for the REST-based strategy there is no timeout, the cached record never expires if self.cache_config['CACHE_EVICTION_STRATEGY'] in [CacheEvictionStrategy.rest_based]: # timeout 0 - never expires timeout = 0 else: # for the time-based and rest-and-time-based eviction strategy the # cached record expires timeout = req.context.cache_timeout if hasattr(req.context, 'cache_timeout') else 600 self.cache.set(key, value, timeout=timeout) @staticmethod def generate_cache_key(req, method: str = None) -> str: """ Generate the cache key from the request using the path and the method """ path = req.path if path.endswith('/'): path = path[:-1] if not method: method = req.method return f'{path}:{method.upper()}' def serialize(self, req, resp, resource) -> bytes: """ Serializes the response, so it can be cached. If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to keep the response Content-Type header, so we need to serialize the response body with the content type with msgpack, which takes away performance. For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in which case the response Content-Type is NOT cached, so it will be the default - which is application/json. That should be fine for most REST APIs and should bring a nice performance bump by avoiding the msgpack serialization. """ if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']: if FALCONVERSION_MAIN < 3: return resp.body else: return resp.text else: if FALCONVERSION_MAIN < 3: return msgpack.packb([resp.content_type, resp.body], use_bin_type=True) else: return msgpack.packb([resp.content_type, resp.text], use_bin_type=True) def deserialize(self, data: bytes) -> Tuple[str, Any]: """ Deserializes the cached record into the response Body or the Content-Type and Body """ if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']: return data else: return msgpack.unpackb(data, raw=False)
[ "from falcon import HTTP_200, COMBINED_METHODS, __version__ as FALCONVERSION\nfrom falcon_caching.options import CacheEvictionStrategy, HttpMethods\nimport logging\nimport re\nimport msgpack\nfrom typing import TYPE_CHECKING, Any, Dict, Tuple\n\nif TYPE_CHECKING:\n from falcon_caching.cache import Cache\n\nlogger = logging.getLogger(__name__)\n\n_DECORABLE_METHOD_NAME = re.compile(r'^on_({})(_\\w+)?$'.format(\n '|'.join(method.lower() for method in COMBINED_METHODS)))\n\n# what is the Falcon main version (eg 2 or 3, etc)\nFALCONVERSION_MAIN = int(FALCONVERSION.split('.')[0])\n\n\nclass Middleware:\n \"\"\" It integrates a cache object with Falcon by turning it into\n a Falcon Middleware\n \"\"\"\n\n def __init__(self, cache: 'Cache', config: Dict[str, Any]) -> None:\n self.cache = cache\n self.cache_config = config\n\n def process_resource(self, req, resp, resource, params):\n \"\"\" Determine if the given request is marked for caching and if yes,\n then look it up in the cache and if found, then return the cached value\n \"\"\"\n\n # Step 1: for 'rest-based' and 'rest&time-based' eviction strategies the\n # POST/PATCH/PUT/DELETE calls are never cached, they should never be\n # loaded from cache as they must always execute,\n # so for those we don't need to try to search the cache\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [CacheEvictionStrategy.rest_based,\n CacheEvictionStrategy.rest_and_time_based] \\\n and req.method.upper() in [HttpMethods.POST,\n HttpMethods.PATCH,\n HttpMethods.PUT,\n HttpMethods.DELETE]:\n return\n\n # Step 2: determine whether the given responder has caching setup\n # and if not then short-circuit to save on the lookup of request in the cache\n # as anyhow this request was not marked to be cached\n\n # find out which responder (\"on_...\" method) is going to be used to process this request\n responder = None\n for _method in dir(resource):\n if _DECORABLE_METHOD_NAME.match(_method) and _method[3:].upper() == req.method.upper():\n responder = _method\n break\n\n if responder:\n # get the name of the responder wrapper, which for cached objects is 'cache_wrap'\n # see the \"Cache.cache\" decorator in cache.py\n responder_wrapper_name = getattr(getattr(resource, responder), '__name__')\n\n # is the given method (or its class) decorated by the cache_wrap being the topmost decorator?\n if responder_wrapper_name == 'cache_wrap':\n logger.debug(\" This endpoint is decorated by 'cache' being the topmost decorator.\")\n else:\n # 'cache_wrap' is not the topmost decorator - let's check whether 'cache' is\n # any of the other decorator on this method (not the topmost):\n # this requires the use of @register(decor1, decor2) as the decorator\n if hasattr(getattr(resource, responder), '_decorators') and \\\n 'cache' in [d._decorator_name for d in getattr(resource, responder)._decorators\n if hasattr(d, '_decorator_name')]:\n logger.debug(\" This endpoint is decorated by 'cache', but it is NOT the topmost decorator.\")\n else:\n # no cache was requested on this responder as no decorator at all\n logger.debug(\" No 'cache' was requested for this endpoint.\")\n return\n\n # Step 3: look up the record in the cache\n key = self.generate_cache_key(req)\n data = self.cache.get(key)\n\n if data:\n # if the CACHE_CONTENT_TYPE_JSON_ONLY = True, then we are NOT\n # caching the response's Content-Type, only its body\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n resp.body = self.deserialize(data)\n else:\n resp.text = self.deserialize(data)\n else:\n if FALCONVERSION_MAIN < 3:\n resp.content_type, resp.body = self.deserialize(data)\n else:\n resp.content_type, resp.text = self.deserialize(data)\n resp.status = HTTP_200\n req.context.cached = True\n\n # Short-circuit any further processing to skip any remaining\n # 'process_request' and 'process_resource' methods, as well as\n # the 'responder' method that the request would have been routed to.\n # However, any 'process_response' middleware methods will still be called.\n resp.complete = True\n\n def process_response(self, req, resp, resource, req_succeeded):\n \"\"\" Cache the response if this request qualifies and has not been cached yet\n or for rest-based and rest-and-time-based evict the record from the cache if\n the request method is POST/PATCH/PUT or DELETE \"\"\"\n\n # Step 1: for 'rest-based' and 'rest&time-based' eviction strategies the\n # POST/PATCH/PUT/DELETE calls are never cached and even more they\n # invalidate the record cached by the GET method\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [CacheEvictionStrategy.rest_based,\n CacheEvictionStrategy.rest_and_time_based] \\\n and req.method.upper() in [HttpMethods.POST,\n HttpMethods.PATCH,\n HttpMethods.PUT,\n HttpMethods.DELETE]:\n # get the cache key created by the GET method (assuming there was one)\n key = self.generate_cache_key(req, method='GET')\n self.cache.delete(key)\n return\n\n # Step 2: if it is marked to be cached, but has not yet been cached\n # then we cache it\n if hasattr(req.context, 'cache') and req.context.cache \\\n and (not hasattr(req.context, 'cached') or not req.context.cached):\n key = self.generate_cache_key(req)\n value = self.serialize(req, resp, resource)\n\n # for the REST-based strategy there is no timeout, the cached record never expires\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [CacheEvictionStrategy.rest_based]:\n # timeout 0 - never expires\n timeout = 0\n else:\n # for the time-based and rest-and-time-based eviction strategy the\n # cached record expires\n timeout = req.context.cache_timeout if hasattr(req.context, 'cache_timeout') else 600\n\n self.cache.set(key, value, timeout=timeout)\n\n @staticmethod\n def generate_cache_key(req, method: str = None) -> str:\n \"\"\" Generate the cache key from the request using the path and the method \"\"\"\n\n path = req.path\n if path.endswith('/'):\n path = path[:-1]\n\n if not method:\n method = req.method\n\n return f'{path}:{method.upper()}'\n\n def serialize(self, req, resp, resource) -> bytes:\n \"\"\" Serializes the response, so it can be cached.\n\n If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to\n keep the response Content-Type header, so we need to serialize the response\n body with the content type with msgpack, which takes away performance.\n\n For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in\n which case the response Content-Type is NOT cached, so it will be\n the default - which is application/json. That should be fine for most\n REST APIs and should bring a nice performance bump by avoiding the msgpack\n serialization.\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n return resp.body\n else:\n return resp.text\n else:\n if FALCONVERSION_MAIN < 3:\n return msgpack.packb([resp.content_type, resp.body], use_bin_type=True)\n else:\n return msgpack.packb([resp.content_type, resp.text], use_bin_type=True)\n\n def deserialize(self, data: bytes) -> Tuple[str, Any]:\n \"\"\" Deserializes the cached record into the response Body\n or the Content-Type and Body\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n return data\n else:\n return msgpack.unpackb(data, raw=False)\n", "from falcon import HTTP_200, COMBINED_METHODS, __version__ as FALCONVERSION\nfrom falcon_caching.options import CacheEvictionStrategy, HttpMethods\nimport logging\nimport re\nimport msgpack\nfrom typing import TYPE_CHECKING, Any, Dict, Tuple\nif TYPE_CHECKING:\n from falcon_caching.cache import Cache\nlogger = logging.getLogger(__name__)\n_DECORABLE_METHOD_NAME = re.compile('^on_({})(_\\\\w+)?$'.format('|'.join(\n method.lower() for method in COMBINED_METHODS)))\nFALCONVERSION_MAIN = int(FALCONVERSION.split('.')[0])\n\n\nclass Middleware:\n \"\"\" It integrates a cache object with Falcon by turning it into\n a Falcon Middleware\n \"\"\"\n\n def __init__(self, cache: 'Cache', config: Dict[str, Any]) ->None:\n self.cache = cache\n self.cache_config = config\n\n def process_resource(self, req, resp, resource, params):\n \"\"\" Determine if the given request is marked for caching and if yes,\n then look it up in the cache and if found, then return the cached value\n \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n return\n responder = None\n for _method in dir(resource):\n if _DECORABLE_METHOD_NAME.match(_method) and _method[3:].upper(\n ) == req.method.upper():\n responder = _method\n break\n if responder:\n responder_wrapper_name = getattr(getattr(resource, responder),\n '__name__')\n if responder_wrapper_name == 'cache_wrap':\n logger.debug(\n \" This endpoint is decorated by 'cache' being the topmost decorator.\"\n )\n elif hasattr(getattr(resource, responder), '_decorators'\n ) and 'cache' in [d._decorator_name for d in getattr(\n resource, responder)._decorators if hasattr(d,\n '_decorator_name')]:\n logger.debug(\n \" This endpoint is decorated by 'cache', but it is NOT the topmost decorator.\"\n )\n else:\n logger.debug(\" No 'cache' was requested for this endpoint.\")\n return\n key = self.generate_cache_key(req)\n data = self.cache.get(key)\n if data:\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n resp.body = self.deserialize(data)\n else:\n resp.text = self.deserialize(data)\n elif FALCONVERSION_MAIN < 3:\n resp.content_type, resp.body = self.deserialize(data)\n else:\n resp.content_type, resp.text = self.deserialize(data)\n resp.status = HTTP_200\n req.context.cached = True\n resp.complete = True\n\n def process_response(self, req, resp, resource, req_succeeded):\n \"\"\" Cache the response if this request qualifies and has not been cached yet\n or for rest-based and rest-and-time-based evict the record from the cache if\n the request method is POST/PATCH/PUT or DELETE \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n key = self.generate_cache_key(req, method='GET')\n self.cache.delete(key)\n return\n if hasattr(req.context, 'cache') and req.context.cache and (not\n hasattr(req.context, 'cached') or not req.context.cached):\n key = self.generate_cache_key(req)\n value = self.serialize(req, resp, resource)\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based]:\n timeout = 0\n else:\n timeout = req.context.cache_timeout if hasattr(req.context,\n 'cache_timeout') else 600\n self.cache.set(key, value, timeout=timeout)\n\n @staticmethod\n def generate_cache_key(req, method: str=None) ->str:\n \"\"\" Generate the cache key from the request using the path and the method \"\"\"\n path = req.path\n if path.endswith('/'):\n path = path[:-1]\n if not method:\n method = req.method\n return f'{path}:{method.upper()}'\n\n def serialize(self, req, resp, resource) ->bytes:\n \"\"\" Serializes the response, so it can be cached.\n\n If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to\n keep the response Content-Type header, so we need to serialize the response\n body with the content type with msgpack, which takes away performance.\n\n For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in\n which case the response Content-Type is NOT cached, so it will be\n the default - which is application/json. That should be fine for most\n REST APIs and should bring a nice performance bump by avoiding the msgpack\n serialization.\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n return resp.body\n else:\n return resp.text\n elif FALCONVERSION_MAIN < 3:\n return msgpack.packb([resp.content_type, resp.body],\n use_bin_type=True)\n else:\n return msgpack.packb([resp.content_type, resp.text],\n use_bin_type=True)\n\n def deserialize(self, data: bytes) ->Tuple[str, Any]:\n \"\"\" Deserializes the cached record into the response Body\n or the Content-Type and Body\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n return data\n else:\n return msgpack.unpackb(data, raw=False)\n", "<import token>\nif TYPE_CHECKING:\n from falcon_caching.cache import Cache\nlogger = logging.getLogger(__name__)\n_DECORABLE_METHOD_NAME = re.compile('^on_({})(_\\\\w+)?$'.format('|'.join(\n method.lower() for method in COMBINED_METHODS)))\nFALCONVERSION_MAIN = int(FALCONVERSION.split('.')[0])\n\n\nclass Middleware:\n \"\"\" It integrates a cache object with Falcon by turning it into\n a Falcon Middleware\n \"\"\"\n\n def __init__(self, cache: 'Cache', config: Dict[str, Any]) ->None:\n self.cache = cache\n self.cache_config = config\n\n def process_resource(self, req, resp, resource, params):\n \"\"\" Determine if the given request is marked for caching and if yes,\n then look it up in the cache and if found, then return the cached value\n \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n return\n responder = None\n for _method in dir(resource):\n if _DECORABLE_METHOD_NAME.match(_method) and _method[3:].upper(\n ) == req.method.upper():\n responder = _method\n break\n if responder:\n responder_wrapper_name = getattr(getattr(resource, responder),\n '__name__')\n if responder_wrapper_name == 'cache_wrap':\n logger.debug(\n \" This endpoint is decorated by 'cache' being the topmost decorator.\"\n )\n elif hasattr(getattr(resource, responder), '_decorators'\n ) and 'cache' in [d._decorator_name for d in getattr(\n resource, responder)._decorators if hasattr(d,\n '_decorator_name')]:\n logger.debug(\n \" This endpoint is decorated by 'cache', but it is NOT the topmost decorator.\"\n )\n else:\n logger.debug(\" No 'cache' was requested for this endpoint.\")\n return\n key = self.generate_cache_key(req)\n data = self.cache.get(key)\n if data:\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n resp.body = self.deserialize(data)\n else:\n resp.text = self.deserialize(data)\n elif FALCONVERSION_MAIN < 3:\n resp.content_type, resp.body = self.deserialize(data)\n else:\n resp.content_type, resp.text = self.deserialize(data)\n resp.status = HTTP_200\n req.context.cached = True\n resp.complete = True\n\n def process_response(self, req, resp, resource, req_succeeded):\n \"\"\" Cache the response if this request qualifies and has not been cached yet\n or for rest-based and rest-and-time-based evict the record from the cache if\n the request method is POST/PATCH/PUT or DELETE \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n key = self.generate_cache_key(req, method='GET')\n self.cache.delete(key)\n return\n if hasattr(req.context, 'cache') and req.context.cache and (not\n hasattr(req.context, 'cached') or not req.context.cached):\n key = self.generate_cache_key(req)\n value = self.serialize(req, resp, resource)\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based]:\n timeout = 0\n else:\n timeout = req.context.cache_timeout if hasattr(req.context,\n 'cache_timeout') else 600\n self.cache.set(key, value, timeout=timeout)\n\n @staticmethod\n def generate_cache_key(req, method: str=None) ->str:\n \"\"\" Generate the cache key from the request using the path and the method \"\"\"\n path = req.path\n if path.endswith('/'):\n path = path[:-1]\n if not method:\n method = req.method\n return f'{path}:{method.upper()}'\n\n def serialize(self, req, resp, resource) ->bytes:\n \"\"\" Serializes the response, so it can be cached.\n\n If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to\n keep the response Content-Type header, so we need to serialize the response\n body with the content type with msgpack, which takes away performance.\n\n For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in\n which case the response Content-Type is NOT cached, so it will be\n the default - which is application/json. That should be fine for most\n REST APIs and should bring a nice performance bump by avoiding the msgpack\n serialization.\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n return resp.body\n else:\n return resp.text\n elif FALCONVERSION_MAIN < 3:\n return msgpack.packb([resp.content_type, resp.body],\n use_bin_type=True)\n else:\n return msgpack.packb([resp.content_type, resp.text],\n use_bin_type=True)\n\n def deserialize(self, data: bytes) ->Tuple[str, Any]:\n \"\"\" Deserializes the cached record into the response Body\n or the Content-Type and Body\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n return data\n else:\n return msgpack.unpackb(data, raw=False)\n", "<import token>\nif TYPE_CHECKING:\n from falcon_caching.cache import Cache\n<assignment token>\n\n\nclass Middleware:\n \"\"\" It integrates a cache object with Falcon by turning it into\n a Falcon Middleware\n \"\"\"\n\n def __init__(self, cache: 'Cache', config: Dict[str, Any]) ->None:\n self.cache = cache\n self.cache_config = config\n\n def process_resource(self, req, resp, resource, params):\n \"\"\" Determine if the given request is marked for caching and if yes,\n then look it up in the cache and if found, then return the cached value\n \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n return\n responder = None\n for _method in dir(resource):\n if _DECORABLE_METHOD_NAME.match(_method) and _method[3:].upper(\n ) == req.method.upper():\n responder = _method\n break\n if responder:\n responder_wrapper_name = getattr(getattr(resource, responder),\n '__name__')\n if responder_wrapper_name == 'cache_wrap':\n logger.debug(\n \" This endpoint is decorated by 'cache' being the topmost decorator.\"\n )\n elif hasattr(getattr(resource, responder), '_decorators'\n ) and 'cache' in [d._decorator_name for d in getattr(\n resource, responder)._decorators if hasattr(d,\n '_decorator_name')]:\n logger.debug(\n \" This endpoint is decorated by 'cache', but it is NOT the topmost decorator.\"\n )\n else:\n logger.debug(\" No 'cache' was requested for this endpoint.\")\n return\n key = self.generate_cache_key(req)\n data = self.cache.get(key)\n if data:\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n resp.body = self.deserialize(data)\n else:\n resp.text = self.deserialize(data)\n elif FALCONVERSION_MAIN < 3:\n resp.content_type, resp.body = self.deserialize(data)\n else:\n resp.content_type, resp.text = self.deserialize(data)\n resp.status = HTTP_200\n req.context.cached = True\n resp.complete = True\n\n def process_response(self, req, resp, resource, req_succeeded):\n \"\"\" Cache the response if this request qualifies and has not been cached yet\n or for rest-based and rest-and-time-based evict the record from the cache if\n the request method is POST/PATCH/PUT or DELETE \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n key = self.generate_cache_key(req, method='GET')\n self.cache.delete(key)\n return\n if hasattr(req.context, 'cache') and req.context.cache and (not\n hasattr(req.context, 'cached') or not req.context.cached):\n key = self.generate_cache_key(req)\n value = self.serialize(req, resp, resource)\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based]:\n timeout = 0\n else:\n timeout = req.context.cache_timeout if hasattr(req.context,\n 'cache_timeout') else 600\n self.cache.set(key, value, timeout=timeout)\n\n @staticmethod\n def generate_cache_key(req, method: str=None) ->str:\n \"\"\" Generate the cache key from the request using the path and the method \"\"\"\n path = req.path\n if path.endswith('/'):\n path = path[:-1]\n if not method:\n method = req.method\n return f'{path}:{method.upper()}'\n\n def serialize(self, req, resp, resource) ->bytes:\n \"\"\" Serializes the response, so it can be cached.\n\n If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to\n keep the response Content-Type header, so we need to serialize the response\n body with the content type with msgpack, which takes away performance.\n\n For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in\n which case the response Content-Type is NOT cached, so it will be\n the default - which is application/json. That should be fine for most\n REST APIs and should bring a nice performance bump by avoiding the msgpack\n serialization.\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n return resp.body\n else:\n return resp.text\n elif FALCONVERSION_MAIN < 3:\n return msgpack.packb([resp.content_type, resp.body],\n use_bin_type=True)\n else:\n return msgpack.packb([resp.content_type, resp.text],\n use_bin_type=True)\n\n def deserialize(self, data: bytes) ->Tuple[str, Any]:\n \"\"\" Deserializes the cached record into the response Body\n or the Content-Type and Body\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n return data\n else:\n return msgpack.unpackb(data, raw=False)\n", "<import token>\n<code token>\n<assignment token>\n\n\nclass Middleware:\n \"\"\" It integrates a cache object with Falcon by turning it into\n a Falcon Middleware\n \"\"\"\n\n def __init__(self, cache: 'Cache', config: Dict[str, Any]) ->None:\n self.cache = cache\n self.cache_config = config\n\n def process_resource(self, req, resp, resource, params):\n \"\"\" Determine if the given request is marked for caching and if yes,\n then look it up in the cache and if found, then return the cached value\n \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n return\n responder = None\n for _method in dir(resource):\n if _DECORABLE_METHOD_NAME.match(_method) and _method[3:].upper(\n ) == req.method.upper():\n responder = _method\n break\n if responder:\n responder_wrapper_name = getattr(getattr(resource, responder),\n '__name__')\n if responder_wrapper_name == 'cache_wrap':\n logger.debug(\n \" This endpoint is decorated by 'cache' being the topmost decorator.\"\n )\n elif hasattr(getattr(resource, responder), '_decorators'\n ) and 'cache' in [d._decorator_name for d in getattr(\n resource, responder)._decorators if hasattr(d,\n '_decorator_name')]:\n logger.debug(\n \" This endpoint is decorated by 'cache', but it is NOT the topmost decorator.\"\n )\n else:\n logger.debug(\" No 'cache' was requested for this endpoint.\")\n return\n key = self.generate_cache_key(req)\n data = self.cache.get(key)\n if data:\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n resp.body = self.deserialize(data)\n else:\n resp.text = self.deserialize(data)\n elif FALCONVERSION_MAIN < 3:\n resp.content_type, resp.body = self.deserialize(data)\n else:\n resp.content_type, resp.text = self.deserialize(data)\n resp.status = HTTP_200\n req.context.cached = True\n resp.complete = True\n\n def process_response(self, req, resp, resource, req_succeeded):\n \"\"\" Cache the response if this request qualifies and has not been cached yet\n or for rest-based and rest-and-time-based evict the record from the cache if\n the request method is POST/PATCH/PUT or DELETE \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n key = self.generate_cache_key(req, method='GET')\n self.cache.delete(key)\n return\n if hasattr(req.context, 'cache') and req.context.cache and (not\n hasattr(req.context, 'cached') or not req.context.cached):\n key = self.generate_cache_key(req)\n value = self.serialize(req, resp, resource)\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based]:\n timeout = 0\n else:\n timeout = req.context.cache_timeout if hasattr(req.context,\n 'cache_timeout') else 600\n self.cache.set(key, value, timeout=timeout)\n\n @staticmethod\n def generate_cache_key(req, method: str=None) ->str:\n \"\"\" Generate the cache key from the request using the path and the method \"\"\"\n path = req.path\n if path.endswith('/'):\n path = path[:-1]\n if not method:\n method = req.method\n return f'{path}:{method.upper()}'\n\n def serialize(self, req, resp, resource) ->bytes:\n \"\"\" Serializes the response, so it can be cached.\n\n If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to\n keep the response Content-Type header, so we need to serialize the response\n body with the content type with msgpack, which takes away performance.\n\n For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in\n which case the response Content-Type is NOT cached, so it will be\n the default - which is application/json. That should be fine for most\n REST APIs and should bring a nice performance bump by avoiding the msgpack\n serialization.\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n return resp.body\n else:\n return resp.text\n elif FALCONVERSION_MAIN < 3:\n return msgpack.packb([resp.content_type, resp.body],\n use_bin_type=True)\n else:\n return msgpack.packb([resp.content_type, resp.text],\n use_bin_type=True)\n\n def deserialize(self, data: bytes) ->Tuple[str, Any]:\n \"\"\" Deserializes the cached record into the response Body\n or the Content-Type and Body\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n return data\n else:\n return msgpack.unpackb(data, raw=False)\n", "<import token>\n<code token>\n<assignment token>\n\n\nclass Middleware:\n <docstring token>\n\n def __init__(self, cache: 'Cache', config: Dict[str, Any]) ->None:\n self.cache = cache\n self.cache_config = config\n\n def process_resource(self, req, resp, resource, params):\n \"\"\" Determine if the given request is marked for caching and if yes,\n then look it up in the cache and if found, then return the cached value\n \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n return\n responder = None\n for _method in dir(resource):\n if _DECORABLE_METHOD_NAME.match(_method) and _method[3:].upper(\n ) == req.method.upper():\n responder = _method\n break\n if responder:\n responder_wrapper_name = getattr(getattr(resource, responder),\n '__name__')\n if responder_wrapper_name == 'cache_wrap':\n logger.debug(\n \" This endpoint is decorated by 'cache' being the topmost decorator.\"\n )\n elif hasattr(getattr(resource, responder), '_decorators'\n ) and 'cache' in [d._decorator_name for d in getattr(\n resource, responder)._decorators if hasattr(d,\n '_decorator_name')]:\n logger.debug(\n \" This endpoint is decorated by 'cache', but it is NOT the topmost decorator.\"\n )\n else:\n logger.debug(\" No 'cache' was requested for this endpoint.\")\n return\n key = self.generate_cache_key(req)\n data = self.cache.get(key)\n if data:\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n resp.body = self.deserialize(data)\n else:\n resp.text = self.deserialize(data)\n elif FALCONVERSION_MAIN < 3:\n resp.content_type, resp.body = self.deserialize(data)\n else:\n resp.content_type, resp.text = self.deserialize(data)\n resp.status = HTTP_200\n req.context.cached = True\n resp.complete = True\n\n def process_response(self, req, resp, resource, req_succeeded):\n \"\"\" Cache the response if this request qualifies and has not been cached yet\n or for rest-based and rest-and-time-based evict the record from the cache if\n the request method is POST/PATCH/PUT or DELETE \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n key = self.generate_cache_key(req, method='GET')\n self.cache.delete(key)\n return\n if hasattr(req.context, 'cache') and req.context.cache and (not\n hasattr(req.context, 'cached') or not req.context.cached):\n key = self.generate_cache_key(req)\n value = self.serialize(req, resp, resource)\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based]:\n timeout = 0\n else:\n timeout = req.context.cache_timeout if hasattr(req.context,\n 'cache_timeout') else 600\n self.cache.set(key, value, timeout=timeout)\n\n @staticmethod\n def generate_cache_key(req, method: str=None) ->str:\n \"\"\" Generate the cache key from the request using the path and the method \"\"\"\n path = req.path\n if path.endswith('/'):\n path = path[:-1]\n if not method:\n method = req.method\n return f'{path}:{method.upper()}'\n\n def serialize(self, req, resp, resource) ->bytes:\n \"\"\" Serializes the response, so it can be cached.\n\n If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to\n keep the response Content-Type header, so we need to serialize the response\n body with the content type with msgpack, which takes away performance.\n\n For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in\n which case the response Content-Type is NOT cached, so it will be\n the default - which is application/json. That should be fine for most\n REST APIs and should bring a nice performance bump by avoiding the msgpack\n serialization.\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n return resp.body\n else:\n return resp.text\n elif FALCONVERSION_MAIN < 3:\n return msgpack.packb([resp.content_type, resp.body],\n use_bin_type=True)\n else:\n return msgpack.packb([resp.content_type, resp.text],\n use_bin_type=True)\n\n def deserialize(self, data: bytes) ->Tuple[str, Any]:\n \"\"\" Deserializes the cached record into the response Body\n or the Content-Type and Body\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n return data\n else:\n return msgpack.unpackb(data, raw=False)\n", "<import token>\n<code token>\n<assignment token>\n\n\nclass Middleware:\n <docstring token>\n\n def __init__(self, cache: 'Cache', config: Dict[str, Any]) ->None:\n self.cache = cache\n self.cache_config = config\n\n def process_resource(self, req, resp, resource, params):\n \"\"\" Determine if the given request is marked for caching and if yes,\n then look it up in the cache and if found, then return the cached value\n \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n return\n responder = None\n for _method in dir(resource):\n if _DECORABLE_METHOD_NAME.match(_method) and _method[3:].upper(\n ) == req.method.upper():\n responder = _method\n break\n if responder:\n responder_wrapper_name = getattr(getattr(resource, responder),\n '__name__')\n if responder_wrapper_name == 'cache_wrap':\n logger.debug(\n \" This endpoint is decorated by 'cache' being the topmost decorator.\"\n )\n elif hasattr(getattr(resource, responder), '_decorators'\n ) and 'cache' in [d._decorator_name for d in getattr(\n resource, responder)._decorators if hasattr(d,\n '_decorator_name')]:\n logger.debug(\n \" This endpoint is decorated by 'cache', but it is NOT the topmost decorator.\"\n )\n else:\n logger.debug(\" No 'cache' was requested for this endpoint.\")\n return\n key = self.generate_cache_key(req)\n data = self.cache.get(key)\n if data:\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n resp.body = self.deserialize(data)\n else:\n resp.text = self.deserialize(data)\n elif FALCONVERSION_MAIN < 3:\n resp.content_type, resp.body = self.deserialize(data)\n else:\n resp.content_type, resp.text = self.deserialize(data)\n resp.status = HTTP_200\n req.context.cached = True\n resp.complete = True\n\n def process_response(self, req, resp, resource, req_succeeded):\n \"\"\" Cache the response if this request qualifies and has not been cached yet\n or for rest-based and rest-and-time-based evict the record from the cache if\n the request method is POST/PATCH/PUT or DELETE \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n key = self.generate_cache_key(req, method='GET')\n self.cache.delete(key)\n return\n if hasattr(req.context, 'cache') and req.context.cache and (not\n hasattr(req.context, 'cached') or not req.context.cached):\n key = self.generate_cache_key(req)\n value = self.serialize(req, resp, resource)\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based]:\n timeout = 0\n else:\n timeout = req.context.cache_timeout if hasattr(req.context,\n 'cache_timeout') else 600\n self.cache.set(key, value, timeout=timeout)\n\n @staticmethod\n def generate_cache_key(req, method: str=None) ->str:\n \"\"\" Generate the cache key from the request using the path and the method \"\"\"\n path = req.path\n if path.endswith('/'):\n path = path[:-1]\n if not method:\n method = req.method\n return f'{path}:{method.upper()}'\n\n def serialize(self, req, resp, resource) ->bytes:\n \"\"\" Serializes the response, so it can be cached.\n\n If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to\n keep the response Content-Type header, so we need to serialize the response\n body with the content type with msgpack, which takes away performance.\n\n For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in\n which case the response Content-Type is NOT cached, so it will be\n the default - which is application/json. That should be fine for most\n REST APIs and should bring a nice performance bump by avoiding the msgpack\n serialization.\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n return resp.body\n else:\n return resp.text\n elif FALCONVERSION_MAIN < 3:\n return msgpack.packb([resp.content_type, resp.body],\n use_bin_type=True)\n else:\n return msgpack.packb([resp.content_type, resp.text],\n use_bin_type=True)\n <function token>\n", "<import token>\n<code token>\n<assignment token>\n\n\nclass Middleware:\n <docstring token>\n\n def __init__(self, cache: 'Cache', config: Dict[str, Any]) ->None:\n self.cache = cache\n self.cache_config = config\n <function token>\n\n def process_response(self, req, resp, resource, req_succeeded):\n \"\"\" Cache the response if this request qualifies and has not been cached yet\n or for rest-based and rest-and-time-based evict the record from the cache if\n the request method is POST/PATCH/PUT or DELETE \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n key = self.generate_cache_key(req, method='GET')\n self.cache.delete(key)\n return\n if hasattr(req.context, 'cache') and req.context.cache and (not\n hasattr(req.context, 'cached') or not req.context.cached):\n key = self.generate_cache_key(req)\n value = self.serialize(req, resp, resource)\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based]:\n timeout = 0\n else:\n timeout = req.context.cache_timeout if hasattr(req.context,\n 'cache_timeout') else 600\n self.cache.set(key, value, timeout=timeout)\n\n @staticmethod\n def generate_cache_key(req, method: str=None) ->str:\n \"\"\" Generate the cache key from the request using the path and the method \"\"\"\n path = req.path\n if path.endswith('/'):\n path = path[:-1]\n if not method:\n method = req.method\n return f'{path}:{method.upper()}'\n\n def serialize(self, req, resp, resource) ->bytes:\n \"\"\" Serializes the response, so it can be cached.\n\n If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to\n keep the response Content-Type header, so we need to serialize the response\n body with the content type with msgpack, which takes away performance.\n\n For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in\n which case the response Content-Type is NOT cached, so it will be\n the default - which is application/json. That should be fine for most\n REST APIs and should bring a nice performance bump by avoiding the msgpack\n serialization.\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n return resp.body\n else:\n return resp.text\n elif FALCONVERSION_MAIN < 3:\n return msgpack.packb([resp.content_type, resp.body],\n use_bin_type=True)\n else:\n return msgpack.packb([resp.content_type, resp.text],\n use_bin_type=True)\n <function token>\n", "<import token>\n<code token>\n<assignment token>\n\n\nclass Middleware:\n <docstring token>\n\n def __init__(self, cache: 'Cache', config: Dict[str, Any]) ->None:\n self.cache = cache\n self.cache_config = config\n <function token>\n\n def process_response(self, req, resp, resource, req_succeeded):\n \"\"\" Cache the response if this request qualifies and has not been cached yet\n or for rest-based and rest-and-time-based evict the record from the cache if\n the request method is POST/PATCH/PUT or DELETE \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n key = self.generate_cache_key(req, method='GET')\n self.cache.delete(key)\n return\n if hasattr(req.context, 'cache') and req.context.cache and (not\n hasattr(req.context, 'cached') or not req.context.cached):\n key = self.generate_cache_key(req)\n value = self.serialize(req, resp, resource)\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based]:\n timeout = 0\n else:\n timeout = req.context.cache_timeout if hasattr(req.context,\n 'cache_timeout') else 600\n self.cache.set(key, value, timeout=timeout)\n <function token>\n\n def serialize(self, req, resp, resource) ->bytes:\n \"\"\" Serializes the response, so it can be cached.\n\n If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to\n keep the response Content-Type header, so we need to serialize the response\n body with the content type with msgpack, which takes away performance.\n\n For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in\n which case the response Content-Type is NOT cached, so it will be\n the default - which is application/json. That should be fine for most\n REST APIs and should bring a nice performance bump by avoiding the msgpack\n serialization.\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n return resp.body\n else:\n return resp.text\n elif FALCONVERSION_MAIN < 3:\n return msgpack.packb([resp.content_type, resp.body],\n use_bin_type=True)\n else:\n return msgpack.packb([resp.content_type, resp.text],\n use_bin_type=True)\n <function token>\n", "<import token>\n<code token>\n<assignment token>\n\n\nclass Middleware:\n <docstring token>\n <function token>\n <function token>\n\n def process_response(self, req, resp, resource, req_succeeded):\n \"\"\" Cache the response if this request qualifies and has not been cached yet\n or for rest-based and rest-and-time-based evict the record from the cache if\n the request method is POST/PATCH/PUT or DELETE \"\"\"\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based, CacheEvictionStrategy.\n rest_and_time_based] and req.method.upper() in [HttpMethods.\n POST, HttpMethods.PATCH, HttpMethods.PUT, HttpMethods.DELETE]:\n key = self.generate_cache_key(req, method='GET')\n self.cache.delete(key)\n return\n if hasattr(req.context, 'cache') and req.context.cache and (not\n hasattr(req.context, 'cached') or not req.context.cached):\n key = self.generate_cache_key(req)\n value = self.serialize(req, resp, resource)\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [\n CacheEvictionStrategy.rest_based]:\n timeout = 0\n else:\n timeout = req.context.cache_timeout if hasattr(req.context,\n 'cache_timeout') else 600\n self.cache.set(key, value, timeout=timeout)\n <function token>\n\n def serialize(self, req, resp, resource) ->bytes:\n \"\"\" Serializes the response, so it can be cached.\n\n If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to\n keep the response Content-Type header, so we need to serialize the response\n body with the content type with msgpack, which takes away performance.\n\n For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in\n which case the response Content-Type is NOT cached, so it will be\n the default - which is application/json. That should be fine for most\n REST APIs and should bring a nice performance bump by avoiding the msgpack\n serialization.\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n return resp.body\n else:\n return resp.text\n elif FALCONVERSION_MAIN < 3:\n return msgpack.packb([resp.content_type, resp.body],\n use_bin_type=True)\n else:\n return msgpack.packb([resp.content_type, resp.text],\n use_bin_type=True)\n <function token>\n", "<import token>\n<code token>\n<assignment token>\n\n\nclass Middleware:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def serialize(self, req, resp, resource) ->bytes:\n \"\"\" Serializes the response, so it can be cached.\n\n If CACHE_CONTENT_TYPE_JSON_ONLY = False (default), then we need to\n keep the response Content-Type header, so we need to serialize the response\n body with the content type with msgpack, which takes away performance.\n\n For this reason the user can set CACHE_CONTENT_TYPE_JSON_ONLY = True, in\n which case the response Content-Type is NOT cached, so it will be\n the default - which is application/json. That should be fine for most\n REST APIs and should bring a nice performance bump by avoiding the msgpack\n serialization.\n \"\"\"\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n return resp.body\n else:\n return resp.text\n elif FALCONVERSION_MAIN < 3:\n return msgpack.packb([resp.content_type, resp.body],\n use_bin_type=True)\n else:\n return msgpack.packb([resp.content_type, resp.text],\n use_bin_type=True)\n <function token>\n", "<import token>\n<code token>\n<assignment token>\n\n\nclass Middleware:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n", "<import token>\n<code token>\n<assignment token>\n<class token>\n" ]
false
98,992
0441954208cd522b26cc5d39629f8b43046506e5
import os, pty, serial, time from devices import lectorDevice from reader import * from xmlLib import XMLprocessor class eventHandler(serialEventListener): counter = 0 def handle(self,data,disp): self.counter=self.counter+1 xmlp= XMLprocessor(data) print xmlp.TD, xmlp.F, xmlp.RS, xmlp.RSR if self.counter==5: print 'prueba finalizada' disp.close() #print "Prueba de conexion virtual" d = lectorDevice() el=eventHandler() #print d.device['frendly_name'],d.device['name'] #master, slave = pty.openpty() #s_name = os.ttyname(slave) #s_name = "/dev/ttys003" #ser = serial.Serial(s_name,9600,timeout=.01) ser = serialReader(d,.01) ser.addEventListener(el) #print "agregado event listener" ser.open() #print "conexion iniciada" #try # asjhdkaç #except Exception as e: # print e.message
[ "import os, pty, serial, time\nfrom devices import lectorDevice\nfrom reader import *\nfrom xmlLib import XMLprocessor\n\nclass eventHandler(serialEventListener):\n\tcounter = 0\n\tdef handle(self,data,disp):\n\t\tself.counter=self.counter+1\n\t\txmlp= XMLprocessor(data)\n\t\tprint xmlp.TD, xmlp.F, xmlp.RS, xmlp.RSR\n\t\tif self.counter==5:\n\t\t\tprint 'prueba finalizada'\n\t\t\tdisp.close()\n\n#print \"Prueba de conexion virtual\"\nd = lectorDevice()\nel=eventHandler()\n\n#print d.device['frendly_name'],d.device['name']\n\n\n#master, slave = pty.openpty()\n#s_name = os.ttyname(slave)\n#s_name = \"/dev/ttys003\"\n#ser = serial.Serial(s_name,9600,timeout=.01)\nser = serialReader(d,.01)\n\nser.addEventListener(el)\n\n#print \"agregado event listener\"\n\nser.open()\n#print \"conexion iniciada\"\n#try \n#\tasjhdkaç\n#except Exception as e:\n#\tprint e.message" ]
true
98,993
856e47ce5826781ba0a2f1c80d2456b64120ebce
def computepay(h,r): if h>40: p = 40*r+(h-40)*1.5*r else: p = h * r return p hrs = input("Enter Hours: ") rate = input("Enter Rate: ") try: h=float(hrs) r=float(rate) except: print('Please Enter Number') quit() p = computepay(h,r) print('Pay:',p)
[ "def computepay(h,r):\n if h>40:\n p = 40*r+(h-40)*1.5*r\n else:\n p = h * r\n return p\n\nhrs = input(\"Enter Hours: \")\nrate = input(\"Enter Rate: \")\ntry:\n h=float(hrs)\n r=float(rate)\nexcept:\n print('Please Enter Number')\n quit()\np = computepay(h,r)\nprint('Pay:',p)\n", "def computepay(h, r):\n if h > 40:\n p = 40 * r + (h - 40) * 1.5 * r\n else:\n p = h * r\n return p\n\n\nhrs = input('Enter Hours: ')\nrate = input('Enter Rate: ')\ntry:\n h = float(hrs)\n r = float(rate)\nexcept:\n print('Please Enter Number')\n quit()\np = computepay(h, r)\nprint('Pay:', p)\n", "def computepay(h, r):\n if h > 40:\n p = 40 * r + (h - 40) * 1.5 * r\n else:\n p = h * r\n return p\n\n\n<assignment token>\ntry:\n h = float(hrs)\n r = float(rate)\nexcept:\n print('Please Enter Number')\n quit()\n<assignment token>\nprint('Pay:', p)\n", "def computepay(h, r):\n if h > 40:\n p = 40 * r + (h - 40) * 1.5 * r\n else:\n p = h * r\n return p\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n", "<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n" ]
false
98,994
9e630e6217d8a8f80c917c3dae682b00f6337c8d
class AthleteList(list): def __init__(self, a_name, a_dob=None, a_times=[]): list.__init__([]) self.name = a_name self.dob = a_dob self.extend(a_times) def top3(self): return str(sorted(set([sanitize(each_item) for each_item in self]))[0:3]) def sanitize(time_string): """ Substitui os caracters '-' e ':' por virgula """ if '-' in time_string: splitter = '-' elif ':' in time_string: splitter = ':' else: return time_string (min, secs) = time_string.split(splitter) return (min + '.' + secs) def get_coach_data(file_name): try: with open(file_name) as ln: data = ln.readline() templ = data.strip().split(',') return (AthleteList(templ.pop(0), templ.pop(0), templ)) except IOError as err: print("Ocorreu um erro: " + err) return None james_data = get_coach_data('james2.txt') julie_data = get_coach_data('julie2.txt') mikey_data = get_coach_data('mikey2.txt') sarah_data = get_coach_data('sarah2.txt') print(james_data.name + "'s fastest times are: " + james_data.top3()) print(julie_data.name + "'s fastest times are: " + julie_data.top3()) print(mikey_data.name + "'s fastest times are: " + mikey_data.top3()) print(sarah_data.name + "'s fastest times are: " + sarah_data.top3())
[ "\nclass AthleteList(list):\n\tdef __init__(self, a_name, a_dob=None, a_times=[]):\n\t\tlist.__init__([])\n\t\tself.name = a_name\n\t\tself.dob = a_dob\n\t\tself.extend(a_times)\n\n\tdef top3(self):\n\t\treturn str(sorted(set([sanitize(each_item) for each_item in self]))[0:3])\n\ndef sanitize(time_string):\n\t\"\"\" \n\tSubstitui os caracters '-' e ':' por virgula\n\t\"\"\"\n\tif '-' in time_string:\n\t\tsplitter = '-'\n\telif ':' in time_string:\n\t\tsplitter = ':'\n\telse:\n\t\treturn time_string\n\t(min, secs) = time_string.split(splitter)\n\treturn (min + '.' + secs)\n\ndef get_coach_data(file_name):\n\ttry:\n\t\twith open(file_name) as ln:\n\t\t\tdata = ln.readline()\n\n\t\ttempl = data.strip().split(',')\n\t\treturn (AthleteList(templ.pop(0), templ.pop(0), templ))\n\n\texcept IOError as err:\n\t\tprint(\"Ocorreu um erro: \" + err)\n\t\treturn None\n\njames_data = get_coach_data('james2.txt')\njulie_data = get_coach_data('julie2.txt')\nmikey_data = get_coach_data('mikey2.txt')\nsarah_data = get_coach_data('sarah2.txt')\n\nprint(james_data.name + \"'s fastest times are: \" + james_data.top3()) \nprint(julie_data.name + \"'s fastest times are: \" + julie_data.top3()) \nprint(mikey_data.name + \"'s fastest times are: \" + mikey_data.top3()) \nprint(sarah_data.name + \"'s fastest times are: \" + sarah_data.top3())", "class AthleteList(list):\n\n def __init__(self, a_name, a_dob=None, a_times=[]):\n list.__init__([])\n self.name = a_name\n self.dob = a_dob\n self.extend(a_times)\n\n def top3(self):\n return str(sorted(set([sanitize(each_item) for each_item in self]))\n [0:3])\n\n\ndef sanitize(time_string):\n \"\"\" \n\tSubstitui os caracters '-' e ':' por virgula\n\t\"\"\"\n if '-' in time_string:\n splitter = '-'\n elif ':' in time_string:\n splitter = ':'\n else:\n return time_string\n min, secs = time_string.split(splitter)\n return min + '.' + secs\n\n\ndef get_coach_data(file_name):\n try:\n with open(file_name) as ln:\n data = ln.readline()\n templ = data.strip().split(',')\n return AthleteList(templ.pop(0), templ.pop(0), templ)\n except IOError as err:\n print('Ocorreu um erro: ' + err)\n return None\n\n\njames_data = get_coach_data('james2.txt')\njulie_data = get_coach_data('julie2.txt')\nmikey_data = get_coach_data('mikey2.txt')\nsarah_data = get_coach_data('sarah2.txt')\nprint(james_data.name + \"'s fastest times are: \" + james_data.top3())\nprint(julie_data.name + \"'s fastest times are: \" + julie_data.top3())\nprint(mikey_data.name + \"'s fastest times are: \" + mikey_data.top3())\nprint(sarah_data.name + \"'s fastest times are: \" + sarah_data.top3())\n", "class AthleteList(list):\n\n def __init__(self, a_name, a_dob=None, a_times=[]):\n list.__init__([])\n self.name = a_name\n self.dob = a_dob\n self.extend(a_times)\n\n def top3(self):\n return str(sorted(set([sanitize(each_item) for each_item in self]))\n [0:3])\n\n\ndef sanitize(time_string):\n \"\"\" \n\tSubstitui os caracters '-' e ':' por virgula\n\t\"\"\"\n if '-' in time_string:\n splitter = '-'\n elif ':' in time_string:\n splitter = ':'\n else:\n return time_string\n min, secs = time_string.split(splitter)\n return min + '.' + secs\n\n\ndef get_coach_data(file_name):\n try:\n with open(file_name) as ln:\n data = ln.readline()\n templ = data.strip().split(',')\n return AthleteList(templ.pop(0), templ.pop(0), templ)\n except IOError as err:\n print('Ocorreu um erro: ' + err)\n return None\n\n\n<assignment token>\nprint(james_data.name + \"'s fastest times are: \" + james_data.top3())\nprint(julie_data.name + \"'s fastest times are: \" + julie_data.top3())\nprint(mikey_data.name + \"'s fastest times are: \" + mikey_data.top3())\nprint(sarah_data.name + \"'s fastest times are: \" + sarah_data.top3())\n", "class AthleteList(list):\n\n def __init__(self, a_name, a_dob=None, a_times=[]):\n list.__init__([])\n self.name = a_name\n self.dob = a_dob\n self.extend(a_times)\n\n def top3(self):\n return str(sorted(set([sanitize(each_item) for each_item in self]))\n [0:3])\n\n\ndef sanitize(time_string):\n \"\"\" \n\tSubstitui os caracters '-' e ':' por virgula\n\t\"\"\"\n if '-' in time_string:\n splitter = '-'\n elif ':' in time_string:\n splitter = ':'\n else:\n return time_string\n min, secs = time_string.split(splitter)\n return min + '.' + secs\n\n\ndef get_coach_data(file_name):\n try:\n with open(file_name) as ln:\n data = ln.readline()\n templ = data.strip().split(',')\n return AthleteList(templ.pop(0), templ.pop(0), templ)\n except IOError as err:\n print('Ocorreu um erro: ' + err)\n return None\n\n\n<assignment token>\n<code token>\n", "class AthleteList(list):\n\n def __init__(self, a_name, a_dob=None, a_times=[]):\n list.__init__([])\n self.name = a_name\n self.dob = a_dob\n self.extend(a_times)\n\n def top3(self):\n return str(sorted(set([sanitize(each_item) for each_item in self]))\n [0:3])\n\n\n<function token>\n\n\ndef get_coach_data(file_name):\n try:\n with open(file_name) as ln:\n data = ln.readline()\n templ = data.strip().split(',')\n return AthleteList(templ.pop(0), templ.pop(0), templ)\n except IOError as err:\n print('Ocorreu um erro: ' + err)\n return None\n\n\n<assignment token>\n<code token>\n", "class AthleteList(list):\n\n def __init__(self, a_name, a_dob=None, a_times=[]):\n list.__init__([])\n self.name = a_name\n self.dob = a_dob\n self.extend(a_times)\n\n def top3(self):\n return str(sorted(set([sanitize(each_item) for each_item in self]))\n [0:3])\n\n\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "class AthleteList(list):\n <function token>\n\n def top3(self):\n return str(sorted(set([sanitize(each_item) for each_item in self]))\n [0:3])\n\n\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "class AthleteList(list):\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<assignment token>\n<code token>\n", "<class token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n" ]
false
98,995
56ae0cfb2295b7a2ba4dfaa7b8e19f63f07ad5c0
import time import random import noun_util from irc.bot import SingleServerIRCBot MESSAGE_THRESHOLD = 1 class GelnarBot(SingleServerIRCBot): last_msg_time = time.time() def __init__(self, channel, nickname, server, port=6667): super(GelnarBot, self).__init__([(server, port)], nickname, nickname) self.channel = channel def on_nicknameinuse(self, c, e): c.nick(c.get_nickname() + "_") def on_welcome(self, c, e): c.join(self.channel) def on_privmsg(self, c, e): nick = e.source.split('!')[0] message = e.arguments[0] self.do_command(c, nick, nick, message, True) def on_pubmsg(self, c, e): curr_time = time.time() if self.last_msg_time + MESSAGE_THRESHOLD >= curr_time: return self.last_msg_time = curr_time my_nick = self.connection.get_nickname() nick = e.source.split('!')[0] message = e.arguments[0] at_me = my_nick in message self.do_command(c, e.target, nick, message, at_me) def do_command(self, c, target, nick, message, at_me): do = any([ at_me, do_chance(), nick.startswith('earlbot'), ]) if do: nouns = noun_util.get_contained_nouns(message) if len(nouns) > 0: # Normall want the last noun nouns = sorted(nouns, key=lambda x: len(x)) noun = nouns[-1] else: noun = noun_util.get_noun() article = noun_util.get_article(noun) msg = "{}: You're {} {}".format(nick, article, noun) c.privmsg(target, msg) def do_chance(): return random.random() > 0.8
[ "import time\nimport random\n\nimport noun_util\n\nfrom irc.bot import SingleServerIRCBot\n\nMESSAGE_THRESHOLD = 1\n\nclass GelnarBot(SingleServerIRCBot):\n\n\tlast_msg_time = time.time()\n\n\tdef __init__(self, channel, nickname, server, port=6667):\n\t\tsuper(GelnarBot, self).__init__([(server, port)], nickname, nickname)\n\t\tself.channel = channel\n\n\tdef on_nicknameinuse(self, c, e):\n\t\tc.nick(c.get_nickname() + \"_\")\n\n\tdef on_welcome(self, c, e):\n\t\tc.join(self.channel)\n\n\tdef on_privmsg(self, c, e):\n\t\tnick = e.source.split('!')[0]\n\t\tmessage = e.arguments[0]\n\t\tself.do_command(c, nick, nick, message, True)\n\n\tdef on_pubmsg(self, c, e):\n\t\tcurr_time = time.time()\n\n\t\tif self.last_msg_time + MESSAGE_THRESHOLD >= curr_time:\n\t\t\treturn\n\n\t\tself.last_msg_time = curr_time\n\n\t\tmy_nick = self.connection.get_nickname()\n\t\tnick = e.source.split('!')[0]\n\t\tmessage = e.arguments[0]\n\n\t\tat_me = my_nick in message\n\n\t\tself.do_command(c, e.target, nick, message, at_me)\n\n\tdef do_command(self, c, target, nick, message, at_me):\n\n\t\tdo = any([\n\t\t\tat_me,\n\t\t\tdo_chance(),\n\t\t\tnick.startswith('earlbot'),\n\t\t])\n\n\t\tif do:\n\t\t\tnouns = noun_util.get_contained_nouns(message)\n\t\t\tif len(nouns) > 0:\n\t\t\t\t# Normall want the last noun\n\t\t\t\tnouns = sorted(nouns, key=lambda x: len(x))\n\t\t\t\tnoun = nouns[-1]\n\t\t\telse:\n\t\t\t\tnoun = noun_util.get_noun()\n\n\t\t\tarticle = noun_util.get_article(noun)\n\t\t\tmsg = \"{}: You're {} {}\".format(nick, article, noun)\n\n\t\t\tc.privmsg(target, msg)\n\n\ndef do_chance():\n\treturn random.random() > 0.8\n", "import time\nimport random\nimport noun_util\nfrom irc.bot import SingleServerIRCBot\nMESSAGE_THRESHOLD = 1\n\n\nclass GelnarBot(SingleServerIRCBot):\n last_msg_time = time.time()\n\n def __init__(self, channel, nickname, server, port=6667):\n super(GelnarBot, self).__init__([(server, port)], nickname, nickname)\n self.channel = channel\n\n def on_nicknameinuse(self, c, e):\n c.nick(c.get_nickname() + '_')\n\n def on_welcome(self, c, e):\n c.join(self.channel)\n\n def on_privmsg(self, c, e):\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n self.do_command(c, nick, nick, message, True)\n\n def on_pubmsg(self, c, e):\n curr_time = time.time()\n if self.last_msg_time + MESSAGE_THRESHOLD >= curr_time:\n return\n self.last_msg_time = curr_time\n my_nick = self.connection.get_nickname()\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n at_me = my_nick in message\n self.do_command(c, e.target, nick, message, at_me)\n\n def do_command(self, c, target, nick, message, at_me):\n do = any([at_me, do_chance(), nick.startswith('earlbot')])\n if do:\n nouns = noun_util.get_contained_nouns(message)\n if len(nouns) > 0:\n nouns = sorted(nouns, key=lambda x: len(x))\n noun = nouns[-1]\n else:\n noun = noun_util.get_noun()\n article = noun_util.get_article(noun)\n msg = \"{}: You're {} {}\".format(nick, article, noun)\n c.privmsg(target, msg)\n\n\ndef do_chance():\n return random.random() > 0.8\n", "<import token>\nMESSAGE_THRESHOLD = 1\n\n\nclass GelnarBot(SingleServerIRCBot):\n last_msg_time = time.time()\n\n def __init__(self, channel, nickname, server, port=6667):\n super(GelnarBot, self).__init__([(server, port)], nickname, nickname)\n self.channel = channel\n\n def on_nicknameinuse(self, c, e):\n c.nick(c.get_nickname() + '_')\n\n def on_welcome(self, c, e):\n c.join(self.channel)\n\n def on_privmsg(self, c, e):\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n self.do_command(c, nick, nick, message, True)\n\n def on_pubmsg(self, c, e):\n curr_time = time.time()\n if self.last_msg_time + MESSAGE_THRESHOLD >= curr_time:\n return\n self.last_msg_time = curr_time\n my_nick = self.connection.get_nickname()\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n at_me = my_nick in message\n self.do_command(c, e.target, nick, message, at_me)\n\n def do_command(self, c, target, nick, message, at_me):\n do = any([at_me, do_chance(), nick.startswith('earlbot')])\n if do:\n nouns = noun_util.get_contained_nouns(message)\n if len(nouns) > 0:\n nouns = sorted(nouns, key=lambda x: len(x))\n noun = nouns[-1]\n else:\n noun = noun_util.get_noun()\n article = noun_util.get_article(noun)\n msg = \"{}: You're {} {}\".format(nick, article, noun)\n c.privmsg(target, msg)\n\n\ndef do_chance():\n return random.random() > 0.8\n", "<import token>\n<assignment token>\n\n\nclass GelnarBot(SingleServerIRCBot):\n last_msg_time = time.time()\n\n def __init__(self, channel, nickname, server, port=6667):\n super(GelnarBot, self).__init__([(server, port)], nickname, nickname)\n self.channel = channel\n\n def on_nicknameinuse(self, c, e):\n c.nick(c.get_nickname() + '_')\n\n def on_welcome(self, c, e):\n c.join(self.channel)\n\n def on_privmsg(self, c, e):\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n self.do_command(c, nick, nick, message, True)\n\n def on_pubmsg(self, c, e):\n curr_time = time.time()\n if self.last_msg_time + MESSAGE_THRESHOLD >= curr_time:\n return\n self.last_msg_time = curr_time\n my_nick = self.connection.get_nickname()\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n at_me = my_nick in message\n self.do_command(c, e.target, nick, message, at_me)\n\n def do_command(self, c, target, nick, message, at_me):\n do = any([at_me, do_chance(), nick.startswith('earlbot')])\n if do:\n nouns = noun_util.get_contained_nouns(message)\n if len(nouns) > 0:\n nouns = sorted(nouns, key=lambda x: len(x))\n noun = nouns[-1]\n else:\n noun = noun_util.get_noun()\n article = noun_util.get_article(noun)\n msg = \"{}: You're {} {}\".format(nick, article, noun)\n c.privmsg(target, msg)\n\n\ndef do_chance():\n return random.random() > 0.8\n", "<import token>\n<assignment token>\n\n\nclass GelnarBot(SingleServerIRCBot):\n last_msg_time = time.time()\n\n def __init__(self, channel, nickname, server, port=6667):\n super(GelnarBot, self).__init__([(server, port)], nickname, nickname)\n self.channel = channel\n\n def on_nicknameinuse(self, c, e):\n c.nick(c.get_nickname() + '_')\n\n def on_welcome(self, c, e):\n c.join(self.channel)\n\n def on_privmsg(self, c, e):\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n self.do_command(c, nick, nick, message, True)\n\n def on_pubmsg(self, c, e):\n curr_time = time.time()\n if self.last_msg_time + MESSAGE_THRESHOLD >= curr_time:\n return\n self.last_msg_time = curr_time\n my_nick = self.connection.get_nickname()\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n at_me = my_nick in message\n self.do_command(c, e.target, nick, message, at_me)\n\n def do_command(self, c, target, nick, message, at_me):\n do = any([at_me, do_chance(), nick.startswith('earlbot')])\n if do:\n nouns = noun_util.get_contained_nouns(message)\n if len(nouns) > 0:\n nouns = sorted(nouns, key=lambda x: len(x))\n noun = nouns[-1]\n else:\n noun = noun_util.get_noun()\n article = noun_util.get_article(noun)\n msg = \"{}: You're {} {}\".format(nick, article, noun)\n c.privmsg(target, msg)\n\n\n<function token>\n", "<import token>\n<assignment token>\n\n\nclass GelnarBot(SingleServerIRCBot):\n <assignment token>\n\n def __init__(self, channel, nickname, server, port=6667):\n super(GelnarBot, self).__init__([(server, port)], nickname, nickname)\n self.channel = channel\n\n def on_nicknameinuse(self, c, e):\n c.nick(c.get_nickname() + '_')\n\n def on_welcome(self, c, e):\n c.join(self.channel)\n\n def on_privmsg(self, c, e):\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n self.do_command(c, nick, nick, message, True)\n\n def on_pubmsg(self, c, e):\n curr_time = time.time()\n if self.last_msg_time + MESSAGE_THRESHOLD >= curr_time:\n return\n self.last_msg_time = curr_time\n my_nick = self.connection.get_nickname()\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n at_me = my_nick in message\n self.do_command(c, e.target, nick, message, at_me)\n\n def do_command(self, c, target, nick, message, at_me):\n do = any([at_me, do_chance(), nick.startswith('earlbot')])\n if do:\n nouns = noun_util.get_contained_nouns(message)\n if len(nouns) > 0:\n nouns = sorted(nouns, key=lambda x: len(x))\n noun = nouns[-1]\n else:\n noun = noun_util.get_noun()\n article = noun_util.get_article(noun)\n msg = \"{}: You're {} {}\".format(nick, article, noun)\n c.privmsg(target, msg)\n\n\n<function token>\n", "<import token>\n<assignment token>\n\n\nclass GelnarBot(SingleServerIRCBot):\n <assignment token>\n\n def __init__(self, channel, nickname, server, port=6667):\n super(GelnarBot, self).__init__([(server, port)], nickname, nickname)\n self.channel = channel\n\n def on_nicknameinuse(self, c, e):\n c.nick(c.get_nickname() + '_')\n <function token>\n\n def on_privmsg(self, c, e):\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n self.do_command(c, nick, nick, message, True)\n\n def on_pubmsg(self, c, e):\n curr_time = time.time()\n if self.last_msg_time + MESSAGE_THRESHOLD >= curr_time:\n return\n self.last_msg_time = curr_time\n my_nick = self.connection.get_nickname()\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n at_me = my_nick in message\n self.do_command(c, e.target, nick, message, at_me)\n\n def do_command(self, c, target, nick, message, at_me):\n do = any([at_me, do_chance(), nick.startswith('earlbot')])\n if do:\n nouns = noun_util.get_contained_nouns(message)\n if len(nouns) > 0:\n nouns = sorted(nouns, key=lambda x: len(x))\n noun = nouns[-1]\n else:\n noun = noun_util.get_noun()\n article = noun_util.get_article(noun)\n msg = \"{}: You're {} {}\".format(nick, article, noun)\n c.privmsg(target, msg)\n\n\n<function token>\n", "<import token>\n<assignment token>\n\n\nclass GelnarBot(SingleServerIRCBot):\n <assignment token>\n <function token>\n\n def on_nicknameinuse(self, c, e):\n c.nick(c.get_nickname() + '_')\n <function token>\n\n def on_privmsg(self, c, e):\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n self.do_command(c, nick, nick, message, True)\n\n def on_pubmsg(self, c, e):\n curr_time = time.time()\n if self.last_msg_time + MESSAGE_THRESHOLD >= curr_time:\n return\n self.last_msg_time = curr_time\n my_nick = self.connection.get_nickname()\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n at_me = my_nick in message\n self.do_command(c, e.target, nick, message, at_me)\n\n def do_command(self, c, target, nick, message, at_me):\n do = any([at_me, do_chance(), nick.startswith('earlbot')])\n if do:\n nouns = noun_util.get_contained_nouns(message)\n if len(nouns) > 0:\n nouns = sorted(nouns, key=lambda x: len(x))\n noun = nouns[-1]\n else:\n noun = noun_util.get_noun()\n article = noun_util.get_article(noun)\n msg = \"{}: You're {} {}\".format(nick, article, noun)\n c.privmsg(target, msg)\n\n\n<function token>\n", "<import token>\n<assignment token>\n\n\nclass GelnarBot(SingleServerIRCBot):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def on_privmsg(self, c, e):\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n self.do_command(c, nick, nick, message, True)\n\n def on_pubmsg(self, c, e):\n curr_time = time.time()\n if self.last_msg_time + MESSAGE_THRESHOLD >= curr_time:\n return\n self.last_msg_time = curr_time\n my_nick = self.connection.get_nickname()\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n at_me = my_nick in message\n self.do_command(c, e.target, nick, message, at_me)\n\n def do_command(self, c, target, nick, message, at_me):\n do = any([at_me, do_chance(), nick.startswith('earlbot')])\n if do:\n nouns = noun_util.get_contained_nouns(message)\n if len(nouns) > 0:\n nouns = sorted(nouns, key=lambda x: len(x))\n noun = nouns[-1]\n else:\n noun = noun_util.get_noun()\n article = noun_util.get_article(noun)\n msg = \"{}: You're {} {}\".format(nick, article, noun)\n c.privmsg(target, msg)\n\n\n<function token>\n", "<import token>\n<assignment token>\n\n\nclass GelnarBot(SingleServerIRCBot):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def on_privmsg(self, c, e):\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n self.do_command(c, nick, nick, message, True)\n\n def on_pubmsg(self, c, e):\n curr_time = time.time()\n if self.last_msg_time + MESSAGE_THRESHOLD >= curr_time:\n return\n self.last_msg_time = curr_time\n my_nick = self.connection.get_nickname()\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n at_me = my_nick in message\n self.do_command(c, e.target, nick, message, at_me)\n <function token>\n\n\n<function token>\n", "<import token>\n<assignment token>\n\n\nclass GelnarBot(SingleServerIRCBot):\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def on_privmsg(self, c, e):\n nick = e.source.split('!')[0]\n message = e.arguments[0]\n self.do_command(c, nick, nick, message, True)\n <function token>\n <function token>\n\n\n<function token>\n", "<import token>\n<assignment token>\n\n\nclass GelnarBot(SingleServerIRCBot):\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n", "<import token>\n<assignment token>\n<class token>\n<function token>\n" ]
false
98,996
4c22f335ce9c65af682f6830758fb00539ebbd61
''' Created on Nov 2, 2012 @author: jluker ''' import re import sys from flask import g from flask.ext.solrquery import solr, SearchRequest from config import config from .forms import ApiQueryForm from api_errors import ApiPermissionError,ApiSolrException __all__ = ['ApiSearchRequest','ApiRecordRequest'] class ApiSearchRequest(object): def __init__(self, request_vals): self.form = ApiQueryForm(request_vals, csrf_enabled=False) self.user = g.api_user def validate(self): valid = self.form.validate() perms_ok = self.user.check_permissions(self.form) return valid and perms_ok def input_errors(self): return self.form.errors def _create_search_request(self): req = SearchRequest(self.form.q.data) if self.form.fl.data: fields = list(set(self.form.fl.data.split(',') + config.SOLR_SEARCH_REQUIRED_FIELDS)) else: fields = self.user.get_allowed_fields() req.set_fields(fields) if self.form.rows.data: req.set_rows(self.form.rows.data) else: req.set_rows(config.SEARCH_DEFAULT_ROWS) if self.form.start.data: req.set_start(self.form.start.data) if self.form.sort.data: sort_tokens = re.split('[\s,]+', self.form.sort.data) # tupleize the list into field,direction pairs sort_tokens = zip(*[iter(sort_tokens)] * 2) for sort, direction in sort_tokens: sort_field = config.SEARCH_SORT_OPTIONS_MAP[sort][0] req.add_sort(sort_field, direction) else: for field, direction in config.API_SOLR_DEFAULT_SORT: req.add_sort(field, direction) if len(self.form.facet.data): for facet in self.form.facet.data: facet = facet.split(':') api_facet_name = facet[0] solr_field_name = config.API_SOLR_FACET_FIELDS[api_facet_name] if api_facet_name != solr_field_name: # translate api facet name to solr field name in request *and* response # see http://wiki.apache.org/solr/SimpleFacetParameters#key_:_Changing_the_output_key output_key = api_facet_name facet[0] = solr_field_name else: output_key = None req.add_facet(*facet, output_key=output_key) if len(self.form.hl.data): for hl in self.form.hl.data: if ':' in hl: req.add_highlight(*hl.split(':')) else: req.add_highlight(hl) if len(self.form.filter.data): for fltr in self.form.filter.data: req.add_filter_query(fltr) if self.form.hlq.data: req.set_hlq(self.form.hlq.data) return req def execute(self): req = self._create_search_request() solr.set_defaults(req) try: resp = solr.get_response(req) except Exception, e: from adsabs.core.solr import AdsabsSolrqueryException raise AdsabsSolrqueryException("Error communicating with search service", sys.exc_info()) if resp.is_error(): raise ApiSolrException(resp.get_error()) resp.add_meta('api-version', g.api_version) self.resp = resp return self.resp def query(self): return self.form.q.data class ApiRecordRequest(ApiSearchRequest): def __init__(self, identifier, request_vals): self.record_id = identifier ApiSearchRequest.__init__(self, request_vals) def _create_search_request(self): q = "identifier:%s OR doi:%s" % (self.record_id, self.record_id) req = SearchRequest(q, rows=1) if self.form.fl.data: fields = list(set(self.form.fl.data.split(',') + config.SOLR_SEARCH_REQUIRED_FIELDS)) req.set_fields(fields) else: req.set_fields(self.user.get_allowed_fields()) if len(self.form.hl.data): for hl in self.form.hl.data: hl = hl.split(':') req.add_highlight(*hl) if self.form.hlq.data: req.set_hlq(self.form.hlq.data) return req
[ "'''\nCreated on Nov 2, 2012\n\n@author: jluker\n'''\nimport re\nimport sys\nfrom flask import g \nfrom flask.ext.solrquery import solr, SearchRequest \n\nfrom config import config\nfrom .forms import ApiQueryForm\nfrom api_errors import ApiPermissionError,ApiSolrException\n \n__all__ = ['ApiSearchRequest','ApiRecordRequest']\n\nclass ApiSearchRequest(object):\n \n def __init__(self, request_vals):\n self.form = ApiQueryForm(request_vals, csrf_enabled=False)\n self.user = g.api_user\n \n def validate(self):\n valid = self.form.validate()\n perms_ok = self.user.check_permissions(self.form)\n return valid and perms_ok\n \n def input_errors(self):\n return self.form.errors\n \n def _create_search_request(self):\n req = SearchRequest(self.form.q.data)\n \n if self.form.fl.data:\n fields = list(set(self.form.fl.data.split(',') + config.SOLR_SEARCH_REQUIRED_FIELDS))\n else:\n fields = self.user.get_allowed_fields()\n\n req.set_fields(fields)\n \n if self.form.rows.data:\n req.set_rows(self.form.rows.data)\n else:\n req.set_rows(config.SEARCH_DEFAULT_ROWS)\n \n if self.form.start.data:\n req.set_start(self.form.start.data)\n \n if self.form.sort.data:\n sort_tokens = re.split('[\\s,]+', self.form.sort.data)\n # tupleize the list into field,direction pairs\n sort_tokens = zip(*[iter(sort_tokens)] * 2)\n for sort, direction in sort_tokens:\n sort_field = config.SEARCH_SORT_OPTIONS_MAP[sort][0]\n req.add_sort(sort_field, direction)\n else:\n for field, direction in config.API_SOLR_DEFAULT_SORT:\n req.add_sort(field, direction)\n \n if len(self.form.facet.data):\n for facet in self.form.facet.data:\n facet = facet.split(':')\n api_facet_name = facet[0]\n solr_field_name = config.API_SOLR_FACET_FIELDS[api_facet_name]\n if api_facet_name != solr_field_name:\n # translate api facet name to solr field name in request *and* response\n # see http://wiki.apache.org/solr/SimpleFacetParameters#key_:_Changing_the_output_key\n output_key = api_facet_name\n facet[0] = solr_field_name\n else:\n output_key = None\n req.add_facet(*facet, output_key=output_key)\n \n if len(self.form.hl.data):\n for hl in self.form.hl.data:\n if ':' in hl:\n req.add_highlight(*hl.split(':'))\n else:\n req.add_highlight(hl)\n \n if len(self.form.filter.data):\n for fltr in self.form.filter.data:\n req.add_filter_query(fltr)\n \n if self.form.hlq.data:\n req.set_hlq(self.form.hlq.data)\n \n return req\n \n def execute(self):\n req = self._create_search_request()\n solr.set_defaults(req)\n \n try:\n resp = solr.get_response(req)\n except Exception, e:\n from adsabs.core.solr import AdsabsSolrqueryException\n raise AdsabsSolrqueryException(\"Error communicating with search service\", sys.exc_info())\n \n if resp.is_error():\n raise ApiSolrException(resp.get_error())\n \n resp.add_meta('api-version', g.api_version)\n self.resp = resp\n return self.resp\n\n def query(self):\n return self.form.q.data\n \nclass ApiRecordRequest(ApiSearchRequest):\n \n def __init__(self, identifier, request_vals):\n self.record_id = identifier\n ApiSearchRequest.__init__(self, request_vals)\n \n def _create_search_request(self):\n q = \"identifier:%s OR doi:%s\" % (self.record_id, self.record_id)\n req = SearchRequest(q, rows=1)\n \n if self.form.fl.data:\n fields = list(set(self.form.fl.data.split(',') + config.SOLR_SEARCH_REQUIRED_FIELDS))\n req.set_fields(fields)\n else:\n req.set_fields(self.user.get_allowed_fields())\n \n if len(self.form.hl.data):\n for hl in self.form.hl.data:\n hl = hl.split(':')\n req.add_highlight(*hl)\n \n if self.form.hlq.data:\n req.set_hlq(self.form.hlq.data)\n \n return req\n \n" ]
true
98,997
e5e932fa1351153749a38e7ab8076b86c89bf253
# Generated by Django 3.0.7 on 2020-06-28 10:48 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('ecard_app', '0001_initial'), ] operations = [ migrations.AlterField( model_name='quotes', name='quote', field=models.TextField(), ), ]
[ "# Generated by Django 3.0.7 on 2020-06-28 10:48\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('ecard_app', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='quotes',\n name='quote',\n field=models.TextField(),\n ),\n ]\n", "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('ecard_app', '0001_initial')]\n operations = [migrations.AlterField(model_name='quotes', name='quote',\n field=models.TextField())]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('ecard_app', '0001_initial')]\n operations = [migrations.AlterField(model_name='quotes', name='quote',\n field=models.TextField())]\n", "<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n", "<import token>\n<class token>\n" ]
false
98,998
80356963436b642db64dedb31534cd8c623f7754
from mysql.connector import MySQLConnection, Error def insert_book(title): try: conn = mysql.connector.connect(host='localhost', database='test', user='root', password='') if conn.is_connected(): print('Connected to MySQL database') except Error as e: print(e) query = "INSERT INTO xyz " \ "VALUES(%s)" args = (title) try: db_config = read_db_config() conn = MySQLConnection(**db_config) cursor = conn.cursor() cursor.execute(query, args) if cursor.lastrowid: print('last insert id', cursor.lastrowid) else: print('last insert id not found') conn.commit() except Error as error: print(error) finally: cursor.close() conn.close() def main(): insert_book(8) if __name__ == '__main__': main()
[ "from mysql.connector import MySQLConnection, Error\r\n\r\ndef insert_book(title):\r\n try:\r\n conn = mysql.connector.connect(host='localhost',\r\n database='test',\r\n user='root',\r\n password='')\r\n if conn.is_connected():\r\n print('Connected to MySQL database')\r\n \r\n except Error as e:\r\n print(e)\r\n \r\n query = \"INSERT INTO xyz \" \\\r\n \"VALUES(%s)\"\r\n args = (title)\r\n \r\n try:\r\n db_config = read_db_config()\r\n conn = MySQLConnection(**db_config)\r\n \r\n cursor = conn.cursor()\r\n cursor.execute(query, args)\r\n \r\n if cursor.lastrowid:\r\n print('last insert id', cursor.lastrowid)\r\n else:\r\n print('last insert id not found')\r\n \r\n conn.commit()\r\n except Error as error:\r\n print(error)\r\n \r\n finally:\r\n cursor.close()\r\n conn.close()\r\n \r\ndef main():\r\n insert_book(8)\r\n \r\nif __name__ == '__main__':\r\n main()\r\n", "from mysql.connector import MySQLConnection, Error\n\n\ndef insert_book(title):\n try:\n conn = mysql.connector.connect(host='localhost', database='test',\n user='root', password='')\n if conn.is_connected():\n print('Connected to MySQL database')\n except Error as e:\n print(e)\n query = 'INSERT INTO xyz VALUES(%s)'\n args = title\n try:\n db_config = read_db_config()\n conn = MySQLConnection(**db_config)\n cursor = conn.cursor()\n cursor.execute(query, args)\n if cursor.lastrowid:\n print('last insert id', cursor.lastrowid)\n else:\n print('last insert id not found')\n conn.commit()\n except Error as error:\n print(error)\n finally:\n cursor.close()\n conn.close()\n\n\ndef main():\n insert_book(8)\n\n\nif __name__ == '__main__':\n main()\n", "<import token>\n\n\ndef insert_book(title):\n try:\n conn = mysql.connector.connect(host='localhost', database='test',\n user='root', password='')\n if conn.is_connected():\n print('Connected to MySQL database')\n except Error as e:\n print(e)\n query = 'INSERT INTO xyz VALUES(%s)'\n args = title\n try:\n db_config = read_db_config()\n conn = MySQLConnection(**db_config)\n cursor = conn.cursor()\n cursor.execute(query, args)\n if cursor.lastrowid:\n print('last insert id', cursor.lastrowid)\n else:\n print('last insert id not found')\n conn.commit()\n except Error as error:\n print(error)\n finally:\n cursor.close()\n conn.close()\n\n\ndef main():\n insert_book(8)\n\n\nif __name__ == '__main__':\n main()\n", "<import token>\n\n\ndef insert_book(title):\n try:\n conn = mysql.connector.connect(host='localhost', database='test',\n user='root', password='')\n if conn.is_connected():\n print('Connected to MySQL database')\n except Error as e:\n print(e)\n query = 'INSERT INTO xyz VALUES(%s)'\n args = title\n try:\n db_config = read_db_config()\n conn = MySQLConnection(**db_config)\n cursor = conn.cursor()\n cursor.execute(query, args)\n if cursor.lastrowid:\n print('last insert id', cursor.lastrowid)\n else:\n print('last insert id not found')\n conn.commit()\n except Error as error:\n print(error)\n finally:\n cursor.close()\n conn.close()\n\n\ndef main():\n insert_book(8)\n\n\n<code token>\n", "<import token>\n\n\ndef insert_book(title):\n try:\n conn = mysql.connector.connect(host='localhost', database='test',\n user='root', password='')\n if conn.is_connected():\n print('Connected to MySQL database')\n except Error as e:\n print(e)\n query = 'INSERT INTO xyz VALUES(%s)'\n args = title\n try:\n db_config = read_db_config()\n conn = MySQLConnection(**db_config)\n cursor = conn.cursor()\n cursor.execute(query, args)\n if cursor.lastrowid:\n print('last insert id', cursor.lastrowid)\n else:\n print('last insert id not found')\n conn.commit()\n except Error as error:\n print(error)\n finally:\n cursor.close()\n conn.close()\n\n\n<function token>\n<code token>\n", "<import token>\n<function token>\n<function token>\n<code token>\n" ]
false
98,999
b372aded5f2b9d99a626799b927c9731fa0d521a
# This monkey-patches scons' CacheDir to synchronize the cache # to an s3 bucket. # # To enable it: # # - ensure python packages are installed: boto3, humanize # - create a site_init.py file in site_scons containing 'import s3_cache' # - setup ~/.aws/credentials with an access key # - set the SCONS_CACHE_S3_BUCKET environment variable to a bucket name # # The --cache-debug=- flag is recommended to see s3 cache operations. import boto3 import botocore.exceptions import humanize import os import os.path import stat import SCons.Action import SCons.CacheDir import SCons.Errors # fail early if SCONS_CACHE_S3_BUCKET is not set S3_BUCKET = os.environ['SCONS_CACHE_S3_BUCKET'] s3_client = boto3.client('s3') def make_cache_dir(fs, cachedir): if not fs.isdir(cachedir): try: fs.makedirs(cachedir) except EnvironmentError: # We may have received an exception because another process # has beaten us creating the directory. if not fs.isdir(cachedir): raise SCons.Errors.EnvironmentError("Unable to create cache dir") def CacheRetrieveFunc(target, source, env): t = target[0] fs = t.fs cd = env.get_CacheDir() cachedir, cachefile = cd.cachepath(t) if not fs.exists(cachefile): cd.CacheDebug('CacheRetrieve(%s): %s not in disk cache\n', t, cachefile) try: # Try to download the file from S3 into the disk cache sig = os.path.basename(cachefile) head = s3_client.head_object(Bucket=S3_BUCKET, Key=sig) download_size = humanize.naturalsize(head['ContentLength'], gnu=True) cd.CacheDebug('CacheRetrieve(%%s): retrieving %%s from s3 (%s)\n' % download_size, t, cachefile) make_cache_dir(fs, cachedir) # no race here: boto3 downloads to a temp file and then links into place s3_client.download_file(S3_BUCKET, sig, cachefile) except botocore.exceptions.ClientError as e: if int(e.response['Error']['Code']) == 404: cd.CacheDebug('CacheRetrieve(%s): %s not in s3\n', t, cachefile) return 1 else: raise SCons.Errors.EnvironmentError('boto exception %s' % e) cd.CacheDebug('CacheRetrieve(%s): retrieving %s from disk cache\n', t, cachefile) if SCons.Action.execute_actions: if fs.islink(cachefile): fs.symlink(fs.readlink(cachefile), t.path) else: env.copy_from_cache(cachefile, t.path) st = fs.stat(cachefile) fs.chmod(t.path, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) return 0 SCons.CacheDir.CacheRetrieve = SCons.Action.Action(CacheRetrieveFunc, None) SCons.CacheDir.CacheRetrieveSilent = SCons.Action.Action(CacheRetrieveFunc, None) def CachePushFunc(target, source, env): t = target[0] if t.nocache: return fs = t.fs cd = env.get_CacheDir() cachedir, cachefile = cd.cachepath(t) if fs.exists(cachefile): # Don't bother copying it if it's already there. Note that # usually this "shouldn't happen" because if the file already # existed in cache, we'd have retrieved the file from there, # not built it. This can happen, though, in a race, if some # other person running the same build pushes their copy to # the cache after we decide we need to build it but before our # build completes. cd.CacheDebug('CachePush(%s): %s already exists in disk cache\n', t, cachefile) return cd.CacheDebug('CachePush(%s): pushing %s to disk cache\n', t, cachefile) tempfile = cachefile+'.tmp'+str(os.getpid()) make_cache_dir(fs, cachedir) # Unlike the original CachePushFunc, we want any error in the # following to halt the build. This is to ensure that every # layer is pushed to the shared cache. if fs.islink(t.path): fs.symlink(fs.readlink(t.path), tempfile) else: fs.copy2(t.path, tempfile) if t.__dict__.get('noshare', False): cd.CacheDebug('CachePush(%s): not pushing %s to s3 (noshare)\n', t, cachefile) else: # Upload the file to S3 before linking it into place tempfile_size = humanize.naturalsize(fs.getsize(tempfile), gnu=True) cache_key = os.path.basename(cachefile) cd.CacheDebug('CachePush(%%s): pushing %%s to s3 (%s)\n' % tempfile_size, t, cachefile) try: s3_client.upload_file(tempfile, S3_BUCKET, cache_key, ExtraArgs={'Metadata': {'VM-Layer': str(t)}}) except botocore.exceptions.ClientError as e: # scons doesn't print errors raised here, but it does stop print e raise SCons.Errors.EnvironmentError('boto exception %s' % e) fs.rename(tempfile, cachefile) st = fs.stat(t.path) fs.chmod(cachefile, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) SCons.CacheDir.CachePush = SCons.Action.Action(CachePushFunc, None)
[ "\n# This monkey-patches scons' CacheDir to synchronize the cache\n# to an s3 bucket.\n#\n# To enable it:\n#\n# - ensure python packages are installed: boto3, humanize\n# - create a site_init.py file in site_scons containing 'import s3_cache'\n# - setup ~/.aws/credentials with an access key\n# - set the SCONS_CACHE_S3_BUCKET environment variable to a bucket name\n#\n# The --cache-debug=- flag is recommended to see s3 cache operations.\n\nimport boto3\nimport botocore.exceptions\nimport humanize\nimport os\nimport os.path\nimport stat\n\nimport SCons.Action\nimport SCons.CacheDir\nimport SCons.Errors\n\n# fail early if SCONS_CACHE_S3_BUCKET is not set\nS3_BUCKET = os.environ['SCONS_CACHE_S3_BUCKET']\n\ns3_client = boto3.client('s3')\n\ndef make_cache_dir(fs, cachedir):\n if not fs.isdir(cachedir):\n try:\n fs.makedirs(cachedir)\n except EnvironmentError:\n # We may have received an exception because another process\n # has beaten us creating the directory.\n if not fs.isdir(cachedir):\n raise SCons.Errors.EnvironmentError(\"Unable to create cache dir\")\n\n\ndef CacheRetrieveFunc(target, source, env):\n t = target[0]\n fs = t.fs\n cd = env.get_CacheDir()\n cachedir, cachefile = cd.cachepath(t)\n if not fs.exists(cachefile):\n cd.CacheDebug('CacheRetrieve(%s): %s not in disk cache\\n', t, cachefile)\n try:\n # Try to download the file from S3 into the disk cache\n sig = os.path.basename(cachefile)\n head = s3_client.head_object(Bucket=S3_BUCKET, Key=sig)\n download_size = humanize.naturalsize(head['ContentLength'], gnu=True)\n cd.CacheDebug('CacheRetrieve(%%s): retrieving %%s from s3 (%s)\\n' % download_size,\n t, cachefile)\n make_cache_dir(fs, cachedir)\n # no race here: boto3 downloads to a temp file and then links into place\n s3_client.download_file(S3_BUCKET, sig, cachefile)\n except botocore.exceptions.ClientError as e:\n if int(e.response['Error']['Code']) == 404:\n cd.CacheDebug('CacheRetrieve(%s): %s not in s3\\n', t, cachefile)\n return 1\n else:\n raise SCons.Errors.EnvironmentError('boto exception %s' % e)\n\n cd.CacheDebug('CacheRetrieve(%s): retrieving %s from disk cache\\n', t, cachefile)\n if SCons.Action.execute_actions:\n if fs.islink(cachefile):\n fs.symlink(fs.readlink(cachefile), t.path)\n else:\n env.copy_from_cache(cachefile, t.path)\n st = fs.stat(cachefile)\n fs.chmod(t.path, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)\n return 0\n\nSCons.CacheDir.CacheRetrieve = SCons.Action.Action(CacheRetrieveFunc, None)\n\nSCons.CacheDir.CacheRetrieveSilent = SCons.Action.Action(CacheRetrieveFunc, None)\n\n\ndef CachePushFunc(target, source, env):\n t = target[0]\n if t.nocache:\n return\n fs = t.fs\n cd = env.get_CacheDir()\n cachedir, cachefile = cd.cachepath(t)\n if fs.exists(cachefile):\n # Don't bother copying it if it's already there. Note that\n # usually this \"shouldn't happen\" because if the file already\n # existed in cache, we'd have retrieved the file from there,\n # not built it. This can happen, though, in a race, if some\n # other person running the same build pushes their copy to\n # the cache after we decide we need to build it but before our\n # build completes.\n cd.CacheDebug('CachePush(%s): %s already exists in disk cache\\n', t, cachefile)\n return\n\n cd.CacheDebug('CachePush(%s): pushing %s to disk cache\\n', t, cachefile)\n\n tempfile = cachefile+'.tmp'+str(os.getpid())\n\n make_cache_dir(fs, cachedir)\n\n # Unlike the original CachePushFunc, we want any error in the\n # following to halt the build. This is to ensure that every\n # layer is pushed to the shared cache.\n if fs.islink(t.path):\n fs.symlink(fs.readlink(t.path), tempfile)\n else:\n fs.copy2(t.path, tempfile)\n if t.__dict__.get('noshare', False):\n cd.CacheDebug('CachePush(%s): not pushing %s to s3 (noshare)\\n', t, cachefile)\n else:\n # Upload the file to S3 before linking it into place\n tempfile_size = humanize.naturalsize(fs.getsize(tempfile), gnu=True)\n cache_key = os.path.basename(cachefile)\n cd.CacheDebug('CachePush(%%s): pushing %%s to s3 (%s)\\n' % tempfile_size,\n t, cachefile)\n try:\n s3_client.upload_file(tempfile, S3_BUCKET, cache_key,\n ExtraArgs={'Metadata': {'VM-Layer': str(t)}})\n except botocore.exceptions.ClientError as e:\n # scons doesn't print errors raised here, but it does stop\n print e\n raise SCons.Errors.EnvironmentError('boto exception %s' % e)\n\n fs.rename(tempfile, cachefile)\n st = fs.stat(t.path)\n fs.chmod(cachefile, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)\n\nSCons.CacheDir.CachePush = SCons.Action.Action(CachePushFunc, None)\n" ]
true