code
stringlengths
13
6.09M
order_type
stringclasses
2 values
original_example
dict
step_ids
listlengths
1
5
import os from flask import Flask, request, jsonify, Response, abort from sesamutils import sesam_logger, VariablesConfig from sesamutils.flask import serve required_env_vars = ["SUBDOMAIN"] optional_env_vars = ["DEBUG", "LOG_LEVEL", ("API_ROOT","zendesk.com/api/v2/tickets/")] # Default values can be given to optional environment variables by the use of tuples app = Flask(__name__) logger = sesam_logger('DemoMicroservice', app=app,timestamp=True) orders = [ { 'id': 1, 'Username': u'Unjudosely', 'Orders': u'Thinkpad', 'TotalSum': 8000 }, { 'id': 2, 'Username': u'Wimen1979', 'Orders': u'MacbookPro', 'TotalSum': 12000 }, { 'id': 3, 'Username': u'Gotin1984', 'Orders': u'Chormebook', 'TotalSum': 10000 } ] @app.route('/api/orders') def get_orders(): return jsonify({'orders': orders}) @app.route('/api/orders/update/<int:orderID>', methods=['GET','PUT','POST','DELETE']) def update_ticket(orderID): try: if request.method != 'PUT': abort(405) # Check closer what Flask abort does logger.error(f"ConnectionError issue while fetching tickets{request.method}") else: return jsonify(orders[orderID-1]) except ConnectionError as e: logger.error(f"ConnectionError issue while fetching tickets{e}") except Exception as e: logger.error(f"Issue while fetching tickets from Zendesk {e}") @app.route('/api/generic/<path:txt>', methods=['GET','PUT','POST','DELETE']) def get_generic(txt): method = request.method if method == "POST" and request.is_json: returnList = [] enteties = request.get_json() logger.info(type(enteties)) for item in enteties: item['Hello'] = "Hello, this is a test." logger.info(type(item)) returnList.append(item) return jsonify(returnList) , 200, {"Content-Type": "application/json"} else: logger.info(f'Http method is {method}') return "Only JSON on POST is supported.", 500, {"Content-Type": "text/plain"} @app.route('/api/show/config') def get_config(): return jsonify({'config': config}) if __name__ == "__main__": config = VariablesConfig(required_env_vars, optional_env_vars=optional_env_vars) # logger.info(str(config)) # if not config.validate(): # os.sys.exit(1) serve(app)
normal
{ "blob_id": "bb58b4384eaeec45be1af865012c618af05f5a0a", "index": 9667, "step-1": "<mask token>\n\n\[email protected]('/api/orders/update/<int:orderID>', methods=['GET', 'PUT',\n 'POST', 'DELETE'])\ndef update_ticket(orderID):\n try:\n if request.method != 'PUT':\n abort(405)\n logger.error(\n f'ConnectionError issue while fetching tickets{request.method}'\n )\n else:\n return jsonify(orders[orderID - 1])\n except ConnectionError as e:\n logger.error(f'ConnectionError issue while fetching tickets{e}')\n except Exception as e:\n logger.error(f'Issue while fetching tickets from Zendesk {e}')\n\n\[email protected]('/api/generic/<path:txt>', methods=['GET', 'PUT', 'POST', 'DELETE'])\ndef get_generic(txt):\n method = request.method\n if method == 'POST' and request.is_json:\n returnList = []\n enteties = request.get_json()\n logger.info(type(enteties))\n for item in enteties:\n item['Hello'] = 'Hello, this is a test.'\n logger.info(type(item))\n returnList.append(item)\n return jsonify(returnList), 200, {'Content-Type': 'application/json'}\n else:\n logger.info(f'Http method is {method}')\n return 'Only JSON on POST is supported.', 500, {'Content-Type':\n 'text/plain'}\n\n\[email protected]('/api/show/config')\ndef get_config():\n return jsonify({'config': config})\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\[email protected]('/api/orders')\ndef get_orders():\n return jsonify({'orders': orders})\n\n\[email protected]('/api/orders/update/<int:orderID>', methods=['GET', 'PUT',\n 'POST', 'DELETE'])\ndef update_ticket(orderID):\n try:\n if request.method != 'PUT':\n abort(405)\n logger.error(\n f'ConnectionError issue while fetching tickets{request.method}'\n )\n else:\n return jsonify(orders[orderID - 1])\n except ConnectionError as e:\n logger.error(f'ConnectionError issue while fetching tickets{e}')\n except Exception as e:\n logger.error(f'Issue while fetching tickets from Zendesk {e}')\n\n\[email protected]('/api/generic/<path:txt>', methods=['GET', 'PUT', 'POST', 'DELETE'])\ndef get_generic(txt):\n method = request.method\n if method == 'POST' and request.is_json:\n returnList = []\n enteties = request.get_json()\n logger.info(type(enteties))\n for item in enteties:\n item['Hello'] = 'Hello, this is a test.'\n logger.info(type(item))\n returnList.append(item)\n return jsonify(returnList), 200, {'Content-Type': 'application/json'}\n else:\n logger.info(f'Http method is {method}')\n return 'Only JSON on POST is supported.', 500, {'Content-Type':\n 'text/plain'}\n\n\[email protected]('/api/show/config')\ndef get_config():\n return jsonify({'config': config})\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\[email protected]('/api/orders')\ndef get_orders():\n return jsonify({'orders': orders})\n\n\[email protected]('/api/orders/update/<int:orderID>', methods=['GET', 'PUT',\n 'POST', 'DELETE'])\ndef update_ticket(orderID):\n try:\n if request.method != 'PUT':\n abort(405)\n logger.error(\n f'ConnectionError issue while fetching tickets{request.method}'\n )\n else:\n return jsonify(orders[orderID - 1])\n except ConnectionError as e:\n logger.error(f'ConnectionError issue while fetching tickets{e}')\n except Exception as e:\n logger.error(f'Issue while fetching tickets from Zendesk {e}')\n\n\[email protected]('/api/generic/<path:txt>', methods=['GET', 'PUT', 'POST', 'DELETE'])\ndef get_generic(txt):\n method = request.method\n if method == 'POST' and request.is_json:\n returnList = []\n enteties = request.get_json()\n logger.info(type(enteties))\n for item in enteties:\n item['Hello'] = 'Hello, this is a test.'\n logger.info(type(item))\n returnList.append(item)\n return jsonify(returnList), 200, {'Content-Type': 'application/json'}\n else:\n logger.info(f'Http method is {method}')\n return 'Only JSON on POST is supported.', 500, {'Content-Type':\n 'text/plain'}\n\n\[email protected]('/api/show/config')\ndef get_config():\n return jsonify({'config': config})\n\n\nif __name__ == '__main__':\n config = VariablesConfig(required_env_vars, optional_env_vars=\n optional_env_vars)\n serve(app)\n", "step-4": "import os\nfrom flask import Flask, request, jsonify, Response, abort\nfrom sesamutils import sesam_logger, VariablesConfig\nfrom sesamutils.flask import serve\nrequired_env_vars = ['SUBDOMAIN']\noptional_env_vars = ['DEBUG', 'LOG_LEVEL', ('API_ROOT',\n 'zendesk.com/api/v2/tickets/')]\napp = Flask(__name__)\nlogger = sesam_logger('DemoMicroservice', app=app, timestamp=True)\norders = [{'id': 1, 'Username': u'Unjudosely', 'Orders': u'Thinkpad',\n 'TotalSum': 8000}, {'id': 2, 'Username': u'Wimen1979', 'Orders':\n u'MacbookPro', 'TotalSum': 12000}, {'id': 3, 'Username': u'Gotin1984',\n 'Orders': u'Chormebook', 'TotalSum': 10000}]\n\n\[email protected]('/api/orders')\ndef get_orders():\n return jsonify({'orders': orders})\n\n\[email protected]('/api/orders/update/<int:orderID>', methods=['GET', 'PUT',\n 'POST', 'DELETE'])\ndef update_ticket(orderID):\n try:\n if request.method != 'PUT':\n abort(405)\n logger.error(\n f'ConnectionError issue while fetching tickets{request.method}'\n )\n else:\n return jsonify(orders[orderID - 1])\n except ConnectionError as e:\n logger.error(f'ConnectionError issue while fetching tickets{e}')\n except Exception as e:\n logger.error(f'Issue while fetching tickets from Zendesk {e}')\n\n\[email protected]('/api/generic/<path:txt>', methods=['GET', 'PUT', 'POST', 'DELETE'])\ndef get_generic(txt):\n method = request.method\n if method == 'POST' and request.is_json:\n returnList = []\n enteties = request.get_json()\n logger.info(type(enteties))\n for item in enteties:\n item['Hello'] = 'Hello, this is a test.'\n logger.info(type(item))\n returnList.append(item)\n return jsonify(returnList), 200, {'Content-Type': 'application/json'}\n else:\n logger.info(f'Http method is {method}')\n return 'Only JSON on POST is supported.', 500, {'Content-Type':\n 'text/plain'}\n\n\[email protected]('/api/show/config')\ndef get_config():\n return jsonify({'config': config})\n\n\nif __name__ == '__main__':\n config = VariablesConfig(required_env_vars, optional_env_vars=\n optional_env_vars)\n serve(app)\n", "step-5": "import os\r\n\r\nfrom flask import Flask, request, jsonify, Response, abort\r\n\r\nfrom sesamutils import sesam_logger, VariablesConfig\r\nfrom sesamutils.flask import serve\r\n\r\nrequired_env_vars = [\"SUBDOMAIN\"]\r\noptional_env_vars = [\"DEBUG\", \"LOG_LEVEL\", (\"API_ROOT\",\"zendesk.com/api/v2/tickets/\")] # Default values can be given to optional environment variables by the use of tuples\r\n\r\napp = Flask(__name__)\r\n\r\nlogger = sesam_logger('DemoMicroservice', app=app,timestamp=True)\r\n\r\norders = [\r\n{\r\n 'id': 1,\r\n 'Username': u'Unjudosely',\r\n 'Orders': u'Thinkpad',\r\n 'TotalSum': 8000\r\n },\r\n {\r\n 'id': 2,\r\n 'Username': u'Wimen1979',\r\n 'Orders': u'MacbookPro',\r\n 'TotalSum': 12000\r\n },\r\n { 'id': 3,\r\n 'Username': u'Gotin1984',\r\n 'Orders': u'Chormebook',\r\n 'TotalSum': 10000\r\n }\r\n\r\n]\r\n\r\[email protected]('/api/orders')\r\ndef get_orders():\r\n return jsonify({'orders': orders})\r\n\r\[email protected]('/api/orders/update/<int:orderID>', methods=['GET','PUT','POST','DELETE']) \r\ndef update_ticket(orderID):\r\n try:\r\n if request.method != 'PUT':\r\n abort(405) # Check closer what Flask abort does\r\n logger.error(f\"ConnectionError issue while fetching tickets{request.method}\")\r\n else:\r\n return jsonify(orders[orderID-1])\r\n except ConnectionError as e:\r\n logger.error(f\"ConnectionError issue while fetching tickets{e}\")\r\n except Exception as e:\r\n logger.error(f\"Issue while fetching tickets from Zendesk {e}\")\r\n\r\[email protected]('/api/generic/<path:txt>', methods=['GET','PUT','POST','DELETE'])\r\ndef get_generic(txt):\r\n method = request.method\r\n if method == \"POST\" and request.is_json:\r\n returnList = []\r\n enteties = request.get_json()\r\n logger.info(type(enteties))\r\n for item in enteties:\r\n item['Hello'] = \"Hello, this is a test.\"\r\n logger.info(type(item))\r\n returnList.append(item)\r\n return jsonify(returnList) , 200, {\"Content-Type\": \"application/json\"}\r\n else: \r\n logger.info(f'Http method is {method}')\r\n return \"Only JSON on POST is supported.\", 500, {\"Content-Type\": \"text/plain\"}\r\n\r\[email protected]('/api/show/config')\r\ndef get_config():\r\n return jsonify({'config': config})\r\n\r\nif __name__ == \"__main__\":\r\n config = VariablesConfig(required_env_vars, optional_env_vars=optional_env_vars)\r\n # logger.info(str(config))\r\n # if not config.validate():\r\n # os.sys.exit(1)\r\n\r\n serve(app)", "step-ids": [ 3, 4, 5, 7, 8 ] }
[ 3, 4, 5, 7, 8 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> np.random.seed(0) if __name__ == '__main__': import argparse parser = argparse.ArgumentParser(description= 'Generate artificial videos with one subject in Casia-B') parser.add_argument('--dataset', type=str, required=False, default= 'casiab', choices=['casiab', 'tumgaid', 'other'], help= "Dataset name. Used tho select metadata and default folder. Try 'casiab', 'tumgaid' or 'other'." ) parser.add_argument('--datasetdir', type=str, required=False, help= 'Full path to dataset directory') parser.add_argument('--siltdir', type=str, required=False, help= 'Full path to silhouettes directory') parser.add_argument('--idsdir', type=str, requiered=False, help='Id file') parser.add_argument('--outputdir', type=str, required=False, help= 'Full path to output directory') parser.add_argument('--background', type=str, required=False, help= 'Full path to background image') parser.add_argument('--videotypes', type=str, nargs='+', required=False, help='Types of videos for augmentation') parser.add_argument('--height', type=int, required=False, help= 'Video height.') parser.add_argument('--width', type=int, required=False, help= 'Video width.') parser.add_argument('--framerate', type=int, required=False, help= 'Video frame rate.') script_path = os.path.dirname(os.path.abspath(__file__)) args = parser.parse_args() dataset = args.dataset datasetdir = args.datasetdir siltdir = args.siltdir idsdir = args.idsdir outputdir = args.outputdir background = args.background videotypes = args.videotypes height = args.height width = args.width framerate = args.framerate if dataset == 'casiab': datasetdir = (script_path + '/casiab/' if datasetdir is None else datasetdir) siltdir = (script_path + '/casiab_silhouettes/' if siltdir is None else siltdir) idsdir = script_path + 'casiab_ids.txt' if idsdir is None else idsdir outputdir = (script_path + '/mupeg_one_person/' if outputdir is None else outputdir) background = (script_path + '/casiab_background.png' if background is None else background) videotypes = ['nm-05-090', 'nm-06-090' ] if videotypes is None else videotypes height = 240 if height is None else height width = 320 if width is None else width framerate = 25 if framerate is None else framerate elif dataset == 'tumgaid': datasetdir = (script_path + '/tumgaid/' if datasetdir is None else datasetdir) siltdir = (script_path + '/tumgaid_silhouettes/' if siltdir is None else siltdir) idsdir = script_path + 'tumgaid_ids.txt' if idsdir is None else idsdir outputdir = (script_path + '/mupeg_one_person/' if outputdir is None else outputdir) background = (script_path + '/casiab_background.png' if background is None else background) videotypes = ['nm-05-090', 'nm-06-090' ] if videotypes is None else videotypes height = 240 if height is None else height width = 320 if width is None else width framerate = 25 if framerate is None else framerate elif not all(v is not None for v in [datasetdir, siltdir, outputdir, background, videotypes, height, width, framerate]): raise argparse.ArgumentTypeError( 'If you select "others" in dataset, you need to complete all the input arguments.' ) generate_one_subject_with_videos(datasetdir, siltdir, idsdir, outputdir, background, videotypes, height, width, framerate) <|reserved_special_token_1|> import cv2 import os import numpy as np import sys from os.path import expanduser np.random.seed(0) if __name__ == '__main__': import argparse parser = argparse.ArgumentParser(description= 'Generate artificial videos with one subject in Casia-B') parser.add_argument('--dataset', type=str, required=False, default= 'casiab', choices=['casiab', 'tumgaid', 'other'], help= "Dataset name. Used tho select metadata and default folder. Try 'casiab', 'tumgaid' or 'other'." ) parser.add_argument('--datasetdir', type=str, required=False, help= 'Full path to dataset directory') parser.add_argument('--siltdir', type=str, required=False, help= 'Full path to silhouettes directory') parser.add_argument('--idsdir', type=str, requiered=False, help='Id file') parser.add_argument('--outputdir', type=str, required=False, help= 'Full path to output directory') parser.add_argument('--background', type=str, required=False, help= 'Full path to background image') parser.add_argument('--videotypes', type=str, nargs='+', required=False, help='Types of videos for augmentation') parser.add_argument('--height', type=int, required=False, help= 'Video height.') parser.add_argument('--width', type=int, required=False, help= 'Video width.') parser.add_argument('--framerate', type=int, required=False, help= 'Video frame rate.') script_path = os.path.dirname(os.path.abspath(__file__)) args = parser.parse_args() dataset = args.dataset datasetdir = args.datasetdir siltdir = args.siltdir idsdir = args.idsdir outputdir = args.outputdir background = args.background videotypes = args.videotypes height = args.height width = args.width framerate = args.framerate if dataset == 'casiab': datasetdir = (script_path + '/casiab/' if datasetdir is None else datasetdir) siltdir = (script_path + '/casiab_silhouettes/' if siltdir is None else siltdir) idsdir = script_path + 'casiab_ids.txt' if idsdir is None else idsdir outputdir = (script_path + '/mupeg_one_person/' if outputdir is None else outputdir) background = (script_path + '/casiab_background.png' if background is None else background) videotypes = ['nm-05-090', 'nm-06-090' ] if videotypes is None else videotypes height = 240 if height is None else height width = 320 if width is None else width framerate = 25 if framerate is None else framerate elif dataset == 'tumgaid': datasetdir = (script_path + '/tumgaid/' if datasetdir is None else datasetdir) siltdir = (script_path + '/tumgaid_silhouettes/' if siltdir is None else siltdir) idsdir = script_path + 'tumgaid_ids.txt' if idsdir is None else idsdir outputdir = (script_path + '/mupeg_one_person/' if outputdir is None else outputdir) background = (script_path + '/casiab_background.png' if background is None else background) videotypes = ['nm-05-090', 'nm-06-090' ] if videotypes is None else videotypes height = 240 if height is None else height width = 320 if width is None else width framerate = 25 if framerate is None else framerate elif not all(v is not None for v in [datasetdir, siltdir, outputdir, background, videotypes, height, width, framerate]): raise argparse.ArgumentTypeError( 'If you select "others" in dataset, you need to complete all the input arguments.' ) generate_one_subject_with_videos(datasetdir, siltdir, idsdir, outputdir, background, videotypes, height, width, framerate) <|reserved_special_token_1|> import cv2 import os import numpy as np import sys from os.path import expanduser np.random.seed(0) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='Generate artificial videos with one subject in Casia-B') parser.add_argument('--dataset', type=str, required=False, default="casiab", choices=['casiab', 'tumgaid', 'other'], help="Dataset name. Used tho select metadata and default folder. " "Try 'casiab', 'tumgaid' or 'other'.") parser.add_argument('--datasetdir', type=str, required=False, help='Full path to dataset directory') parser.add_argument('--siltdir', type=str, required=False, help='Full path to silhouettes directory') parser.add_argument('--idsdir', type=str, requiered=False, help="Id file") parser.add_argument('--outputdir', type=str, required=False, help='Full path to output directory') parser.add_argument('--background', type=str, required=False, help='Full path to background image') parser.add_argument('--videotypes', type=str, nargs='+', required=False, help='Types of videos for augmentation') parser.add_argument('--height', type=int, required=False, help='Video height.') parser.add_argument('--width', type=int, required=False, help='Video width.') parser.add_argument('--framerate', type=int, required=False, help='Video frame rate.') script_path = os.path.dirname(os.path.abspath(__file__)) args = parser.parse_args() dataset = args.dataset datasetdir = args.datasetdir siltdir = args.siltdir idsdir = args.idsdir outputdir = args.outputdir background = args.background videotypes = args.videotypes height = args.height width = args.width framerate = args.framerate if dataset == 'casiab': datasetdir = script_path + "/casiab/" if datasetdir is None else datasetdir siltdir = script_path + "/casiab_silhouettes/" if siltdir is None else siltdir idsdir = script_path + "casiab_ids.txt" if idsdir is None else idsdir outputdir = script_path + "/mupeg_one_person/" if outputdir is None else outputdir background = script_path + "/casiab_background.png" if background is None else background videotypes = ["nm-05-090", "nm-06-090"] if videotypes is None else videotypes height = 240 if height is None else height width = 320 if width is None else width framerate = 25 if framerate is None else framerate elif dataset == 'tumgaid': datasetdir = script_path + "/tumgaid/" if datasetdir is None else datasetdir siltdir = script_path + "/tumgaid_silhouettes/" if siltdir is None else siltdir idsdir = script_path + "tumgaid_ids.txt" if idsdir is None else idsdir outputdir = script_path + "/mupeg_one_person/" if outputdir is None else outputdir background = script_path + "/casiab_background.png" if background is None else background videotypes = ["nm-05-090", "nm-06-090"] if videotypes is None else videotypes height = 240 if height is None else height width = 320 if width is None else width framerate = 25 if framerate is None else framerate else: if not all(v is not None for v in [datasetdir, siltdir, outputdir, background, videotypes, height, width, framerate]): raise argparse.ArgumentTypeError('If you select "others" in dataset, you need to complete all the input arguments.') generate_one_subject_with_videos(datasetdir, siltdir, idsdir, outputdir, background, videotypes, height, width, framerate)
flexible
{ "blob_id": "6b32f829648b92da4b638ffd79692ffb86be80fe", "index": 8761, "step-1": "<mask token>\n", "step-2": "<mask token>\nnp.random.seed(0)\nif __name__ == '__main__':\n import argparse\n parser = argparse.ArgumentParser(description=\n 'Generate artificial videos with one subject in Casia-B')\n parser.add_argument('--dataset', type=str, required=False, default=\n 'casiab', choices=['casiab', 'tumgaid', 'other'], help=\n \"Dataset name. Used tho select metadata and default folder. Try 'casiab', 'tumgaid' or 'other'.\"\n )\n parser.add_argument('--datasetdir', type=str, required=False, help=\n 'Full path to dataset directory')\n parser.add_argument('--siltdir', type=str, required=False, help=\n 'Full path to silhouettes directory')\n parser.add_argument('--idsdir', type=str, requiered=False, help='Id file')\n parser.add_argument('--outputdir', type=str, required=False, help=\n 'Full path to output directory')\n parser.add_argument('--background', type=str, required=False, help=\n 'Full path to background image')\n parser.add_argument('--videotypes', type=str, nargs='+', required=False,\n help='Types of videos for augmentation')\n parser.add_argument('--height', type=int, required=False, help=\n 'Video height.')\n parser.add_argument('--width', type=int, required=False, help=\n 'Video width.')\n parser.add_argument('--framerate', type=int, required=False, help=\n 'Video frame rate.')\n script_path = os.path.dirname(os.path.abspath(__file__))\n args = parser.parse_args()\n dataset = args.dataset\n datasetdir = args.datasetdir\n siltdir = args.siltdir\n idsdir = args.idsdir\n outputdir = args.outputdir\n background = args.background\n videotypes = args.videotypes\n height = args.height\n width = args.width\n framerate = args.framerate\n if dataset == 'casiab':\n datasetdir = (script_path + '/casiab/' if datasetdir is None else\n datasetdir)\n siltdir = (script_path + '/casiab_silhouettes/' if siltdir is None else\n siltdir)\n idsdir = script_path + 'casiab_ids.txt' if idsdir is None else idsdir\n outputdir = (script_path + '/mupeg_one_person/' if outputdir is\n None else outputdir)\n background = (script_path + '/casiab_background.png' if background is\n None else background)\n videotypes = ['nm-05-090', 'nm-06-090'\n ] if videotypes is None else videotypes\n height = 240 if height is None else height\n width = 320 if width is None else width\n framerate = 25 if framerate is None else framerate\n elif dataset == 'tumgaid':\n datasetdir = (script_path + '/tumgaid/' if datasetdir is None else\n datasetdir)\n siltdir = (script_path + '/tumgaid_silhouettes/' if siltdir is None\n else siltdir)\n idsdir = script_path + 'tumgaid_ids.txt' if idsdir is None else idsdir\n outputdir = (script_path + '/mupeg_one_person/' if outputdir is\n None else outputdir)\n background = (script_path + '/casiab_background.png' if background is\n None else background)\n videotypes = ['nm-05-090', 'nm-06-090'\n ] if videotypes is None else videotypes\n height = 240 if height is None else height\n width = 320 if width is None else width\n framerate = 25 if framerate is None else framerate\n elif not all(v is not None for v in [datasetdir, siltdir, outputdir,\n background, videotypes, height, width, framerate]):\n raise argparse.ArgumentTypeError(\n 'If you select \"others\" in dataset, you need to complete all the input arguments.'\n )\n generate_one_subject_with_videos(datasetdir, siltdir, idsdir, outputdir,\n background, videotypes, height, width, framerate)\n", "step-3": "import cv2\nimport os\nimport numpy as np\nimport sys\nfrom os.path import expanduser\nnp.random.seed(0)\nif __name__ == '__main__':\n import argparse\n parser = argparse.ArgumentParser(description=\n 'Generate artificial videos with one subject in Casia-B')\n parser.add_argument('--dataset', type=str, required=False, default=\n 'casiab', choices=['casiab', 'tumgaid', 'other'], help=\n \"Dataset name. Used tho select metadata and default folder. Try 'casiab', 'tumgaid' or 'other'.\"\n )\n parser.add_argument('--datasetdir', type=str, required=False, help=\n 'Full path to dataset directory')\n parser.add_argument('--siltdir', type=str, required=False, help=\n 'Full path to silhouettes directory')\n parser.add_argument('--idsdir', type=str, requiered=False, help='Id file')\n parser.add_argument('--outputdir', type=str, required=False, help=\n 'Full path to output directory')\n parser.add_argument('--background', type=str, required=False, help=\n 'Full path to background image')\n parser.add_argument('--videotypes', type=str, nargs='+', required=False,\n help='Types of videos for augmentation')\n parser.add_argument('--height', type=int, required=False, help=\n 'Video height.')\n parser.add_argument('--width', type=int, required=False, help=\n 'Video width.')\n parser.add_argument('--framerate', type=int, required=False, help=\n 'Video frame rate.')\n script_path = os.path.dirname(os.path.abspath(__file__))\n args = parser.parse_args()\n dataset = args.dataset\n datasetdir = args.datasetdir\n siltdir = args.siltdir\n idsdir = args.idsdir\n outputdir = args.outputdir\n background = args.background\n videotypes = args.videotypes\n height = args.height\n width = args.width\n framerate = args.framerate\n if dataset == 'casiab':\n datasetdir = (script_path + '/casiab/' if datasetdir is None else\n datasetdir)\n siltdir = (script_path + '/casiab_silhouettes/' if siltdir is None else\n siltdir)\n idsdir = script_path + 'casiab_ids.txt' if idsdir is None else idsdir\n outputdir = (script_path + '/mupeg_one_person/' if outputdir is\n None else outputdir)\n background = (script_path + '/casiab_background.png' if background is\n None else background)\n videotypes = ['nm-05-090', 'nm-06-090'\n ] if videotypes is None else videotypes\n height = 240 if height is None else height\n width = 320 if width is None else width\n framerate = 25 if framerate is None else framerate\n elif dataset == 'tumgaid':\n datasetdir = (script_path + '/tumgaid/' if datasetdir is None else\n datasetdir)\n siltdir = (script_path + '/tumgaid_silhouettes/' if siltdir is None\n else siltdir)\n idsdir = script_path + 'tumgaid_ids.txt' if idsdir is None else idsdir\n outputdir = (script_path + '/mupeg_one_person/' if outputdir is\n None else outputdir)\n background = (script_path + '/casiab_background.png' if background is\n None else background)\n videotypes = ['nm-05-090', 'nm-06-090'\n ] if videotypes is None else videotypes\n height = 240 if height is None else height\n width = 320 if width is None else width\n framerate = 25 if framerate is None else framerate\n elif not all(v is not None for v in [datasetdir, siltdir, outputdir,\n background, videotypes, height, width, framerate]):\n raise argparse.ArgumentTypeError(\n 'If you select \"others\" in dataset, you need to complete all the input arguments.'\n )\n generate_one_subject_with_videos(datasetdir, siltdir, idsdir, outputdir,\n background, videotypes, height, width, framerate)\n", "step-4": "import cv2\nimport os\nimport numpy as np\nimport sys\nfrom os.path import expanduser\n\nnp.random.seed(0)\n\n\n\nif __name__ == \"__main__\":\n import argparse\n\n parser = argparse.ArgumentParser(description='Generate artificial videos with one subject in Casia-B')\n parser.add_argument('--dataset', type=str, required=False,\n default=\"casiab\", choices=['casiab', 'tumgaid', 'other'],\n help=\"Dataset name. Used tho select metadata and default folder. \"\n \"Try 'casiab', 'tumgaid' or 'other'.\")\n parser.add_argument('--datasetdir', type=str, required=False,\n help='Full path to dataset directory')\n parser.add_argument('--siltdir', type=str, required=False,\n help='Full path to silhouettes directory')\n parser.add_argument('--idsdir', type=str, requiered=False,\n help=\"Id file\")\n parser.add_argument('--outputdir', type=str, required=False,\n help='Full path to output directory')\n parser.add_argument('--background', type=str, required=False,\n help='Full path to background image')\n parser.add_argument('--videotypes', type=str, nargs='+', required=False,\n help='Types of videos for augmentation')\n parser.add_argument('--height', type=int, required=False,\n help='Video height.')\n parser.add_argument('--width', type=int, required=False,\n help='Video width.')\n parser.add_argument('--framerate', type=int, required=False,\n help='Video frame rate.')\n\n script_path = os.path.dirname(os.path.abspath(__file__))\n\n args = parser.parse_args()\n dataset = args.dataset\n datasetdir = args.datasetdir\n siltdir = args.siltdir\n idsdir = args.idsdir\n outputdir = args.outputdir\n background = args.background\n videotypes = args.videotypes\n height = args.height\n width = args.width\n framerate = args.framerate\n\n if dataset == 'casiab':\n datasetdir = script_path + \"/casiab/\" if datasetdir is None else datasetdir\n siltdir = script_path + \"/casiab_silhouettes/\" if siltdir is None else siltdir\n idsdir = script_path + \"casiab_ids.txt\" if idsdir is None else idsdir\n outputdir = script_path + \"/mupeg_one_person/\" if outputdir is None else outputdir\n background = script_path + \"/casiab_background.png\" if background is None else background\n videotypes = [\"nm-05-090\", \"nm-06-090\"] if videotypes is None else videotypes\n height = 240 if height is None else height\n width = 320 if width is None else width\n framerate = 25 if framerate is None else framerate\n elif dataset == 'tumgaid':\n datasetdir = script_path + \"/tumgaid/\" if datasetdir is None else datasetdir\n siltdir = script_path + \"/tumgaid_silhouettes/\" if siltdir is None else siltdir\n idsdir = script_path + \"tumgaid_ids.txt\" if idsdir is None else idsdir\n outputdir = script_path + \"/mupeg_one_person/\" if outputdir is None else outputdir\n background = script_path + \"/casiab_background.png\" if background is None else background\n videotypes = [\"nm-05-090\", \"nm-06-090\"] if videotypes is None else videotypes\n height = 240 if height is None else height\n width = 320 if width is None else width\n framerate = 25 if framerate is None else framerate\n\n else:\n if not all(v is not None for v in [datasetdir, siltdir, outputdir, background, videotypes, height, width, framerate]):\n raise argparse.ArgumentTypeError('If you select \"others\" in dataset, you need to complete all the input arguments.')\n\n\n\n generate_one_subject_with_videos(datasetdir, siltdir, idsdir, outputdir, background, videotypes, height, width, framerate)\n\n\n\n\n\n\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
import os import matplotlib.pyplot as plt import cv2 import numpy as np def divide_img(img_path, img_name, save_path): imgg = img_path +'\\' +img_name print(imgg) img = cv2.imread(imgg) print(img) # img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB) h = img.shape[0] w = img.shape[1] n = 8 m = 8 print('h={},w={},n={},m={}'.format(h, w, n, m)) dis_h = int(np.floor(h / n)) dis_w = int(np.floor(w / m)) num = 0 for i in range(n): for j in range(m): num += 1 print('i,j={}{}'.format(i, j)) sub = img[dis_h * i:dis_h * (i + 1), dis_w * j:dis_w * (j + 1), :] cv2.imwrite(save_path + '_{}.tif'.format(num), sub) if __name__ == '__main__': img_path = r'E:\个人文件夹\土地利用编码\tif' save_path = r'E:\个人文件夹\土地利用编码\tif1' img_list = os.listdir(img_path) for name in img_list: print(name) divide_img(img_path, name, save_path)
normal
{ "blob_id": "03f3fcb38877570dea830a56460061bd3ccb8927", "index": 8830, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef divide_img(img_path, img_name, save_path):\n imgg = img_path + '\\\\' + img_name\n print(imgg)\n img = cv2.imread(imgg)\n print(img)\n h = img.shape[0]\n w = img.shape[1]\n n = 8\n m = 8\n print('h={},w={},n={},m={}'.format(h, w, n, m))\n dis_h = int(np.floor(h / n))\n dis_w = int(np.floor(w / m))\n num = 0\n for i in range(n):\n for j in range(m):\n num += 1\n print('i,j={}{}'.format(i, j))\n sub = img[dis_h * i:dis_h * (i + 1), dis_w * j:dis_w * (j + 1), :]\n cv2.imwrite(save_path + '_{}.tif'.format(num), sub)\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef divide_img(img_path, img_name, save_path):\n imgg = img_path + '\\\\' + img_name\n print(imgg)\n img = cv2.imread(imgg)\n print(img)\n h = img.shape[0]\n w = img.shape[1]\n n = 8\n m = 8\n print('h={},w={},n={},m={}'.format(h, w, n, m))\n dis_h = int(np.floor(h / n))\n dis_w = int(np.floor(w / m))\n num = 0\n for i in range(n):\n for j in range(m):\n num += 1\n print('i,j={}{}'.format(i, j))\n sub = img[dis_h * i:dis_h * (i + 1), dis_w * j:dis_w * (j + 1), :]\n cv2.imwrite(save_path + '_{}.tif'.format(num), sub)\n\n\nif __name__ == '__main__':\n img_path = 'E:\\\\个人文件夹\\\\土地利用编码\\\\tif'\n save_path = 'E:\\\\个人文件夹\\\\土地利用编码\\\\tif1'\n img_list = os.listdir(img_path)\n for name in img_list:\n print(name)\n divide_img(img_path, name, save_path)\n", "step-4": "import os\nimport matplotlib.pyplot as plt\nimport cv2\nimport numpy as np\n\n\ndef divide_img(img_path, img_name, save_path):\n imgg = img_path + '\\\\' + img_name\n print(imgg)\n img = cv2.imread(imgg)\n print(img)\n h = img.shape[0]\n w = img.shape[1]\n n = 8\n m = 8\n print('h={},w={},n={},m={}'.format(h, w, n, m))\n dis_h = int(np.floor(h / n))\n dis_w = int(np.floor(w / m))\n num = 0\n for i in range(n):\n for j in range(m):\n num += 1\n print('i,j={}{}'.format(i, j))\n sub = img[dis_h * i:dis_h * (i + 1), dis_w * j:dis_w * (j + 1), :]\n cv2.imwrite(save_path + '_{}.tif'.format(num), sub)\n\n\nif __name__ == '__main__':\n img_path = 'E:\\\\个人文件夹\\\\土地利用编码\\\\tif'\n save_path = 'E:\\\\个人文件夹\\\\土地利用编码\\\\tif1'\n img_list = os.listdir(img_path)\n for name in img_list:\n print(name)\n divide_img(img_path, name, save_path)\n", "step-5": "import os\nimport matplotlib.pyplot as plt\nimport cv2\nimport numpy as np\n\n\ndef divide_img(img_path, img_name, save_path):\n imgg = img_path +'\\\\' +img_name\n print(imgg)\n img = cv2.imread(imgg)\n print(img)\n # img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)\n h = img.shape[0]\n w = img.shape[1]\n n = 8\n m = 8\n print('h={},w={},n={},m={}'.format(h, w, n, m))\n dis_h = int(np.floor(h / n))\n dis_w = int(np.floor(w / m))\n num = 0\n for i in range(n):\n for j in range(m):\n num += 1\n print('i,j={}{}'.format(i, j))\n sub = img[dis_h * i:dis_h * (i + 1), dis_w * j:dis_w * (j + 1), :]\n cv2.imwrite(save_path + '_{}.tif'.format(num), sub)\n\n\nif __name__ == '__main__':\n\n img_path = r'E:\\个人文件夹\\土地利用编码\\tif'\n save_path = r'E:\\个人文件夹\\土地利用编码\\tif1'\n img_list = os.listdir(img_path)\n for name in img_list:\n print(name)\n divide_img(img_path, name, save_path)", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# 使用celery from django.conf import settings from django.core.mail import send_mail from django.template import loader,RequestContext from celery import Celery import time # 在任务处理者一 # # 端加的代码 import os import django os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dailyfresh.settings") django.setup() from goods.models import GoodsType, IndexGoodsBanner, IndexPromotionBanner, IndexTypeGoodsBanner # 创建一个实例对象 app = Celery('celery_tasks.tasks', broker='redis://127.0.0.1:6379/8') # 定义任务函数,发邮件函数 @app.task def send_register_active_email(to_email, username, token): '''发送激活邮件''' # 组织邮件信息 subject = '天天生鲜欢迎信息' message = '' sender = settings.EMAIL_FROM receiver = [to_email] html_message = '<h1>%s,欢迎</h1><br>请点击以下链接激活<br><a href="http://127.0.0.1:8000/user/active/%s">http://127.0.0.1:8000/user/active/%s</a>'%(username, token, token) send_mail(subject, message, sender, receiver, html_message=html_message) @app.task def generate_static_index_html(): '''产生首页静态页面''' types = GoodsType.objects.all() # 获取首页轮播图信息 goods_banners = IndexGoodsBanner.objects.all().order_by('index') # 获取首页促销信息 promotion_banners = IndexPromotionBanner.objects.all().order_by('index') # 获取首页分类商品展示信息 #type_goods_banners = IndexTypeGoodsBanner.objects.all() for type in types: # 获取type种类首页分类商品图片信息 image_banners = IndexTypeGoodsBanner.objects.filter(type=type, display_type=1).order_by('index') # 获取type种类首页分类商品的文字展示信息 title_banners = IndexTypeGoodsBanner.objects.filter(type=type, display_type=0).order_by('index') # 将查出来的数据动态添加到type中 type.image_banners = image_banners type.title_banners = title_banners # 获取用户购物车中商品信息 # 组织模范上下文 context = {'types': types, 'goods_banners': goods_banners, 'promotion_banners': promotion_banners} # 加载模板文件 temp = loader.get_template('static_index.html') # 定义模板上下文 # 模板渲染 statoc_index_html = temp.render(context) save_path = os.path.join(settings.BASE_DIR, 'static/static_index/index.html') with open(save_path,'w',encoding='utf-8') as f: f.write(statoc_index_html)
normal
{ "blob_id": "7f7d087b7001cd7df01d4f22e056809be5a35568", "index": 9584, "step-1": "<mask token>\n\n\[email protected]\ndef generate_static_index_html():\n \"\"\"产生首页静态页面\"\"\"\n types = GoodsType.objects.all()\n goods_banners = IndexGoodsBanner.objects.all().order_by('index')\n promotion_banners = IndexPromotionBanner.objects.all().order_by('index')\n for type in types:\n image_banners = IndexTypeGoodsBanner.objects.filter(type=type,\n display_type=1).order_by('index')\n title_banners = IndexTypeGoodsBanner.objects.filter(type=type,\n display_type=0).order_by('index')\n type.image_banners = image_banners\n type.title_banners = title_banners\n context = {'types': types, 'goods_banners': goods_banners,\n 'promotion_banners': promotion_banners}\n temp = loader.get_template('static_index.html')\n statoc_index_html = temp.render(context)\n save_path = os.path.join(settings.BASE_DIR,\n 'static/static_index/index.html')\n with open(save_path, 'w', encoding='utf-8') as f:\n f.write(statoc_index_html)\n", "step-2": "<mask token>\n\n\[email protected]\ndef send_register_active_email(to_email, username, token):\n \"\"\"发送激活邮件\"\"\"\n subject = '天天生鲜欢迎信息'\n message = ''\n sender = settings.EMAIL_FROM\n receiver = [to_email]\n html_message = (\n '<h1>%s,欢迎</h1><br>请点击以下链接激活<br><a href=\"http://127.0.0.1:8000/user/active/%s\">http://127.0.0.1:8000/user/active/%s</a>'\n % (username, token, token))\n send_mail(subject, message, sender, receiver, html_message=html_message)\n\n\[email protected]\ndef generate_static_index_html():\n \"\"\"产生首页静态页面\"\"\"\n types = GoodsType.objects.all()\n goods_banners = IndexGoodsBanner.objects.all().order_by('index')\n promotion_banners = IndexPromotionBanner.objects.all().order_by('index')\n for type in types:\n image_banners = IndexTypeGoodsBanner.objects.filter(type=type,\n display_type=1).order_by('index')\n title_banners = IndexTypeGoodsBanner.objects.filter(type=type,\n display_type=0).order_by('index')\n type.image_banners = image_banners\n type.title_banners = title_banners\n context = {'types': types, 'goods_banners': goods_banners,\n 'promotion_banners': promotion_banners}\n temp = loader.get_template('static_index.html')\n statoc_index_html = temp.render(context)\n save_path = os.path.join(settings.BASE_DIR,\n 'static/static_index/index.html')\n with open(save_path, 'w', encoding='utf-8') as f:\n f.write(statoc_index_html)\n", "step-3": "<mask token>\nos.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dailyfresh.settings')\ndjango.setup()\n<mask token>\napp = Celery('celery_tasks.tasks', broker='redis://127.0.0.1:6379/8')\n\n\[email protected]\ndef send_register_active_email(to_email, username, token):\n \"\"\"发送激活邮件\"\"\"\n subject = '天天生鲜欢迎信息'\n message = ''\n sender = settings.EMAIL_FROM\n receiver = [to_email]\n html_message = (\n '<h1>%s,欢迎</h1><br>请点击以下链接激活<br><a href=\"http://127.0.0.1:8000/user/active/%s\">http://127.0.0.1:8000/user/active/%s</a>'\n % (username, token, token))\n send_mail(subject, message, sender, receiver, html_message=html_message)\n\n\[email protected]\ndef generate_static_index_html():\n \"\"\"产生首页静态页面\"\"\"\n types = GoodsType.objects.all()\n goods_banners = IndexGoodsBanner.objects.all().order_by('index')\n promotion_banners = IndexPromotionBanner.objects.all().order_by('index')\n for type in types:\n image_banners = IndexTypeGoodsBanner.objects.filter(type=type,\n display_type=1).order_by('index')\n title_banners = IndexTypeGoodsBanner.objects.filter(type=type,\n display_type=0).order_by('index')\n type.image_banners = image_banners\n type.title_banners = title_banners\n context = {'types': types, 'goods_banners': goods_banners,\n 'promotion_banners': promotion_banners}\n temp = loader.get_template('static_index.html')\n statoc_index_html = temp.render(context)\n save_path = os.path.join(settings.BASE_DIR,\n 'static/static_index/index.html')\n with open(save_path, 'w', encoding='utf-8') as f:\n f.write(statoc_index_html)\n", "step-4": "from django.conf import settings\nfrom django.core.mail import send_mail\nfrom django.template import loader, RequestContext\nfrom celery import Celery\nimport time\nimport os\nimport django\nos.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dailyfresh.settings')\ndjango.setup()\nfrom goods.models import GoodsType, IndexGoodsBanner, IndexPromotionBanner, IndexTypeGoodsBanner\napp = Celery('celery_tasks.tasks', broker='redis://127.0.0.1:6379/8')\n\n\[email protected]\ndef send_register_active_email(to_email, username, token):\n \"\"\"发送激活邮件\"\"\"\n subject = '天天生鲜欢迎信息'\n message = ''\n sender = settings.EMAIL_FROM\n receiver = [to_email]\n html_message = (\n '<h1>%s,欢迎</h1><br>请点击以下链接激活<br><a href=\"http://127.0.0.1:8000/user/active/%s\">http://127.0.0.1:8000/user/active/%s</a>'\n % (username, token, token))\n send_mail(subject, message, sender, receiver, html_message=html_message)\n\n\[email protected]\ndef generate_static_index_html():\n \"\"\"产生首页静态页面\"\"\"\n types = GoodsType.objects.all()\n goods_banners = IndexGoodsBanner.objects.all().order_by('index')\n promotion_banners = IndexPromotionBanner.objects.all().order_by('index')\n for type in types:\n image_banners = IndexTypeGoodsBanner.objects.filter(type=type,\n display_type=1).order_by('index')\n title_banners = IndexTypeGoodsBanner.objects.filter(type=type,\n display_type=0).order_by('index')\n type.image_banners = image_banners\n type.title_banners = title_banners\n context = {'types': types, 'goods_banners': goods_banners,\n 'promotion_banners': promotion_banners}\n temp = loader.get_template('static_index.html')\n statoc_index_html = temp.render(context)\n save_path = os.path.join(settings.BASE_DIR,\n 'static/static_index/index.html')\n with open(save_path, 'w', encoding='utf-8') as f:\n f.write(statoc_index_html)\n", "step-5": "# 使用celery\nfrom django.conf import settings\nfrom django.core.mail import send_mail\nfrom django.template import loader,RequestContext\nfrom celery import Celery\nimport time\n# 在任务处理者一\n#\n# 端加的代码\nimport os\nimport django\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"dailyfresh.settings\")\ndjango.setup()\n\nfrom goods.models import GoodsType, IndexGoodsBanner, IndexPromotionBanner, IndexTypeGoodsBanner\n\n# 创建一个实例对象\napp = Celery('celery_tasks.tasks', broker='redis://127.0.0.1:6379/8')\n# 定义任务函数,发邮件函数\[email protected]\ndef send_register_active_email(to_email, username, token):\n '''发送激活邮件'''\n # 组织邮件信息\n subject = '天天生鲜欢迎信息'\n message = ''\n sender = settings.EMAIL_FROM\n receiver = [to_email]\n html_message = '<h1>%s,欢迎</h1><br>请点击以下链接激活<br><a href=\"http://127.0.0.1:8000/user/active/%s\">http://127.0.0.1:8000/user/active/%s</a>'%(username, token, token)\n send_mail(subject, message, sender, receiver, html_message=html_message)\n\[email protected]\ndef generate_static_index_html():\n '''产生首页静态页面'''\n types = GoodsType.objects.all()\n # 获取首页轮播图信息\n goods_banners = IndexGoodsBanner.objects.all().order_by('index')\n # 获取首页促销信息\n promotion_banners = IndexPromotionBanner.objects.all().order_by('index')\n # 获取首页分类商品展示信息\n #type_goods_banners = IndexTypeGoodsBanner.objects.all()\n for type in types:\n\n # 获取type种类首页分类商品图片信息\n image_banners = IndexTypeGoodsBanner.objects.filter(type=type, display_type=1).order_by('index')\n # 获取type种类首页分类商品的文字展示信息\n title_banners = IndexTypeGoodsBanner.objects.filter(type=type, display_type=0).order_by('index')\n # 将查出来的数据动态添加到type中\n type.image_banners = image_banners\n type.title_banners = title_banners\n # 获取用户购物车中商品信息\n # 组织模范上下文\n context = {'types': types,\n 'goods_banners': goods_banners,\n 'promotion_banners': promotion_banners}\n\n # 加载模板文件\n temp = loader.get_template('static_index.html')\n # 定义模板上下文\n # 模板渲染\n statoc_index_html = temp.render(context)\n\n save_path = os.path.join(settings.BASE_DIR, 'static/static_index/index.html')\n with open(save_path,'w',encoding='utf-8') as f:\n f.write(statoc_index_html)\n\n\n\n\n\n\n\n\n\n\n", "step-ids": [ 1, 2, 4, 5, 6 ] }
[ 1, 2, 4, 5, 6 ]
<|reserved_special_token_0|> class MockSOLR(object): class MockHits(list): @property def hits(self): return len(self) @property def docs(self): return self <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def search(self, q, fq=None, **kw): if isinstance(q, unicode): q = q.encode('latin-1') preds = [] q_parts = shlex.split(q) if fq: q_parts += fq for part in q_parts: if part == '&&': continue if ':' in part: field, value = part.split(':', 1) preds.append((field, value)) else: preds.append(('text', part)) result = self.MockHits() for obj in self.db.values(): for field, value in preds: neg = False if field[0] == '!': neg = True field = field[1:] if field == 'text' or field.endswith('_t'): if (value not in str(obj.get(field, ''))) ^ neg: break elif (value != str(obj.get(field, ''))) ^ neg: break else: result.append(obj) return result def delete(self, *args, **kwargs): if kwargs.get('q', None) == '*:*': self.db = {} elif kwargs.get('id', None): del self.db[kwargs['id']] elif kwargs.get('q', None): for doc in self.search(kwargs['q']): self.delete(id=doc['id']) <|reserved_special_token_1|> <|reserved_special_token_0|> class MockSOLR(object): class MockHits(list): @property def hits(self): return len(self) @property def docs(self): return self def __init__(self): self.db = {} def add(self, objects): for o in objects: o['text'] = ''.join(o['text']) self.db[o['id']] = o <|reserved_special_token_0|> def search(self, q, fq=None, **kw): if isinstance(q, unicode): q = q.encode('latin-1') preds = [] q_parts = shlex.split(q) if fq: q_parts += fq for part in q_parts: if part == '&&': continue if ':' in part: field, value = part.split(':', 1) preds.append((field, value)) else: preds.append(('text', part)) result = self.MockHits() for obj in self.db.values(): for field, value in preds: neg = False if field[0] == '!': neg = True field = field[1:] if field == 'text' or field.endswith('_t'): if (value not in str(obj.get(field, ''))) ^ neg: break elif (value != str(obj.get(field, ''))) ^ neg: break else: result.append(obj) return result def delete(self, *args, **kwargs): if kwargs.get('q', None) == '*:*': self.db = {} elif kwargs.get('id', None): del self.db[kwargs['id']] elif kwargs.get('q', None): for doc in self.search(kwargs['q']): self.delete(id=doc['id']) <|reserved_special_token_1|> <|reserved_special_token_0|> class MockSOLR(object): class MockHits(list): @property def hits(self): return len(self) @property def docs(self): return self def __init__(self): self.db = {} def add(self, objects): for o in objects: o['text'] = ''.join(o['text']) self.db[o['id']] = o def commit(self): pass def search(self, q, fq=None, **kw): if isinstance(q, unicode): q = q.encode('latin-1') preds = [] q_parts = shlex.split(q) if fq: q_parts += fq for part in q_parts: if part == '&&': continue if ':' in part: field, value = part.split(':', 1) preds.append((field, value)) else: preds.append(('text', part)) result = self.MockHits() for obj in self.db.values(): for field, value in preds: neg = False if field[0] == '!': neg = True field = field[1:] if field == 'text' or field.endswith('_t'): if (value not in str(obj.get(field, ''))) ^ neg: break elif (value != str(obj.get(field, ''))) ^ neg: break else: result.append(obj) return result def delete(self, *args, **kwargs): if kwargs.get('q', None) == '*:*': self.db = {} elif kwargs.get('id', None): del self.db[kwargs['id']] elif kwargs.get('q', None): for doc in self.search(kwargs['q']): self.delete(id=doc['id']) <|reserved_special_token_1|> import shlex class MockSOLR(object): class MockHits(list): @property def hits(self): return len(self) @property def docs(self): return self def __init__(self): self.db = {} def add(self, objects): for o in objects: o['text'] = ''.join(o['text']) self.db[o['id']] = o def commit(self): pass def search(self, q, fq=None, **kw): if isinstance(q, unicode): q = q.encode('latin-1') preds = [] q_parts = shlex.split(q) if fq: q_parts += fq for part in q_parts: if part == '&&': continue if ':' in part: field, value = part.split(':', 1) preds.append((field, value)) else: preds.append(('text', part)) result = self.MockHits() for obj in self.db.values(): for field, value in preds: neg = False if field[0] == '!': neg = True field = field[1:] if field == 'text' or field.endswith('_t'): if (value not in str(obj.get(field, ''))) ^ neg: break elif (value != str(obj.get(field, ''))) ^ neg: break else: result.append(obj) return result def delete(self, *args, **kwargs): if kwargs.get('q', None) == '*:*': self.db = {} elif kwargs.get('id', None): del self.db[kwargs['id']] elif kwargs.get('q', None): for doc in self.search(kwargs['q']): self.delete(id=doc['id']) <|reserved_special_token_1|> import shlex class MockSOLR(object): class MockHits(list): @property def hits(self): return len(self) @property def docs(self): return self def __init__(self): self.db = {} def add(self, objects): for o in objects: o['text'] = ''.join(o['text']) self.db[o['id']] = o def commit(self): pass def search(self, q, fq=None, **kw): if isinstance(q, unicode): q = q.encode('latin-1') # Parse query preds = [] q_parts = shlex.split(q) if fq: q_parts += fq for part in q_parts: if part == '&&': continue if ':' in part: field, value = part.split(':', 1) preds.append((field, value)) else: preds.append(('text', part)) result = self.MockHits() for obj in self.db.values(): for field, value in preds: neg = False if field[0] == '!': neg = True field = field[1:] if field == 'text' or field.endswith('_t'): if (value not in str(obj.get(field, ''))) ^ neg: break else: if (value != str(obj.get(field, ''))) ^ neg: break else: result.append(obj) return result def delete(self, *args, **kwargs): if kwargs.get('q', None) == '*:*': self.db = {} elif kwargs.get('id', None): del self.db[kwargs['id']] elif kwargs.get('q', None): for doc in self.search(kwargs['q']): self.delete(id=doc['id'])
flexible
{ "blob_id": "4774c1f4eafc0132bab0073b60c4bcad6b69380d", "index": 9068, "step-1": "<mask token>\n\n\nclass MockSOLR(object):\n\n\n class MockHits(list):\n\n @property\n def hits(self):\n return len(self)\n\n @property\n def docs(self):\n return self\n <mask token>\n <mask token>\n <mask token>\n\n def search(self, q, fq=None, **kw):\n if isinstance(q, unicode):\n q = q.encode('latin-1')\n preds = []\n q_parts = shlex.split(q)\n if fq:\n q_parts += fq\n for part in q_parts:\n if part == '&&':\n continue\n if ':' in part:\n field, value = part.split(':', 1)\n preds.append((field, value))\n else:\n preds.append(('text', part))\n result = self.MockHits()\n for obj in self.db.values():\n for field, value in preds:\n neg = False\n if field[0] == '!':\n neg = True\n field = field[1:]\n if field == 'text' or field.endswith('_t'):\n if (value not in str(obj.get(field, ''))) ^ neg:\n break\n elif (value != str(obj.get(field, ''))) ^ neg:\n break\n else:\n result.append(obj)\n return result\n\n def delete(self, *args, **kwargs):\n if kwargs.get('q', None) == '*:*':\n self.db = {}\n elif kwargs.get('id', None):\n del self.db[kwargs['id']]\n elif kwargs.get('q', None):\n for doc in self.search(kwargs['q']):\n self.delete(id=doc['id'])\n", "step-2": "<mask token>\n\n\nclass MockSOLR(object):\n\n\n class MockHits(list):\n\n @property\n def hits(self):\n return len(self)\n\n @property\n def docs(self):\n return self\n\n def __init__(self):\n self.db = {}\n\n def add(self, objects):\n for o in objects:\n o['text'] = ''.join(o['text'])\n self.db[o['id']] = o\n <mask token>\n\n def search(self, q, fq=None, **kw):\n if isinstance(q, unicode):\n q = q.encode('latin-1')\n preds = []\n q_parts = shlex.split(q)\n if fq:\n q_parts += fq\n for part in q_parts:\n if part == '&&':\n continue\n if ':' in part:\n field, value = part.split(':', 1)\n preds.append((field, value))\n else:\n preds.append(('text', part))\n result = self.MockHits()\n for obj in self.db.values():\n for field, value in preds:\n neg = False\n if field[0] == '!':\n neg = True\n field = field[1:]\n if field == 'text' or field.endswith('_t'):\n if (value not in str(obj.get(field, ''))) ^ neg:\n break\n elif (value != str(obj.get(field, ''))) ^ neg:\n break\n else:\n result.append(obj)\n return result\n\n def delete(self, *args, **kwargs):\n if kwargs.get('q', None) == '*:*':\n self.db = {}\n elif kwargs.get('id', None):\n del self.db[kwargs['id']]\n elif kwargs.get('q', None):\n for doc in self.search(kwargs['q']):\n self.delete(id=doc['id'])\n", "step-3": "<mask token>\n\n\nclass MockSOLR(object):\n\n\n class MockHits(list):\n\n @property\n def hits(self):\n return len(self)\n\n @property\n def docs(self):\n return self\n\n def __init__(self):\n self.db = {}\n\n def add(self, objects):\n for o in objects:\n o['text'] = ''.join(o['text'])\n self.db[o['id']] = o\n\n def commit(self):\n pass\n\n def search(self, q, fq=None, **kw):\n if isinstance(q, unicode):\n q = q.encode('latin-1')\n preds = []\n q_parts = shlex.split(q)\n if fq:\n q_parts += fq\n for part in q_parts:\n if part == '&&':\n continue\n if ':' in part:\n field, value = part.split(':', 1)\n preds.append((field, value))\n else:\n preds.append(('text', part))\n result = self.MockHits()\n for obj in self.db.values():\n for field, value in preds:\n neg = False\n if field[0] == '!':\n neg = True\n field = field[1:]\n if field == 'text' or field.endswith('_t'):\n if (value not in str(obj.get(field, ''))) ^ neg:\n break\n elif (value != str(obj.get(field, ''))) ^ neg:\n break\n else:\n result.append(obj)\n return result\n\n def delete(self, *args, **kwargs):\n if kwargs.get('q', None) == '*:*':\n self.db = {}\n elif kwargs.get('id', None):\n del self.db[kwargs['id']]\n elif kwargs.get('q', None):\n for doc in self.search(kwargs['q']):\n self.delete(id=doc['id'])\n", "step-4": "import shlex\n\n\nclass MockSOLR(object):\n\n\n class MockHits(list):\n\n @property\n def hits(self):\n return len(self)\n\n @property\n def docs(self):\n return self\n\n def __init__(self):\n self.db = {}\n\n def add(self, objects):\n for o in objects:\n o['text'] = ''.join(o['text'])\n self.db[o['id']] = o\n\n def commit(self):\n pass\n\n def search(self, q, fq=None, **kw):\n if isinstance(q, unicode):\n q = q.encode('latin-1')\n preds = []\n q_parts = shlex.split(q)\n if fq:\n q_parts += fq\n for part in q_parts:\n if part == '&&':\n continue\n if ':' in part:\n field, value = part.split(':', 1)\n preds.append((field, value))\n else:\n preds.append(('text', part))\n result = self.MockHits()\n for obj in self.db.values():\n for field, value in preds:\n neg = False\n if field[0] == '!':\n neg = True\n field = field[1:]\n if field == 'text' or field.endswith('_t'):\n if (value not in str(obj.get(field, ''))) ^ neg:\n break\n elif (value != str(obj.get(field, ''))) ^ neg:\n break\n else:\n result.append(obj)\n return result\n\n def delete(self, *args, **kwargs):\n if kwargs.get('q', None) == '*:*':\n self.db = {}\n elif kwargs.get('id', None):\n del self.db[kwargs['id']]\n elif kwargs.get('q', None):\n for doc in self.search(kwargs['q']):\n self.delete(id=doc['id'])\n", "step-5": "import shlex\n\n\nclass MockSOLR(object):\n\n class MockHits(list):\n @property\n def hits(self):\n return len(self)\n\n @property\n def docs(self):\n return self\n\n def __init__(self):\n self.db = {}\n\n def add(self, objects):\n for o in objects:\n o['text'] = ''.join(o['text'])\n self.db[o['id']] = o\n\n def commit(self):\n pass\n\n def search(self, q, fq=None, **kw):\n if isinstance(q, unicode):\n q = q.encode('latin-1')\n # Parse query\n preds = []\n q_parts = shlex.split(q)\n if fq:\n q_parts += fq\n for part in q_parts:\n if part == '&&':\n continue\n if ':' in part:\n field, value = part.split(':', 1)\n preds.append((field, value))\n else:\n preds.append(('text', part))\n result = self.MockHits()\n for obj in self.db.values():\n for field, value in preds:\n neg = False\n if field[0] == '!':\n neg = True\n field = field[1:]\n if field == 'text' or field.endswith('_t'):\n if (value not in str(obj.get(field, ''))) ^ neg:\n break\n else:\n if (value != str(obj.get(field, ''))) ^ neg:\n break\n else:\n result.append(obj)\n return result\n\n def delete(self, *args, **kwargs):\n if kwargs.get('q', None) == '*:*':\n self.db = {}\n elif kwargs.get('id', None):\n del self.db[kwargs['id']]\n elif kwargs.get('q', None):\n for doc in self.search(kwargs['q']):\n self.delete(id=doc['id'])\n", "step-ids": [ 3, 5, 6, 7, 8 ] }
[ 3, 5, 6, 7, 8 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> if 'feira' in msg: print('Sim, foi a feira') else: print('não ele não foi a feira') <|reserved_special_token_1|> msg = 'eduardo foi a feira' if 'feira' in msg: print('Sim, foi a feira') else: print('não ele não foi a feira') <|reserved_special_token_1|> msg = "eduardo foi a feira" if 'feira' in msg: print('Sim, foi a feira') else: print('não ele não foi a feira')
flexible
{ "blob_id": "2a83bc9157e2210da46e58c56fc0b7199856f4c0", "index": 6287, "step-1": "<mask token>\n", "step-2": "<mask token>\nif 'feira' in msg:\n print('Sim, foi a feira')\nelse:\n print('não ele não foi a feira')\n", "step-3": "msg = 'eduardo foi a feira'\nif 'feira' in msg:\n print('Sim, foi a feira')\nelse:\n print('não ele não foi a feira')\n", "step-4": "msg = \"eduardo foi a feira\"\n\nif 'feira' in msg:\n print('Sim, foi a feira')\nelse:\n print('não ele não foi a feira')\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from django.shortcuts import render from django.http import HttpResponse # # Create your views here. # def Login_Form(request): # return render(request, 'Login.html')
normal
{ "blob_id": "ee161ff66a6fc651a03f725427c3731bdf4243eb", "index": 6906, "step-1": "<mask token>\n", "step-2": "from django.shortcuts import render\nfrom django.http import HttpResponse\n", "step-3": "from django.shortcuts import render\r\nfrom django.http import HttpResponse\r\n\r\n\r\n\r\n# # Create your views here.\r\n# def Login_Form(request):\r\n# return render(request, 'Login.html')", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> @app.get('/jobs/', response_model=List[schemas.Job]) def read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)): jobs = crud.get_jobs(db, skip=skip, limit=limit) return jobs @app.get('/jobs/{job_id}', response_model=schemas.Job) def read_job(job_id: int, db: Session=Depends(get_db)): db_job = crud.get_job(db, job_id=job_id) if db_job is None: raise HTTPException(status_code=404, detail='Job not found') return db_job @app.post('/cands/', response_model=schemas.CanCreate) def create_can(can: schemas.CanCreate, db: Session=Depends(get_db)): db_can = crud.get_candidate(db, can.email) if db_can: raise HTTPException(status_code=400, detail='Candidate already Present' ) return crud.create_candidate(db=db, can=can) @app.get('/cands/', response_model=List[schemas.Can]) def read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)): cans = crud.get_candidates(db, skip=skip, limit=limit) return cans @app.get('/cands/{email}', response_model=schemas.Can) def read_can(email: str, db: Session=Depends(get_db)): db_can = crud.get_candidate(db, email) if db_can is None: raise HTTPException(status_code=404, detail='Candidate not found') return db_can @app.get('/jobapps/', response_model=List[schemas.AppBase]) def read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)): jobapps = crud.get_jobapps(db, skip=skip, limit=limit) return jobapps <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def get_db(): db = SessionLocal() try: yield db finally: db.close() <|reserved_special_token_0|> @app.get('/jobs/', response_model=List[schemas.Job]) def read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)): jobs = crud.get_jobs(db, skip=skip, limit=limit) return jobs @app.get('/jobs/{job_id}', response_model=schemas.Job) def read_job(job_id: int, db: Session=Depends(get_db)): db_job = crud.get_job(db, job_id=job_id) if db_job is None: raise HTTPException(status_code=404, detail='Job not found') return db_job @app.post('/cands/', response_model=schemas.CanCreate) def create_can(can: schemas.CanCreate, db: Session=Depends(get_db)): db_can = crud.get_candidate(db, can.email) if db_can: raise HTTPException(status_code=400, detail='Candidate already Present' ) return crud.create_candidate(db=db, can=can) @app.get('/cands/', response_model=List[schemas.Can]) def read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)): cans = crud.get_candidates(db, skip=skip, limit=limit) return cans @app.get('/cands/{email}', response_model=schemas.Can) def read_can(email: str, db: Session=Depends(get_db)): db_can = crud.get_candidate(db, email) if db_can is None: raise HTTPException(status_code=404, detail='Candidate not found') return db_can @app.get('/jobapps/', response_model=List[schemas.AppBase]) def read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)): jobapps = crud.get_jobapps(db, skip=skip, limit=limit) return jobapps @app.get('/jobapps/{appid}', response_model=schemas.AppBase) def read_jobapp(appid: int, db: Session=Depends(get_db)): db_jobapp = crud.get_jobapp(db, appid) if db_jobapp is None: raise HTTPException(status_code=404, detail='Job Application not found' ) return db_jobapp <|reserved_special_token_1|> <|reserved_special_token_0|> models.Base.metadata.create_all(bind=engine) <|reserved_special_token_0|> def get_db(): db = SessionLocal() try: yield db finally: db.close() @app.post('/jobs/', response_model=schemas.JobCreate) def create_job(job: schemas.JobCreate, db: Session=Depends(get_db)): db_job = crud.get_job(db, job.title) if db_job: raise HTTPException(status_code=400, detail='Job already Posted') return crud.create_job(db=db, job=job) @app.get('/jobs/', response_model=List[schemas.Job]) def read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)): jobs = crud.get_jobs(db, skip=skip, limit=limit) return jobs @app.get('/jobs/{job_id}', response_model=schemas.Job) def read_job(job_id: int, db: Session=Depends(get_db)): db_job = crud.get_job(db, job_id=job_id) if db_job is None: raise HTTPException(status_code=404, detail='Job not found') return db_job @app.post('/cands/', response_model=schemas.CanCreate) def create_can(can: schemas.CanCreate, db: Session=Depends(get_db)): db_can = crud.get_candidate(db, can.email) if db_can: raise HTTPException(status_code=400, detail='Candidate already Present' ) return crud.create_candidate(db=db, can=can) @app.get('/cands/', response_model=List[schemas.Can]) def read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)): cans = crud.get_candidates(db, skip=skip, limit=limit) return cans @app.get('/cands/{email}', response_model=schemas.Can) def read_can(email: str, db: Session=Depends(get_db)): db_can = crud.get_candidate(db, email) if db_can is None: raise HTTPException(status_code=404, detail='Candidate not found') return db_can @app.get('/jobapps/', response_model=List[schemas.AppBase]) def read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)): jobapps = crud.get_jobapps(db, skip=skip, limit=limit) return jobapps @app.get('/jobapps/{appid}', response_model=schemas.AppBase) def read_jobapp(appid: int, db: Session=Depends(get_db)): db_jobapp = crud.get_jobapp(db, appid) if db_jobapp is None: raise HTTPException(status_code=404, detail='Job Application not found' ) return db_jobapp <|reserved_special_token_1|> <|reserved_special_token_0|> models.Base.metadata.create_all(bind=engine) app = FastAPI() def get_db(): db = SessionLocal() try: yield db finally: db.close() @app.post('/jobs/', response_model=schemas.JobCreate) def create_job(job: schemas.JobCreate, db: Session=Depends(get_db)): db_job = crud.get_job(db, job.title) if db_job: raise HTTPException(status_code=400, detail='Job already Posted') return crud.create_job(db=db, job=job) @app.get('/jobs/', response_model=List[schemas.Job]) def read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)): jobs = crud.get_jobs(db, skip=skip, limit=limit) return jobs @app.get('/jobs/{job_id}', response_model=schemas.Job) def read_job(job_id: int, db: Session=Depends(get_db)): db_job = crud.get_job(db, job_id=job_id) if db_job is None: raise HTTPException(status_code=404, detail='Job not found') return db_job @app.post('/cands/', response_model=schemas.CanCreate) def create_can(can: schemas.CanCreate, db: Session=Depends(get_db)): db_can = crud.get_candidate(db, can.email) if db_can: raise HTTPException(status_code=400, detail='Candidate already Present' ) return crud.create_candidate(db=db, can=can) @app.get('/cands/', response_model=List[schemas.Can]) def read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)): cans = crud.get_candidates(db, skip=skip, limit=limit) return cans @app.get('/cands/{email}', response_model=schemas.Can) def read_can(email: str, db: Session=Depends(get_db)): db_can = crud.get_candidate(db, email) if db_can is None: raise HTTPException(status_code=404, detail='Candidate not found') return db_can @app.get('/jobapps/', response_model=List[schemas.AppBase]) def read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)): jobapps = crud.get_jobapps(db, skip=skip, limit=limit) return jobapps @app.get('/jobapps/{appid}', response_model=schemas.AppBase) def read_jobapp(appid: int, db: Session=Depends(get_db)): db_jobapp = crud.get_jobapp(db, appid) if db_jobapp is None: raise HTTPException(status_code=404, detail='Job Application not found' ) return db_jobapp <|reserved_special_token_1|> from typing import List from fastapi import Depends, FastAPI, HTTPException from sqlalchemy.orm import Session from myfirstpython.fastapi import models, crud, schemas from myfirstpython.fastapi.dbconnection import engine, SessionLocal models.Base.metadata.create_all(bind=engine) app = FastAPI() # Dependency def get_db(): db = SessionLocal() try: yield db finally: db.close() @app.post("/jobs/", response_model=schemas.JobCreate) def create_job(job: schemas.JobCreate, db: Session = Depends(get_db)): db_job = crud.get_job(db, job.title) if db_job: raise HTTPException(status_code=400, detail="Job already Posted") return crud.create_job(db=db, job=job) @app.get("/jobs/", response_model=List[schemas.Job]) def read_jobs(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)): jobs = crud.get_jobs(db, skip=skip, limit=limit) return jobs @app.get("/jobs/{job_id}", response_model=schemas.Job) def read_job(job_id: int, db: Session = Depends(get_db)): db_job = crud.get_job(db, job_id=job_id) if db_job is None: raise HTTPException(status_code=404, detail="Job not found") return db_job @app.post("/cands/", response_model=schemas.CanCreate) def create_can(can: schemas.CanCreate, db: Session = Depends(get_db)): db_can = crud.get_candidate(db, can.email) if db_can: raise HTTPException(status_code=400, detail="Candidate already Present") return crud.create_candidate(db=db, can=can) @app.get("/cands/", response_model=List[schemas.Can]) def read_cans(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)): cans = crud.get_candidates(db, skip=skip, limit=limit) return cans @app.get("/cands/{email}", response_model=schemas.Can) def read_can(email: str, db: Session = Depends(get_db)): db_can = crud.get_candidate(db, email) if db_can is None: raise HTTPException(status_code=404, detail="Candidate not found") return db_can @app.get("/jobapps/", response_model=List[schemas.AppBase]) def read_jobapps(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)): jobapps = crud.get_jobapps(db, skip=skip, limit=limit) return jobapps @app.get("/jobapps/{appid}", response_model=schemas.AppBase) def read_jobapp(appid: int, db: Session = Depends(get_db)): db_jobapp = crud.get_jobapp(db, appid) if db_jobapp is None: raise HTTPException(status_code=404, detail="Job Application not found") return db_jobapp
flexible
{ "blob_id": "ad474f5120ca2a8c81b18071ab364e6d6cf9e653", "index": 7031, "step-1": "<mask token>\n\n\[email protected]('/jobs/', response_model=List[schemas.Job])\ndef read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobs = crud.get_jobs(db, skip=skip, limit=limit)\n return jobs\n\n\[email protected]('/jobs/{job_id}', response_model=schemas.Job)\ndef read_job(job_id: int, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job_id=job_id)\n if db_job is None:\n raise HTTPException(status_code=404, detail='Job not found')\n return db_job\n\n\[email protected]('/cands/', response_model=schemas.CanCreate)\ndef create_can(can: schemas.CanCreate, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, can.email)\n if db_can:\n raise HTTPException(status_code=400, detail='Candidate already Present'\n )\n return crud.create_candidate(db=db, can=can)\n\n\[email protected]('/cands/', response_model=List[schemas.Can])\ndef read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n cans = crud.get_candidates(db, skip=skip, limit=limit)\n return cans\n\n\[email protected]('/cands/{email}', response_model=schemas.Can)\ndef read_can(email: str, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, email)\n if db_can is None:\n raise HTTPException(status_code=404, detail='Candidate not found')\n return db_can\n\n\[email protected]('/jobapps/', response_model=List[schemas.AppBase])\ndef read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobapps = crud.get_jobapps(db, skip=skip, limit=limit)\n return jobapps\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\n<mask token>\n\n\[email protected]('/jobs/', response_model=List[schemas.Job])\ndef read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobs = crud.get_jobs(db, skip=skip, limit=limit)\n return jobs\n\n\[email protected]('/jobs/{job_id}', response_model=schemas.Job)\ndef read_job(job_id: int, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job_id=job_id)\n if db_job is None:\n raise HTTPException(status_code=404, detail='Job not found')\n return db_job\n\n\[email protected]('/cands/', response_model=schemas.CanCreate)\ndef create_can(can: schemas.CanCreate, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, can.email)\n if db_can:\n raise HTTPException(status_code=400, detail='Candidate already Present'\n )\n return crud.create_candidate(db=db, can=can)\n\n\[email protected]('/cands/', response_model=List[schemas.Can])\ndef read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n cans = crud.get_candidates(db, skip=skip, limit=limit)\n return cans\n\n\[email protected]('/cands/{email}', response_model=schemas.Can)\ndef read_can(email: str, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, email)\n if db_can is None:\n raise HTTPException(status_code=404, detail='Candidate not found')\n return db_can\n\n\[email protected]('/jobapps/', response_model=List[schemas.AppBase])\ndef read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobapps = crud.get_jobapps(db, skip=skip, limit=limit)\n return jobapps\n\n\[email protected]('/jobapps/{appid}', response_model=schemas.AppBase)\ndef read_jobapp(appid: int, db: Session=Depends(get_db)):\n db_jobapp = crud.get_jobapp(db, appid)\n if db_jobapp is None:\n raise HTTPException(status_code=404, detail='Job Application not found'\n )\n return db_jobapp\n", "step-3": "<mask token>\nmodels.Base.metadata.create_all(bind=engine)\n<mask token>\n\n\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\[email protected]('/jobs/', response_model=schemas.JobCreate)\ndef create_job(job: schemas.JobCreate, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job.title)\n if db_job:\n raise HTTPException(status_code=400, detail='Job already Posted')\n return crud.create_job(db=db, job=job)\n\n\[email protected]('/jobs/', response_model=List[schemas.Job])\ndef read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobs = crud.get_jobs(db, skip=skip, limit=limit)\n return jobs\n\n\[email protected]('/jobs/{job_id}', response_model=schemas.Job)\ndef read_job(job_id: int, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job_id=job_id)\n if db_job is None:\n raise HTTPException(status_code=404, detail='Job not found')\n return db_job\n\n\[email protected]('/cands/', response_model=schemas.CanCreate)\ndef create_can(can: schemas.CanCreate, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, can.email)\n if db_can:\n raise HTTPException(status_code=400, detail='Candidate already Present'\n )\n return crud.create_candidate(db=db, can=can)\n\n\[email protected]('/cands/', response_model=List[schemas.Can])\ndef read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n cans = crud.get_candidates(db, skip=skip, limit=limit)\n return cans\n\n\[email protected]('/cands/{email}', response_model=schemas.Can)\ndef read_can(email: str, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, email)\n if db_can is None:\n raise HTTPException(status_code=404, detail='Candidate not found')\n return db_can\n\n\[email protected]('/jobapps/', response_model=List[schemas.AppBase])\ndef read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobapps = crud.get_jobapps(db, skip=skip, limit=limit)\n return jobapps\n\n\[email protected]('/jobapps/{appid}', response_model=schemas.AppBase)\ndef read_jobapp(appid: int, db: Session=Depends(get_db)):\n db_jobapp = crud.get_jobapp(db, appid)\n if db_jobapp is None:\n raise HTTPException(status_code=404, detail='Job Application not found'\n )\n return db_jobapp\n", "step-4": "<mask token>\nmodels.Base.metadata.create_all(bind=engine)\napp = FastAPI()\n\n\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\[email protected]('/jobs/', response_model=schemas.JobCreate)\ndef create_job(job: schemas.JobCreate, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job.title)\n if db_job:\n raise HTTPException(status_code=400, detail='Job already Posted')\n return crud.create_job(db=db, job=job)\n\n\[email protected]('/jobs/', response_model=List[schemas.Job])\ndef read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobs = crud.get_jobs(db, skip=skip, limit=limit)\n return jobs\n\n\[email protected]('/jobs/{job_id}', response_model=schemas.Job)\ndef read_job(job_id: int, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job_id=job_id)\n if db_job is None:\n raise HTTPException(status_code=404, detail='Job not found')\n return db_job\n\n\[email protected]('/cands/', response_model=schemas.CanCreate)\ndef create_can(can: schemas.CanCreate, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, can.email)\n if db_can:\n raise HTTPException(status_code=400, detail='Candidate already Present'\n )\n return crud.create_candidate(db=db, can=can)\n\n\[email protected]('/cands/', response_model=List[schemas.Can])\ndef read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n cans = crud.get_candidates(db, skip=skip, limit=limit)\n return cans\n\n\[email protected]('/cands/{email}', response_model=schemas.Can)\ndef read_can(email: str, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, email)\n if db_can is None:\n raise HTTPException(status_code=404, detail='Candidate not found')\n return db_can\n\n\[email protected]('/jobapps/', response_model=List[schemas.AppBase])\ndef read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobapps = crud.get_jobapps(db, skip=skip, limit=limit)\n return jobapps\n\n\[email protected]('/jobapps/{appid}', response_model=schemas.AppBase)\ndef read_jobapp(appid: int, db: Session=Depends(get_db)):\n db_jobapp = crud.get_jobapp(db, appid)\n if db_jobapp is None:\n raise HTTPException(status_code=404, detail='Job Application not found'\n )\n return db_jobapp\n", "step-5": "from typing import List\n\nfrom fastapi import Depends, FastAPI, HTTPException\nfrom sqlalchemy.orm import Session\n\nfrom myfirstpython.fastapi import models, crud, schemas\nfrom myfirstpython.fastapi.dbconnection import engine, SessionLocal\n\nmodels.Base.metadata.create_all(bind=engine)\n\napp = FastAPI()\n\n\n# Dependency\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\[email protected](\"/jobs/\", response_model=schemas.JobCreate)\ndef create_job(job: schemas.JobCreate, db: Session = Depends(get_db)):\n db_job = crud.get_job(db, job.title)\n if db_job:\n raise HTTPException(status_code=400, detail=\"Job already Posted\")\n return crud.create_job(db=db, job=job)\n\n\[email protected](\"/jobs/\", response_model=List[schemas.Job])\ndef read_jobs(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):\n jobs = crud.get_jobs(db, skip=skip, limit=limit)\n return jobs\n\n\[email protected](\"/jobs/{job_id}\", response_model=schemas.Job)\ndef read_job(job_id: int, db: Session = Depends(get_db)):\n db_job = crud.get_job(db, job_id=job_id)\n if db_job is None:\n raise HTTPException(status_code=404, detail=\"Job not found\")\n return db_job\n\n\[email protected](\"/cands/\", response_model=schemas.CanCreate)\ndef create_can(can: schemas.CanCreate, db: Session = Depends(get_db)):\n db_can = crud.get_candidate(db, can.email)\n if db_can:\n raise HTTPException(status_code=400, detail=\"Candidate already Present\")\n return crud.create_candidate(db=db, can=can)\n\n\[email protected](\"/cands/\", response_model=List[schemas.Can])\ndef read_cans(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):\n cans = crud.get_candidates(db, skip=skip, limit=limit)\n return cans\n\n\[email protected](\"/cands/{email}\", response_model=schemas.Can)\ndef read_can(email: str, db: Session = Depends(get_db)):\n db_can = crud.get_candidate(db, email)\n if db_can is None:\n raise HTTPException(status_code=404, detail=\"Candidate not found\")\n return db_can\n\n\[email protected](\"/jobapps/\", response_model=List[schemas.AppBase])\ndef read_jobapps(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):\n jobapps = crud.get_jobapps(db, skip=skip, limit=limit)\n return jobapps\n\n\[email protected](\"/jobapps/{appid}\", response_model=schemas.AppBase)\ndef read_jobapp(appid: int, db: Session = Depends(get_db)):\n db_jobapp = crud.get_jobapp(db, appid)\n if db_jobapp is None:\n raise HTTPException(status_code=404, detail=\"Job Application not found\")\n return db_jobapp\n", "step-ids": [ 6, 8, 10, 11, 13 ] }
[ 6, 8, 10, 11, 13 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def get_user_agent(): return {'User-Agent': random.choice(user_agent)} <|reserved_special_token_1|> <|reserved_special_token_0|> user_agent = [ 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50' , 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50' , 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0' , 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko' , 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)', 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)', 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1' , 'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1', 'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11' , 'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11' , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)' , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)', 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5' , 'Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5' , 'Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5' , 'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1' , 'MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1' , 'Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10' , 'Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13' , 'Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+' , 'Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0' , 'Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124' , 'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)' , 'UCWEB7.0.2.37/28/999', 'NOKIA5700/ UCWEB7.0.2.37/28/999', 'Openwave/ UCWEB7.0.2.37/28/999', 'Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999', 'Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25' ] def get_user_agent(): return {'User-Agent': random.choice(user_agent)} <|reserved_special_token_1|> import random <|reserved_special_token_0|> user_agent = [ 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50' , 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50' , 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0' , 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko' , 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)', 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)', 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1' , 'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1', 'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11' , 'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11' , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)' , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)', 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5' , 'Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5' , 'Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5' , 'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1' , 'MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1' , 'Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10' , 'Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13' , 'Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+' , 'Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0' , 'Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124' , 'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)' , 'UCWEB7.0.2.37/28/999', 'NOKIA5700/ UCWEB7.0.2.37/28/999', 'Openwave/ UCWEB7.0.2.37/28/999', 'Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999', 'Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25' ] def get_user_agent(): return {'User-Agent': random.choice(user_agent)} <|reserved_special_token_1|> import random ''' 通用文件头,浏览器访问时随机选择 ''' user_agent = [ "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50", "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50", "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0", "Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1", "Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1", "Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11", "Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)", "Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5", "Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5", "Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5", "Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1", "MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1", "Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10", "Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13", "Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+", "Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0", "Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124", "Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)", "UCWEB7.0.2.37/28/999", "NOKIA5700/ UCWEB7.0.2.37/28/999", "Openwave/ UCWEB7.0.2.37/28/999", "Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999", # iPhone 6: "Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25" ] def get_user_agent(): return {'User-Agent': random.choice(user_agent)}
flexible
{ "blob_id": "5ed91b98ece3ac9525e9d2c42db9c9d9912d5ed2", "index": 9029, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef get_user_agent():\n return {'User-Agent': random.choice(user_agent)}\n", "step-3": "<mask token>\nuser_agent = [\n 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50'\n ,\n 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50'\n ,\n 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0'\n ,\n 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko'\n , 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)',\n 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)',\n 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)',\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1'\n , 'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',\n 'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11'\n , 'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11',\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11'\n , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)'\n , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',\n 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'\n ,\n 'MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'\n ,\n 'Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10'\n ,\n 'Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13'\n ,\n 'Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+'\n ,\n 'Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0'\n ,\n 'Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124'\n ,\n 'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)'\n , 'UCWEB7.0.2.37/28/999', 'NOKIA5700/ UCWEB7.0.2.37/28/999',\n 'Openwave/ UCWEB7.0.2.37/28/999',\n 'Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999',\n 'Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25'\n ]\n\n\ndef get_user_agent():\n return {'User-Agent': random.choice(user_agent)}\n", "step-4": "import random\n<mask token>\nuser_agent = [\n 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50'\n ,\n 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50'\n ,\n 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0'\n ,\n 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko'\n , 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)',\n 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)',\n 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)',\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1'\n , 'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',\n 'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11'\n , 'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11',\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11'\n , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)'\n , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',\n 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'\n ,\n 'MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'\n ,\n 'Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10'\n ,\n 'Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13'\n ,\n 'Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+'\n ,\n 'Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0'\n ,\n 'Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124'\n ,\n 'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)'\n , 'UCWEB7.0.2.37/28/999', 'NOKIA5700/ UCWEB7.0.2.37/28/999',\n 'Openwave/ UCWEB7.0.2.37/28/999',\n 'Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999',\n 'Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25'\n ]\n\n\ndef get_user_agent():\n return {'User-Agent': random.choice(user_agent)}\n", "step-5": "import random\n\n'''\n通用文件头,浏览器访问时随机选择\n'''\n\nuser_agent = [\n \"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50\",\n \"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50\",\n \"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0\",\n \"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko\",\n \"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)\",\n \"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)\",\n \"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)\",\n \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1\",\n \"Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1\",\n \"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11\",\n \"Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11\",\n \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\",\n \"Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5\",\n \"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5\",\n \"Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5\",\n \"Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1\",\n \"MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1\",\n \"Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10\",\n \"Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13\",\n \"Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+\",\n \"Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0\",\n \"Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124\",\n \"Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)\",\n \"UCWEB7.0.2.37/28/999\",\n \"NOKIA5700/ UCWEB7.0.2.37/28/999\",\n \"Openwave/ UCWEB7.0.2.37/28/999\",\n \"Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999\",\n # iPhone 6:\n \"Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25\"\n]\n\n\ndef get_user_agent():\n return {'User-Agent': random.choice(user_agent)}\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# SaveIsawQvector import sys import os if os.path.exists("/opt/Mantid/bin"): sys.path.append("/opt/mantidnightly/bin") #sys.path.append("/opt/Mantid/bin") # Linux cluster #sys.path.append('/opt/mantidunstable/bin') else: sys.path.append("C:/MantidInstall/bin") # Windows PC # import mantid from mantid.simpleapi import * user_input = open('SaveIsawQvector.inp', 'r') lineString = user_input.readline() lineList = lineString.split() data_directory = lineList[0] lineString = user_input.readline() lineList = lineString.split() output_directory = lineList[0] input_run_nums = open('monitorCtsAndAngles.dat', 'r') min_tof = 2000 max_tof = 16500 start_time = 0.0 stop_time = 1.0e06 while True: lineString = input_run_nums.readline() lineList = lineString.split() if len(lineList) == 0: break run_num = lineList[0] print run_num full_name = data_directory + run_num + '_event.nxs' event_ws = 'TOPAZ_' + run_num LoadEventNexus( Filename = full_name, OutputWorkspace = event_ws, FilterByTofMin = min_tof, FilterByTofMax = max_tof, FilterByTimeStart = start_time, FilterByTimeStop = stop_time ) outputFile = output_directory + run_num + '_SaveIsawQvector.bin' SaveIsawQvector(InputWorkspace = event_ws, Filename = outputFile) DeleteWorkspace(Workspace = event_ws) print 'All done!'
normal
{ "blob_id": "b72bf00d156862c7bddecb396da3752be964ee66", "index": 5463, "step-1": "# SaveIsawQvector\r\n\r\nimport sys\nimport os\n\r\nif os.path.exists(\"/opt/Mantid/bin\"):\n sys.path.append(\"/opt/mantidnightly/bin\")\n #sys.path.append(\"/opt/Mantid/bin\") # Linux cluster\n #sys.path.append('/opt/mantidunstable/bin')\nelse:\n sys.path.append(\"C:/MantidInstall/bin\") # Windows PC\n\r\n# import mantid\r\nfrom mantid.simpleapi import *\n\nuser_input = open('SaveIsawQvector.inp', 'r')\n\nlineString = user_input.readline()\r\nlineList = lineString.split()\ndata_directory = lineList[0]\n\nlineString = user_input.readline()\r\nlineList = lineString.split()\noutput_directory = lineList[0]\r\n\ninput_run_nums = open('monitorCtsAndAngles.dat', 'r')\n\nmin_tof = 2000\r\nmax_tof = 16500\r\n\nstart_time = 0.0\nstop_time = 1.0e06\n\nwhile True:\r\n\n lineString = input_run_nums.readline()\n lineList = lineString.split()\n if len(lineList) == 0: break\n run_num = lineList[0]\n print run_num\r\n full_name = data_directory + run_num + '_event.nxs'\n\r\n event_ws = 'TOPAZ_' + run_num\n\r\n LoadEventNexus( Filename = full_name, OutputWorkspace = event_ws,\n FilterByTofMin = min_tof, FilterByTofMax = max_tof,\n FilterByTimeStart = start_time, FilterByTimeStop = stop_time )\n\n outputFile = output_directory + run_num + '_SaveIsawQvector.bin'\n\r\n SaveIsawQvector(InputWorkspace = event_ws, \n Filename = outputFile)\n \n DeleteWorkspace(Workspace = event_ws)\n\nprint 'All done!'\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
""" This example shows how to communicate with a SH05 (shutter) connected to a KSC101 (KCube Solenoid). """ # this "if" statement is used so that Sphinx does not execute this script when the docs are being built if __name__ == '__main__': import os import time from msl.equipment import EquipmentRecord, ConnectionRecord, Backend from msl.equipment.resources.thorlabs import MotionControl # ensure that the Kinesis folder is available on PATH os.environ['PATH'] += os.pathsep + 'C:/Program Files/Thorlabs/Kinesis' # rather than reading the EquipmentRecord from a database we can create it manually record = EquipmentRecord( manufacturer='Thorlabs', model='KSC101', serial='68000297', # update the serial number for your KSC101 connection=ConnectionRecord( backend=Backend.MSL, address='SDK::Thorlabs.MotionControl.KCube.Solenoid.dll', ), ) def is_open(): return shutter.get_operating_state() == 1 # avoid the FT_DeviceNotFound error MotionControl.build_device_list() # connect to the KCube Solenoid shutter = record.connect() print('Connected to {}'.format(shutter)) # start polling at 200 ms shutter.start_polling(200) # set the operating mode to SC_OperatingModes.SC_Manual shutter.set_operating_mode('Manual') for i in range(5): # set the operating state to SC_OperatingStates.SC_Active print('Opening the shutter...') shutter.set_operating_state('Active') while not is_open(): time.sleep(0.05) print(' Is the shutter open? {}'.format(is_open())) time.sleep(1) # set the operating state to SC_OperatingStates.SC_Inactive print('Closing the shutter...') shutter.set_operating_state('Inactive') while is_open(): time.sleep(0.05) print(' Is the shutter open? {}'.format(is_open())) time.sleep(1) # stop polling and close the connection shutter.stop_polling() shutter.disconnect()
normal
{ "blob_id": "04b5df5cfd052390f057c6f13b2e21d27bac6449", "index": 943, "step-1": "<mask token>\n", "step-2": "<mask token>\nif __name__ == '__main__':\n import os\n import time\n from msl.equipment import EquipmentRecord, ConnectionRecord, Backend\n from msl.equipment.resources.thorlabs import MotionControl\n os.environ['PATH'] += os.pathsep + 'C:/Program Files/Thorlabs/Kinesis'\n record = EquipmentRecord(manufacturer='Thorlabs', model='KSC101',\n serial='68000297', connection=ConnectionRecord(backend=Backend.MSL,\n address='SDK::Thorlabs.MotionControl.KCube.Solenoid.dll'))\n\n def is_open():\n return shutter.get_operating_state() == 1\n MotionControl.build_device_list()\n shutter = record.connect()\n print('Connected to {}'.format(shutter))\n shutter.start_polling(200)\n shutter.set_operating_mode('Manual')\n for i in range(5):\n print('Opening the shutter...')\n shutter.set_operating_state('Active')\n while not is_open():\n time.sleep(0.05)\n print(' Is the shutter open? {}'.format(is_open()))\n time.sleep(1)\n print('Closing the shutter...')\n shutter.set_operating_state('Inactive')\n while is_open():\n time.sleep(0.05)\n print(' Is the shutter open? {}'.format(is_open()))\n time.sleep(1)\n shutter.stop_polling()\n shutter.disconnect()\n", "step-3": "\"\"\"\nThis example shows how to communicate with a SH05 (shutter) connected to a KSC101 (KCube Solenoid).\n\"\"\"\n\n# this \"if\" statement is used so that Sphinx does not execute this script when the docs are being built\nif __name__ == '__main__':\n import os\n import time\n\n from msl.equipment import EquipmentRecord, ConnectionRecord, Backend\n from msl.equipment.resources.thorlabs import MotionControl\n\n # ensure that the Kinesis folder is available on PATH\n os.environ['PATH'] += os.pathsep + 'C:/Program Files/Thorlabs/Kinesis'\n\n # rather than reading the EquipmentRecord from a database we can create it manually\n record = EquipmentRecord(\n manufacturer='Thorlabs',\n model='KSC101',\n serial='68000297', # update the serial number for your KSC101\n connection=ConnectionRecord(\n backend=Backend.MSL,\n address='SDK::Thorlabs.MotionControl.KCube.Solenoid.dll',\n ),\n )\n\n def is_open():\n return shutter.get_operating_state() == 1\n\n # avoid the FT_DeviceNotFound error\n MotionControl.build_device_list()\n\n # connect to the KCube Solenoid\n shutter = record.connect()\n print('Connected to {}'.format(shutter))\n\n # start polling at 200 ms\n shutter.start_polling(200)\n\n # set the operating mode to SC_OperatingModes.SC_Manual\n shutter.set_operating_mode('Manual')\n\n for i in range(5):\n\n # set the operating state to SC_OperatingStates.SC_Active\n print('Opening the shutter...')\n shutter.set_operating_state('Active')\n while not is_open():\n time.sleep(0.05)\n print(' Is the shutter open? {}'.format(is_open()))\n\n time.sleep(1)\n\n # set the operating state to SC_OperatingStates.SC_Inactive\n print('Closing the shutter...')\n shutter.set_operating_state('Inactive')\n while is_open():\n time.sleep(0.05)\n print(' Is the shutter open? {}'.format(is_open()))\n\n time.sleep(1)\n\n # stop polling and close the connection\n shutter.stop_polling()\n shutter.disconnect()\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
# Generate some object patterns as save as JSON format import json import math import random from obstacle import * def main(map): obs = [] for x in range(1,35): obs.append(Obstacle(random.randint(0,map.getHeight()), y=random.randint(0,map.getWidth()), radius=20).toJsonObject()) jsonOb={'map': {'obstacle': obs}} print jsonOb F = open('testDump.json', 'w') json.dump(jsonOb, F, indent=4, separators=(',', ': ')) F.close() if __name__ == '__main__': main()
normal
{ "blob_id": "b849a2902c8596daa2c6da4de7b9d1c07b34d136", "index": 7883, "step-1": "# Generate some object patterns as save as JSON format\nimport json\nimport math\nimport random\nfrom obstacle import *\n\ndef main(map):\n\tobs = []\n\tfor x in range(1,35):\n\t\tobs.append(Obstacle(random.randint(0,map.getHeight()), y=random.randint(0,map.getWidth()), radius=20).toJsonObject())\n\n\tjsonOb={'map': {'obstacle': obs}}\n\t\n\tprint jsonOb\n\tF = open('testDump.json', 'w')\n\tjson.dump(jsonOb, F, indent=4, separators=(',', ': '))\n\tF.close()\n\nif __name__ == '__main__':\n\tmain()", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
''' Module for interaction with database ''' import sqlite3 from enum import Enum DB_NAME = 'categories.db' class State(Enum): ok = True error = False def get_db_connection(): try: global connection connection = sqlite3.connect(DB_NAME) cursor = connection.cursor() except Exception: print("Error connection db {0}".format(DB_NAME)) connection.close() return return cursor def close_db_connection(): try: connection.close() except Exception: print("Error closing connection") def create_new_category(category): state = State.ok try: cursor = get_db_connection() query = "CREATE TABLE {0} (word varchar(15) primary key, weight real)".format(category) cursor.execute(query) except Exception: state = State.error print("Error with creating new category") finally: close_db_connection() return state def get_category_data(category): state = State.ok data = list() try: cursor = get_db_connection() query = "SELECT * from {0} ORDER BY weight DESC".format(category) for row in cursor.execute(query): data.append(row) except Exception: state = State.error print("Error with getting data from {0} category".format(category)) finally: close_db_connection() return state.value, data def set_category_data(category, data): state = State.ok try: cursor = get_db_connection() for key, value in data: query = 'INSERT OR REPLACE INTO {0} (word, weight) VALUES({1},{2})'.format(category, key, value) cursor.execute(query) connection.commit() except Exception: state = State.error print("Error with setting data to database in {0} category".format(category)) finally: close_db_connection() return state.value def get_file_names_in_category(category): state = State.ok names = list() try: cursor = get_db_connection() query = "SELECT * FROM result WHERE category = '{0}'".format(category) for row in cursor.execute(query): names.append(row) except Exception: state = State.error print("Error with getting category file names") finally: close_db_connection() return state.value, names def get_file_names(): state = State.ok names = list() try: cursor = get_db_connection() query = "SELECT * FROM result" for row in cursor.execute(query): names.append(row) except Exception: state = State.error print("Error with getting category file names") finally: close_db_connection() return state.value, names
normal
{ "blob_id": "9b3c2604b428295eda16030b45cf739e714f3d00", "index": 1614, "step-1": "<mask token>\n\n\nclass State(Enum):\n ok = True\n error = False\n\n\n<mask token>\n\n\ndef close_db_connection():\n try:\n connection.close()\n except Exception:\n print('Error closing connection')\n\n\ndef create_new_category(category):\n state = State.ok\n try:\n cursor = get_db_connection()\n query = ('CREATE TABLE {0} (word varchar(15) primary key, weight real)'\n .format(category))\n cursor.execute(query)\n except Exception:\n state = State.error\n print('Error with creating new category')\n finally:\n close_db_connection()\n return state\n\n\ndef get_category_data(category):\n state = State.ok\n data = list()\n try:\n cursor = get_db_connection()\n query = 'SELECT * from {0} ORDER BY weight DESC'.format(category)\n for row in cursor.execute(query):\n data.append(row)\n except Exception:\n state = State.error\n print('Error with getting data from {0} category'.format(category))\n finally:\n close_db_connection()\n return state.value, data\n\n\n<mask token>\n\n\ndef get_file_names_in_category(category):\n state = State.ok\n names = list()\n try:\n cursor = get_db_connection()\n query = \"SELECT * FROM result WHERE category = '{0}'\".format(category)\n for row in cursor.execute(query):\n names.append(row)\n except Exception:\n state = State.error\n print('Error with getting category file names')\n finally:\n close_db_connection()\n return state.value, names\n\n\ndef get_file_names():\n state = State.ok\n names = list()\n try:\n cursor = get_db_connection()\n query = 'SELECT * FROM result'\n for row in cursor.execute(query):\n names.append(row)\n except Exception:\n state = State.error\n print('Error with getting category file names')\n finally:\n close_db_connection()\n return state.value, names\n", "step-2": "<mask token>\n\n\nclass State(Enum):\n ok = True\n error = False\n\n\ndef get_db_connection():\n try:\n global connection\n connection = sqlite3.connect(DB_NAME)\n cursor = connection.cursor()\n except Exception:\n print('Error connection db {0}'.format(DB_NAME))\n connection.close()\n return\n return cursor\n\n\ndef close_db_connection():\n try:\n connection.close()\n except Exception:\n print('Error closing connection')\n\n\ndef create_new_category(category):\n state = State.ok\n try:\n cursor = get_db_connection()\n query = ('CREATE TABLE {0} (word varchar(15) primary key, weight real)'\n .format(category))\n cursor.execute(query)\n except Exception:\n state = State.error\n print('Error with creating new category')\n finally:\n close_db_connection()\n return state\n\n\ndef get_category_data(category):\n state = State.ok\n data = list()\n try:\n cursor = get_db_connection()\n query = 'SELECT * from {0} ORDER BY weight DESC'.format(category)\n for row in cursor.execute(query):\n data.append(row)\n except Exception:\n state = State.error\n print('Error with getting data from {0} category'.format(category))\n finally:\n close_db_connection()\n return state.value, data\n\n\n<mask token>\n\n\ndef get_file_names_in_category(category):\n state = State.ok\n names = list()\n try:\n cursor = get_db_connection()\n query = \"SELECT * FROM result WHERE category = '{0}'\".format(category)\n for row in cursor.execute(query):\n names.append(row)\n except Exception:\n state = State.error\n print('Error with getting category file names')\n finally:\n close_db_connection()\n return state.value, names\n\n\ndef get_file_names():\n state = State.ok\n names = list()\n try:\n cursor = get_db_connection()\n query = 'SELECT * FROM result'\n for row in cursor.execute(query):\n names.append(row)\n except Exception:\n state = State.error\n print('Error with getting category file names')\n finally:\n close_db_connection()\n return state.value, names\n", "step-3": "<mask token>\nDB_NAME = 'categories.db'\n\n\nclass State(Enum):\n ok = True\n error = False\n\n\ndef get_db_connection():\n try:\n global connection\n connection = sqlite3.connect(DB_NAME)\n cursor = connection.cursor()\n except Exception:\n print('Error connection db {0}'.format(DB_NAME))\n connection.close()\n return\n return cursor\n\n\ndef close_db_connection():\n try:\n connection.close()\n except Exception:\n print('Error closing connection')\n\n\ndef create_new_category(category):\n state = State.ok\n try:\n cursor = get_db_connection()\n query = ('CREATE TABLE {0} (word varchar(15) primary key, weight real)'\n .format(category))\n cursor.execute(query)\n except Exception:\n state = State.error\n print('Error with creating new category')\n finally:\n close_db_connection()\n return state\n\n\ndef get_category_data(category):\n state = State.ok\n data = list()\n try:\n cursor = get_db_connection()\n query = 'SELECT * from {0} ORDER BY weight DESC'.format(category)\n for row in cursor.execute(query):\n data.append(row)\n except Exception:\n state = State.error\n print('Error with getting data from {0} category'.format(category))\n finally:\n close_db_connection()\n return state.value, data\n\n\ndef set_category_data(category, data):\n state = State.ok\n try:\n cursor = get_db_connection()\n for key, value in data:\n query = (\n 'INSERT OR REPLACE INTO {0} (word, weight) VALUES({1},{2})'\n .format(category, key, value))\n cursor.execute(query)\n connection.commit()\n except Exception:\n state = State.error\n print('Error with setting data to database in {0} category'.format(\n category))\n finally:\n close_db_connection()\n return state.value\n\n\ndef get_file_names_in_category(category):\n state = State.ok\n names = list()\n try:\n cursor = get_db_connection()\n query = \"SELECT * FROM result WHERE category = '{0}'\".format(category)\n for row in cursor.execute(query):\n names.append(row)\n except Exception:\n state = State.error\n print('Error with getting category file names')\n finally:\n close_db_connection()\n return state.value, names\n\n\ndef get_file_names():\n state = State.ok\n names = list()\n try:\n cursor = get_db_connection()\n query = 'SELECT * FROM result'\n for row in cursor.execute(query):\n names.append(row)\n except Exception:\n state = State.error\n print('Error with getting category file names')\n finally:\n close_db_connection()\n return state.value, names\n", "step-4": "<mask token>\nimport sqlite3\nfrom enum import Enum\nDB_NAME = 'categories.db'\n\n\nclass State(Enum):\n ok = True\n error = False\n\n\ndef get_db_connection():\n try:\n global connection\n connection = sqlite3.connect(DB_NAME)\n cursor = connection.cursor()\n except Exception:\n print('Error connection db {0}'.format(DB_NAME))\n connection.close()\n return\n return cursor\n\n\ndef close_db_connection():\n try:\n connection.close()\n except Exception:\n print('Error closing connection')\n\n\ndef create_new_category(category):\n state = State.ok\n try:\n cursor = get_db_connection()\n query = ('CREATE TABLE {0} (word varchar(15) primary key, weight real)'\n .format(category))\n cursor.execute(query)\n except Exception:\n state = State.error\n print('Error with creating new category')\n finally:\n close_db_connection()\n return state\n\n\ndef get_category_data(category):\n state = State.ok\n data = list()\n try:\n cursor = get_db_connection()\n query = 'SELECT * from {0} ORDER BY weight DESC'.format(category)\n for row in cursor.execute(query):\n data.append(row)\n except Exception:\n state = State.error\n print('Error with getting data from {0} category'.format(category))\n finally:\n close_db_connection()\n return state.value, data\n\n\ndef set_category_data(category, data):\n state = State.ok\n try:\n cursor = get_db_connection()\n for key, value in data:\n query = (\n 'INSERT OR REPLACE INTO {0} (word, weight) VALUES({1},{2})'\n .format(category, key, value))\n cursor.execute(query)\n connection.commit()\n except Exception:\n state = State.error\n print('Error with setting data to database in {0} category'.format(\n category))\n finally:\n close_db_connection()\n return state.value\n\n\ndef get_file_names_in_category(category):\n state = State.ok\n names = list()\n try:\n cursor = get_db_connection()\n query = \"SELECT * FROM result WHERE category = '{0}'\".format(category)\n for row in cursor.execute(query):\n names.append(row)\n except Exception:\n state = State.error\n print('Error with getting category file names')\n finally:\n close_db_connection()\n return state.value, names\n\n\ndef get_file_names():\n state = State.ok\n names = list()\n try:\n cursor = get_db_connection()\n query = 'SELECT * FROM result'\n for row in cursor.execute(query):\n names.append(row)\n except Exception:\n state = State.error\n print('Error with getting category file names')\n finally:\n close_db_connection()\n return state.value, names\n", "step-5": "'''\n Module for interaction with database\n'''\n\nimport sqlite3\nfrom enum import Enum\n\nDB_NAME = 'categories.db'\n\n\nclass State(Enum):\n ok = True\n error = False\n\n\ndef get_db_connection():\n try:\n global connection\n connection = sqlite3.connect(DB_NAME)\n cursor = connection.cursor()\n except Exception:\n print(\"Error connection db {0}\".format(DB_NAME))\n connection.close()\n return\n\n return cursor\n\n\ndef close_db_connection():\n try:\n connection.close()\n except Exception:\n print(\"Error closing connection\")\n\n\ndef create_new_category(category):\n state = State.ok\n\n try:\n cursor = get_db_connection()\n query = \"CREATE TABLE {0} (word varchar(15) primary key, weight real)\".format(category)\n cursor.execute(query)\n except Exception:\n state = State.error\n print(\"Error with creating new category\")\n finally:\n close_db_connection()\n\n return state\n\n\ndef get_category_data(category):\n state = State.ok\n data = list()\n\n try:\n cursor = get_db_connection()\n query = \"SELECT * from {0} ORDER BY weight DESC\".format(category)\n for row in cursor.execute(query):\n data.append(row)\n except Exception:\n state = State.error\n print(\"Error with getting data from {0} category\".format(category))\n finally:\n close_db_connection()\n\n return state.value, data\n\n\ndef set_category_data(category, data):\n state = State.ok\n try:\n cursor = get_db_connection()\n for key, value in data:\n query = 'INSERT OR REPLACE INTO {0} (word, weight) VALUES({1},{2})'.format(category, key, value)\n cursor.execute(query)\n\n connection.commit()\n except Exception:\n state = State.error\n print(\"Error with setting data to database in {0} category\".format(category))\n finally:\n close_db_connection()\n\n return state.value\n\ndef get_file_names_in_category(category):\n state = State.ok\n names = list()\n try:\n cursor = get_db_connection()\n query = \"SELECT * FROM result WHERE category = '{0}'\".format(category)\n for row in cursor.execute(query):\n names.append(row)\n except Exception:\n state = State.error\n print(\"Error with getting category file names\")\n finally:\n close_db_connection()\n\n return state.value, names\n\ndef get_file_names():\n state = State.ok\n names = list()\n try:\n cursor = get_db_connection()\n query = \"SELECT * FROM result\"\n for row in cursor.execute(query):\n names.append(row)\n except Exception:\n state = State.error\n print(\"Error with getting category file names\")\n finally:\n close_db_connection()\n\n return state.value, names", "step-ids": [ 7, 8, 10, 11, 12 ] }
[ 7, 8, 10, 11, 12 ]
from math import exp from math import e import numpy as np import decimal import pandas as pd pop = [] x = 0 for a in range(1,10001): pop.append((1.2)*e**(-1.2*x)) x =+0.0001 for k in range(100,10100,100): exec(f'S{k} =pop[1:k]') #################################################################################### import numpy as np for size in np.arange(100,10100,100): exec(f'S{size} = np.random.exponential(scale=1.2,size=size)') len(S10000) #################################################################################### import numpy as np #another way to do it #create a dictionary of samples dict_samples = {} for size in np.arange(100,10100,100): dict_samples[size]=np.random.exponential(scale=10/12,size=size) dict_samples[100] len(dict_samples[200]) 1/1.2 pos = 100 for pos in np.arange(100,10100,100): sample = dict_samples[pos] sample_mean = sample.mean() print("The mean for sample {} is {}".format(pos,sample_mean))
normal
{ "blob_id": "adfdd988b7e208229f195308df8d63fd2799046f", "index": 8941, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor a in range(1, 10001):\n pop.append(1.2 * e ** (-1.2 * x))\n x = +0.0001\nfor k in range(100, 10100, 100):\n exec(f'S{k} =pop[1:k]')\n<mask token>\nfor size in np.arange(100, 10100, 100):\n exec(f'S{size} = np.random.exponential(scale=1.2,size=size)')\nlen(S10000)\n<mask token>\nfor size in np.arange(100, 10100, 100):\n dict_samples[size] = np.random.exponential(scale=10 / 12, size=size)\ndict_samples[100]\nlen(dict_samples[200])\n1 / 1.2\n<mask token>\nfor pos in np.arange(100, 10100, 100):\n sample = dict_samples[pos]\n sample_mean = sample.mean()\n print('The mean for sample {} is {}'.format(pos, sample_mean))\n", "step-3": "<mask token>\npop = []\nx = 0\nfor a in range(1, 10001):\n pop.append(1.2 * e ** (-1.2 * x))\n x = +0.0001\nfor k in range(100, 10100, 100):\n exec(f'S{k} =pop[1:k]')\n<mask token>\nfor size in np.arange(100, 10100, 100):\n exec(f'S{size} = np.random.exponential(scale=1.2,size=size)')\nlen(S10000)\n<mask token>\ndict_samples = {}\nfor size in np.arange(100, 10100, 100):\n dict_samples[size] = np.random.exponential(scale=10 / 12, size=size)\ndict_samples[100]\nlen(dict_samples[200])\n1 / 1.2\npos = 100\nfor pos in np.arange(100, 10100, 100):\n sample = dict_samples[pos]\n sample_mean = sample.mean()\n print('The mean for sample {} is {}'.format(pos, sample_mean))\n", "step-4": "from math import exp\nfrom math import e\nimport numpy as np\nimport decimal\nimport pandas as pd\npop = []\nx = 0\nfor a in range(1, 10001):\n pop.append(1.2 * e ** (-1.2 * x))\n x = +0.0001\nfor k in range(100, 10100, 100):\n exec(f'S{k} =pop[1:k]')\nimport numpy as np\nfor size in np.arange(100, 10100, 100):\n exec(f'S{size} = np.random.exponential(scale=1.2,size=size)')\nlen(S10000)\nimport numpy as np\ndict_samples = {}\nfor size in np.arange(100, 10100, 100):\n dict_samples[size] = np.random.exponential(scale=10 / 12, size=size)\ndict_samples[100]\nlen(dict_samples[200])\n1 / 1.2\npos = 100\nfor pos in np.arange(100, 10100, 100):\n sample = dict_samples[pos]\n sample_mean = sample.mean()\n print('The mean for sample {} is {}'.format(pos, sample_mean))\n", "step-5": "\r\nfrom math import exp\r\nfrom math import e\r\nimport numpy as np\r\nimport decimal\r\nimport pandas as pd\r\n\r\n\r\n\r\n\r\npop = []\r\nx = 0\r\nfor a in range(1,10001):\r\n pop.append((1.2)*e**(-1.2*x))\r\n x =+0.0001\r\n\r\n\r\nfor k in range(100,10100,100):\r\n exec(f'S{k} =pop[1:k]')\r\n\r\n\r\n####################################################################################\r\n\r\nimport numpy as np\r\n\r\nfor size in np.arange(100,10100,100):\t\r\n exec(f'S{size} = np.random.exponential(scale=1.2,size=size)')\r\n\r\nlen(S10000)\r\n\r\n####################################################################################\r\nimport numpy as np\r\n#another way to do it\r\n#create a dictionary of samples\r\ndict_samples = {} \r\nfor size in np.arange(100,10100,100):\t\r\n dict_samples[size]=np.random.exponential(scale=10/12,size=size)\r\n\r\n\r\ndict_samples[100]\r\n \r\nlen(dict_samples[200])\r\n\r\n1/1.2\r\n\r\npos = 100\r\nfor pos in np.arange(100,10100,100):\r\n sample = dict_samples[pos]\r\n sample_mean = sample.mean()\r\n print(\"The mean for sample {} is {}\".format(pos,sample_mean))\r\n\r\n\r\n\r\n \r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def main(): total_count = 0 valid_count = 0 with open(options['INPUT'], 'rb') as fh: reader = MARCReader(fh, to_unicode=True, force_utf8=True) if not options['--csv']: writer = MARCWriter(open('out.mrc' or options['--output'], 'wb')) for record in reader: include_record = False for item in record.get_fields('952'): valid = validate_item(item) total_count += 1 if valid is True: valid_count += 1 include_record = True if include_record is True: writer.write(record) print('Total items: %i | Items included: %i' % (total_count, valid_count)) elif options['--csv']: koha_record_ids = set() for record in reader: total_count += 1 for item in record.get_fields('952'): valid = validate_item(item) if valid: id = record.get_fields(MARC_ID_FIELD)[0].get_subfields( MARC_ID_SUBFIELD)[0] koha_record_ids.add(id) break csvreader = csv.DictReader(open(options['--csv'], 'r')) gg_record_ids = set() for row in csvreader: gg_record_ids.add(row[GG_ID_COLUMN]) print('Total Koha Bibs: %i' % total_count) print('Koha Bibs with circulating items: %i ' % len( koha_record_ids)) print('Total GreenGlass Bibs: %i' % len(gg_record_ids)) print('Weeded Items (I in GG & not in Koha): %i' % len( gg_record_ids - koha_record_ids)) print('Added Items (I in Koha & not in GG): %i' % len( koha_record_ids - gg_record_ids)) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def validate_item(item): status = [] valid = True if item['q'] and item['q'] != '0': status.append('checked out') if item['7'] and item['7'] != '0': status.append(notforloan_codes[item['7']]) valid = False if item['4'] and item['4'] != '0': status.append('damaged') valid = False if item['1'] and item['1'] != '0': status.append(lost_codes[item['1']]) valid = False if item['0'] and item['0'] != '0': status.append('withdrawn') valid = False if item['c'] not in valid_locations: valid = False if item['y'] not in valid_types: valid = False if len(status) > 0 and options.get('--debug'): print('"' + record.title() + '" item status: ' + ', '.join(status)) return valid def main(): total_count = 0 valid_count = 0 with open(options['INPUT'], 'rb') as fh: reader = MARCReader(fh, to_unicode=True, force_utf8=True) if not options['--csv']: writer = MARCWriter(open('out.mrc' or options['--output'], 'wb')) for record in reader: include_record = False for item in record.get_fields('952'): valid = validate_item(item) total_count += 1 if valid is True: valid_count += 1 include_record = True if include_record is True: writer.write(record) print('Total items: %i | Items included: %i' % (total_count, valid_count)) elif options['--csv']: koha_record_ids = set() for record in reader: total_count += 1 for item in record.get_fields('952'): valid = validate_item(item) if valid: id = record.get_fields(MARC_ID_FIELD)[0].get_subfields( MARC_ID_SUBFIELD)[0] koha_record_ids.add(id) break csvreader = csv.DictReader(open(options['--csv'], 'r')) gg_record_ids = set() for row in csvreader: gg_record_ids.add(row[GG_ID_COLUMN]) print('Total Koha Bibs: %i' % total_count) print('Koha Bibs with circulating items: %i ' % len( koha_record_ids)) print('Total GreenGlass Bibs: %i' % len(gg_record_ids)) print('Weeded Items (I in GG & not in Koha): %i' % len( gg_record_ids - koha_record_ids)) print('Added Items (I in Koha & not in GG): %i' % len( koha_record_ids - gg_record_ids)) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def validate_item(item): status = [] valid = True if item['q'] and item['q'] != '0': status.append('checked out') if item['7'] and item['7'] != '0': status.append(notforloan_codes[item['7']]) valid = False if item['4'] and item['4'] != '0': status.append('damaged') valid = False if item['1'] and item['1'] != '0': status.append(lost_codes[item['1']]) valid = False if item['0'] and item['0'] != '0': status.append('withdrawn') valid = False if item['c'] not in valid_locations: valid = False if item['y'] not in valid_types: valid = False if len(status) > 0 and options.get('--debug'): print('"' + record.title() + '" item status: ' + ', '.join(status)) return valid def main(): total_count = 0 valid_count = 0 with open(options['INPUT'], 'rb') as fh: reader = MARCReader(fh, to_unicode=True, force_utf8=True) if not options['--csv']: writer = MARCWriter(open('out.mrc' or options['--output'], 'wb')) for record in reader: include_record = False for item in record.get_fields('952'): valid = validate_item(item) total_count += 1 if valid is True: valid_count += 1 include_record = True if include_record is True: writer.write(record) print('Total items: %i | Items included: %i' % (total_count, valid_count)) elif options['--csv']: koha_record_ids = set() for record in reader: total_count += 1 for item in record.get_fields('952'): valid = validate_item(item) if valid: id = record.get_fields(MARC_ID_FIELD)[0].get_subfields( MARC_ID_SUBFIELD)[0] koha_record_ids.add(id) break csvreader = csv.DictReader(open(options['--csv'], 'r')) gg_record_ids = set() for row in csvreader: gg_record_ids.add(row[GG_ID_COLUMN]) print('Total Koha Bibs: %i' % total_count) print('Koha Bibs with circulating items: %i ' % len( koha_record_ids)) print('Total GreenGlass Bibs: %i' % len(gg_record_ids)) print('Weeded Items (I in GG & not in Koha): %i' % len( gg_record_ids - koha_record_ids)) print('Added Items (I in Koha & not in GG): %i' % len( koha_record_ids - gg_record_ids)) if __name__ == '__main__': options = docopt(__doc__) main() <|reserved_special_token_1|> <|reserved_special_token_0|> import csv from docopt import docopt from pymarc import MARCReader, MARCWriter lost_codes = {'0': '', '1': 'Lost', '2': 'Long Overdue (Lost)', '3': 'Lost and Paid For', '4': 'Missing', '5': 'Lost (On Search)', '6': 'Claims Returned'} notforloan_codes = {'-3': 'Repair', '-2': 'In Processing', '-1': 'Ordered', '0': '', '1': 'Library Use Only', '2': 'Staff Collection', '3': 'Bindery', '4': 'By Appointment', '5': 'On display'} valid_locations = ['CART', 'FACDEV', 'MAIN', 'NEWBOOK', 'DISPLAY'] valid_types = ['BOOK', 'SUPPL'] GG_ID_COLUMN = 'Bib Record Number' MARC_ID_FIELD = '999' MARC_ID_SUBFIELD = 'c' def validate_item(item): status = [] valid = True if item['q'] and item['q'] != '0': status.append('checked out') if item['7'] and item['7'] != '0': status.append(notforloan_codes[item['7']]) valid = False if item['4'] and item['4'] != '0': status.append('damaged') valid = False if item['1'] and item['1'] != '0': status.append(lost_codes[item['1']]) valid = False if item['0'] and item['0'] != '0': status.append('withdrawn') valid = False if item['c'] not in valid_locations: valid = False if item['y'] not in valid_types: valid = False if len(status) > 0 and options.get('--debug'): print('"' + record.title() + '" item status: ' + ', '.join(status)) return valid def main(): total_count = 0 valid_count = 0 with open(options['INPUT'], 'rb') as fh: reader = MARCReader(fh, to_unicode=True, force_utf8=True) if not options['--csv']: writer = MARCWriter(open('out.mrc' or options['--output'], 'wb')) for record in reader: include_record = False for item in record.get_fields('952'): valid = validate_item(item) total_count += 1 if valid is True: valid_count += 1 include_record = True if include_record is True: writer.write(record) print('Total items: %i | Items included: %i' % (total_count, valid_count)) elif options['--csv']: koha_record_ids = set() for record in reader: total_count += 1 for item in record.get_fields('952'): valid = validate_item(item) if valid: id = record.get_fields(MARC_ID_FIELD)[0].get_subfields( MARC_ID_SUBFIELD)[0] koha_record_ids.add(id) break csvreader = csv.DictReader(open(options['--csv'], 'r')) gg_record_ids = set() for row in csvreader: gg_record_ids.add(row[GG_ID_COLUMN]) print('Total Koha Bibs: %i' % total_count) print('Koha Bibs with circulating items: %i ' % len( koha_record_ids)) print('Total GreenGlass Bibs: %i' % len(gg_record_ids)) print('Weeded Items (I in GG & not in Koha): %i' % len( gg_record_ids - koha_record_ids)) print('Added Items (I in Koha & not in GG): %i' % len( koha_record_ids - gg_record_ids)) if __name__ == '__main__': options = docopt(__doc__) main() <|reserved_special_token_1|> """Usage: sharedprint.py INPUT [--output=out.mrc] sharedprint.py INPUT [--csv=greenglass.csv] Process Koha MARC export for SCELC Shared Print. The two uses above either 1) create a subset of the MARC input that's limited to circulating items only or 2) performs a comparison between what's in the catalog and what's in GreenGlass i.e. how many records were added and weeded. Arguments: INPUT MARC records (.mrc file) Options: -h --help show this usage information --debug show debug information as the script runs --output=FILE output records to this file [default: out.mrc] --csv=CSV GreenGlass CSV to compare input MARC file against """ import csv from docopt import docopt from pymarc import MARCReader, MARCWriter # https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=LOST lost_codes = { "0": "", "1": "Lost", "2": "Long Overdue (Lost)", "3": "Lost and Paid For", "4": "Missing", "5": "Lost (On Search)", "6": "Claims Returned", } # https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=NOT_LOAN notforloan_codes = { "-3": "Repair", "-2": "In Processing", "-1": "Ordered", "0": "", "1": "Library Use Only", "2": "Staff Collection", "3": "Bindery", "4": "By Appointment", "5": "On display", } # https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=LOC valid_locations = [ "CART", "FACDEV", "MAIN", "NEWBOOK", "DISPLAY", ] # https://library-staff.cca.edu/cgi-bin/koha/admin/itemtypes.pl valid_types = [ "BOOK", "SUPPL", ] # name of column in the GreenGlass spreadsheet that contains the bib record ID GG_ID_COLUMN = 'Bib Record Number' # field and subfield in MARC record that contains the bib record ID # Koha appears to store it in both 999$c & $d MARC_ID_FIELD = '999' MARC_ID_SUBFIELD = 'c' def validate_item(item): # "item status" is an agglomeration of several things status = [] # whether the _item_ we're looking at should be included valid = True # checked out, will be a date if item is checked out if item['q'] and item['q'] != "0": status.append('checked out') # "not for loan", variety of reasons why an item might not circ if item['7'] and item['7'] != "0": status.append(notforloan_codes[item['7']]) valid = False # 1 is an item is damanged if item['4'] and item['4'] != "0": status.append('damaged') valid = False # lost, variety of codes if item['1'] and item['1'] != "0": status.append(lost_codes[item['1']]) valid = False # 1 if an item has been withdrawn if item['0'] and item['0'] != "0": status.append('withdrawn') valid = False # filter items based on location & type if item['c'] not in valid_locations: valid = False if item['y'] not in valid_types: valid = False if len(status) > 0 and options.get('--debug'): print('"' + record.title() + '" item status: ' + ', '.join(status)) return valid def main(): total_count = 0 valid_count = 0 with open(options['INPUT'], 'rb') as fh: reader = MARCReader(fh, to_unicode=True, force_utf8=True) # 1) first mode: write a MARC output file if not options['--csv']: writer = MARCWriter(open('out.mrc' or options['--output'], 'wb')) for record in reader: # whether we'll include the _bib_ record in export file include_record = False # Koha stores item data in 952 fields, one per item for item in record.get_fields('952'): valid = validate_item(item) total_count += 1 if valid is True: valid_count += 1 # if there's any valid item then the bib should be included include_record = True if include_record is True: writer.write(record) print('Total items: %i | Items included: %i' % (total_count, valid_count)) elif options['--csv']: koha_record_ids = set() for record in reader: total_count += 1 for item in record.get_fields('952'): valid = validate_item(item) if valid: id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(MARC_ID_SUBFIELD)[0] koha_record_ids.add(id) # stop looking at items after we find the first valid one break csvreader = csv.DictReader(open(options['--csv'], 'r')) gg_record_ids = set() for row in csvreader: gg_record_ids.add(row[GG_ID_COLUMN]) print('Total Koha Bibs: %i' % total_count) print('Koha Bibs with circulating items: %i ' % len(koha_record_ids)) print('Total GreenGlass Bibs: %i' % len(gg_record_ids)) print('Weeded Items (I in GG & not in Koha): %i' % len(gg_record_ids - koha_record_ids)) print('Added Items (I in Koha & not in GG): %i' % len(koha_record_ids - gg_record_ids)) if __name__ == '__main__': options = docopt(__doc__) # print(options) main()
flexible
{ "blob_id": "c6cce2edafd7683af766b932d90ca170359e648a", "index": 679, "step-1": "<mask token>\n\n\ndef main():\n total_count = 0\n valid_count = 0\n with open(options['INPUT'], 'rb') as fh:\n reader = MARCReader(fh, to_unicode=True, force_utf8=True)\n if not options['--csv']:\n writer = MARCWriter(open('out.mrc' or options['--output'], 'wb'))\n for record in reader:\n include_record = False\n for item in record.get_fields('952'):\n valid = validate_item(item)\n total_count += 1\n if valid is True:\n valid_count += 1\n include_record = True\n if include_record is True:\n writer.write(record)\n print('Total items: %i | Items included: %i' % (total_count,\n valid_count))\n elif options['--csv']:\n koha_record_ids = set()\n for record in reader:\n total_count += 1\n for item in record.get_fields('952'):\n valid = validate_item(item)\n if valid:\n id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(\n MARC_ID_SUBFIELD)[0]\n koha_record_ids.add(id)\n break\n csvreader = csv.DictReader(open(options['--csv'], 'r'))\n gg_record_ids = set()\n for row in csvreader:\n gg_record_ids.add(row[GG_ID_COLUMN])\n print('Total Koha Bibs: %i' % total_count)\n print('Koha Bibs with circulating items: %i ' % len(\n koha_record_ids))\n print('Total GreenGlass Bibs: %i' % len(gg_record_ids))\n print('Weeded Items (I in GG & not in Koha): %i' % len(\n gg_record_ids - koha_record_ids))\n print('Added Items (I in Koha & not in GG): %i' % len(\n koha_record_ids - gg_record_ids))\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef validate_item(item):\n status = []\n valid = True\n if item['q'] and item['q'] != '0':\n status.append('checked out')\n if item['7'] and item['7'] != '0':\n status.append(notforloan_codes[item['7']])\n valid = False\n if item['4'] and item['4'] != '0':\n status.append('damaged')\n valid = False\n if item['1'] and item['1'] != '0':\n status.append(lost_codes[item['1']])\n valid = False\n if item['0'] and item['0'] != '0':\n status.append('withdrawn')\n valid = False\n if item['c'] not in valid_locations:\n valid = False\n if item['y'] not in valid_types:\n valid = False\n if len(status) > 0 and options.get('--debug'):\n print('\"' + record.title() + '\" item status: ' + ', '.join(status))\n return valid\n\n\ndef main():\n total_count = 0\n valid_count = 0\n with open(options['INPUT'], 'rb') as fh:\n reader = MARCReader(fh, to_unicode=True, force_utf8=True)\n if not options['--csv']:\n writer = MARCWriter(open('out.mrc' or options['--output'], 'wb'))\n for record in reader:\n include_record = False\n for item in record.get_fields('952'):\n valid = validate_item(item)\n total_count += 1\n if valid is True:\n valid_count += 1\n include_record = True\n if include_record is True:\n writer.write(record)\n print('Total items: %i | Items included: %i' % (total_count,\n valid_count))\n elif options['--csv']:\n koha_record_ids = set()\n for record in reader:\n total_count += 1\n for item in record.get_fields('952'):\n valid = validate_item(item)\n if valid:\n id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(\n MARC_ID_SUBFIELD)[0]\n koha_record_ids.add(id)\n break\n csvreader = csv.DictReader(open(options['--csv'], 'r'))\n gg_record_ids = set()\n for row in csvreader:\n gg_record_ids.add(row[GG_ID_COLUMN])\n print('Total Koha Bibs: %i' % total_count)\n print('Koha Bibs with circulating items: %i ' % len(\n koha_record_ids))\n print('Total GreenGlass Bibs: %i' % len(gg_record_ids))\n print('Weeded Items (I in GG & not in Koha): %i' % len(\n gg_record_ids - koha_record_ids))\n print('Added Items (I in Koha & not in GG): %i' % len(\n koha_record_ids - gg_record_ids))\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef validate_item(item):\n status = []\n valid = True\n if item['q'] and item['q'] != '0':\n status.append('checked out')\n if item['7'] and item['7'] != '0':\n status.append(notforloan_codes[item['7']])\n valid = False\n if item['4'] and item['4'] != '0':\n status.append('damaged')\n valid = False\n if item['1'] and item['1'] != '0':\n status.append(lost_codes[item['1']])\n valid = False\n if item['0'] and item['0'] != '0':\n status.append('withdrawn')\n valid = False\n if item['c'] not in valid_locations:\n valid = False\n if item['y'] not in valid_types:\n valid = False\n if len(status) > 0 and options.get('--debug'):\n print('\"' + record.title() + '\" item status: ' + ', '.join(status))\n return valid\n\n\ndef main():\n total_count = 0\n valid_count = 0\n with open(options['INPUT'], 'rb') as fh:\n reader = MARCReader(fh, to_unicode=True, force_utf8=True)\n if not options['--csv']:\n writer = MARCWriter(open('out.mrc' or options['--output'], 'wb'))\n for record in reader:\n include_record = False\n for item in record.get_fields('952'):\n valid = validate_item(item)\n total_count += 1\n if valid is True:\n valid_count += 1\n include_record = True\n if include_record is True:\n writer.write(record)\n print('Total items: %i | Items included: %i' % (total_count,\n valid_count))\n elif options['--csv']:\n koha_record_ids = set()\n for record in reader:\n total_count += 1\n for item in record.get_fields('952'):\n valid = validate_item(item)\n if valid:\n id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(\n MARC_ID_SUBFIELD)[0]\n koha_record_ids.add(id)\n break\n csvreader = csv.DictReader(open(options['--csv'], 'r'))\n gg_record_ids = set()\n for row in csvreader:\n gg_record_ids.add(row[GG_ID_COLUMN])\n print('Total Koha Bibs: %i' % total_count)\n print('Koha Bibs with circulating items: %i ' % len(\n koha_record_ids))\n print('Total GreenGlass Bibs: %i' % len(gg_record_ids))\n print('Weeded Items (I in GG & not in Koha): %i' % len(\n gg_record_ids - koha_record_ids))\n print('Added Items (I in Koha & not in GG): %i' % len(\n koha_record_ids - gg_record_ids))\n\n\nif __name__ == '__main__':\n options = docopt(__doc__)\n main()\n", "step-4": "<mask token>\nimport csv\nfrom docopt import docopt\nfrom pymarc import MARCReader, MARCWriter\nlost_codes = {'0': '', '1': 'Lost', '2': 'Long Overdue (Lost)', '3':\n 'Lost and Paid For', '4': 'Missing', '5': 'Lost (On Search)', '6':\n 'Claims Returned'}\nnotforloan_codes = {'-3': 'Repair', '-2': 'In Processing', '-1': 'Ordered',\n '0': '', '1': 'Library Use Only', '2': 'Staff Collection', '3':\n 'Bindery', '4': 'By Appointment', '5': 'On display'}\nvalid_locations = ['CART', 'FACDEV', 'MAIN', 'NEWBOOK', 'DISPLAY']\nvalid_types = ['BOOK', 'SUPPL']\nGG_ID_COLUMN = 'Bib Record Number'\nMARC_ID_FIELD = '999'\nMARC_ID_SUBFIELD = 'c'\n\n\ndef validate_item(item):\n status = []\n valid = True\n if item['q'] and item['q'] != '0':\n status.append('checked out')\n if item['7'] and item['7'] != '0':\n status.append(notforloan_codes[item['7']])\n valid = False\n if item['4'] and item['4'] != '0':\n status.append('damaged')\n valid = False\n if item['1'] and item['1'] != '0':\n status.append(lost_codes[item['1']])\n valid = False\n if item['0'] and item['0'] != '0':\n status.append('withdrawn')\n valid = False\n if item['c'] not in valid_locations:\n valid = False\n if item['y'] not in valid_types:\n valid = False\n if len(status) > 0 and options.get('--debug'):\n print('\"' + record.title() + '\" item status: ' + ', '.join(status))\n return valid\n\n\ndef main():\n total_count = 0\n valid_count = 0\n with open(options['INPUT'], 'rb') as fh:\n reader = MARCReader(fh, to_unicode=True, force_utf8=True)\n if not options['--csv']:\n writer = MARCWriter(open('out.mrc' or options['--output'], 'wb'))\n for record in reader:\n include_record = False\n for item in record.get_fields('952'):\n valid = validate_item(item)\n total_count += 1\n if valid is True:\n valid_count += 1\n include_record = True\n if include_record is True:\n writer.write(record)\n print('Total items: %i | Items included: %i' % (total_count,\n valid_count))\n elif options['--csv']:\n koha_record_ids = set()\n for record in reader:\n total_count += 1\n for item in record.get_fields('952'):\n valid = validate_item(item)\n if valid:\n id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(\n MARC_ID_SUBFIELD)[0]\n koha_record_ids.add(id)\n break\n csvreader = csv.DictReader(open(options['--csv'], 'r'))\n gg_record_ids = set()\n for row in csvreader:\n gg_record_ids.add(row[GG_ID_COLUMN])\n print('Total Koha Bibs: %i' % total_count)\n print('Koha Bibs with circulating items: %i ' % len(\n koha_record_ids))\n print('Total GreenGlass Bibs: %i' % len(gg_record_ids))\n print('Weeded Items (I in GG & not in Koha): %i' % len(\n gg_record_ids - koha_record_ids))\n print('Added Items (I in Koha & not in GG): %i' % len(\n koha_record_ids - gg_record_ids))\n\n\nif __name__ == '__main__':\n options = docopt(__doc__)\n main()\n", "step-5": "\"\"\"Usage:\n sharedprint.py INPUT [--output=out.mrc]\n sharedprint.py INPUT [--csv=greenglass.csv]\n\nProcess Koha MARC export for SCELC Shared Print.\n\nThe two uses above either 1) create a subset of the MARC input that's limited to\ncirculating items only or 2) performs a comparison between what's in the catalog\nand what's in GreenGlass i.e. how many records were added and weeded.\n\nArguments:\n INPUT MARC records (.mrc file)\n\nOptions:\n -h --help show this usage information\n --debug show debug information as the script runs\n --output=FILE output records to this file [default: out.mrc]\n --csv=CSV GreenGlass CSV to compare input MARC file against\n\"\"\"\nimport csv\n\nfrom docopt import docopt\nfrom pymarc import MARCReader, MARCWriter\n\n# https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=LOST\nlost_codes = {\n \"0\": \"\",\n \"1\": \"Lost\",\n \"2\": \"Long Overdue (Lost)\",\n \"3\": \"Lost and Paid For\",\n \"4\": \"Missing\",\n \"5\": \"Lost (On Search)\",\n \"6\": \"Claims Returned\",\n}\n\n# https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=NOT_LOAN\nnotforloan_codes = {\n \"-3\":\t\"Repair\",\n \"-2\":\t\"In Processing\",\n \"-1\":\t\"Ordered\",\n \"0\":\t\"\",\n \"1\":\t\"Library Use Only\",\n \"2\":\t\"Staff Collection\",\n \"3\":\t\"Bindery\",\n \"4\":\t\"By Appointment\",\n \"5\":\t\"On display\",\n}\n\n# https://library-staff.cca.edu/cgi-bin/koha/admin/authorised_values.pl?searchfield=LOC\nvalid_locations = [\n \"CART\",\n \"FACDEV\",\n \"MAIN\",\n \"NEWBOOK\",\n \"DISPLAY\",\n]\n\n# https://library-staff.cca.edu/cgi-bin/koha/admin/itemtypes.pl\nvalid_types = [\n \"BOOK\",\n \"SUPPL\",\n]\n\n# name of column in the GreenGlass spreadsheet that contains the bib record ID\nGG_ID_COLUMN = 'Bib Record Number'\n# field and subfield in MARC record that contains the bib record ID\n# Koha appears to store it in both 999$c & $d\nMARC_ID_FIELD = '999'\nMARC_ID_SUBFIELD = 'c'\n\ndef validate_item(item):\n # \"item status\" is an agglomeration of several things\n status = []\n # whether the _item_ we're looking at should be included\n valid = True\n\n # checked out, will be a date if item is checked out\n if item['q'] and item['q'] != \"0\":\n status.append('checked out')\n\n # \"not for loan\", variety of reasons why an item might not circ\n if item['7'] and item['7'] != \"0\":\n status.append(notforloan_codes[item['7']])\n valid = False\n\n # 1 is an item is damanged\n if item['4'] and item['4'] != \"0\":\n status.append('damaged')\n valid = False\n\n # lost, variety of codes\n if item['1'] and item['1'] != \"0\":\n status.append(lost_codes[item['1']])\n valid = False\n\n # 1 if an item has been withdrawn\n if item['0'] and item['0'] != \"0\":\n status.append('withdrawn')\n valid = False\n\n # filter items based on location & type\n if item['c'] not in valid_locations:\n valid = False\n\n if item['y'] not in valid_types:\n valid = False\n\n if len(status) > 0 and options.get('--debug'):\n print('\"' + record.title() + '\" item status: ' + ', '.join(status))\n\n return valid\n\n\ndef main():\n total_count = 0\n valid_count = 0\n with open(options['INPUT'], 'rb') as fh:\n reader = MARCReader(fh, to_unicode=True, force_utf8=True)\n # 1) first mode: write a MARC output file\n if not options['--csv']:\n writer = MARCWriter(open('out.mrc' or options['--output'], 'wb'))\n for record in reader:\n # whether we'll include the _bib_ record in export file\n include_record = False\n # Koha stores item data in 952 fields, one per item\n for item in record.get_fields('952'):\n valid = validate_item(item)\n\n total_count += 1\n if valid is True:\n valid_count += 1\n # if there's any valid item then the bib should be included\n include_record = True\n\n if include_record is True:\n writer.write(record)\n\n print('Total items: %i | Items included: %i' % (total_count, valid_count))\n elif options['--csv']:\n koha_record_ids = set()\n for record in reader:\n total_count += 1\n for item in record.get_fields('952'):\n valid = validate_item(item)\n if valid:\n id = record.get_fields(MARC_ID_FIELD)[0].get_subfields(MARC_ID_SUBFIELD)[0]\n koha_record_ids.add(id)\n # stop looking at items after we find the first valid one\n break\n\n csvreader = csv.DictReader(open(options['--csv'], 'r'))\n gg_record_ids = set()\n for row in csvreader:\n gg_record_ids.add(row[GG_ID_COLUMN])\n\n print('Total Koha Bibs: %i' % total_count)\n print('Koha Bibs with circulating items: %i ' % len(koha_record_ids))\n print('Total GreenGlass Bibs: %i' % len(gg_record_ids))\n print('Weeded Items (I in GG & not in Koha): %i' % len(gg_record_ids - koha_record_ids))\n print('Added Items (I in Koha & not in GG): %i' % len(koha_record_ids - gg_record_ids))\n\n\nif __name__ == '__main__':\n options = docopt(__doc__)\n # print(options)\n main()\n", "step-ids": [ 1, 2, 3, 5, 6 ] }
[ 1, 2, 3, 5, 6 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Migration(migrations.Migration): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Migration(migrations.Migration): dependencies = [('eCom', '0014_auto_20210617_1503')] operations = [migrations.RemoveField(model_name='order', name='items'), migrations.AddField(model_name='order', name='items', field=models. ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='eCom.orderitem'))] <|reserved_special_token_1|> from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [('eCom', '0014_auto_20210617_1503')] operations = [migrations.RemoveField(model_name='order', name='items'), migrations.AddField(model_name='order', name='items', field=models. ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='eCom.orderitem'))] <|reserved_special_token_1|> # Generated by Django 3.2.4 on 2021-06-18 01:20 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('eCom', '0014_auto_20210617_1503'), ] operations = [ migrations.RemoveField( model_name='order', name='items', ), migrations.AddField( model_name='order', name='items', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='eCom.orderitem'), ), ]
flexible
{ "blob_id": "ef57f0dfea261f022ced36ef9e27a07d63c21026", "index": 2156, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('eCom', '0014_auto_20210617_1503')]\n operations = [migrations.RemoveField(model_name='order', name='items'),\n migrations.AddField(model_name='order', name='items', field=models.\n ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='eCom.orderitem'))]\n", "step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('eCom', '0014_auto_20210617_1503')]\n operations = [migrations.RemoveField(model_name='order', name='items'),\n migrations.AddField(model_name='order', name='items', field=models.\n ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='eCom.orderitem'))]\n", "step-5": "# Generated by Django 3.2.4 on 2021-06-18 01:20\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('eCom', '0014_auto_20210617_1503'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='order',\n name='items',\n ),\n migrations.AddField(\n model_name='order',\n name='items',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='eCom.orderitem'),\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class Other_Operations_Stack(Stack): def min_value(self): min_value = self.peek() for value in self._data: if value < min_value: min_value = value self.pop() return min_value <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Other_Operations_Stack(Stack): def min_value(self): min_value = self.peek() for value in self._data: if value < min_value: min_value = value self.pop() return min_value <|reserved_special_token_0|> content_stack.push(1) content_stack.push(-2) content_stack.push(3) print(content_stack.min_value()) <|reserved_special_token_1|> <|reserved_special_token_0|> class Other_Operations_Stack(Stack): def min_value(self): min_value = self.peek() for value in self._data: if value < min_value: min_value = value self.pop() return min_value content_stack = Other_Operations_Stack() content_stack.push(1) content_stack.push(-2) content_stack.push(3) print(content_stack.min_value()) <|reserved_special_token_1|> <|reserved_special_token_0|> from stack import Stack class Other_Operations_Stack(Stack): def min_value(self): min_value = self.peek() for value in self._data: if value < min_value: min_value = value self.pop() return min_value content_stack = Other_Operations_Stack() content_stack.push(1) content_stack.push(-2) content_stack.push(3) print(content_stack.min_value()) <|reserved_special_token_1|> ''' Exercício 1: Estenda a classe Stack , que escrevemos durante as explicações do conteúdo, adicionando uma nova função chamada min_value() que irá retornar o menor valor inteiro presente na pilha. ''' from stack import Stack class Other_Operations_Stack(Stack): def min_value(self): min_value = self.peek() for value in self._data: if value < min_value: min_value = value self.pop() return min_value content_stack = Other_Operations_Stack() content_stack.push(1) content_stack.push(-2) content_stack.push(3) print(content_stack.min_value()) # saída: -2
flexible
{ "blob_id": "0b2fd671b99b7012a14b132db2322318873b826c", "index": 1345, "step-1": "<mask token>\n\n\nclass Other_Operations_Stack(Stack):\n\n def min_value(self):\n min_value = self.peek()\n for value in self._data:\n if value < min_value:\n min_value = value\n self.pop()\n return min_value\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Other_Operations_Stack(Stack):\n\n def min_value(self):\n min_value = self.peek()\n for value in self._data:\n if value < min_value:\n min_value = value\n self.pop()\n return min_value\n\n\n<mask token>\ncontent_stack.push(1)\ncontent_stack.push(-2)\ncontent_stack.push(3)\nprint(content_stack.min_value())\n", "step-3": "<mask token>\n\n\nclass Other_Operations_Stack(Stack):\n\n def min_value(self):\n min_value = self.peek()\n for value in self._data:\n if value < min_value:\n min_value = value\n self.pop()\n return min_value\n\n\ncontent_stack = Other_Operations_Stack()\ncontent_stack.push(1)\ncontent_stack.push(-2)\ncontent_stack.push(3)\nprint(content_stack.min_value())\n", "step-4": "<mask token>\nfrom stack import Stack\n\n\nclass Other_Operations_Stack(Stack):\n\n def min_value(self):\n min_value = self.peek()\n for value in self._data:\n if value < min_value:\n min_value = value\n self.pop()\n return min_value\n\n\ncontent_stack = Other_Operations_Stack()\ncontent_stack.push(1)\ncontent_stack.push(-2)\ncontent_stack.push(3)\nprint(content_stack.min_value())\n", "step-5": "'''\nExercício 1: Estenda a classe Stack , que escrevemos durante as explicações do\nconteúdo, adicionando uma nova função chamada min_value() que irá retornar o\nmenor valor inteiro presente na pilha.\n'''\n\nfrom stack import Stack\n\n\nclass Other_Operations_Stack(Stack):\n def min_value(self):\n min_value = self.peek()\n for value in self._data:\n if value < min_value:\n min_value = value\n self.pop()\n return min_value\n\n\ncontent_stack = Other_Operations_Stack()\ncontent_stack.push(1)\ncontent_stack.push(-2)\ncontent_stack.push(3)\nprint(content_stack.min_value()) # saída: -2\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
import random import numpy as np class Board: def __init__(self, nrows, ncols, random_seed=42): self.nrows = nrows self.ncols = ncols self.random = random.Random() self.random.seed(random_seed) self.board = np.zeros((nrows, ncols)) self.score = 0 self.__add_new_numbers() # Initialize with 1/8 of the board filled, with 90% chance of filling # with 2, and 10% chance of filling with 4 def __add_new_numbers(self): num_zeros = (self.board == 0).sum() for i in range(min((self.nrows*self.ncols)//8, num_zeros)): random_row = self.random.randint(0,self.nrows-1) random_col = self.random.randint(0,self.ncols-1) while self.board[random_row, random_col] != 0: random_row = self.random.randint(0,self.nrows-1) random_col = self.random.randint(0,self.ncols-1) if self.random.random() < 0.9: self.board[random_row, random_col] = 2 else: self.board[random_row, random_col] = 4 def __swap_on_board(self, pos1, pos2): val = self.board[pos1] self.board[pos1] = self.board[pos2] self.board[pos2] = val def __left_swipe_on_row(self, row_index): left_index = 0 collapsed = False for i in range(self.ncols): if self.board[row_index, i] != 0: if left_index != i: collapsed = True self.__swap_on_board((row_index, left_index), (row_index, i)) left_index += 1 for i in range(1, self.ncols): if self.board[row_index, i] == self.board[row_index, i-1]: self.board[row_index, i-1] *= 2 self.board[row_index, i] = 0 collapsed = True self.score += self.board[row_index, i-1] left_index = 0 for i in range(self.ncols): if self.board[row_index, i] != 0: self.__swap_on_board((row_index, left_index), (row_index, i)) left_index += 1 return collapsed def __up_swipe_on_col(self, col_index): top_index = 0 collapsed = False for i in range(self.nrows): if self.board[i, col_index] != 0: if top_index != i: collapsed = True self.__swap_on_board((top_index, col_index), (i, col_index)) top_index += 1 for i in range(1, self.nrows): if self.board[i, col_index] == self.board[i-1, col_index]: self.board[i-1, col_index] *= 2 self.board[i, col_index] = 0 collapsed = True self.score += self.board[i-1, col_index] top_index = 0 for i in range(self.nrows): if self.board[i, col_index] != 0: self.__swap_on_board((top_index, col_index), (i, col_index)) top_index += 1 return collapsed def __left_swipe(self): collapsed = False for i in range(self.nrows): if self.__left_swipe_on_row(i): collapsed = True return collapsed def __right_swipe(self): collapsed = False for i in range(self.nrows): for j in range(self.ncols//2): self.__swap_on_board((i, j), (i, -j-1)) if self.__left_swipe_on_row(i): collapsed = True for j in range(self.ncols//2): self.__swap_on_board((i, j), (i, -j-1)) return collapsed def __up_swipe(self): collapsed = False for i in range(self.ncols): if self.__up_swipe_on_col(i): collapsed = True return collapsed def __down_swipe(self): collapsed = False for i in range(self.ncols): for j in range(self.nrows//2): self.__swap_on_board((j, i), (-j-1, i)) if self.__up_swipe_on_col(i): collapsed = True for j in range(self.nrows//2): self.__swap_on_board((j, i), (-j-1, i)) return collapsed def __display(self): print(self.board) print(f"Current score: {self.score}") def reset(self): self.score = 0 for i in range(self.nrows): for j in range(self.ncols): self.board[i, j] = 0 def play(self): moves = [self.__up_swipe, self.__left_swipe, self.__down_swipe, self.__right_swipe] movement_mapping = {char: moves[pos] for pos, char in enumerate('WASD')} board_copy = Board(self.nrows, self.ncols) lost = False while self.board.max() < 2048: self.__display() raw_input_value = input("Play with WASD: ").upper() while len(raw_input_value) == 0: raw_input_value = input("Play with WASD: ").upper() direction = raw_input_value[0] while direction not in movement_mapping: raw_input_value = input("Play with WASD: ").upper() while len(raw_input_value) == 0: raw_input_value = input("Play with WASD: ").upper() direction = raw_input_value[0] did_move = movement_mapping[direction]() if did_move: self.__add_new_numbers() # TODO: Make this more efficient board_copy.board = self.board.copy() can_swipe_up = board_copy.__up_swipe() can_swipe_left = board_copy.__left_swipe() if not (can_swipe_left or can_swipe_up): lost = True break else: print(f"'{direction}'" + ' is an INVALID MOVE') if not lost: print('GAME WON') else: print('GAME LOST') self.__display() output = self.score self.reset() return output if __name__ == "__main__": b = Board(4, 4) b.play()
normal
{ "blob_id": "cab45a823e319bd504b3db68cf70bff315f44fc6", "index": 7462, "step-1": "<mask token>\n\n\nclass Board:\n\n def __init__(self, nrows, ncols, random_seed=42):\n self.nrows = nrows\n self.ncols = ncols\n self.random = random.Random()\n self.random.seed(random_seed)\n self.board = np.zeros((nrows, ncols))\n self.score = 0\n self.__add_new_numbers()\n <mask token>\n\n def __swap_on_board(self, pos1, pos2):\n val = self.board[pos1]\n self.board[pos1] = self.board[pos2]\n self.board[pos2] = val\n <mask token>\n <mask token>\n\n def __left_swipe(self):\n collapsed = False\n for i in range(self.nrows):\n if self.__left_swipe_on_row(i):\n collapsed = True\n return collapsed\n\n def __right_swipe(self):\n collapsed = False\n for i in range(self.nrows):\n for j in range(self.ncols // 2):\n self.__swap_on_board((i, j), (i, -j - 1))\n if self.__left_swipe_on_row(i):\n collapsed = True\n for j in range(self.ncols // 2):\n self.__swap_on_board((i, j), (i, -j - 1))\n return collapsed\n\n def __up_swipe(self):\n collapsed = False\n for i in range(self.ncols):\n if self.__up_swipe_on_col(i):\n collapsed = True\n return collapsed\n\n def __down_swipe(self):\n collapsed = False\n for i in range(self.ncols):\n for j in range(self.nrows // 2):\n self.__swap_on_board((j, i), (-j - 1, i))\n if self.__up_swipe_on_col(i):\n collapsed = True\n for j in range(self.nrows // 2):\n self.__swap_on_board((j, i), (-j - 1, i))\n return collapsed\n\n def __display(self):\n print(self.board)\n print(f'Current score: {self.score}')\n\n def reset(self):\n self.score = 0\n for i in range(self.nrows):\n for j in range(self.ncols):\n self.board[i, j] = 0\n\n def play(self):\n moves = [self.__up_swipe, self.__left_swipe, self.__down_swipe,\n self.__right_swipe]\n movement_mapping = {char: moves[pos] for pos, char in enumerate('WASD')\n }\n board_copy = Board(self.nrows, self.ncols)\n lost = False\n while self.board.max() < 2048:\n self.__display()\n raw_input_value = input('Play with WASD: ').upper()\n while len(raw_input_value) == 0:\n raw_input_value = input('Play with WASD: ').upper()\n direction = raw_input_value[0]\n while direction not in movement_mapping:\n raw_input_value = input('Play with WASD: ').upper()\n while len(raw_input_value) == 0:\n raw_input_value = input('Play with WASD: ').upper()\n direction = raw_input_value[0]\n did_move = movement_mapping[direction]()\n if did_move:\n self.__add_new_numbers()\n board_copy.board = self.board.copy()\n can_swipe_up = board_copy.__up_swipe()\n can_swipe_left = board_copy.__left_swipe()\n if not (can_swipe_left or can_swipe_up):\n lost = True\n break\n else:\n print(f\"'{direction}'\" + ' is an INVALID MOVE')\n if not lost:\n print('GAME WON')\n else:\n print('GAME LOST')\n self.__display()\n output = self.score\n self.reset()\n return output\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Board:\n\n def __init__(self, nrows, ncols, random_seed=42):\n self.nrows = nrows\n self.ncols = ncols\n self.random = random.Random()\n self.random.seed(random_seed)\n self.board = np.zeros((nrows, ncols))\n self.score = 0\n self.__add_new_numbers()\n\n def __add_new_numbers(self):\n num_zeros = (self.board == 0).sum()\n for i in range(min(self.nrows * self.ncols // 8, num_zeros)):\n random_row = self.random.randint(0, self.nrows - 1)\n random_col = self.random.randint(0, self.ncols - 1)\n while self.board[random_row, random_col] != 0:\n random_row = self.random.randint(0, self.nrows - 1)\n random_col = self.random.randint(0, self.ncols - 1)\n if self.random.random() < 0.9:\n self.board[random_row, random_col] = 2\n else:\n self.board[random_row, random_col] = 4\n\n def __swap_on_board(self, pos1, pos2):\n val = self.board[pos1]\n self.board[pos1] = self.board[pos2]\n self.board[pos2] = val\n\n def __left_swipe_on_row(self, row_index):\n left_index = 0\n collapsed = False\n for i in range(self.ncols):\n if self.board[row_index, i] != 0:\n if left_index != i:\n collapsed = True\n self.__swap_on_board((row_index, left_index), (row_index, i))\n left_index += 1\n for i in range(1, self.ncols):\n if self.board[row_index, i] == self.board[row_index, i - 1]:\n self.board[row_index, i - 1] *= 2\n self.board[row_index, i] = 0\n collapsed = True\n self.score += self.board[row_index, i - 1]\n left_index = 0\n for i in range(self.ncols):\n if self.board[row_index, i] != 0:\n self.__swap_on_board((row_index, left_index), (row_index, i))\n left_index += 1\n return collapsed\n <mask token>\n\n def __left_swipe(self):\n collapsed = False\n for i in range(self.nrows):\n if self.__left_swipe_on_row(i):\n collapsed = True\n return collapsed\n\n def __right_swipe(self):\n collapsed = False\n for i in range(self.nrows):\n for j in range(self.ncols // 2):\n self.__swap_on_board((i, j), (i, -j - 1))\n if self.__left_swipe_on_row(i):\n collapsed = True\n for j in range(self.ncols // 2):\n self.__swap_on_board((i, j), (i, -j - 1))\n return collapsed\n\n def __up_swipe(self):\n collapsed = False\n for i in range(self.ncols):\n if self.__up_swipe_on_col(i):\n collapsed = True\n return collapsed\n\n def __down_swipe(self):\n collapsed = False\n for i in range(self.ncols):\n for j in range(self.nrows // 2):\n self.__swap_on_board((j, i), (-j - 1, i))\n if self.__up_swipe_on_col(i):\n collapsed = True\n for j in range(self.nrows // 2):\n self.__swap_on_board((j, i), (-j - 1, i))\n return collapsed\n\n def __display(self):\n print(self.board)\n print(f'Current score: {self.score}')\n\n def reset(self):\n self.score = 0\n for i in range(self.nrows):\n for j in range(self.ncols):\n self.board[i, j] = 0\n\n def play(self):\n moves = [self.__up_swipe, self.__left_swipe, self.__down_swipe,\n self.__right_swipe]\n movement_mapping = {char: moves[pos] for pos, char in enumerate('WASD')\n }\n board_copy = Board(self.nrows, self.ncols)\n lost = False\n while self.board.max() < 2048:\n self.__display()\n raw_input_value = input('Play with WASD: ').upper()\n while len(raw_input_value) == 0:\n raw_input_value = input('Play with WASD: ').upper()\n direction = raw_input_value[0]\n while direction not in movement_mapping:\n raw_input_value = input('Play with WASD: ').upper()\n while len(raw_input_value) == 0:\n raw_input_value = input('Play with WASD: ').upper()\n direction = raw_input_value[0]\n did_move = movement_mapping[direction]()\n if did_move:\n self.__add_new_numbers()\n board_copy.board = self.board.copy()\n can_swipe_up = board_copy.__up_swipe()\n can_swipe_left = board_copy.__left_swipe()\n if not (can_swipe_left or can_swipe_up):\n lost = True\n break\n else:\n print(f\"'{direction}'\" + ' is an INVALID MOVE')\n if not lost:\n print('GAME WON')\n else:\n print('GAME LOST')\n self.__display()\n output = self.score\n self.reset()\n return output\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Board:\n\n def __init__(self, nrows, ncols, random_seed=42):\n self.nrows = nrows\n self.ncols = ncols\n self.random = random.Random()\n self.random.seed(random_seed)\n self.board = np.zeros((nrows, ncols))\n self.score = 0\n self.__add_new_numbers()\n\n def __add_new_numbers(self):\n num_zeros = (self.board == 0).sum()\n for i in range(min(self.nrows * self.ncols // 8, num_zeros)):\n random_row = self.random.randint(0, self.nrows - 1)\n random_col = self.random.randint(0, self.ncols - 1)\n while self.board[random_row, random_col] != 0:\n random_row = self.random.randint(0, self.nrows - 1)\n random_col = self.random.randint(0, self.ncols - 1)\n if self.random.random() < 0.9:\n self.board[random_row, random_col] = 2\n else:\n self.board[random_row, random_col] = 4\n\n def __swap_on_board(self, pos1, pos2):\n val = self.board[pos1]\n self.board[pos1] = self.board[pos2]\n self.board[pos2] = val\n\n def __left_swipe_on_row(self, row_index):\n left_index = 0\n collapsed = False\n for i in range(self.ncols):\n if self.board[row_index, i] != 0:\n if left_index != i:\n collapsed = True\n self.__swap_on_board((row_index, left_index), (row_index, i))\n left_index += 1\n for i in range(1, self.ncols):\n if self.board[row_index, i] == self.board[row_index, i - 1]:\n self.board[row_index, i - 1] *= 2\n self.board[row_index, i] = 0\n collapsed = True\n self.score += self.board[row_index, i - 1]\n left_index = 0\n for i in range(self.ncols):\n if self.board[row_index, i] != 0:\n self.__swap_on_board((row_index, left_index), (row_index, i))\n left_index += 1\n return collapsed\n\n def __up_swipe_on_col(self, col_index):\n top_index = 0\n collapsed = False\n for i in range(self.nrows):\n if self.board[i, col_index] != 0:\n if top_index != i:\n collapsed = True\n self.__swap_on_board((top_index, col_index), (i, col_index))\n top_index += 1\n for i in range(1, self.nrows):\n if self.board[i, col_index] == self.board[i - 1, col_index]:\n self.board[i - 1, col_index] *= 2\n self.board[i, col_index] = 0\n collapsed = True\n self.score += self.board[i - 1, col_index]\n top_index = 0\n for i in range(self.nrows):\n if self.board[i, col_index] != 0:\n self.__swap_on_board((top_index, col_index), (i, col_index))\n top_index += 1\n return collapsed\n\n def __left_swipe(self):\n collapsed = False\n for i in range(self.nrows):\n if self.__left_swipe_on_row(i):\n collapsed = True\n return collapsed\n\n def __right_swipe(self):\n collapsed = False\n for i in range(self.nrows):\n for j in range(self.ncols // 2):\n self.__swap_on_board((i, j), (i, -j - 1))\n if self.__left_swipe_on_row(i):\n collapsed = True\n for j in range(self.ncols // 2):\n self.__swap_on_board((i, j), (i, -j - 1))\n return collapsed\n\n def __up_swipe(self):\n collapsed = False\n for i in range(self.ncols):\n if self.__up_swipe_on_col(i):\n collapsed = True\n return collapsed\n\n def __down_swipe(self):\n collapsed = False\n for i in range(self.ncols):\n for j in range(self.nrows // 2):\n self.__swap_on_board((j, i), (-j - 1, i))\n if self.__up_swipe_on_col(i):\n collapsed = True\n for j in range(self.nrows // 2):\n self.__swap_on_board((j, i), (-j - 1, i))\n return collapsed\n\n def __display(self):\n print(self.board)\n print(f'Current score: {self.score}')\n\n def reset(self):\n self.score = 0\n for i in range(self.nrows):\n for j in range(self.ncols):\n self.board[i, j] = 0\n\n def play(self):\n moves = [self.__up_swipe, self.__left_swipe, self.__down_swipe,\n self.__right_swipe]\n movement_mapping = {char: moves[pos] for pos, char in enumerate('WASD')\n }\n board_copy = Board(self.nrows, self.ncols)\n lost = False\n while self.board.max() < 2048:\n self.__display()\n raw_input_value = input('Play with WASD: ').upper()\n while len(raw_input_value) == 0:\n raw_input_value = input('Play with WASD: ').upper()\n direction = raw_input_value[0]\n while direction not in movement_mapping:\n raw_input_value = input('Play with WASD: ').upper()\n while len(raw_input_value) == 0:\n raw_input_value = input('Play with WASD: ').upper()\n direction = raw_input_value[0]\n did_move = movement_mapping[direction]()\n if did_move:\n self.__add_new_numbers()\n board_copy.board = self.board.copy()\n can_swipe_up = board_copy.__up_swipe()\n can_swipe_left = board_copy.__left_swipe()\n if not (can_swipe_left or can_swipe_up):\n lost = True\n break\n else:\n print(f\"'{direction}'\" + ' is an INVALID MOVE')\n if not lost:\n print('GAME WON')\n else:\n print('GAME LOST')\n self.__display()\n output = self.score\n self.reset()\n return output\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\nclass Board:\n\n def __init__(self, nrows, ncols, random_seed=42):\n self.nrows = nrows\n self.ncols = ncols\n self.random = random.Random()\n self.random.seed(random_seed)\n self.board = np.zeros((nrows, ncols))\n self.score = 0\n self.__add_new_numbers()\n\n def __add_new_numbers(self):\n num_zeros = (self.board == 0).sum()\n for i in range(min(self.nrows * self.ncols // 8, num_zeros)):\n random_row = self.random.randint(0, self.nrows - 1)\n random_col = self.random.randint(0, self.ncols - 1)\n while self.board[random_row, random_col] != 0:\n random_row = self.random.randint(0, self.nrows - 1)\n random_col = self.random.randint(0, self.ncols - 1)\n if self.random.random() < 0.9:\n self.board[random_row, random_col] = 2\n else:\n self.board[random_row, random_col] = 4\n\n def __swap_on_board(self, pos1, pos2):\n val = self.board[pos1]\n self.board[pos1] = self.board[pos2]\n self.board[pos2] = val\n\n def __left_swipe_on_row(self, row_index):\n left_index = 0\n collapsed = False\n for i in range(self.ncols):\n if self.board[row_index, i] != 0:\n if left_index != i:\n collapsed = True\n self.__swap_on_board((row_index, left_index), (row_index, i))\n left_index += 1\n for i in range(1, self.ncols):\n if self.board[row_index, i] == self.board[row_index, i - 1]:\n self.board[row_index, i - 1] *= 2\n self.board[row_index, i] = 0\n collapsed = True\n self.score += self.board[row_index, i - 1]\n left_index = 0\n for i in range(self.ncols):\n if self.board[row_index, i] != 0:\n self.__swap_on_board((row_index, left_index), (row_index, i))\n left_index += 1\n return collapsed\n\n def __up_swipe_on_col(self, col_index):\n top_index = 0\n collapsed = False\n for i in range(self.nrows):\n if self.board[i, col_index] != 0:\n if top_index != i:\n collapsed = True\n self.__swap_on_board((top_index, col_index), (i, col_index))\n top_index += 1\n for i in range(1, self.nrows):\n if self.board[i, col_index] == self.board[i - 1, col_index]:\n self.board[i - 1, col_index] *= 2\n self.board[i, col_index] = 0\n collapsed = True\n self.score += self.board[i - 1, col_index]\n top_index = 0\n for i in range(self.nrows):\n if self.board[i, col_index] != 0:\n self.__swap_on_board((top_index, col_index), (i, col_index))\n top_index += 1\n return collapsed\n\n def __left_swipe(self):\n collapsed = False\n for i in range(self.nrows):\n if self.__left_swipe_on_row(i):\n collapsed = True\n return collapsed\n\n def __right_swipe(self):\n collapsed = False\n for i in range(self.nrows):\n for j in range(self.ncols // 2):\n self.__swap_on_board((i, j), (i, -j - 1))\n if self.__left_swipe_on_row(i):\n collapsed = True\n for j in range(self.ncols // 2):\n self.__swap_on_board((i, j), (i, -j - 1))\n return collapsed\n\n def __up_swipe(self):\n collapsed = False\n for i in range(self.ncols):\n if self.__up_swipe_on_col(i):\n collapsed = True\n return collapsed\n\n def __down_swipe(self):\n collapsed = False\n for i in range(self.ncols):\n for j in range(self.nrows // 2):\n self.__swap_on_board((j, i), (-j - 1, i))\n if self.__up_swipe_on_col(i):\n collapsed = True\n for j in range(self.nrows // 2):\n self.__swap_on_board((j, i), (-j - 1, i))\n return collapsed\n\n def __display(self):\n print(self.board)\n print(f'Current score: {self.score}')\n\n def reset(self):\n self.score = 0\n for i in range(self.nrows):\n for j in range(self.ncols):\n self.board[i, j] = 0\n\n def play(self):\n moves = [self.__up_swipe, self.__left_swipe, self.__down_swipe,\n self.__right_swipe]\n movement_mapping = {char: moves[pos] for pos, char in enumerate('WASD')\n }\n board_copy = Board(self.nrows, self.ncols)\n lost = False\n while self.board.max() < 2048:\n self.__display()\n raw_input_value = input('Play with WASD: ').upper()\n while len(raw_input_value) == 0:\n raw_input_value = input('Play with WASD: ').upper()\n direction = raw_input_value[0]\n while direction not in movement_mapping:\n raw_input_value = input('Play with WASD: ').upper()\n while len(raw_input_value) == 0:\n raw_input_value = input('Play with WASD: ').upper()\n direction = raw_input_value[0]\n did_move = movement_mapping[direction]()\n if did_move:\n self.__add_new_numbers()\n board_copy.board = self.board.copy()\n can_swipe_up = board_copy.__up_swipe()\n can_swipe_left = board_copy.__left_swipe()\n if not (can_swipe_left or can_swipe_up):\n lost = True\n break\n else:\n print(f\"'{direction}'\" + ' is an INVALID MOVE')\n if not lost:\n print('GAME WON')\n else:\n print('GAME LOST')\n self.__display()\n output = self.score\n self.reset()\n return output\n\n\nif __name__ == '__main__':\n b = Board(4, 4)\n b.play()\n", "step-5": "import random\nimport numpy as np\n\nclass Board:\n\tdef __init__(self, nrows, ncols, random_seed=42):\n\t\tself.nrows = nrows\n\t\tself.ncols = ncols\n\t\tself.random = random.Random()\n\t\tself.random.seed(random_seed)\n\t\tself.board = np.zeros((nrows, ncols))\n\t\tself.score = 0\n\n\t\tself.__add_new_numbers()\n\n\t# Initialize with 1/8 of the board filled, with 90% chance of filling\n\t# with 2, and 10% chance of filling with 4\n\tdef __add_new_numbers(self):\n\n\t\tnum_zeros = (self.board == 0).sum()\n\n\t\tfor i in range(min((self.nrows*self.ncols)//8, num_zeros)):\n\t\t\trandom_row = self.random.randint(0,self.nrows-1)\n\t\t\trandom_col = self.random.randint(0,self.ncols-1)\n\t\t\twhile self.board[random_row, random_col] != 0:\n\t\t\t\trandom_row = self.random.randint(0,self.nrows-1)\n\t\t\t\trandom_col = self.random.randint(0,self.ncols-1)\n\t\t\tif self.random.random() < 0.9:\n\t\t\t\tself.board[random_row, random_col] = 2\n\t\t\telse:\n\t\t\t\tself.board[random_row, random_col] = 4\n\n\tdef __swap_on_board(self, pos1, pos2):\n\t\tval = self.board[pos1]\n\t\tself.board[pos1] = self.board[pos2]\n\t\tself.board[pos2] = val\n\n\tdef __left_swipe_on_row(self, row_index):\n\t\tleft_index = 0\n\t\tcollapsed = False\n\t\tfor i in range(self.ncols):\n\t\t\tif self.board[row_index, i] != 0:\n\t\t\t\tif left_index != i:\n\t\t\t\t\tcollapsed = True\n\t\t\t\tself.__swap_on_board((row_index, left_index), (row_index, i))\n\t\t\t\tleft_index += 1\n\n\t\tfor i in range(1, self.ncols):\n\t\t\tif self.board[row_index, i] == self.board[row_index, i-1]:\n\t\t\t\tself.board[row_index, i-1] *= 2\n\t\t\t\tself.board[row_index, i] = 0\n\t\t\t\tcollapsed = True\n\t\t\t\tself.score += self.board[row_index, i-1]\n\n\t\tleft_index = 0\n\t\tfor i in range(self.ncols):\n\t\t\tif self.board[row_index, i] != 0:\n\t\t\t\tself.__swap_on_board((row_index, left_index), (row_index, i))\n\t\t\t\tleft_index += 1\n\n\t\treturn collapsed\n\n\tdef __up_swipe_on_col(self, col_index):\n\t\ttop_index = 0\n\t\tcollapsed = False\n\t\tfor i in range(self.nrows):\n\t\t\tif self.board[i, col_index] != 0:\n\t\t\t\tif top_index != i:\n\t\t\t\t\tcollapsed = True\n\t\t\t\tself.__swap_on_board((top_index, col_index), (i, col_index))\n\t\t\t\ttop_index += 1\n\n\t\tfor i in range(1, self.nrows):\n\t\t\tif self.board[i, col_index] == self.board[i-1, col_index]:\n\t\t\t\tself.board[i-1, col_index] *= 2\n\t\t\t\tself.board[i, col_index] = 0\n\t\t\t\tcollapsed = True\n\t\t\t\tself.score += self.board[i-1, col_index]\n\n\t\ttop_index = 0\n\t\tfor i in range(self.nrows):\n\t\t\tif self.board[i, col_index] != 0:\n\t\t\t\tself.__swap_on_board((top_index, col_index), (i, col_index))\n\t\t\t\ttop_index += 1\n\n\t\treturn collapsed\n\n\tdef __left_swipe(self):\n\t\tcollapsed = False\n\t\tfor i in range(self.nrows):\n\t\t\tif self.__left_swipe_on_row(i):\n\t\t\t\tcollapsed = True\n\t\treturn collapsed\n\n\tdef __right_swipe(self):\n\t\tcollapsed = False\n\t\tfor i in range(self.nrows):\n\t\t\tfor j in range(self.ncols//2):\n\t\t\t\tself.__swap_on_board((i, j), (i, -j-1))\n\t\t\tif self.__left_swipe_on_row(i):\n\t\t\t\tcollapsed = True\n\t\t\tfor j in range(self.ncols//2):\n\t\t\t\tself.__swap_on_board((i, j), (i, -j-1))\n\t\treturn collapsed\n\n\tdef __up_swipe(self):\n\t\tcollapsed = False\n\t\tfor i in range(self.ncols):\n\t\t\tif self.__up_swipe_on_col(i):\n\t\t\t\tcollapsed = True\n\t\treturn collapsed\n\n\tdef __down_swipe(self):\n\t\tcollapsed = False\n\t\tfor i in range(self.ncols):\n\t\t\tfor j in range(self.nrows//2):\n\t\t\t\tself.__swap_on_board((j, i), (-j-1, i))\n\t\t\tif self.__up_swipe_on_col(i):\n\t\t\t\tcollapsed = True\n\t\t\tfor j in range(self.nrows//2):\n\t\t\t\tself.__swap_on_board((j, i), (-j-1, i))\n\t\treturn collapsed\n\n\tdef __display(self):\n\t\tprint(self.board)\n\t\tprint(f\"Current score: {self.score}\")\n\n\tdef reset(self):\n\t\tself.score = 0\n\t\tfor i in range(self.nrows):\n\t\t\tfor j in range(self.ncols):\n\t\t\t\tself.board[i, j] = 0\n\n\tdef play(self):\n\t\tmoves = [self.__up_swipe, self.__left_swipe, self.__down_swipe, self.__right_swipe]\n\t\tmovement_mapping = {char: moves[pos] for pos, char in enumerate('WASD')}\n\t\tboard_copy = Board(self.nrows, self.ncols)\n\t\tlost = False\n\t\twhile self.board.max() < 2048:\n\t\t\tself.__display()\n\t\t\traw_input_value = input(\"Play with WASD: \").upper()\n\t\t\twhile len(raw_input_value) == 0:\n\t\t\t\traw_input_value = input(\"Play with WASD: \").upper()\n\t\t\tdirection = raw_input_value[0]\n\t\t\twhile direction not in movement_mapping:\n\t\t\t\traw_input_value = input(\"Play with WASD: \").upper()\n\t\t\t\twhile len(raw_input_value) == 0:\n\t\t\t\t\traw_input_value = input(\"Play with WASD: \").upper()\n\t\t\t\tdirection = raw_input_value[0]\n\t\t\tdid_move = movement_mapping[direction]()\n\t\t\tif did_move:\n\t\t\t\tself.__add_new_numbers()\n\n\t\t\t\t# TODO: Make this more efficient\n\t\t\t\tboard_copy.board = self.board.copy()\n\t\t\t\tcan_swipe_up = board_copy.__up_swipe()\n\t\t\t\tcan_swipe_left = board_copy.__left_swipe()\n\t\t\t\tif not (can_swipe_left or can_swipe_up):\n\t\t\t\t\tlost = True\n\t\t\t\t\tbreak\n\t\t\telse:\n\t\t\t\tprint(f\"'{direction}'\" + ' is an INVALID MOVE')\n\n\t\tif not lost:\n\t\t\tprint('GAME WON')\n\t\telse:\n\t\t\tprint('GAME LOST')\n\t\tself.__display()\n\t\toutput = self.score\n\t\tself.reset()\n\t\treturn output\n\n\n\nif __name__ == \"__main__\":\n\tb = Board(4, 4)\n\tb.play()", "step-ids": [ 10, 12, 13, 14, 16 ] }
[ 10, 12, 13, 14, 16 ]
from random import randint in_file = open("vocabulary.txt", "r") voca_dic = {} for line in in_file: data = line.strip().split(": ") eng_word = data[0] kor_word = data[1] voca_dic[eng_word] = kor_word while True: keys = list(voca_dic.keys()) index = randint(1, len(keys) - 1) input_val = input("%s: " % voca_dic[keys[index]]) if input_val == "q": break if input_val == keys[index]: print("맞았습니다!") else: print("틀렸습니다. 정답은 %s입니다." % keys[index])
normal
{ "blob_id": "be64c981e7ea70dfcbd840988a633b4a71a43783", "index": 9814, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor line in in_file:\n data = line.strip().split(': ')\n eng_word = data[0]\n kor_word = data[1]\n voca_dic[eng_word] = kor_word\nwhile True:\n keys = list(voca_dic.keys())\n index = randint(1, len(keys) - 1)\n input_val = input('%s: ' % voca_dic[keys[index]])\n if input_val == 'q':\n break\n if input_val == keys[index]:\n print('맞았습니다!')\n else:\n print('틀렸습니다. 정답은 %s입니다.' % keys[index])\n", "step-3": "<mask token>\nin_file = open('vocabulary.txt', 'r')\nvoca_dic = {}\nfor line in in_file:\n data = line.strip().split(': ')\n eng_word = data[0]\n kor_word = data[1]\n voca_dic[eng_word] = kor_word\nwhile True:\n keys = list(voca_dic.keys())\n index = randint(1, len(keys) - 1)\n input_val = input('%s: ' % voca_dic[keys[index]])\n if input_val == 'q':\n break\n if input_val == keys[index]:\n print('맞았습니다!')\n else:\n print('틀렸습니다. 정답은 %s입니다.' % keys[index])\n", "step-4": "from random import randint\nin_file = open('vocabulary.txt', 'r')\nvoca_dic = {}\nfor line in in_file:\n data = line.strip().split(': ')\n eng_word = data[0]\n kor_word = data[1]\n voca_dic[eng_word] = kor_word\nwhile True:\n keys = list(voca_dic.keys())\n index = randint(1, len(keys) - 1)\n input_val = input('%s: ' % voca_dic[keys[index]])\n if input_val == 'q':\n break\n if input_val == keys[index]:\n print('맞았습니다!')\n else:\n print('틀렸습니다. 정답은 %s입니다.' % keys[index])\n", "step-5": "from random import randint\n\nin_file = open(\"vocabulary.txt\", \"r\")\n\nvoca_dic = {}\n\nfor line in in_file:\n data = line.strip().split(\": \")\n eng_word = data[0]\n kor_word = data[1]\n\n voca_dic[eng_word] = kor_word\n\nwhile True:\n keys = list(voca_dic.keys())\n\n index = randint(1, len(keys) - 1)\n\n input_val = input(\"%s: \" % voca_dic[keys[index]])\n\n if input_val == \"q\":\n break\n\n if input_val == keys[index]:\n print(\"맞았습니다!\")\n else:\n print(\"틀렸습니다. 정답은 %s입니다.\" % keys[index])", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def upgrade(): op.execute(' '.join([update.format('false'), where])) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def upgrade(): op.execute(' '.join([update.format('false'), where])) def downgrade(): op.execute(' '.join([update.format('true'), where])) <|reserved_special_token_1|> <|reserved_special_token_0|> revision = 'f37637c1bcf8' down_revision = '43c7ecf8ed02' branch_labels = None depends_on = None update = 'UPDATE post SET is_active = {}' where = 'WHERE media_type != {} AND is_shopping = false'.format(MediaType. gif.value) def upgrade(): op.execute(' '.join([update.format('false'), where])) def downgrade(): op.execute(' '.join([update.format('true'), where])) <|reserved_special_token_1|> <|reserved_special_token_0|> from alembic import op from pd.facebook.models import MediaType revision = 'f37637c1bcf8' down_revision = '43c7ecf8ed02' branch_labels = None depends_on = None update = 'UPDATE post SET is_active = {}' where = 'WHERE media_type != {} AND is_shopping = false'.format(MediaType. gif.value) def upgrade(): op.execute(' '.join([update.format('false'), where])) def downgrade(): op.execute(' '.join([update.format('true'), where])) <|reserved_special_token_1|> """inactivate fb posts Revision ID: f37637c1bcf8 Revises: 43c7ecf8ed02 Create Date: 2017-06-22 12:01:59.623040 """ from alembic import op from pd.facebook.models import MediaType # revision identifiers, used by Alembic. revision = 'f37637c1bcf8' down_revision = '43c7ecf8ed02' branch_labels = None depends_on = None # set active status of posts update = "UPDATE post SET is_active = {}" # filter those: not gif, and not shopping; 3 == gif where = "WHERE media_type != {} AND is_shopping = false".format( MediaType.gif.value) def upgrade(): op.execute(' '.join([update.format('false'), where])) def downgrade(): op.execute(' '.join([update.format('true'), where]))
flexible
{ "blob_id": "89ed30411c624e3d930db0bc0b5b716a10908727", "index": 8259, "step-1": "<mask token>\n\n\ndef upgrade():\n op.execute(' '.join([update.format('false'), where]))\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef upgrade():\n op.execute(' '.join([update.format('false'), where]))\n\n\ndef downgrade():\n op.execute(' '.join([update.format('true'), where]))\n", "step-3": "<mask token>\nrevision = 'f37637c1bcf8'\ndown_revision = '43c7ecf8ed02'\nbranch_labels = None\ndepends_on = None\nupdate = 'UPDATE post SET is_active = {}'\nwhere = 'WHERE media_type != {} AND is_shopping = false'.format(MediaType.\n gif.value)\n\n\ndef upgrade():\n op.execute(' '.join([update.format('false'), where]))\n\n\ndef downgrade():\n op.execute(' '.join([update.format('true'), where]))\n", "step-4": "<mask token>\nfrom alembic import op\nfrom pd.facebook.models import MediaType\nrevision = 'f37637c1bcf8'\ndown_revision = '43c7ecf8ed02'\nbranch_labels = None\ndepends_on = None\nupdate = 'UPDATE post SET is_active = {}'\nwhere = 'WHERE media_type != {} AND is_shopping = false'.format(MediaType.\n gif.value)\n\n\ndef upgrade():\n op.execute(' '.join([update.format('false'), where]))\n\n\ndef downgrade():\n op.execute(' '.join([update.format('true'), where]))\n", "step-5": "\"\"\"inactivate fb posts\n\nRevision ID: f37637c1bcf8\nRevises: 43c7ecf8ed02\nCreate Date: 2017-06-22 12:01:59.623040\n\n\"\"\"\nfrom alembic import op\nfrom pd.facebook.models import MediaType\n\n\n# revision identifiers, used by Alembic.\nrevision = 'f37637c1bcf8'\ndown_revision = '43c7ecf8ed02'\nbranch_labels = None\ndepends_on = None\n\n# set active status of posts\nupdate = \"UPDATE post SET is_active = {}\"\n# filter those: not gif, and not shopping; 3 == gif\nwhere = \"WHERE media_type != {} AND is_shopping = false\".format(\n MediaType.gif.value)\n\n\ndef upgrade():\n op.execute(' '.join([update.format('false'), where]))\n\n\ndef downgrade():\n op.execute(' '.join([update.format('true'), where]))\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> x *= 2 <|reserved_special_token_0|> <|reserved_special_token_1|> #!/usr/bin/env python x *= 2 """run = 0 while(run < 10): [TAB]x = (first number in sequence) [TAB](your code here) [TAB]run += 1"""
flexible
{ "blob_id": "3e84265b7c88fc45bc89868c4339fe37dcc7d738", "index": 1112, "step-1": "<mask token>\n", "step-2": "x *= 2\n<mask token>\n", "step-3": "#!/usr/bin/env python\r\n\r\nx *= 2\r\n\r\n\"\"\"run = 0\r\nwhile(run < 10):\r\n[TAB]x = (first number in sequence)\r\n[TAB](your code here)\r\n[TAB]run += 1\"\"\"\r\n\r\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
from django.db import models from django.contrib.auth.models import User from django.db.models.signals import post_save from django.dispatch import receiver from django.core.mail import EmailMultiAlternatives from django.template import loader from django.conf import settings from django.contrib.sites.shortcuts import get_current_site class Blog(models.Model): user = models.ForeignKey(User,on_delete=models.CASCADE,) class Post(models.Model): blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING) user = models.ForeignKey(User,on_delete=models.CASCADE,) header = models.CharField(max_length=50) text = models.CharField(max_length=2048) create_date = models.DateTimeField(auto_now=True) @receiver(post_save, sender=Post) def send_email(sender, **kwargs): post = Post.objects.get(id=kwargs.get('instance').id) template = loader.get_template('post2email.html') subject = "Post in blog " + post.blog.user.username context = { "header": post.header, "text": post.text, "id": post.id, "host": getattr(settings, 'MY_DJANGO_URL_PATH', ''), } html_content = template.render(context) msg = EmailMultiAlternatives(subject, "", "", [post.user.email]) msg.attach_alternative(html_content, "text/html") msg.send() class ReadPost(models.Model): user = models.ForeignKey(User,on_delete=models.CASCADE,) post = models.ForeignKey(Post, on_delete=models.DO_NOTHING) class Subscription(models.Model): user = models.ForeignKey(User,on_delete=models.CASCADE,) blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING)
normal
{ "blob_id": "de77edaccdaada785f41828135ad2da4ae2b403e", "index": 725, "step-1": "<mask token>\n\n\nclass Post(models.Model):\n blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING)\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n header = models.CharField(max_length=50)\n text = models.CharField(max_length=2048)\n create_date = models.DateTimeField(auto_now=True)\n\n\n<mask token>\n\n\nclass ReadPost(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n post = models.ForeignKey(Post, on_delete=models.DO_NOTHING)\n\n\nclass Subscription(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING)\n", "step-2": "<mask token>\n\n\nclass Blog(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n\n\nclass Post(models.Model):\n blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING)\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n header = models.CharField(max_length=50)\n text = models.CharField(max_length=2048)\n create_date = models.DateTimeField(auto_now=True)\n\n\n<mask token>\n\n\nclass ReadPost(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n post = models.ForeignKey(Post, on_delete=models.DO_NOTHING)\n\n\nclass Subscription(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING)\n", "step-3": "<mask token>\n\n\nclass Blog(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n\n\nclass Post(models.Model):\n blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING)\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n header = models.CharField(max_length=50)\n text = models.CharField(max_length=2048)\n create_date = models.DateTimeField(auto_now=True)\n\n\n@receiver(post_save, sender=Post)\ndef send_email(sender, **kwargs):\n post = Post.objects.get(id=kwargs.get('instance').id)\n template = loader.get_template('post2email.html')\n subject = 'Post in blog ' + post.blog.user.username\n context = {'header': post.header, 'text': post.text, 'id': post.id,\n 'host': getattr(settings, 'MY_DJANGO_URL_PATH', '')}\n html_content = template.render(context)\n msg = EmailMultiAlternatives(subject, '', '', [post.user.email])\n msg.attach_alternative(html_content, 'text/html')\n msg.send()\n\n\nclass ReadPost(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n post = models.ForeignKey(Post, on_delete=models.DO_NOTHING)\n\n\nclass Subscription(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING)\n", "step-4": "from django.db import models\nfrom django.contrib.auth.models import User\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\nfrom django.core.mail import EmailMultiAlternatives\nfrom django.template import loader\nfrom django.conf import settings\nfrom django.contrib.sites.shortcuts import get_current_site\n\n\nclass Blog(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n\n\nclass Post(models.Model):\n blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING)\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n header = models.CharField(max_length=50)\n text = models.CharField(max_length=2048)\n create_date = models.DateTimeField(auto_now=True)\n\n\n@receiver(post_save, sender=Post)\ndef send_email(sender, **kwargs):\n post = Post.objects.get(id=kwargs.get('instance').id)\n template = loader.get_template('post2email.html')\n subject = 'Post in blog ' + post.blog.user.username\n context = {'header': post.header, 'text': post.text, 'id': post.id,\n 'host': getattr(settings, 'MY_DJANGO_URL_PATH', '')}\n html_content = template.render(context)\n msg = EmailMultiAlternatives(subject, '', '', [post.user.email])\n msg.attach_alternative(html_content, 'text/html')\n msg.send()\n\n\nclass ReadPost(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n post = models.ForeignKey(Post, on_delete=models.DO_NOTHING)\n\n\nclass Subscription(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING)\n", "step-5": "from django.db import models\nfrom django.contrib.auth.models import User\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\nfrom django.core.mail import EmailMultiAlternatives\nfrom django.template import loader\nfrom django.conf import settings\nfrom django.contrib.sites.shortcuts import get_current_site\n\nclass Blog(models.Model):\n user = models.ForeignKey(User,on_delete=models.CASCADE,)\n\nclass Post(models.Model):\n blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING)\n user = models.ForeignKey(User,on_delete=models.CASCADE,)\n header = models.CharField(max_length=50)\n text = models.CharField(max_length=2048)\n create_date = models.DateTimeField(auto_now=True)\n\n@receiver(post_save, sender=Post)\ndef send_email(sender, **kwargs):\n post = Post.objects.get(id=kwargs.get('instance').id)\n template = loader.get_template('post2email.html')\n subject = \"Post in blog \" + post.blog.user.username\n context = { \"header\": post.header,\n \"text\": post.text,\n \"id\": post.id,\n \"host\": getattr(settings, 'MY_DJANGO_URL_PATH', ''),\n }\n html_content = template.render(context)\n msg = EmailMultiAlternatives(subject, \"\", \"\", [post.user.email])\n msg.attach_alternative(html_content, \"text/html\")\n msg.send() \n \nclass ReadPost(models.Model):\n user = models.ForeignKey(User,on_delete=models.CASCADE,)\n post = models.ForeignKey(Post, on_delete=models.DO_NOTHING)\n\nclass Subscription(models.Model):\n user = models.ForeignKey(User,on_delete=models.CASCADE,)\n blog = models.ForeignKey(Blog, on_delete=models.DO_NOTHING)", "step-ids": [ 6, 8, 9, 10, 11 ] }
[ 6, 8, 9, 10, 11 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> setup(name='greeker', version='0.3.2-git', description= 'scrambles nouns in an XML document to produce a specimen for layout testing' , author='Brian Tingle', author_email= '[email protected]', url= 'http://tingletech.github.com/greeker.py/', install_requires=[ 'inflect>=0.2.1', 'lxml>=2.3.2', 'nltk>=2.0.1rc2-git', 'numpy', 'argparse'], py_modules=['greeker'], scripts=['greeker.py']) <|reserved_special_token_1|> from distutils.core import setup setup(name='greeker', version='0.3.2-git', description= 'scrambles nouns in an XML document to produce a specimen for layout testing' , author='Brian Tingle', author_email= '[email protected]', url= 'http://tingletech.github.com/greeker.py/', install_requires=[ 'inflect>=0.2.1', 'lxml>=2.3.2', 'nltk>=2.0.1rc2-git', 'numpy', 'argparse'], py_modules=['greeker'], scripts=['greeker.py']) <|reserved_special_token_1|> from distutils.core import setup setup(name='greeker', version='0.3.2-git', description="scrambles nouns in an XML document to produce a specimen for layout testing", author="Brian Tingle", author_email="[email protected]", url="http://tingletech.github.com/greeker.py/", install_requires=["inflect>=0.2.1", "lxml>=2.3.2", "nltk>=2.0.1rc2-git", "numpy", "argparse"], py_modules=['greeker'], scripts=['greeker.py'], )
flexible
{ "blob_id": "1fda8274024bdf74e7fbd4ac4a27d6cfe6032a13", "index": 9790, "step-1": "<mask token>\n", "step-2": "<mask token>\nsetup(name='greeker', version='0.3.2-git', description=\n 'scrambles nouns in an XML document to produce a specimen for layout testing'\n , author='Brian Tingle', author_email=\n '[email protected]', url=\n 'http://tingletech.github.com/greeker.py/', install_requires=[\n 'inflect>=0.2.1', 'lxml>=2.3.2', 'nltk>=2.0.1rc2-git', 'numpy',\n 'argparse'], py_modules=['greeker'], scripts=['greeker.py'])\n", "step-3": "from distutils.core import setup\nsetup(name='greeker', version='0.3.2-git', description=\n 'scrambles nouns in an XML document to produce a specimen for layout testing'\n , author='Brian Tingle', author_email=\n '[email protected]', url=\n 'http://tingletech.github.com/greeker.py/', install_requires=[\n 'inflect>=0.2.1', 'lxml>=2.3.2', 'nltk>=2.0.1rc2-git', 'numpy',\n 'argparse'], py_modules=['greeker'], scripts=['greeker.py'])\n", "step-4": "from distutils.core import setup\nsetup(name='greeker',\n version='0.3.2-git',\n description=\"scrambles nouns in an XML document to produce a specimen for layout testing\",\n author=\"Brian Tingle\",\n author_email=\"[email protected]\",\n url=\"http://tingletech.github.com/greeker.py/\",\n install_requires=[\"inflect>=0.2.1\", \"lxml>=2.3.2\", \"nltk>=2.0.1rc2-git\", \"numpy\", \"argparse\"],\n py_modules=['greeker'],\n scripts=['greeker.py'],\n )\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> region = 'us-east-2' ec2 = boto3.resource('ec2', region) ImageId = 'ami-07efac79022b86107' KeyName = 'aws_keypair' InstanceType = 't2.micro' instances = ec2.create_instances(ImageId=ImageId, MinCount=1, MaxCount=5, KeyName=KeyName, InstanceType=InstanceType, IamInstanceProfile={'Name': 'Test-ec2-pro'}) <|reserved_special_token_1|> import boto3 import json region = 'us-east-2' ec2 = boto3.resource('ec2', region) ImageId = 'ami-07efac79022b86107' KeyName = 'aws_keypair' InstanceType = 't2.micro' instances = ec2.create_instances(ImageId=ImageId, MinCount=1, MaxCount=5, KeyName=KeyName, InstanceType=InstanceType, IamInstanceProfile={'Name': 'Test-ec2-pro'}) <|reserved_special_token_1|> import boto3 import json region = 'us-east-2' ec2 = boto3.resource('ec2',region) ImageId = 'ami-07efac79022b86107' KeyName = 'aws_keypair' InstanceType = 't2.micro' #IamInstanceProfile = instances = ec2.create_instances( ImageId =ImageId, MinCount = 1, MaxCount = 5, KeyName = KeyName, InstanceType = InstanceType, IamInstanceProfile = { 'Name' : 'Test-ec2-pro', } )
flexible
{ "blob_id": "b7606befe123c4fb6840a1bc62e43e6721edfcc3", "index": 5005, "step-1": "<mask token>\n", "step-2": "<mask token>\nregion = 'us-east-2'\nec2 = boto3.resource('ec2', region)\nImageId = 'ami-07efac79022b86107'\nKeyName = 'aws_keypair'\nInstanceType = 't2.micro'\ninstances = ec2.create_instances(ImageId=ImageId, MinCount=1, MaxCount=5,\n KeyName=KeyName, InstanceType=InstanceType, IamInstanceProfile={'Name':\n 'Test-ec2-pro'})\n", "step-3": "import boto3\nimport json\nregion = 'us-east-2'\nec2 = boto3.resource('ec2', region)\nImageId = 'ami-07efac79022b86107'\nKeyName = 'aws_keypair'\nInstanceType = 't2.micro'\ninstances = ec2.create_instances(ImageId=ImageId, MinCount=1, MaxCount=5,\n KeyName=KeyName, InstanceType=InstanceType, IamInstanceProfile={'Name':\n 'Test-ec2-pro'})\n", "step-4": "import boto3\nimport json\n\nregion = 'us-east-2'\n\nec2 = boto3.resource('ec2',region)\n\nImageId = 'ami-07efac79022b86107'\nKeyName = 'aws_keypair'\nInstanceType = 't2.micro'\n#IamInstanceProfile =\ninstances = ec2.create_instances(\n ImageId =ImageId,\n MinCount = 1,\n MaxCount = 5,\n KeyName = KeyName,\n InstanceType = InstanceType,\n IamInstanceProfile = {\n 'Name' : 'Test-ec2-pro',\n\n }\n)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Migration(migrations.Migration): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Migration(migrations.Migration): dependencies = [('sub_adjuster', '0002_parameters')] operations = [migrations.AlterField(model_name='subtitles', name= 'line_A', field=models.CharField(max_length=255, default=None)), migrations.AlterField(model_name='subtitles', name='line_B', field= models.CharField(max_length=255, default=None)), migrations. AlterField(model_name='subtitles', name='line_C', field=models. CharField(max_length=255, default=None))] <|reserved_special_token_1|> from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [('sub_adjuster', '0002_parameters')] operations = [migrations.AlterField(model_name='subtitles', name= 'line_A', field=models.CharField(max_length=255, default=None)), migrations.AlterField(model_name='subtitles', name='line_B', field= models.CharField(max_length=255, default=None)), migrations. AlterField(model_name='subtitles', name='line_C', field=models. CharField(max_length=255, default=None))] <|reserved_special_token_1|> # -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('sub_adjuster', '0002_parameters'), ] operations = [ migrations.AlterField( model_name='subtitles', name='line_A', field=models.CharField(max_length=255, default=None), ), migrations.AlterField( model_name='subtitles', name='line_B', field=models.CharField(max_length=255, default=None), ), migrations.AlterField( model_name='subtitles', name='line_C', field=models.CharField(max_length=255, default=None), ), ]
flexible
{ "blob_id": "156203042ed8a9bde0e9d8587ea3d37de6bcfdf7", "index": 5155, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('sub_adjuster', '0002_parameters')]\n operations = [migrations.AlterField(model_name='subtitles', name=\n 'line_A', field=models.CharField(max_length=255, default=None)),\n migrations.AlterField(model_name='subtitles', name='line_B', field=\n models.CharField(max_length=255, default=None)), migrations.\n AlterField(model_name='subtitles', name='line_C', field=models.\n CharField(max_length=255, default=None))]\n", "step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('sub_adjuster', '0002_parameters')]\n operations = [migrations.AlterField(model_name='subtitles', name=\n 'line_A', field=models.CharField(max_length=255, default=None)),\n migrations.AlterField(model_name='subtitles', name='line_B', field=\n models.CharField(max_length=255, default=None)), migrations.\n AlterField(model_name='subtitles', name='line_C', field=models.\n CharField(max_length=255, default=None))]\n", "step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('sub_adjuster', '0002_parameters'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='subtitles',\n name='line_A',\n field=models.CharField(max_length=255, default=None),\n ),\n migrations.AlterField(\n model_name='subtitles',\n name='line_B',\n field=models.CharField(max_length=255, default=None),\n ),\n migrations.AlterField(\n model_name='subtitles',\n name='line_C',\n field=models.CharField(max_length=255, default=None),\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import requests from lxml import html headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36'} mail_ru_link = "http://mail.ru" lenta_link = "https://lenta.ru/" req = requests.get(mail_ru_link, headers=headers).text root = html.fromstring(req) news = [] links = root.xpath( "//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/a[@name]/@href | " "//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/*/a[contains(@href, 'https://')]/@href") titles = root.xpath("//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/a[@name]/*/*/h3/text() | " "//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/*/a[contains(@href, 'https://')]/text()") if len(links) > 0: for i, l in enumerate(links): article = {'link': l, 'title': titles[i], 'source': mail_ru_link} news.append(article) else: print("Error") req = requests.get(lenta_link, headers=headers).text root = html.fromstring(req) links = root.xpath( "//div[@class='item']/a/@href") titles = root.xpath("//div[@class='item']/a/text()") if len(links) > 0: for i, l in enumerate(links): article = {'link': lenta_link + l, 'title': titles[i], 'source': lenta_link} news.append(article) else: print("Error") print(news)
normal
{ "blob_id": "00d2a29774a4278b1b022571b3f16c88224f08fc", "index": 5207, "step-1": "<mask token>\n", "step-2": "<mask token>\nif len(links) > 0:\n for i, l in enumerate(links):\n article = {'link': l, 'title': titles[i], 'source': mail_ru_link}\n news.append(article)\nelse:\n print('Error')\n<mask token>\nif len(links) > 0:\n for i, l in enumerate(links):\n article = {'link': lenta_link + l, 'title': titles[i], 'source':\n lenta_link}\n news.append(article)\nelse:\n print('Error')\nprint(news)\n", "step-3": "<mask token>\nheaders = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36'\n }\nmail_ru_link = 'http://mail.ru'\nlenta_link = 'https://lenta.ru/'\nreq = requests.get(mail_ru_link, headers=headers).text\nroot = html.fromstring(req)\nnews = []\nlinks = root.xpath(\n \"//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/a[@name]/@href | //div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/*/a[contains(@href, 'https://')]/@href\"\n )\ntitles = root.xpath(\n \"//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/a[@name]/*/*/h3/text() | //div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/*/a[contains(@href, 'https://')]/text()\"\n )\nif len(links) > 0:\n for i, l in enumerate(links):\n article = {'link': l, 'title': titles[i], 'source': mail_ru_link}\n news.append(article)\nelse:\n print('Error')\nreq = requests.get(lenta_link, headers=headers).text\nroot = html.fromstring(req)\nlinks = root.xpath(\"//div[@class='item']/a/@href\")\ntitles = root.xpath(\"//div[@class='item']/a/text()\")\nif len(links) > 0:\n for i, l in enumerate(links):\n article = {'link': lenta_link + l, 'title': titles[i], 'source':\n lenta_link}\n news.append(article)\nelse:\n print('Error')\nprint(news)\n", "step-4": "import requests\nfrom lxml import html\nheaders = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36'\n }\nmail_ru_link = 'http://mail.ru'\nlenta_link = 'https://lenta.ru/'\nreq = requests.get(mail_ru_link, headers=headers).text\nroot = html.fromstring(req)\nnews = []\nlinks = root.xpath(\n \"//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/a[@name]/@href | //div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/*/a[contains(@href, 'https://')]/@href\"\n )\ntitles = root.xpath(\n \"//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/a[@name]/*/*/h3/text() | //div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/*/a[contains(@href, 'https://')]/text()\"\n )\nif len(links) > 0:\n for i, l in enumerate(links):\n article = {'link': l, 'title': titles[i], 'source': mail_ru_link}\n news.append(article)\nelse:\n print('Error')\nreq = requests.get(lenta_link, headers=headers).text\nroot = html.fromstring(req)\nlinks = root.xpath(\"//div[@class='item']/a/@href\")\ntitles = root.xpath(\"//div[@class='item']/a/text()\")\nif len(links) > 0:\n for i, l in enumerate(links):\n article = {'link': lenta_link + l, 'title': titles[i], 'source':\n lenta_link}\n news.append(article)\nelse:\n print('Error')\nprint(news)\n", "step-5": "import requests\nfrom lxml import html\n\nheaders = {\n 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36'}\nmail_ru_link = \"http://mail.ru\"\nlenta_link = \"https://lenta.ru/\"\n\nreq = requests.get(mail_ru_link, headers=headers).text\nroot = html.fromstring(req)\n\nnews = []\n\nlinks = root.xpath(\n \"//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/a[@name]/@href | \"\n \"//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/*/a[contains(@href, 'https://')]/@href\")\ntitles = root.xpath(\"//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/a[@name]/*/*/h3/text() | \"\n \"//div[@class='tabs__content']/*/div[contains(@class, 'news-item')]/*/a[contains(@href, 'https://')]/text()\")\nif len(links) > 0:\n for i, l in enumerate(links):\n article = {'link': l, 'title': titles[i], 'source': mail_ru_link}\n news.append(article)\nelse:\n print(\"Error\")\n\n\nreq = requests.get(lenta_link, headers=headers).text\nroot = html.fromstring(req)\n\nlinks = root.xpath(\n \"//div[@class='item']/a/@href\")\ntitles = root.xpath(\"//div[@class='item']/a/text()\")\nif len(links) > 0:\n for i, l in enumerate(links):\n article = {'link': lenta_link + l, 'title': titles[i], 'source': lenta_link}\n news.append(article)\nelse:\n print(\"Error\")\nprint(news)", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def item(request): if not request.session.get('is_login', None): return redirect('/item/item') else: item_list = Item.objects.all() return render(request, 'item/item.html', locals()) <|reserved_special_token_0|> def add_unit(request): if request.method == 'GET': last_unit_info = Unit.objects.last() return render(request, 'item/add_unit.html', locals()) else: unit_index = request.POST.get('unit_index') unit_name = request.POST.get('unit_name') new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name) new_unit.save() return redirect('/item/unit/') def edit_unit(request): if request.method == 'GET': nid = request.GET.get('nid') unit_info = Unit.objects.get(id=nid) return render(request, 'item/edit_unit.html', locals()) else: nid = request.GET.get('nid') unit_index = request.POST.get('unit_index') unit_name = request.POST.get('unit_name') unit_info = Unit.objects.get(id=nid) unit_info.unit_index = unit_index unit_info.unit_name = unit_name unit_info.save() return redirect('/item/unit/') def del_unit(request): nid = request.GET.get('nid') unit_info = Unit.objects.filter(id=nid) unit_info.delete() return redirect('/item/unit/') <|reserved_special_token_1|> <|reserved_special_token_0|> def item(request): if not request.session.get('is_login', None): return redirect('/item/item') else: item_list = Item.objects.all() return render(request, 'item/item.html', locals()) <|reserved_special_token_0|> def edit_item(request): if request.method == 'GET': nid = request.GET.get('nid') item_info = Item.objects.get(id=nid) unit_list = Unit.objects.all() return render(request, 'item/edit_item.html', locals()) else: nid = request.GET.get('nid') item_index = request.POST.get('item_index') item_chinese_name = request.POST.get('item_chinese_name') item_english_name = request.POST.get('item_english_name') item_method = request.POST.get('item_method') item_unit = request.POST.get('item_unit') is_calc = request.POST.get('is_calc') is_use = request.POST.get('is_use') unit_info = Unit.objects.get(id=item_unit) item_info = Item.objects.get(id=nid) item_info.item_index = item_index item_info.item_chinese_name = item_chinese_name item_info.item_english_name = item_english_name item_info.item_method = item_method item_info.item_unit = unit_info item_info.is_calc = str_to_bool(is_calc) item_info.is_use = str_to_bool(is_use) item_info.save() return redirect('/item/item/') <|reserved_special_token_0|> def add_unit(request): if request.method == 'GET': last_unit_info = Unit.objects.last() return render(request, 'item/add_unit.html', locals()) else: unit_index = request.POST.get('unit_index') unit_name = request.POST.get('unit_name') new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name) new_unit.save() return redirect('/item/unit/') def edit_unit(request): if request.method == 'GET': nid = request.GET.get('nid') unit_info = Unit.objects.get(id=nid) return render(request, 'item/edit_unit.html', locals()) else: nid = request.GET.get('nid') unit_index = request.POST.get('unit_index') unit_name = request.POST.get('unit_name') unit_info = Unit.objects.get(id=nid) unit_info.unit_index = unit_index unit_info.unit_name = unit_name unit_info.save() return redirect('/item/unit/') def del_unit(request): nid = request.GET.get('nid') unit_info = Unit.objects.filter(id=nid) unit_info.delete() return redirect('/item/unit/') <|reserved_special_token_1|> <|reserved_special_token_0|> def item(request): if not request.session.get('is_login', None): return redirect('/item/item') else: item_list = Item.objects.all() return render(request, 'item/item.html', locals()) <|reserved_special_token_0|> def edit_item(request): if request.method == 'GET': nid = request.GET.get('nid') item_info = Item.objects.get(id=nid) unit_list = Unit.objects.all() return render(request, 'item/edit_item.html', locals()) else: nid = request.GET.get('nid') item_index = request.POST.get('item_index') item_chinese_name = request.POST.get('item_chinese_name') item_english_name = request.POST.get('item_english_name') item_method = request.POST.get('item_method') item_unit = request.POST.get('item_unit') is_calc = request.POST.get('is_calc') is_use = request.POST.get('is_use') unit_info = Unit.objects.get(id=item_unit) item_info = Item.objects.get(id=nid) item_info.item_index = item_index item_info.item_chinese_name = item_chinese_name item_info.item_english_name = item_english_name item_info.item_method = item_method item_info.item_unit = unit_info item_info.is_calc = str_to_bool(is_calc) item_info.is_use = str_to_bool(is_use) item_info.save() return redirect('/item/item/') <|reserved_special_token_0|> def unit(request): if not request.session.get('is_login', None): return redirect('/item/unit') else: unit_list = Unit.objects.all() return render(request, 'item/unit.html', locals()) def add_unit(request): if request.method == 'GET': last_unit_info = Unit.objects.last() return render(request, 'item/add_unit.html', locals()) else: unit_index = request.POST.get('unit_index') unit_name = request.POST.get('unit_name') new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name) new_unit.save() return redirect('/item/unit/') def edit_unit(request): if request.method == 'GET': nid = request.GET.get('nid') unit_info = Unit.objects.get(id=nid) return render(request, 'item/edit_unit.html', locals()) else: nid = request.GET.get('nid') unit_index = request.POST.get('unit_index') unit_name = request.POST.get('unit_name') unit_info = Unit.objects.get(id=nid) unit_info.unit_index = unit_index unit_info.unit_name = unit_name unit_info.save() return redirect('/item/unit/') def del_unit(request): nid = request.GET.get('nid') unit_info = Unit.objects.filter(id=nid) unit_info.delete() return redirect('/item/unit/') <|reserved_special_token_1|> <|reserved_special_token_0|> def str_to_bool(s): return True if s.lower() == 'true' else False def item(request): if not request.session.get('is_login', None): return redirect('/item/item') else: item_list = Item.objects.all() return render(request, 'item/item.html', locals()) def add_item(request): if request.method == 'GET': last_item_info = Item.objects.last() unit_list = Unit.objects.all() return render(request, 'item/add_item.html', locals()) else: item_index = request.POST.get('item_index') item_chinese_name = request.POST.get('item_chinese_name') item_english_name = request.POST.get('item_english_name') item_method = request.POST.get('item_method') item_unit = request.POST.get('item_unit') is_calc = request.POST.get('is_calc') is_use = request.POST.get('is_use') unit_info = Unit.objects.get(id=item_unit) new_item = Item(item_index=int(item_index), item_chinese_name= item_chinese_name, item_english_name=item_english_name, item_method=item_method, item_unit=unit_info, is_calc= str_to_bool(is_calc), is_use=str_to_bool(is_use)) new_item.save() return redirect('/item/item/') def edit_item(request): if request.method == 'GET': nid = request.GET.get('nid') item_info = Item.objects.get(id=nid) unit_list = Unit.objects.all() return render(request, 'item/edit_item.html', locals()) else: nid = request.GET.get('nid') item_index = request.POST.get('item_index') item_chinese_name = request.POST.get('item_chinese_name') item_english_name = request.POST.get('item_english_name') item_method = request.POST.get('item_method') item_unit = request.POST.get('item_unit') is_calc = request.POST.get('is_calc') is_use = request.POST.get('is_use') unit_info = Unit.objects.get(id=item_unit) item_info = Item.objects.get(id=nid) item_info.item_index = item_index item_info.item_chinese_name = item_chinese_name item_info.item_english_name = item_english_name item_info.item_method = item_method item_info.item_unit = unit_info item_info.is_calc = str_to_bool(is_calc) item_info.is_use = str_to_bool(is_use) item_info.save() return redirect('/item/item/') <|reserved_special_token_0|> def unit(request): if not request.session.get('is_login', None): return redirect('/item/unit') else: unit_list = Unit.objects.all() return render(request, 'item/unit.html', locals()) def add_unit(request): if request.method == 'GET': last_unit_info = Unit.objects.last() return render(request, 'item/add_unit.html', locals()) else: unit_index = request.POST.get('unit_index') unit_name = request.POST.get('unit_name') new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name) new_unit.save() return redirect('/item/unit/') def edit_unit(request): if request.method == 'GET': nid = request.GET.get('nid') unit_info = Unit.objects.get(id=nid) return render(request, 'item/edit_unit.html', locals()) else: nid = request.GET.get('nid') unit_index = request.POST.get('unit_index') unit_name = request.POST.get('unit_name') unit_info = Unit.objects.get(id=nid) unit_info.unit_index = unit_index unit_info.unit_name = unit_name unit_info.save() return redirect('/item/unit/') def del_unit(request): nid = request.GET.get('nid') unit_info = Unit.objects.filter(id=nid) unit_info.delete() return redirect('/item/unit/') <|reserved_special_token_1|> from django.shortcuts import render, redirect # Create your views here. from item.models import Item, Unit def str_to_bool(s): return True if s.lower() == 'true' else False def item(request): if not request.session.get('is_login', None): return redirect('/item/item') else: item_list = Item.objects.all() return render(request, 'item/item.html', locals()) def add_item(request): if request.method == 'GET': last_item_info = Item.objects.last() unit_list=Unit.objects.all() return render(request, 'item/add_item.html', locals()) else: item_index = request.POST.get('item_index') item_chinese_name = request.POST.get('item_chinese_name') item_english_name = request.POST.get('item_english_name') item_method = request.POST.get('item_method') item_unit = request.POST.get('item_unit') is_calc = request.POST.get('is_calc') is_use = request.POST.get('is_use') unit_info=Unit.objects.get(id=item_unit) new_item = Item(item_index=int(item_index), item_chinese_name=item_chinese_name, item_english_name=item_english_name,item_method=item_method,item_unit=unit_info,is_calc=str_to_bool(is_calc), is_use=str_to_bool(is_use)) new_item.save() return redirect('/item/item/') def edit_item(request): if request.method == 'GET': nid = request.GET.get('nid') item_info = Item.objects.get(id=nid) unit_list = Unit.objects.all() return render(request, 'item/edit_item.html', locals()) else: nid = request.GET.get('nid') item_index = request.POST.get('item_index') item_chinese_name = request.POST.get('item_chinese_name') item_english_name = request.POST.get('item_english_name') item_method = request.POST.get('item_method') item_unit = request.POST.get('item_unit') is_calc = request.POST.get('is_calc') is_use = request.POST.get('is_use') unit_info = Unit.objects.get(id=item_unit) item_info = Item.objects.get(id=nid) item_info.item_index = item_index item_info.item_chinese_name = item_chinese_name item_info.item_english_name = item_english_name item_info.item_method = item_method item_info.item_unit = unit_info item_info.is_calc = str_to_bool(is_calc) item_info.is_use = str_to_bool(is_use) item_info.save() return redirect('/item/item/') def del_item(request): nid = request.GET.get('nid') item_info = Unit.objects.filter(id=nid) item_info.delete() return redirect('/item/item/') def unit(request): if not request.session.get('is_login', None): return redirect('/item/unit') else: unit_list = Unit.objects.all() return render(request, 'item/unit.html', locals()) def add_unit(request): if request.method == 'GET': last_unit_info = Unit.objects.last() return render(request, 'item/add_unit.html', locals()) else: unit_index = request.POST.get('unit_index') unit_name = request.POST.get('unit_name') new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name,) new_unit.save() return redirect('/item/unit/') def edit_unit(request): if request.method == 'GET': nid = request.GET.get('nid') unit_info = Unit.objects.get(id=nid) return render(request, 'item/edit_unit.html', locals()) else: nid = request.GET.get('nid') unit_index = request.POST.get('unit_index') unit_name = request.POST.get('unit_name') unit_info = Unit.objects.get(id=nid) unit_info.unit_index = unit_index unit_info.unit_name = unit_name unit_info.save() return redirect('/item/unit/') def del_unit(request): nid = request.GET.get('nid') unit_info = Unit.objects.filter(id=nid) unit_info.delete() return redirect('/item/unit/')
flexible
{ "blob_id": "22b2ebdbb48caa593bece030d238089a0aa27053", "index": 1983, "step-1": "<mask token>\n\n\ndef item(request):\n if not request.session.get('is_login', None):\n return redirect('/item/item')\n else:\n item_list = Item.objects.all()\n return render(request, 'item/item.html', locals())\n\n\n<mask token>\n\n\ndef add_unit(request):\n if request.method == 'GET':\n last_unit_info = Unit.objects.last()\n return render(request, 'item/add_unit.html', locals())\n else:\n unit_index = request.POST.get('unit_index')\n unit_name = request.POST.get('unit_name')\n new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name)\n new_unit.save()\n return redirect('/item/unit/')\n\n\ndef edit_unit(request):\n if request.method == 'GET':\n nid = request.GET.get('nid')\n unit_info = Unit.objects.get(id=nid)\n return render(request, 'item/edit_unit.html', locals())\n else:\n nid = request.GET.get('nid')\n unit_index = request.POST.get('unit_index')\n unit_name = request.POST.get('unit_name')\n unit_info = Unit.objects.get(id=nid)\n unit_info.unit_index = unit_index\n unit_info.unit_name = unit_name\n unit_info.save()\n return redirect('/item/unit/')\n\n\ndef del_unit(request):\n nid = request.GET.get('nid')\n unit_info = Unit.objects.filter(id=nid)\n unit_info.delete()\n return redirect('/item/unit/')\n", "step-2": "<mask token>\n\n\ndef item(request):\n if not request.session.get('is_login', None):\n return redirect('/item/item')\n else:\n item_list = Item.objects.all()\n return render(request, 'item/item.html', locals())\n\n\n<mask token>\n\n\ndef edit_item(request):\n if request.method == 'GET':\n nid = request.GET.get('nid')\n item_info = Item.objects.get(id=nid)\n unit_list = Unit.objects.all()\n return render(request, 'item/edit_item.html', locals())\n else:\n nid = request.GET.get('nid')\n item_index = request.POST.get('item_index')\n item_chinese_name = request.POST.get('item_chinese_name')\n item_english_name = request.POST.get('item_english_name')\n item_method = request.POST.get('item_method')\n item_unit = request.POST.get('item_unit')\n is_calc = request.POST.get('is_calc')\n is_use = request.POST.get('is_use')\n unit_info = Unit.objects.get(id=item_unit)\n item_info = Item.objects.get(id=nid)\n item_info.item_index = item_index\n item_info.item_chinese_name = item_chinese_name\n item_info.item_english_name = item_english_name\n item_info.item_method = item_method\n item_info.item_unit = unit_info\n item_info.is_calc = str_to_bool(is_calc)\n item_info.is_use = str_to_bool(is_use)\n item_info.save()\n return redirect('/item/item/')\n\n\n<mask token>\n\n\ndef add_unit(request):\n if request.method == 'GET':\n last_unit_info = Unit.objects.last()\n return render(request, 'item/add_unit.html', locals())\n else:\n unit_index = request.POST.get('unit_index')\n unit_name = request.POST.get('unit_name')\n new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name)\n new_unit.save()\n return redirect('/item/unit/')\n\n\ndef edit_unit(request):\n if request.method == 'GET':\n nid = request.GET.get('nid')\n unit_info = Unit.objects.get(id=nid)\n return render(request, 'item/edit_unit.html', locals())\n else:\n nid = request.GET.get('nid')\n unit_index = request.POST.get('unit_index')\n unit_name = request.POST.get('unit_name')\n unit_info = Unit.objects.get(id=nid)\n unit_info.unit_index = unit_index\n unit_info.unit_name = unit_name\n unit_info.save()\n return redirect('/item/unit/')\n\n\ndef del_unit(request):\n nid = request.GET.get('nid')\n unit_info = Unit.objects.filter(id=nid)\n unit_info.delete()\n return redirect('/item/unit/')\n", "step-3": "<mask token>\n\n\ndef item(request):\n if not request.session.get('is_login', None):\n return redirect('/item/item')\n else:\n item_list = Item.objects.all()\n return render(request, 'item/item.html', locals())\n\n\n<mask token>\n\n\ndef edit_item(request):\n if request.method == 'GET':\n nid = request.GET.get('nid')\n item_info = Item.objects.get(id=nid)\n unit_list = Unit.objects.all()\n return render(request, 'item/edit_item.html', locals())\n else:\n nid = request.GET.get('nid')\n item_index = request.POST.get('item_index')\n item_chinese_name = request.POST.get('item_chinese_name')\n item_english_name = request.POST.get('item_english_name')\n item_method = request.POST.get('item_method')\n item_unit = request.POST.get('item_unit')\n is_calc = request.POST.get('is_calc')\n is_use = request.POST.get('is_use')\n unit_info = Unit.objects.get(id=item_unit)\n item_info = Item.objects.get(id=nid)\n item_info.item_index = item_index\n item_info.item_chinese_name = item_chinese_name\n item_info.item_english_name = item_english_name\n item_info.item_method = item_method\n item_info.item_unit = unit_info\n item_info.is_calc = str_to_bool(is_calc)\n item_info.is_use = str_to_bool(is_use)\n item_info.save()\n return redirect('/item/item/')\n\n\n<mask token>\n\n\ndef unit(request):\n if not request.session.get('is_login', None):\n return redirect('/item/unit')\n else:\n unit_list = Unit.objects.all()\n return render(request, 'item/unit.html', locals())\n\n\ndef add_unit(request):\n if request.method == 'GET':\n last_unit_info = Unit.objects.last()\n return render(request, 'item/add_unit.html', locals())\n else:\n unit_index = request.POST.get('unit_index')\n unit_name = request.POST.get('unit_name')\n new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name)\n new_unit.save()\n return redirect('/item/unit/')\n\n\ndef edit_unit(request):\n if request.method == 'GET':\n nid = request.GET.get('nid')\n unit_info = Unit.objects.get(id=nid)\n return render(request, 'item/edit_unit.html', locals())\n else:\n nid = request.GET.get('nid')\n unit_index = request.POST.get('unit_index')\n unit_name = request.POST.get('unit_name')\n unit_info = Unit.objects.get(id=nid)\n unit_info.unit_index = unit_index\n unit_info.unit_name = unit_name\n unit_info.save()\n return redirect('/item/unit/')\n\n\ndef del_unit(request):\n nid = request.GET.get('nid')\n unit_info = Unit.objects.filter(id=nid)\n unit_info.delete()\n return redirect('/item/unit/')\n", "step-4": "<mask token>\n\n\ndef str_to_bool(s):\n return True if s.lower() == 'true' else False\n\n\ndef item(request):\n if not request.session.get('is_login', None):\n return redirect('/item/item')\n else:\n item_list = Item.objects.all()\n return render(request, 'item/item.html', locals())\n\n\ndef add_item(request):\n if request.method == 'GET':\n last_item_info = Item.objects.last()\n unit_list = Unit.objects.all()\n return render(request, 'item/add_item.html', locals())\n else:\n item_index = request.POST.get('item_index')\n item_chinese_name = request.POST.get('item_chinese_name')\n item_english_name = request.POST.get('item_english_name')\n item_method = request.POST.get('item_method')\n item_unit = request.POST.get('item_unit')\n is_calc = request.POST.get('is_calc')\n is_use = request.POST.get('is_use')\n unit_info = Unit.objects.get(id=item_unit)\n new_item = Item(item_index=int(item_index), item_chinese_name=\n item_chinese_name, item_english_name=item_english_name,\n item_method=item_method, item_unit=unit_info, is_calc=\n str_to_bool(is_calc), is_use=str_to_bool(is_use))\n new_item.save()\n return redirect('/item/item/')\n\n\ndef edit_item(request):\n if request.method == 'GET':\n nid = request.GET.get('nid')\n item_info = Item.objects.get(id=nid)\n unit_list = Unit.objects.all()\n return render(request, 'item/edit_item.html', locals())\n else:\n nid = request.GET.get('nid')\n item_index = request.POST.get('item_index')\n item_chinese_name = request.POST.get('item_chinese_name')\n item_english_name = request.POST.get('item_english_name')\n item_method = request.POST.get('item_method')\n item_unit = request.POST.get('item_unit')\n is_calc = request.POST.get('is_calc')\n is_use = request.POST.get('is_use')\n unit_info = Unit.objects.get(id=item_unit)\n item_info = Item.objects.get(id=nid)\n item_info.item_index = item_index\n item_info.item_chinese_name = item_chinese_name\n item_info.item_english_name = item_english_name\n item_info.item_method = item_method\n item_info.item_unit = unit_info\n item_info.is_calc = str_to_bool(is_calc)\n item_info.is_use = str_to_bool(is_use)\n item_info.save()\n return redirect('/item/item/')\n\n\n<mask token>\n\n\ndef unit(request):\n if not request.session.get('is_login', None):\n return redirect('/item/unit')\n else:\n unit_list = Unit.objects.all()\n return render(request, 'item/unit.html', locals())\n\n\ndef add_unit(request):\n if request.method == 'GET':\n last_unit_info = Unit.objects.last()\n return render(request, 'item/add_unit.html', locals())\n else:\n unit_index = request.POST.get('unit_index')\n unit_name = request.POST.get('unit_name')\n new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name)\n new_unit.save()\n return redirect('/item/unit/')\n\n\ndef edit_unit(request):\n if request.method == 'GET':\n nid = request.GET.get('nid')\n unit_info = Unit.objects.get(id=nid)\n return render(request, 'item/edit_unit.html', locals())\n else:\n nid = request.GET.get('nid')\n unit_index = request.POST.get('unit_index')\n unit_name = request.POST.get('unit_name')\n unit_info = Unit.objects.get(id=nid)\n unit_info.unit_index = unit_index\n unit_info.unit_name = unit_name\n unit_info.save()\n return redirect('/item/unit/')\n\n\ndef del_unit(request):\n nid = request.GET.get('nid')\n unit_info = Unit.objects.filter(id=nid)\n unit_info.delete()\n return redirect('/item/unit/')\n", "step-5": "from django.shortcuts import render, redirect\n\n\n# Create your views here.\nfrom item.models import Item, Unit\n\n\ndef str_to_bool(s):\n return True if s.lower() == 'true' else False\n\n\ndef item(request):\n if not request.session.get('is_login', None):\n return redirect('/item/item')\n else:\n item_list = Item.objects.all()\n return render(request, 'item/item.html', locals())\n\n\ndef add_item(request):\n if request.method == 'GET':\n last_item_info = Item.objects.last()\n unit_list=Unit.objects.all()\n return render(request, 'item/add_item.html', locals())\n else:\n item_index = request.POST.get('item_index')\n item_chinese_name = request.POST.get('item_chinese_name')\n item_english_name = request.POST.get('item_english_name')\n item_method = request.POST.get('item_method')\n item_unit = request.POST.get('item_unit')\n is_calc = request.POST.get('is_calc')\n is_use = request.POST.get('is_use')\n\n unit_info=Unit.objects.get(id=item_unit)\n new_item = Item(item_index=int(item_index), item_chinese_name=item_chinese_name,\n item_english_name=item_english_name,item_method=item_method,item_unit=unit_info,is_calc=str_to_bool(is_calc),\n is_use=str_to_bool(is_use))\n new_item.save()\n return redirect('/item/item/')\n\n\ndef edit_item(request):\n if request.method == 'GET':\n nid = request.GET.get('nid')\n item_info = Item.objects.get(id=nid)\n unit_list = Unit.objects.all()\n return render(request, 'item/edit_item.html', locals())\n else:\n nid = request.GET.get('nid')\n item_index = request.POST.get('item_index')\n item_chinese_name = request.POST.get('item_chinese_name')\n item_english_name = request.POST.get('item_english_name')\n item_method = request.POST.get('item_method')\n item_unit = request.POST.get('item_unit')\n is_calc = request.POST.get('is_calc')\n is_use = request.POST.get('is_use')\n\n unit_info = Unit.objects.get(id=item_unit)\n item_info = Item.objects.get(id=nid)\n item_info.item_index = item_index\n item_info.item_chinese_name = item_chinese_name\n item_info.item_english_name = item_english_name\n item_info.item_method = item_method\n item_info.item_unit = unit_info\n item_info.is_calc = str_to_bool(is_calc)\n\n item_info.is_use = str_to_bool(is_use)\n item_info.save()\n return redirect('/item/item/')\n\n\ndef del_item(request):\n nid = request.GET.get('nid')\n item_info = Unit.objects.filter(id=nid)\n item_info.delete()\n return redirect('/item/item/')\n\n\ndef unit(request):\n if not request.session.get('is_login', None):\n return redirect('/item/unit')\n else:\n unit_list = Unit.objects.all()\n return render(request, 'item/unit.html', locals())\n\n\ndef add_unit(request):\n if request.method == 'GET':\n last_unit_info = Unit.objects.last()\n return render(request, 'item/add_unit.html', locals())\n else:\n unit_index = request.POST.get('unit_index')\n unit_name = request.POST.get('unit_name')\n new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name,)\n new_unit.save()\n return redirect('/item/unit/')\n\n\ndef edit_unit(request):\n if request.method == 'GET':\n nid = request.GET.get('nid')\n unit_info = Unit.objects.get(id=nid)\n return render(request, 'item/edit_unit.html', locals())\n else:\n nid = request.GET.get('nid')\n unit_index = request.POST.get('unit_index')\n unit_name = request.POST.get('unit_name')\n\n unit_info = Unit.objects.get(id=nid)\n unit_info.unit_index = unit_index\n unit_info.unit_name = unit_name\n\n unit_info.save()\n return redirect('/item/unit/')\n\n\ndef del_unit(request):\n nid = request.GET.get('nid')\n unit_info = Unit.objects.filter(id=nid)\n unit_info.delete()\n return redirect('/item/unit/')", "step-ids": [ 4, 5, 6, 8, 11 ] }
[ 4, 5, 6, 8, 11 ]
<|reserved_special_token_0|> def displayEmployees(): fh = open('Employee.txt', 'r') for line in fh: emp = line.split(',') print('\nEmployee No:', emp[0], '\nEmployee Name:', emp[1], '\nBasic:', emp[2], '\nHRA:', emp[3], '\nDA:', emp[4], '\nGross Salary:', emp[5], '\nIncome Tax:', emp[6], '\nNet Salary:', emp[7]) fh.close() <|reserved_special_token_0|> def deleteEmployee(eno): count = 0 fh = open('Employee.txt', 'r') ftemp = open('Temp.txt', 'w') for line in fh: emp = line.split(',') if int(emp[0]) != eno: ftemp.write(line) else: count += 1 continue fh.close() ftemp.close() if count == 0: print('Employee record not found') os.remove('Temp.txt') else: print('No of Employee records deleted:', count) os.remove('Employee.txt') os.rename('Temp.txt', 'Employee.txt') def modifyEmployee(eno): count = 0 fh = open('Employee.txt', 'r') ftemp = open('Temp.txt', 'w') for line in fh: emp = line.split(',') if int(emp[0]) == eno: print('Existing Employee record:') print('\nEmployee No:', emp[0], '\nEmployee Name:', emp[1], '\nBasic:', emp[2], '\nHRA:', emp[3], '\nDA:', emp[4], '\nGross Salary:', emp[5], '\nIncome Tax:', emp[6], '\nNet Salary:', emp[7]) print('Enter New Employee details') emp[1] = input('Enter Employee Name:') emp[2] = int(input('Enter Employee Basic Salary:')) emp[3] = emp[2] * 0.1 emp[4] = emp[2] * 0.73 emp[5] = emp[2] + emp[3] + emp[4] emp[6] = emp[5] * 0.3 emp[7] = emp[5] - emp[6] print(emp[0]) line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3] ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6] ) + ',' + str(emp[7]) + '\n' count += 1 ftemp.write(line) fh.close() ftemp.close() if count == 0: print('Employee record not found') os.remove('Temp.txt') else: print('No of Employee records modified:', count) os.remove('Employee.txt') os.rename('Temp.txt', 'Employee.txt') <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def appendEmployee(eno, name, basic): fh = open('Employee.txt', 'a') hra = basic * 0.1 da = basic * 0.73 gross = basic + hra + da tax = gross * 0.3 net = gross - tax line = str(eno) + ',' + name + ',' + str(basic) + ',' + str(hra ) + ',' + str(da) + ',' + str(gross) + ',' + str(tax) + ',' + str(net ) + '\n' fh.write(line) fh.close() def displayEmployees(): fh = open('Employee.txt', 'r') for line in fh: emp = line.split(',') print('\nEmployee No:', emp[0], '\nEmployee Name:', emp[1], '\nBasic:', emp[2], '\nHRA:', emp[3], '\nDA:', emp[4], '\nGross Salary:', emp[5], '\nIncome Tax:', emp[6], '\nNet Salary:', emp[7]) fh.close() def searchEmployee(eno): fh = open('Employee.txt', 'r') flag = False for line in fh: emp = line.split(',') if int(emp[0]) == eno: print('\nEmployee No:', emp[0], '\nEmployee Name:', emp[1], '\nBasic:', emp[2], '\nHRA:', emp[3], '\nDA:', emp[4], '\nGross Salary:', emp[5], '\nIncome Tax:', emp[6], '\nNet Salary:', emp[7]) flag = True break if flag == False: print('Employee record not found') fh.close() def deleteEmployee(eno): count = 0 fh = open('Employee.txt', 'r') ftemp = open('Temp.txt', 'w') for line in fh: emp = line.split(',') if int(emp[0]) != eno: ftemp.write(line) else: count += 1 continue fh.close() ftemp.close() if count == 0: print('Employee record not found') os.remove('Temp.txt') else: print('No of Employee records deleted:', count) os.remove('Employee.txt') os.rename('Temp.txt', 'Employee.txt') def modifyEmployee(eno): count = 0 fh = open('Employee.txt', 'r') ftemp = open('Temp.txt', 'w') for line in fh: emp = line.split(',') if int(emp[0]) == eno: print('Existing Employee record:') print('\nEmployee No:', emp[0], '\nEmployee Name:', emp[1], '\nBasic:', emp[2], '\nHRA:', emp[3], '\nDA:', emp[4], '\nGross Salary:', emp[5], '\nIncome Tax:', emp[6], '\nNet Salary:', emp[7]) print('Enter New Employee details') emp[1] = input('Enter Employee Name:') emp[2] = int(input('Enter Employee Basic Salary:')) emp[3] = emp[2] * 0.1 emp[4] = emp[2] * 0.73 emp[5] = emp[2] + emp[3] + emp[4] emp[6] = emp[5] * 0.3 emp[7] = emp[5] - emp[6] print(emp[0]) line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3] ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6] ) + ',' + str(emp[7]) + '\n' count += 1 ftemp.write(line) fh.close() ftemp.close() if count == 0: print('Employee record not found') os.remove('Temp.txt') else: print('No of Employee records modified:', count) os.remove('Employee.txt') os.rename('Temp.txt', 'Employee.txt') <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def appendEmployee(eno, name, basic): fh = open('Employee.txt', 'a') hra = basic * 0.1 da = basic * 0.73 gross = basic + hra + da tax = gross * 0.3 net = gross - tax line = str(eno) + ',' + name + ',' + str(basic) + ',' + str(hra ) + ',' + str(da) + ',' + str(gross) + ',' + str(tax) + ',' + str(net ) + '\n' fh.write(line) fh.close() def displayEmployees(): fh = open('Employee.txt', 'r') for line in fh: emp = line.split(',') print('\nEmployee No:', emp[0], '\nEmployee Name:', emp[1], '\nBasic:', emp[2], '\nHRA:', emp[3], '\nDA:', emp[4], '\nGross Salary:', emp[5], '\nIncome Tax:', emp[6], '\nNet Salary:', emp[7]) fh.close() def searchEmployee(eno): fh = open('Employee.txt', 'r') flag = False for line in fh: emp = line.split(',') if int(emp[0]) == eno: print('\nEmployee No:', emp[0], '\nEmployee Name:', emp[1], '\nBasic:', emp[2], '\nHRA:', emp[3], '\nDA:', emp[4], '\nGross Salary:', emp[5], '\nIncome Tax:', emp[6], '\nNet Salary:', emp[7]) flag = True break if flag == False: print('Employee record not found') fh.close() def deleteEmployee(eno): count = 0 fh = open('Employee.txt', 'r') ftemp = open('Temp.txt', 'w') for line in fh: emp = line.split(',') if int(emp[0]) != eno: ftemp.write(line) else: count += 1 continue fh.close() ftemp.close() if count == 0: print('Employee record not found') os.remove('Temp.txt') else: print('No of Employee records deleted:', count) os.remove('Employee.txt') os.rename('Temp.txt', 'Employee.txt') def modifyEmployee(eno): count = 0 fh = open('Employee.txt', 'r') ftemp = open('Temp.txt', 'w') for line in fh: emp = line.split(',') if int(emp[0]) == eno: print('Existing Employee record:') print('\nEmployee No:', emp[0], '\nEmployee Name:', emp[1], '\nBasic:', emp[2], '\nHRA:', emp[3], '\nDA:', emp[4], '\nGross Salary:', emp[5], '\nIncome Tax:', emp[6], '\nNet Salary:', emp[7]) print('Enter New Employee details') emp[1] = input('Enter Employee Name:') emp[2] = int(input('Enter Employee Basic Salary:')) emp[3] = emp[2] * 0.1 emp[4] = emp[2] * 0.73 emp[5] = emp[2] + emp[3] + emp[4] emp[6] = emp[5] * 0.3 emp[7] = emp[5] - emp[6] print(emp[0]) line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3] ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6] ) + ',' + str(emp[7]) + '\n' count += 1 ftemp.write(line) fh.close() ftemp.close() if count == 0: print('Employee record not found') os.remove('Temp.txt') else: print('No of Employee records modified:', count) os.remove('Employee.txt') os.rename('Temp.txt', 'Employee.txt') if __name__ == '__main__': while True: ch = int(input( """1->New Employee 2->Display Employee records 3->Search Employee 4->Delete Employee 5->Modify Employee 6->Exit """ )) if ch == 1: eno = int(input('Enter Employee No:')) name = input('Enter Employee Name:') basic = int(input('Enter Employee Basic salary:')) appendEmployee(eno, name, basic) elif ch == 2: displayEmployees() elif ch == 3: eno = int(input('Enter Employee No to search:')) searchEmployee(eno) elif ch == 4: eno = int(input('Enter Employee No to delete:')) deleteEmployee(eno) elif ch == 5: eno = int(input('Enter Employee No to modify:')) modifyEmployee(eno) else: break <|reserved_special_token_1|> import os def appendEmployee(eno, name, basic): fh = open('Employee.txt', 'a') hra = basic * 0.1 da = basic * 0.73 gross = basic + hra + da tax = gross * 0.3 net = gross - tax line = str(eno) + ',' + name + ',' + str(basic) + ',' + str(hra ) + ',' + str(da) + ',' + str(gross) + ',' + str(tax) + ',' + str(net ) + '\n' fh.write(line) fh.close() def displayEmployees(): fh = open('Employee.txt', 'r') for line in fh: emp = line.split(',') print('\nEmployee No:', emp[0], '\nEmployee Name:', emp[1], '\nBasic:', emp[2], '\nHRA:', emp[3], '\nDA:', emp[4], '\nGross Salary:', emp[5], '\nIncome Tax:', emp[6], '\nNet Salary:', emp[7]) fh.close() def searchEmployee(eno): fh = open('Employee.txt', 'r') flag = False for line in fh: emp = line.split(',') if int(emp[0]) == eno: print('\nEmployee No:', emp[0], '\nEmployee Name:', emp[1], '\nBasic:', emp[2], '\nHRA:', emp[3], '\nDA:', emp[4], '\nGross Salary:', emp[5], '\nIncome Tax:', emp[6], '\nNet Salary:', emp[7]) flag = True break if flag == False: print('Employee record not found') fh.close() def deleteEmployee(eno): count = 0 fh = open('Employee.txt', 'r') ftemp = open('Temp.txt', 'w') for line in fh: emp = line.split(',') if int(emp[0]) != eno: ftemp.write(line) else: count += 1 continue fh.close() ftemp.close() if count == 0: print('Employee record not found') os.remove('Temp.txt') else: print('No of Employee records deleted:', count) os.remove('Employee.txt') os.rename('Temp.txt', 'Employee.txt') def modifyEmployee(eno): count = 0 fh = open('Employee.txt', 'r') ftemp = open('Temp.txt', 'w') for line in fh: emp = line.split(',') if int(emp[0]) == eno: print('Existing Employee record:') print('\nEmployee No:', emp[0], '\nEmployee Name:', emp[1], '\nBasic:', emp[2], '\nHRA:', emp[3], '\nDA:', emp[4], '\nGross Salary:', emp[5], '\nIncome Tax:', emp[6], '\nNet Salary:', emp[7]) print('Enter New Employee details') emp[1] = input('Enter Employee Name:') emp[2] = int(input('Enter Employee Basic Salary:')) emp[3] = emp[2] * 0.1 emp[4] = emp[2] * 0.73 emp[5] = emp[2] + emp[3] + emp[4] emp[6] = emp[5] * 0.3 emp[7] = emp[5] - emp[6] print(emp[0]) line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3] ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6] ) + ',' + str(emp[7]) + '\n' count += 1 ftemp.write(line) fh.close() ftemp.close() if count == 0: print('Employee record not found') os.remove('Temp.txt') else: print('No of Employee records modified:', count) os.remove('Employee.txt') os.rename('Temp.txt', 'Employee.txt') if __name__ == '__main__': while True: ch = int(input( """1->New Employee 2->Display Employee records 3->Search Employee 4->Delete Employee 5->Modify Employee 6->Exit """ )) if ch == 1: eno = int(input('Enter Employee No:')) name = input('Enter Employee Name:') basic = int(input('Enter Employee Basic salary:')) appendEmployee(eno, name, basic) elif ch == 2: displayEmployees() elif ch == 3: eno = int(input('Enter Employee No to search:')) searchEmployee(eno) elif ch == 4: eno = int(input('Enter Employee No to delete:')) deleteEmployee(eno) elif ch == 5: eno = int(input('Enter Employee No to modify:')) modifyEmployee(eno) else: break <|reserved_special_token_1|> #Program to create and store Employee Salary Records in a file import os def appendEmployee(eno,name,basic): fh=open("Employee.txt","a") hra=basic*0.10 da=basic*0.73 gross=basic+hra+da tax=gross*0.3 net=gross-tax line=str(eno)+","+name+","+str(basic)+","+str(hra)+","+str(da)+","+str(gross)+","+str(tax)+","+str(net)+"\n" fh.write(line) fh.close() def displayEmployees(): fh=open("Employee.txt","r") for line in fh: emp=line.split(",") print("\nEmployee No:",emp[0],"\nEmployee Name:",emp[1],"\nBasic:",emp[2],"\nHRA:",emp[3],"\nDA:",emp[4],"\nGross Salary:",emp[5],"\nIncome Tax:",emp[6],"\nNet Salary:",emp[7]) fh.close() def searchEmployee(eno): fh=open("Employee.txt","r") flag=False for line in fh: emp=line.split(",") if(int(emp[0])==eno): print("\nEmployee No:",emp[0],"\nEmployee Name:",emp[1],"\nBasic:",emp[2],"\nHRA:",emp[3],"\nDA:",emp[4],"\nGross Salary:",emp[5],"\nIncome Tax:",emp[6],"\nNet Salary:",emp[7]) flag=True break if flag==False: print("Employee record not found") fh.close() def deleteEmployee(eno): count=0 fh=open("Employee.txt","r") ftemp=open("Temp.txt","w") for line in fh: emp=line.split(",") if(int(emp[0])!=eno): ftemp.write(line) else: count+=1 continue fh.close() ftemp.close() if count==0: print("Employee record not found") os.remove("Temp.txt") else: print("No of Employee records deleted:",count) os.remove("Employee.txt") os.rename("Temp.txt","Employee.txt") def modifyEmployee(eno): count=0 fh=open("Employee.txt","r") ftemp=open("Temp.txt","w") for line in fh: emp=line.split(",") if(int(emp[0])==eno): print("Existing Employee record:") print("\nEmployee No:",emp[0],"\nEmployee Name:",emp[1],"\nBasic:",emp[2],"\nHRA:",emp[3],"\nDA:",emp[4],"\nGross Salary:",emp[5],"\nIncome Tax:",emp[6],"\nNet Salary:",emp[7]) print("Enter New Employee details") emp[1]=input("Enter Employee Name:") emp[2]=int(input("Enter Employee Basic Salary:")) emp[3]=emp[2]*0.10 #HRA emp[4]=emp[2]*0.73 #DA emp[5]=emp[2]+emp[3]+emp[4] #Gross emp[6]=emp[5]*0.3 #Tax emp[7]=emp[5]-emp[6] print(emp[0]) #Net line=emp[0]+","+emp[1]+","+str(emp[2])+","+str(emp[3])+","+str(emp[4])+","+str(emp[5])+","+str(emp[6])+","+str(emp[7])+"\n" count+=1 ftemp.write(line) fh.close() ftemp.close() if count==0: print("Employee record not found") os.remove("Temp.txt") else: print("No of Employee records modified:",count) os.remove("Employee.txt") os.rename("Temp.txt","Employee.txt") if __name__=="__main__": while True: ch=int(input("1->New Employee 2->Display Employee records 3->Search Employee 4->Delete Employee 5->Modify Employee 6->Exit\n")) if ch==1: eno=int(input("Enter Employee No:")) name=input("Enter Employee Name:") basic=int(input("Enter Employee Basic salary:")) appendEmployee(eno,name,basic) elif ch==2: displayEmployees() elif ch==3: eno=int(input("Enter Employee No to search:")) searchEmployee(eno) elif ch==4: eno=int(input("Enter Employee No to delete:")) deleteEmployee(eno) elif ch==5: eno=int(input("Enter Employee No to modify:")) modifyEmployee(eno) else: break
flexible
{ "blob_id": "5b6241907cc97f82d6c6e0a461f4f71a9a567204", "index": 5395, "step-1": "<mask token>\n\n\ndef displayEmployees():\n fh = open('Employee.txt', 'r')\n for line in fh:\n emp = line.split(',')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n fh.close()\n\n\n<mask token>\n\n\ndef deleteEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) != eno:\n ftemp.write(line)\n else:\n count += 1\n continue\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records deleted:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\ndef modifyEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('Existing Employee record:')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n print('Enter New Employee details')\n emp[1] = input('Enter Employee Name:')\n emp[2] = int(input('Enter Employee Basic Salary:'))\n emp[3] = emp[2] * 0.1\n emp[4] = emp[2] * 0.73\n emp[5] = emp[2] + emp[3] + emp[4]\n emp[6] = emp[5] * 0.3\n emp[7] = emp[5] - emp[6]\n print(emp[0])\n line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3]\n ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6]\n ) + ',' + str(emp[7]) + '\\n'\n count += 1\n ftemp.write(line)\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records modified:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef appendEmployee(eno, name, basic):\n fh = open('Employee.txt', 'a')\n hra = basic * 0.1\n da = basic * 0.73\n gross = basic + hra + da\n tax = gross * 0.3\n net = gross - tax\n line = str(eno) + ',' + name + ',' + str(basic) + ',' + str(hra\n ) + ',' + str(da) + ',' + str(gross) + ',' + str(tax) + ',' + str(net\n ) + '\\n'\n fh.write(line)\n fh.close()\n\n\ndef displayEmployees():\n fh = open('Employee.txt', 'r')\n for line in fh:\n emp = line.split(',')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n fh.close()\n\n\ndef searchEmployee(eno):\n fh = open('Employee.txt', 'r')\n flag = False\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n flag = True\n break\n if flag == False:\n print('Employee record not found')\n fh.close()\n\n\ndef deleteEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) != eno:\n ftemp.write(line)\n else:\n count += 1\n continue\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records deleted:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\ndef modifyEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('Existing Employee record:')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n print('Enter New Employee details')\n emp[1] = input('Enter Employee Name:')\n emp[2] = int(input('Enter Employee Basic Salary:'))\n emp[3] = emp[2] * 0.1\n emp[4] = emp[2] * 0.73\n emp[5] = emp[2] + emp[3] + emp[4]\n emp[6] = emp[5] * 0.3\n emp[7] = emp[5] - emp[6]\n print(emp[0])\n line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3]\n ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6]\n ) + ',' + str(emp[7]) + '\\n'\n count += 1\n ftemp.write(line)\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records modified:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef appendEmployee(eno, name, basic):\n fh = open('Employee.txt', 'a')\n hra = basic * 0.1\n da = basic * 0.73\n gross = basic + hra + da\n tax = gross * 0.3\n net = gross - tax\n line = str(eno) + ',' + name + ',' + str(basic) + ',' + str(hra\n ) + ',' + str(da) + ',' + str(gross) + ',' + str(tax) + ',' + str(net\n ) + '\\n'\n fh.write(line)\n fh.close()\n\n\ndef displayEmployees():\n fh = open('Employee.txt', 'r')\n for line in fh:\n emp = line.split(',')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n fh.close()\n\n\ndef searchEmployee(eno):\n fh = open('Employee.txt', 'r')\n flag = False\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n flag = True\n break\n if flag == False:\n print('Employee record not found')\n fh.close()\n\n\ndef deleteEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) != eno:\n ftemp.write(line)\n else:\n count += 1\n continue\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records deleted:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\ndef modifyEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('Existing Employee record:')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n print('Enter New Employee details')\n emp[1] = input('Enter Employee Name:')\n emp[2] = int(input('Enter Employee Basic Salary:'))\n emp[3] = emp[2] * 0.1\n emp[4] = emp[2] * 0.73\n emp[5] = emp[2] + emp[3] + emp[4]\n emp[6] = emp[5] * 0.3\n emp[7] = emp[5] - emp[6]\n print(emp[0])\n line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3]\n ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6]\n ) + ',' + str(emp[7]) + '\\n'\n count += 1\n ftemp.write(line)\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records modified:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\nif __name__ == '__main__':\n while True:\n ch = int(input(\n \"\"\"1->New Employee 2->Display Employee records 3->Search Employee 4->Delete Employee 5->Modify Employee 6->Exit\n\"\"\"\n ))\n if ch == 1:\n eno = int(input('Enter Employee No:'))\n name = input('Enter Employee Name:')\n basic = int(input('Enter Employee Basic salary:'))\n appendEmployee(eno, name, basic)\n elif ch == 2:\n displayEmployees()\n elif ch == 3:\n eno = int(input('Enter Employee No to search:'))\n searchEmployee(eno)\n elif ch == 4:\n eno = int(input('Enter Employee No to delete:'))\n deleteEmployee(eno)\n elif ch == 5:\n eno = int(input('Enter Employee No to modify:'))\n modifyEmployee(eno)\n else:\n break\n", "step-4": "import os\n\n\ndef appendEmployee(eno, name, basic):\n fh = open('Employee.txt', 'a')\n hra = basic * 0.1\n da = basic * 0.73\n gross = basic + hra + da\n tax = gross * 0.3\n net = gross - tax\n line = str(eno) + ',' + name + ',' + str(basic) + ',' + str(hra\n ) + ',' + str(da) + ',' + str(gross) + ',' + str(tax) + ',' + str(net\n ) + '\\n'\n fh.write(line)\n fh.close()\n\n\ndef displayEmployees():\n fh = open('Employee.txt', 'r')\n for line in fh:\n emp = line.split(',')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n fh.close()\n\n\ndef searchEmployee(eno):\n fh = open('Employee.txt', 'r')\n flag = False\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n flag = True\n break\n if flag == False:\n print('Employee record not found')\n fh.close()\n\n\ndef deleteEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) != eno:\n ftemp.write(line)\n else:\n count += 1\n continue\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records deleted:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\ndef modifyEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('Existing Employee record:')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n print('Enter New Employee details')\n emp[1] = input('Enter Employee Name:')\n emp[2] = int(input('Enter Employee Basic Salary:'))\n emp[3] = emp[2] * 0.1\n emp[4] = emp[2] * 0.73\n emp[5] = emp[2] + emp[3] + emp[4]\n emp[6] = emp[5] * 0.3\n emp[7] = emp[5] - emp[6]\n print(emp[0])\n line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3]\n ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6]\n ) + ',' + str(emp[7]) + '\\n'\n count += 1\n ftemp.write(line)\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records modified:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\nif __name__ == '__main__':\n while True:\n ch = int(input(\n \"\"\"1->New Employee 2->Display Employee records 3->Search Employee 4->Delete Employee 5->Modify Employee 6->Exit\n\"\"\"\n ))\n if ch == 1:\n eno = int(input('Enter Employee No:'))\n name = input('Enter Employee Name:')\n basic = int(input('Enter Employee Basic salary:'))\n appendEmployee(eno, name, basic)\n elif ch == 2:\n displayEmployees()\n elif ch == 3:\n eno = int(input('Enter Employee No to search:'))\n searchEmployee(eno)\n elif ch == 4:\n eno = int(input('Enter Employee No to delete:'))\n deleteEmployee(eno)\n elif ch == 5:\n eno = int(input('Enter Employee No to modify:'))\n modifyEmployee(eno)\n else:\n break\n", "step-5": "#Program to create and store Employee Salary Records in a file\n\nimport os\n\ndef appendEmployee(eno,name,basic):\n\tfh=open(\"Employee.txt\",\"a\")\n\thra=basic*0.10\n\tda=basic*0.73\n\tgross=basic+hra+da\n\ttax=gross*0.3\n\tnet=gross-tax\n\tline=str(eno)+\",\"+name+\",\"+str(basic)+\",\"+str(hra)+\",\"+str(da)+\",\"+str(gross)+\",\"+str(tax)+\",\"+str(net)+\"\\n\"\n\tfh.write(line)\n\tfh.close()\n\n\n\ndef displayEmployees():\n\tfh=open(\"Employee.txt\",\"r\")\n\tfor line in fh:\n\t\temp=line.split(\",\")\n\t\tprint(\"\\nEmployee No:\",emp[0],\"\\nEmployee Name:\",emp[1],\"\\nBasic:\",emp[2],\"\\nHRA:\",emp[3],\"\\nDA:\",emp[4],\"\\nGross Salary:\",emp[5],\"\\nIncome Tax:\",emp[6],\"\\nNet Salary:\",emp[7])\n\tfh.close()\n\n\n\ndef searchEmployee(eno):\n\tfh=open(\"Employee.txt\",\"r\")\n\tflag=False\n\tfor line in fh:\n\t\temp=line.split(\",\")\n\t\tif(int(emp[0])==eno):\n\t\t\tprint(\"\\nEmployee No:\",emp[0],\"\\nEmployee Name:\",emp[1],\"\\nBasic:\",emp[2],\"\\nHRA:\",emp[3],\"\\nDA:\",emp[4],\"\\nGross Salary:\",emp[5],\"\\nIncome Tax:\",emp[6],\"\\nNet Salary:\",emp[7])\n\t\t\tflag=True\n\t\t\tbreak\n\tif flag==False:\n\t\tprint(\"Employee record not found\")\n\tfh.close()\n\n\n\ndef deleteEmployee(eno):\n\tcount=0\n\tfh=open(\"Employee.txt\",\"r\")\n\tftemp=open(\"Temp.txt\",\"w\")\n\tfor line in fh:\n\t\temp=line.split(\",\")\n\t\tif(int(emp[0])!=eno):\n\t\t\tftemp.write(line)\n\t\telse:\n\t\t\tcount+=1\n\t\t\tcontinue\n\tfh.close()\n\tftemp.close()\n\n\tif count==0:\n\t\tprint(\"Employee record not found\")\n\t\tos.remove(\"Temp.txt\")\n\telse:\n\t\tprint(\"No of Employee records deleted:\",count)\n\t\tos.remove(\"Employee.txt\")\n\t\tos.rename(\"Temp.txt\",\"Employee.txt\")\n\n\n\n\ndef modifyEmployee(eno):\n\tcount=0\n\tfh=open(\"Employee.txt\",\"r\")\n\tftemp=open(\"Temp.txt\",\"w\")\n\tfor line in fh:\n\t\temp=line.split(\",\")\n\t\tif(int(emp[0])==eno):\n\t\t\tprint(\"Existing Employee record:\")\n\t\t\tprint(\"\\nEmployee No:\",emp[0],\"\\nEmployee Name:\",emp[1],\"\\nBasic:\",emp[2],\"\\nHRA:\",emp[3],\"\\nDA:\",emp[4],\"\\nGross Salary:\",emp[5],\"\\nIncome Tax:\",emp[6],\"\\nNet Salary:\",emp[7])\n\t\t\tprint(\"Enter New Employee details\")\n\t\t\temp[1]=input(\"Enter Employee Name:\")\n\t\t\temp[2]=int(input(\"Enter Employee Basic Salary:\"))\n\t\t\temp[3]=emp[2]*0.10\t\t\t\t\t\t\t\t\t\t#HRA\n\t\t\temp[4]=emp[2]*0.73\t\t\t\t\t\t\t\t\t\t#DA\n\t\t\temp[5]=emp[2]+emp[3]+emp[4]\t\t\t\t\t\t\t\t#Gross\n\t\t\temp[6]=emp[5]*0.3\t\t\t\t\t\t\t\t\t\t#Tax\n\t\t\temp[7]=emp[5]-emp[6]\n\t\t\tprint(emp[0])\t\t\t\t\t\t\t\t\t#Net\n\t\t\tline=emp[0]+\",\"+emp[1]+\",\"+str(emp[2])+\",\"+str(emp[3])+\",\"+str(emp[4])+\",\"+str(emp[5])+\",\"+str(emp[6])+\",\"+str(emp[7])+\"\\n\"\n\t\t\tcount+=1\n\t\tftemp.write(line)\n\n\tfh.close()\n\tftemp.close()\n\n\tif count==0:\n\t\tprint(\"Employee record not found\")\n\t\tos.remove(\"Temp.txt\")\n\telse:\n\t\tprint(\"No of Employee records modified:\",count)\n\t\tos.remove(\"Employee.txt\")\n\t\tos.rename(\"Temp.txt\",\"Employee.txt\")\n\n\n\nif __name__==\"__main__\":\n\twhile True:\n\t\tch=int(input(\"1->New Employee 2->Display Employee records 3->Search Employee 4->Delete Employee 5->Modify Employee 6->Exit\\n\"))\n\t\tif ch==1:\n\t\t\teno=int(input(\"Enter Employee No:\"))\n\t\t\tname=input(\"Enter Employee Name:\")\n\t\t\tbasic=int(input(\"Enter Employee Basic salary:\"))\n\t\t\tappendEmployee(eno,name,basic)\n\n\t\telif ch==2:\n\t\t\tdisplayEmployees()\n\n\t\telif ch==3:\n\t\t\teno=int(input(\"Enter Employee No to search:\"))\n\t\t\tsearchEmployee(eno)\n\n\t\telif ch==4:\n\t\t\teno=int(input(\"Enter Employee No to delete:\"))\n\t\t\tdeleteEmployee(eno)\n\n\t\telif ch==5:\n\t\t\teno=int(input(\"Enter Employee No to modify:\"))\n\t\t\tmodifyEmployee(eno)\n\n\t\telse:\n\t\t\tbreak\n\n", "step-ids": [ 3, 5, 6, 7, 8 ] }
[ 3, 5, 6, 7, 8 ]
<|reserved_special_token_0|> class Connector: <|reserved_special_token_0|> @abc.abstractmethod def save(self, item): pass @abc.abstractmethod def load_all(self): pass @abc.abstractmethod def load_by_id(self, id): pass <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Connector: <|reserved_special_token_0|> @abc.abstractmethod def save(self, item): pass @abc.abstractmethod def load_all(self): pass @abc.abstractmethod def load_by_id(self, id): pass <|reserved_special_token_0|> @abc.abstractmethod def get_last_id(self): pass <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Connector: <|reserved_special_token_0|> @abc.abstractmethod def save(self, item): pass @abc.abstractmethod def load_all(self): pass @abc.abstractmethod def load_by_id(self, id): pass @abc.abstractmethod def update_item(self, item): pass @abc.abstractmethod def get_last_id(self): pass @abc.abstractmethod def get_done_items(self): pass <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Connector: """@abc.abstractmethod def connect(self): pass """ @abc.abstractmethod def save(self, item): pass @abc.abstractmethod def load_all(self): pass @abc.abstractmethod def load_by_id(self, id): pass @abc.abstractmethod def update_item(self, item): pass @abc.abstractmethod def get_last_id(self): pass @abc.abstractmethod def get_done_items(self): pass """@abc.abstractmethod def close(self): pass""" <|reserved_special_token_1|> import abc class Connector: """@abc.abstractmethod def connect(self): pass """ @abc.abstractmethod def save(self, item): pass @abc.abstractmethod def load_all(self): pass @abc.abstractmethod def load_by_id(self, id): pass @abc.abstractmethod def update_item(self, item): pass @abc.abstractmethod def get_last_id(self): pass @abc.abstractmethod def get_done_items(self): pass """@abc.abstractmethod def close(self): pass"""
flexible
{ "blob_id": "ac46aa6f8f4f01b6f3c48532533b9dd41a8a1c1c", "index": 7007, "step-1": "<mask token>\n\n\nclass Connector:\n <mask token>\n\n @abc.abstractmethod\n def save(self, item):\n pass\n\n @abc.abstractmethod\n def load_all(self):\n pass\n\n @abc.abstractmethod\n def load_by_id(self, id):\n pass\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass Connector:\n <mask token>\n\n @abc.abstractmethod\n def save(self, item):\n pass\n\n @abc.abstractmethod\n def load_all(self):\n pass\n\n @abc.abstractmethod\n def load_by_id(self, id):\n pass\n <mask token>\n\n @abc.abstractmethod\n def get_last_id(self):\n pass\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Connector:\n <mask token>\n\n @abc.abstractmethod\n def save(self, item):\n pass\n\n @abc.abstractmethod\n def load_all(self):\n pass\n\n @abc.abstractmethod\n def load_by_id(self, id):\n pass\n\n @abc.abstractmethod\n def update_item(self, item):\n pass\n\n @abc.abstractmethod\n def get_last_id(self):\n pass\n\n @abc.abstractmethod\n def get_done_items(self):\n pass\n <mask token>\n", "step-4": "<mask token>\n\n\nclass Connector:\n \"\"\"@abc.abstractmethod\n def connect(self):\n pass\n \"\"\"\n\n @abc.abstractmethod\n def save(self, item):\n pass\n\n @abc.abstractmethod\n def load_all(self):\n pass\n\n @abc.abstractmethod\n def load_by_id(self, id):\n pass\n\n @abc.abstractmethod\n def update_item(self, item):\n pass\n\n @abc.abstractmethod\n def get_last_id(self):\n pass\n\n @abc.abstractmethod\n def get_done_items(self):\n pass\n \"\"\"@abc.abstractmethod\n def close(self):\n pass\"\"\"\n", "step-5": "import abc\r\n\r\n\r\nclass Connector:\r\n\r\n\r\n \"\"\"@abc.abstractmethod\r\n def connect(self):\r\n pass\r\n \"\"\"\r\n \r\n @abc.abstractmethod\r\n def save(self, item):\r\n pass\r\n \r\n\r\n @abc.abstractmethod\r\n def load_all(self):\r\n pass\r\n \r\n \r\n @abc.abstractmethod\r\n def load_by_id(self, id):\r\n pass\r\n\r\n \r\n @abc.abstractmethod\r\n def update_item(self, item):\r\n pass\r\n\r\n\r\n @abc.abstractmethod\r\n def get_last_id(self):\r\n pass\r\n\r\n\r\n @abc.abstractmethod\r\n def get_done_items(self):\r\n pass \r\n \r\n \r\n \"\"\"@abc.abstractmethod\r\n def close(self):\r\n pass\"\"\"\r\n\r\n \r\n\r\n", "step-ids": [ 4, 5, 7, 8, 10 ] }
[ 4, 5, 7, 8, 10 ]
<|reserved_special_token_0|> class UserData(models.Model): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class Meta: verbose_name = 'Данные пользователя' verbose_name_plural = 'Данные пользователей' class UserProfile(models.Model): user_data = models.OneToOneField(UserData, on_delete=models.CASCADE) first_name = models.CharField(max_length=24) last_name = models.CharField(max_length=24) def __str__(self): return 'Login: {}, Email: {}.'.format(self.user_data.login, self. user_data.email) class Meta: verbose_name = 'Профиль пользователя' verbose_name_plural = 'Профили пользователей' <|reserved_special_token_1|> <|reserved_special_token_0|> class UserData(models.Model): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def __str__(self): return 'Login: {}, Email: {}, Data: {}.'.format(self.login, self. email, self.created_data) class Meta: verbose_name = 'Данные пользователя' verbose_name_plural = 'Данные пользователей' class UserProfile(models.Model): user_data = models.OneToOneField(UserData, on_delete=models.CASCADE) first_name = models.CharField(max_length=24) last_name = models.CharField(max_length=24) def __str__(self): return 'Login: {}, Email: {}.'.format(self.user_data.login, self. user_data.email) class Meta: verbose_name = 'Профиль пользователя' verbose_name_plural = 'Профили пользователей' <|reserved_special_token_1|> <|reserved_special_token_0|> class UserData(models.Model): username = models.CharField(max_length=24) email = models.EmailField(max_length=32, blank=True, null=True) password = models.CharField(max_length=32) created_data = models.DateTimeField() email_is_confirm = models.CharField(max_length=20, blank=True, null=True) def __str__(self): return 'Login: {}, Email: {}, Data: {}.'.format(self.login, self. email, self.created_data) class Meta: verbose_name = 'Данные пользователя' verbose_name_plural = 'Данные пользователей' class UserProfile(models.Model): user_data = models.OneToOneField(UserData, on_delete=models.CASCADE) first_name = models.CharField(max_length=24) last_name = models.CharField(max_length=24) def __str__(self): return 'Login: {}, Email: {}.'.format(self.user_data.login, self. user_data.email) class Meta: verbose_name = 'Профиль пользователя' verbose_name_plural = 'Профили пользователей' <|reserved_special_token_1|> from django.db import models class UserData(models.Model): username = models.CharField(max_length=24) email = models.EmailField(max_length=32, blank=True, null=True) password = models.CharField(max_length=32) created_data = models.DateTimeField() email_is_confirm = models.CharField(max_length=20, blank=True, null=True) def __str__(self): return 'Login: {}, Email: {}, Data: {}.'.format(self.login, self. email, self.created_data) class Meta: verbose_name = 'Данные пользователя' verbose_name_plural = 'Данные пользователей' class UserProfile(models.Model): user_data = models.OneToOneField(UserData, on_delete=models.CASCADE) first_name = models.CharField(max_length=24) last_name = models.CharField(max_length=24) def __str__(self): return 'Login: {}, Email: {}.'.format(self.user_data.login, self. user_data.email) class Meta: verbose_name = 'Профиль пользователя' verbose_name_plural = 'Профили пользователей' <|reserved_special_token_1|> from django.db import models class UserData(models.Model): username = models.CharField(max_length=24) email = models.EmailField(max_length=32, blank=True, null=True) password = models.CharField(max_length=32) created_data = models.DateTimeField() email_is_confirm = models.CharField(max_length=20, blank=True, null=True) def __str__(self): return "Login: {}, Email: {}, Data: {}.".format(self.login, self.email, self.created_data) class Meta: verbose_name = "Данные пользователя" verbose_name_plural = "Данные пользователей" class UserProfile(models.Model): user_data = models.OneToOneField(UserData, on_delete=models.CASCADE) first_name = models.CharField(max_length=24) last_name = models.CharField(max_length=24) def __str__(self): return "Login: {}, Email: {}.".format(self.user_data.login, self.user_data.email) class Meta: verbose_name = "Профиль пользователя" verbose_name_plural = "Профили пользователей"
flexible
{ "blob_id": "8f57e120a1a84eb0b9918128580c152aabc6a724", "index": 6778, "step-1": "<mask token>\n\n\nclass UserData(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name = 'Данные пользователя'\n verbose_name_plural = 'Данные пользователей'\n\n\nclass UserProfile(models.Model):\n user_data = models.OneToOneField(UserData, on_delete=models.CASCADE)\n first_name = models.CharField(max_length=24)\n last_name = models.CharField(max_length=24)\n\n def __str__(self):\n return 'Login: {}, Email: {}.'.format(self.user_data.login, self.\n user_data.email)\n\n\n class Meta:\n verbose_name = 'Профиль пользователя'\n verbose_name_plural = 'Профили пользователей'\n", "step-2": "<mask token>\n\n\nclass UserData(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return 'Login: {}, Email: {}, Data: {}.'.format(self.login, self.\n email, self.created_data)\n\n\n class Meta:\n verbose_name = 'Данные пользователя'\n verbose_name_plural = 'Данные пользователей'\n\n\nclass UserProfile(models.Model):\n user_data = models.OneToOneField(UserData, on_delete=models.CASCADE)\n first_name = models.CharField(max_length=24)\n last_name = models.CharField(max_length=24)\n\n def __str__(self):\n return 'Login: {}, Email: {}.'.format(self.user_data.login, self.\n user_data.email)\n\n\n class Meta:\n verbose_name = 'Профиль пользователя'\n verbose_name_plural = 'Профили пользователей'\n", "step-3": "<mask token>\n\n\nclass UserData(models.Model):\n username = models.CharField(max_length=24)\n email = models.EmailField(max_length=32, blank=True, null=True)\n password = models.CharField(max_length=32)\n created_data = models.DateTimeField()\n email_is_confirm = models.CharField(max_length=20, blank=True, null=True)\n\n def __str__(self):\n return 'Login: {}, Email: {}, Data: {}.'.format(self.login, self.\n email, self.created_data)\n\n\n class Meta:\n verbose_name = 'Данные пользователя'\n verbose_name_plural = 'Данные пользователей'\n\n\nclass UserProfile(models.Model):\n user_data = models.OneToOneField(UserData, on_delete=models.CASCADE)\n first_name = models.CharField(max_length=24)\n last_name = models.CharField(max_length=24)\n\n def __str__(self):\n return 'Login: {}, Email: {}.'.format(self.user_data.login, self.\n user_data.email)\n\n\n class Meta:\n verbose_name = 'Профиль пользователя'\n verbose_name_plural = 'Профили пользователей'\n", "step-4": "from django.db import models\n\n\nclass UserData(models.Model):\n username = models.CharField(max_length=24)\n email = models.EmailField(max_length=32, blank=True, null=True)\n password = models.CharField(max_length=32)\n created_data = models.DateTimeField()\n email_is_confirm = models.CharField(max_length=20, blank=True, null=True)\n\n def __str__(self):\n return 'Login: {}, Email: {}, Data: {}.'.format(self.login, self.\n email, self.created_data)\n\n\n class Meta:\n verbose_name = 'Данные пользователя'\n verbose_name_plural = 'Данные пользователей'\n\n\nclass UserProfile(models.Model):\n user_data = models.OneToOneField(UserData, on_delete=models.CASCADE)\n first_name = models.CharField(max_length=24)\n last_name = models.CharField(max_length=24)\n\n def __str__(self):\n return 'Login: {}, Email: {}.'.format(self.user_data.login, self.\n user_data.email)\n\n\n class Meta:\n verbose_name = 'Профиль пользователя'\n verbose_name_plural = 'Профили пользователей'\n", "step-5": "from django.db import models\n\n\nclass UserData(models.Model):\n username = models.CharField(max_length=24)\n email = models.EmailField(max_length=32, blank=True, null=True)\n password = models.CharField(max_length=32)\n created_data = models.DateTimeField()\n email_is_confirm = models.CharField(max_length=20, blank=True, null=True)\n\n def __str__(self):\n return \"Login: {}, Email: {}, Data: {}.\".format(self.login, self.email, self.created_data)\n\n class Meta:\n verbose_name = \"Данные пользователя\"\n verbose_name_plural = \"Данные пользователей\"\n\n\nclass UserProfile(models.Model):\n user_data = models.OneToOneField(UserData, on_delete=models.CASCADE)\n first_name = models.CharField(max_length=24)\n last_name = models.CharField(max_length=24)\n\n def __str__(self):\n return \"Login: {}, Email: {}.\".format(self.user_data.login, self.user_data.email)\n\n class Meta:\n verbose_name = \"Профиль пользователя\"\n verbose_name_plural = \"Профили пользователей\"\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
#!/usr/bin/env python3 import argparse import os import sys,shutil from shutil import make_archive import pathlib from phpManager import execute,execute_outputfile from datetime import date,datetime import re import pymysql import tarfile def append_log(log,message): f = open(log, "a+") today = datetime.now() f.write("%s %s \n" % (today.strftime("%Y-%m-%d %H:%M:%S"), message)) f.close() def get_root_pass(): with open("/root/.my.cnf") as fp: lines = fp.read().splitlines() for line in lines: grep = re.findall(r'password', line) if grep: pwrd = line.split('"')[1] return pwrd def get_db_name(argv): try: pwrd = get_root_pass() db = pymysql.connect("localhost","root",pwrd,"secure_vps") cursor = db.cursor() cursor.execute("select id,db_name from provision where provision_name='%s'" % argv) data = cursor.fetchone() db.close() return data except pymysql.err.OperationalError as err: print (' An error has occurred \n', err) except pymysql.err.InternalError as err: print (' An error has occurred \n', err) def backup_db(argv): data = get_db_name(argv) db_name = data[1] try: sqldir = '/home/kusanagi/'+argv+'/sql_backup/' p = pathlib.Path(sqldir) if not p.exists(): p.mkdir(mode=0o755, parents=True, exist_ok=True) shutil.chown(sqldir,'kusanagi','kusanagi') except BaseException as error: print(error) pwrd = get_root_pass() log = '/home/kusanagi/'+argv+'/log/backup.log' mess = 'Backed up database '+db_name append_log(log,mess) cmd = 'mysqldump --single-transaction -p'+pwrd+' --databases '+db_name+' | gzip > '+sqldir+db_name+'.sql.gz' execute_outputfile(cmd,log) def update_backup_record(argv,backup_type,result): pwrd = get_root_pass() data = get_db_name(argv) provi_id = data[0] log = '/home/kusanagi/'+argv+'/log/backup.log' db = pymysql.connect("localhost","root",pwrd,"secure_vps") cursor = db.cursor() cursor.execute("select id from logs where provision_id=%d and status=0 and backup_type=%d" % (provi_id,backup_type)) res = cursor.fetchone() record_id = res[0] if result: cursor.execute("update logs set status=1,message='Done' where provision_id=%d and id=%d" % (provi_id,record_id)) else: cursor.execute("update logs set status=-1,message='Failed. See %s' where provision_id=%d and id=%d" % (log,provi_id,record_id)) db.commit() db.close() def compress_provision_dir(argv,chdir=''): date = datetime.now() today = date.strftime("%Y-%m-%d") if chdir: tarname = chdir+argv+'.'+today else: tarname = '/home/kusanagi/backup/'+argv+'.'+today source_dir = '/home/kusanagi/'+argv shutil.make_archive(tarname,"gztar",source_dir) return tarname def local_backup(argv): append_log('/home/kusanagi/'+argv+'/log/backup.log', '--- Local backup') backup_db(argv) tarname = compress_provision_dir(argv) tar_file=pathlib.Path(tarname+'.tar.gz') if tar_file.exists(): update_backup_record(argv,0,1) else: update_backup_record(argv,0,0) def check_ssh_conn(argv,remote_user,remote_host,remote_port,remote_pass): cmd = 'sshpass -p "'+remote_pass+'" ssh -o StrictHostKeyChecking=no -p '+remote_port+' -q '+remote_user+'@'+remote_host+' exit;echo $?' res = execute(cmd) log = '/home/kusanagi/'+argv+'/log/backup.log' if int(res) == 0: #print('Connect OK \n') pass else: append_log(log, 'Remote connection failed. Can not issue remote backup') update_backup_record(argv,1,0) sys.exit(1) def remote_backup(argv, remote_user, remote_host, remote_port, remote_pass, remote_dest): log = '/home/kusanagi/'+argv+'/log/backup.log' append_log(log, '--- Remote backup') check_ssh_conn(argv, remote_user, remote_host, remote_port, remote_pass) backup_db(argv) tarname = compress_provision_dir(argv,'/home/kusanagi/') conf_ssh = '/etc/ssh/ssh_config' with open(conf_ssh) as fp: lines = fp.read().splitlines() for line in lines: grep = re.findall(remote_host, line) if grep: break if not grep: #configure stricthostkey ssh f = open(conf_ssh,"a+") f.write('Host %s\n\tStrictHostKeyChecking no\n' % remote_host) f.close() cmd = 'sshpass -p "'+remote_pass+'" rsync --remove-source-files -azhe \'ssh -p'+remote_port+'\' '+tarname+'.tar.gz '+remote_user+'@'+remote_host+':'+remote_dest+' 2>> '+log+' ; echo $?' res = execute(cmd) if int(res) == 0: update_backup_record(argv,1,1) else: update_backup_record(argv,1,0) def drive_backup(argv,drive_dir): log = '/home/kusanagi/'+argv+'/log/backup.log' append_log(log,'--- Backup to Google Drive') backup_db(argv) tarname = compress_provision_dir(argv,'/home/kusanagi/') cmd = 'rclone copy '+tarname+'.tar.gz GGD1:'+drive_dir+ ' 2>> '+log+' ; echo $?' res = execute(cmd) if int(res) == 0: update_backup_record(argv,2,1) else: update_backup_record(argv,2,0) os.remove(tarname+'.tar.gz') def get_options(argv): parser = argparse.ArgumentParser() parser.add_argument('mode', type=str, choices=['local', 'remote', 'drive']) parser.add_argument('options', nargs=argparse.REMAINDER) return parser.parse_args(argv) def main(): args=get_options(sys.argv[1:]) #pwrd = get_root_pass() options = ' '.join(map(str, args.options)) if args.mode == 'local': local_backup(*args.options) elif args.mode == 'remote': remote_backup(*args.options) else: drive_backup(*args.options) if __name__ == '__main__': main()
normal
{ "blob_id": "e09af436f2fb37d16427aa0b1416d6f2d59ad6c4", "index": 214, "step-1": "<mask token>\n\n\ndef append_log(log, message):\n f = open(log, 'a+')\n today = datetime.now()\n f.write('%s %s \\n' % (today.strftime('%Y-%m-%d %H:%M:%S'), message))\n f.close()\n\n\ndef get_root_pass():\n with open('/root/.my.cnf') as fp:\n lines = fp.read().splitlines()\n for line in lines:\n grep = re.findall('password', line)\n if grep:\n pwrd = line.split('\"')[1]\n return pwrd\n\n\n<mask token>\n\n\ndef backup_db(argv):\n data = get_db_name(argv)\n db_name = data[1]\n try:\n sqldir = '/home/kusanagi/' + argv + '/sql_backup/'\n p = pathlib.Path(sqldir)\n if not p.exists():\n p.mkdir(mode=493, parents=True, exist_ok=True)\n shutil.chown(sqldir, 'kusanagi', 'kusanagi')\n except BaseException as error:\n print(error)\n pwrd = get_root_pass()\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n mess = 'Backed up database ' + db_name\n append_log(log, mess)\n cmd = ('mysqldump --single-transaction -p' + pwrd + ' --databases ' +\n db_name + ' | gzip > ' + sqldir + db_name + '.sql.gz')\n execute_outputfile(cmd, log)\n\n\n<mask token>\n\n\ndef compress_provision_dir(argv, chdir=''):\n date = datetime.now()\n today = date.strftime('%Y-%m-%d')\n if chdir:\n tarname = chdir + argv + '.' + today\n else:\n tarname = '/home/kusanagi/backup/' + argv + '.' + today\n source_dir = '/home/kusanagi/' + argv\n shutil.make_archive(tarname, 'gztar', source_dir)\n return tarname\n\n\ndef local_backup(argv):\n append_log('/home/kusanagi/' + argv + '/log/backup.log', '--- Local backup'\n )\n backup_db(argv)\n tarname = compress_provision_dir(argv)\n tar_file = pathlib.Path(tarname + '.tar.gz')\n if tar_file.exists():\n update_backup_record(argv, 0, 1)\n else:\n update_backup_record(argv, 0, 0)\n\n\ndef check_ssh_conn(argv, remote_user, remote_host, remote_port, remote_pass):\n cmd = ('sshpass -p \"' + remote_pass +\n '\" ssh -o StrictHostKeyChecking=no -p ' + remote_port + ' -q ' +\n remote_user + '@' + remote_host + ' exit;echo $?')\n res = execute(cmd)\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n if int(res) == 0:\n pass\n else:\n append_log(log, 'Remote connection failed. Can not issue remote backup'\n )\n update_backup_record(argv, 1, 0)\n sys.exit(1)\n\n\ndef remote_backup(argv, remote_user, remote_host, remote_port, remote_pass,\n remote_dest):\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n append_log(log, '--- Remote backup')\n check_ssh_conn(argv, remote_user, remote_host, remote_port, remote_pass)\n backup_db(argv)\n tarname = compress_provision_dir(argv, '/home/kusanagi/')\n conf_ssh = '/etc/ssh/ssh_config'\n with open(conf_ssh) as fp:\n lines = fp.read().splitlines()\n for line in lines:\n grep = re.findall(remote_host, line)\n if grep:\n break\n if not grep:\n f = open(conf_ssh, 'a+')\n f.write('Host %s\\n\\tStrictHostKeyChecking no\\n' % remote_host)\n f.close()\n cmd = ('sshpass -p \"' + remote_pass +\n '\" rsync --remove-source-files -azhe \\'ssh -p' + remote_port + \"' \" +\n tarname + '.tar.gz ' + remote_user + '@' + remote_host + ':' +\n remote_dest + ' 2>> ' + log + ' ; echo $?')\n res = execute(cmd)\n if int(res) == 0:\n update_backup_record(argv, 1, 1)\n else:\n update_backup_record(argv, 1, 0)\n\n\ndef drive_backup(argv, drive_dir):\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n append_log(log, '--- Backup to Google Drive')\n backup_db(argv)\n tarname = compress_provision_dir(argv, '/home/kusanagi/')\n cmd = ('rclone copy ' + tarname + '.tar.gz GGD1:' + drive_dir + ' 2>> ' +\n log + ' ; echo $?')\n res = execute(cmd)\n if int(res) == 0:\n update_backup_record(argv, 2, 1)\n else:\n update_backup_record(argv, 2, 0)\n os.remove(tarname + '.tar.gz')\n\n\ndef get_options(argv):\n parser = argparse.ArgumentParser()\n parser.add_argument('mode', type=str, choices=['local', 'remote', 'drive'])\n parser.add_argument('options', nargs=argparse.REMAINDER)\n return parser.parse_args(argv)\n\n\ndef main():\n args = get_options(sys.argv[1:])\n options = ' '.join(map(str, args.options))\n if args.mode == 'local':\n local_backup(*args.options)\n elif args.mode == 'remote':\n remote_backup(*args.options)\n else:\n drive_backup(*args.options)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef append_log(log, message):\n f = open(log, 'a+')\n today = datetime.now()\n f.write('%s %s \\n' % (today.strftime('%Y-%m-%d %H:%M:%S'), message))\n f.close()\n\n\ndef get_root_pass():\n with open('/root/.my.cnf') as fp:\n lines = fp.read().splitlines()\n for line in lines:\n grep = re.findall('password', line)\n if grep:\n pwrd = line.split('\"')[1]\n return pwrd\n\n\ndef get_db_name(argv):\n try:\n pwrd = get_root_pass()\n db = pymysql.connect('localhost', 'root', pwrd, 'secure_vps')\n cursor = db.cursor()\n cursor.execute(\n \"select id,db_name from provision where provision_name='%s'\" % argv\n )\n data = cursor.fetchone()\n db.close()\n return data\n except pymysql.err.OperationalError as err:\n print(' An error has occurred \\n', err)\n except pymysql.err.InternalError as err:\n print(' An error has occurred \\n', err)\n\n\ndef backup_db(argv):\n data = get_db_name(argv)\n db_name = data[1]\n try:\n sqldir = '/home/kusanagi/' + argv + '/sql_backup/'\n p = pathlib.Path(sqldir)\n if not p.exists():\n p.mkdir(mode=493, parents=True, exist_ok=True)\n shutil.chown(sqldir, 'kusanagi', 'kusanagi')\n except BaseException as error:\n print(error)\n pwrd = get_root_pass()\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n mess = 'Backed up database ' + db_name\n append_log(log, mess)\n cmd = ('mysqldump --single-transaction -p' + pwrd + ' --databases ' +\n db_name + ' | gzip > ' + sqldir + db_name + '.sql.gz')\n execute_outputfile(cmd, log)\n\n\n<mask token>\n\n\ndef compress_provision_dir(argv, chdir=''):\n date = datetime.now()\n today = date.strftime('%Y-%m-%d')\n if chdir:\n tarname = chdir + argv + '.' + today\n else:\n tarname = '/home/kusanagi/backup/' + argv + '.' + today\n source_dir = '/home/kusanagi/' + argv\n shutil.make_archive(tarname, 'gztar', source_dir)\n return tarname\n\n\ndef local_backup(argv):\n append_log('/home/kusanagi/' + argv + '/log/backup.log', '--- Local backup'\n )\n backup_db(argv)\n tarname = compress_provision_dir(argv)\n tar_file = pathlib.Path(tarname + '.tar.gz')\n if tar_file.exists():\n update_backup_record(argv, 0, 1)\n else:\n update_backup_record(argv, 0, 0)\n\n\ndef check_ssh_conn(argv, remote_user, remote_host, remote_port, remote_pass):\n cmd = ('sshpass -p \"' + remote_pass +\n '\" ssh -o StrictHostKeyChecking=no -p ' + remote_port + ' -q ' +\n remote_user + '@' + remote_host + ' exit;echo $?')\n res = execute(cmd)\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n if int(res) == 0:\n pass\n else:\n append_log(log, 'Remote connection failed. Can not issue remote backup'\n )\n update_backup_record(argv, 1, 0)\n sys.exit(1)\n\n\ndef remote_backup(argv, remote_user, remote_host, remote_port, remote_pass,\n remote_dest):\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n append_log(log, '--- Remote backup')\n check_ssh_conn(argv, remote_user, remote_host, remote_port, remote_pass)\n backup_db(argv)\n tarname = compress_provision_dir(argv, '/home/kusanagi/')\n conf_ssh = '/etc/ssh/ssh_config'\n with open(conf_ssh) as fp:\n lines = fp.read().splitlines()\n for line in lines:\n grep = re.findall(remote_host, line)\n if grep:\n break\n if not grep:\n f = open(conf_ssh, 'a+')\n f.write('Host %s\\n\\tStrictHostKeyChecking no\\n' % remote_host)\n f.close()\n cmd = ('sshpass -p \"' + remote_pass +\n '\" rsync --remove-source-files -azhe \\'ssh -p' + remote_port + \"' \" +\n tarname + '.tar.gz ' + remote_user + '@' + remote_host + ':' +\n remote_dest + ' 2>> ' + log + ' ; echo $?')\n res = execute(cmd)\n if int(res) == 0:\n update_backup_record(argv, 1, 1)\n else:\n update_backup_record(argv, 1, 0)\n\n\ndef drive_backup(argv, drive_dir):\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n append_log(log, '--- Backup to Google Drive')\n backup_db(argv)\n tarname = compress_provision_dir(argv, '/home/kusanagi/')\n cmd = ('rclone copy ' + tarname + '.tar.gz GGD1:' + drive_dir + ' 2>> ' +\n log + ' ; echo $?')\n res = execute(cmd)\n if int(res) == 0:\n update_backup_record(argv, 2, 1)\n else:\n update_backup_record(argv, 2, 0)\n os.remove(tarname + '.tar.gz')\n\n\ndef get_options(argv):\n parser = argparse.ArgumentParser()\n parser.add_argument('mode', type=str, choices=['local', 'remote', 'drive'])\n parser.add_argument('options', nargs=argparse.REMAINDER)\n return parser.parse_args(argv)\n\n\ndef main():\n args = get_options(sys.argv[1:])\n options = ' '.join(map(str, args.options))\n if args.mode == 'local':\n local_backup(*args.options)\n elif args.mode == 'remote':\n remote_backup(*args.options)\n else:\n drive_backup(*args.options)\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef append_log(log, message):\n f = open(log, 'a+')\n today = datetime.now()\n f.write('%s %s \\n' % (today.strftime('%Y-%m-%d %H:%M:%S'), message))\n f.close()\n\n\ndef get_root_pass():\n with open('/root/.my.cnf') as fp:\n lines = fp.read().splitlines()\n for line in lines:\n grep = re.findall('password', line)\n if grep:\n pwrd = line.split('\"')[1]\n return pwrd\n\n\ndef get_db_name(argv):\n try:\n pwrd = get_root_pass()\n db = pymysql.connect('localhost', 'root', pwrd, 'secure_vps')\n cursor = db.cursor()\n cursor.execute(\n \"select id,db_name from provision where provision_name='%s'\" % argv\n )\n data = cursor.fetchone()\n db.close()\n return data\n except pymysql.err.OperationalError as err:\n print(' An error has occurred \\n', err)\n except pymysql.err.InternalError as err:\n print(' An error has occurred \\n', err)\n\n\ndef backup_db(argv):\n data = get_db_name(argv)\n db_name = data[1]\n try:\n sqldir = '/home/kusanagi/' + argv + '/sql_backup/'\n p = pathlib.Path(sqldir)\n if not p.exists():\n p.mkdir(mode=493, parents=True, exist_ok=True)\n shutil.chown(sqldir, 'kusanagi', 'kusanagi')\n except BaseException as error:\n print(error)\n pwrd = get_root_pass()\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n mess = 'Backed up database ' + db_name\n append_log(log, mess)\n cmd = ('mysqldump --single-transaction -p' + pwrd + ' --databases ' +\n db_name + ' | gzip > ' + sqldir + db_name + '.sql.gz')\n execute_outputfile(cmd, log)\n\n\ndef update_backup_record(argv, backup_type, result):\n pwrd = get_root_pass()\n data = get_db_name(argv)\n provi_id = data[0]\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n db = pymysql.connect('localhost', 'root', pwrd, 'secure_vps')\n cursor = db.cursor()\n cursor.execute(\n 'select id from logs where provision_id=%d and status=0 and backup_type=%d'\n % (provi_id, backup_type))\n res = cursor.fetchone()\n record_id = res[0]\n if result:\n cursor.execute(\n \"update logs set status=1,message='Done' where provision_id=%d and id=%d\"\n % (provi_id, record_id))\n else:\n cursor.execute(\n \"update logs set status=-1,message='Failed. See %s' where provision_id=%d and id=%d\"\n % (log, provi_id, record_id))\n db.commit()\n db.close()\n\n\ndef compress_provision_dir(argv, chdir=''):\n date = datetime.now()\n today = date.strftime('%Y-%m-%d')\n if chdir:\n tarname = chdir + argv + '.' + today\n else:\n tarname = '/home/kusanagi/backup/' + argv + '.' + today\n source_dir = '/home/kusanagi/' + argv\n shutil.make_archive(tarname, 'gztar', source_dir)\n return tarname\n\n\ndef local_backup(argv):\n append_log('/home/kusanagi/' + argv + '/log/backup.log', '--- Local backup'\n )\n backup_db(argv)\n tarname = compress_provision_dir(argv)\n tar_file = pathlib.Path(tarname + '.tar.gz')\n if tar_file.exists():\n update_backup_record(argv, 0, 1)\n else:\n update_backup_record(argv, 0, 0)\n\n\ndef check_ssh_conn(argv, remote_user, remote_host, remote_port, remote_pass):\n cmd = ('sshpass -p \"' + remote_pass +\n '\" ssh -o StrictHostKeyChecking=no -p ' + remote_port + ' -q ' +\n remote_user + '@' + remote_host + ' exit;echo $?')\n res = execute(cmd)\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n if int(res) == 0:\n pass\n else:\n append_log(log, 'Remote connection failed. Can not issue remote backup'\n )\n update_backup_record(argv, 1, 0)\n sys.exit(1)\n\n\ndef remote_backup(argv, remote_user, remote_host, remote_port, remote_pass,\n remote_dest):\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n append_log(log, '--- Remote backup')\n check_ssh_conn(argv, remote_user, remote_host, remote_port, remote_pass)\n backup_db(argv)\n tarname = compress_provision_dir(argv, '/home/kusanagi/')\n conf_ssh = '/etc/ssh/ssh_config'\n with open(conf_ssh) as fp:\n lines = fp.read().splitlines()\n for line in lines:\n grep = re.findall(remote_host, line)\n if grep:\n break\n if not grep:\n f = open(conf_ssh, 'a+')\n f.write('Host %s\\n\\tStrictHostKeyChecking no\\n' % remote_host)\n f.close()\n cmd = ('sshpass -p \"' + remote_pass +\n '\" rsync --remove-source-files -azhe \\'ssh -p' + remote_port + \"' \" +\n tarname + '.tar.gz ' + remote_user + '@' + remote_host + ':' +\n remote_dest + ' 2>> ' + log + ' ; echo $?')\n res = execute(cmd)\n if int(res) == 0:\n update_backup_record(argv, 1, 1)\n else:\n update_backup_record(argv, 1, 0)\n\n\ndef drive_backup(argv, drive_dir):\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n append_log(log, '--- Backup to Google Drive')\n backup_db(argv)\n tarname = compress_provision_dir(argv, '/home/kusanagi/')\n cmd = ('rclone copy ' + tarname + '.tar.gz GGD1:' + drive_dir + ' 2>> ' +\n log + ' ; echo $?')\n res = execute(cmd)\n if int(res) == 0:\n update_backup_record(argv, 2, 1)\n else:\n update_backup_record(argv, 2, 0)\n os.remove(tarname + '.tar.gz')\n\n\ndef get_options(argv):\n parser = argparse.ArgumentParser()\n parser.add_argument('mode', type=str, choices=['local', 'remote', 'drive'])\n parser.add_argument('options', nargs=argparse.REMAINDER)\n return parser.parse_args(argv)\n\n\ndef main():\n args = get_options(sys.argv[1:])\n options = ' '.join(map(str, args.options))\n if args.mode == 'local':\n local_backup(*args.options)\n elif args.mode == 'remote':\n remote_backup(*args.options)\n else:\n drive_backup(*args.options)\n\n\nif __name__ == '__main__':\n main()\n", "step-4": "import argparse\nimport os\nimport sys, shutil\nfrom shutil import make_archive\nimport pathlib\nfrom phpManager import execute, execute_outputfile\nfrom datetime import date, datetime\nimport re\nimport pymysql\nimport tarfile\n\n\ndef append_log(log, message):\n f = open(log, 'a+')\n today = datetime.now()\n f.write('%s %s \\n' % (today.strftime('%Y-%m-%d %H:%M:%S'), message))\n f.close()\n\n\ndef get_root_pass():\n with open('/root/.my.cnf') as fp:\n lines = fp.read().splitlines()\n for line in lines:\n grep = re.findall('password', line)\n if grep:\n pwrd = line.split('\"')[1]\n return pwrd\n\n\ndef get_db_name(argv):\n try:\n pwrd = get_root_pass()\n db = pymysql.connect('localhost', 'root', pwrd, 'secure_vps')\n cursor = db.cursor()\n cursor.execute(\n \"select id,db_name from provision where provision_name='%s'\" % argv\n )\n data = cursor.fetchone()\n db.close()\n return data\n except pymysql.err.OperationalError as err:\n print(' An error has occurred \\n', err)\n except pymysql.err.InternalError as err:\n print(' An error has occurred \\n', err)\n\n\ndef backup_db(argv):\n data = get_db_name(argv)\n db_name = data[1]\n try:\n sqldir = '/home/kusanagi/' + argv + '/sql_backup/'\n p = pathlib.Path(sqldir)\n if not p.exists():\n p.mkdir(mode=493, parents=True, exist_ok=True)\n shutil.chown(sqldir, 'kusanagi', 'kusanagi')\n except BaseException as error:\n print(error)\n pwrd = get_root_pass()\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n mess = 'Backed up database ' + db_name\n append_log(log, mess)\n cmd = ('mysqldump --single-transaction -p' + pwrd + ' --databases ' +\n db_name + ' | gzip > ' + sqldir + db_name + '.sql.gz')\n execute_outputfile(cmd, log)\n\n\ndef update_backup_record(argv, backup_type, result):\n pwrd = get_root_pass()\n data = get_db_name(argv)\n provi_id = data[0]\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n db = pymysql.connect('localhost', 'root', pwrd, 'secure_vps')\n cursor = db.cursor()\n cursor.execute(\n 'select id from logs where provision_id=%d and status=0 and backup_type=%d'\n % (provi_id, backup_type))\n res = cursor.fetchone()\n record_id = res[0]\n if result:\n cursor.execute(\n \"update logs set status=1,message='Done' where provision_id=%d and id=%d\"\n % (provi_id, record_id))\n else:\n cursor.execute(\n \"update logs set status=-1,message='Failed. See %s' where provision_id=%d and id=%d\"\n % (log, provi_id, record_id))\n db.commit()\n db.close()\n\n\ndef compress_provision_dir(argv, chdir=''):\n date = datetime.now()\n today = date.strftime('%Y-%m-%d')\n if chdir:\n tarname = chdir + argv + '.' + today\n else:\n tarname = '/home/kusanagi/backup/' + argv + '.' + today\n source_dir = '/home/kusanagi/' + argv\n shutil.make_archive(tarname, 'gztar', source_dir)\n return tarname\n\n\ndef local_backup(argv):\n append_log('/home/kusanagi/' + argv + '/log/backup.log', '--- Local backup'\n )\n backup_db(argv)\n tarname = compress_provision_dir(argv)\n tar_file = pathlib.Path(tarname + '.tar.gz')\n if tar_file.exists():\n update_backup_record(argv, 0, 1)\n else:\n update_backup_record(argv, 0, 0)\n\n\ndef check_ssh_conn(argv, remote_user, remote_host, remote_port, remote_pass):\n cmd = ('sshpass -p \"' + remote_pass +\n '\" ssh -o StrictHostKeyChecking=no -p ' + remote_port + ' -q ' +\n remote_user + '@' + remote_host + ' exit;echo $?')\n res = execute(cmd)\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n if int(res) == 0:\n pass\n else:\n append_log(log, 'Remote connection failed. Can not issue remote backup'\n )\n update_backup_record(argv, 1, 0)\n sys.exit(1)\n\n\ndef remote_backup(argv, remote_user, remote_host, remote_port, remote_pass,\n remote_dest):\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n append_log(log, '--- Remote backup')\n check_ssh_conn(argv, remote_user, remote_host, remote_port, remote_pass)\n backup_db(argv)\n tarname = compress_provision_dir(argv, '/home/kusanagi/')\n conf_ssh = '/etc/ssh/ssh_config'\n with open(conf_ssh) as fp:\n lines = fp.read().splitlines()\n for line in lines:\n grep = re.findall(remote_host, line)\n if grep:\n break\n if not grep:\n f = open(conf_ssh, 'a+')\n f.write('Host %s\\n\\tStrictHostKeyChecking no\\n' % remote_host)\n f.close()\n cmd = ('sshpass -p \"' + remote_pass +\n '\" rsync --remove-source-files -azhe \\'ssh -p' + remote_port + \"' \" +\n tarname + '.tar.gz ' + remote_user + '@' + remote_host + ':' +\n remote_dest + ' 2>> ' + log + ' ; echo $?')\n res = execute(cmd)\n if int(res) == 0:\n update_backup_record(argv, 1, 1)\n else:\n update_backup_record(argv, 1, 0)\n\n\ndef drive_backup(argv, drive_dir):\n log = '/home/kusanagi/' + argv + '/log/backup.log'\n append_log(log, '--- Backup to Google Drive')\n backup_db(argv)\n tarname = compress_provision_dir(argv, '/home/kusanagi/')\n cmd = ('rclone copy ' + tarname + '.tar.gz GGD1:' + drive_dir + ' 2>> ' +\n log + ' ; echo $?')\n res = execute(cmd)\n if int(res) == 0:\n update_backup_record(argv, 2, 1)\n else:\n update_backup_record(argv, 2, 0)\n os.remove(tarname + '.tar.gz')\n\n\ndef get_options(argv):\n parser = argparse.ArgumentParser()\n parser.add_argument('mode', type=str, choices=['local', 'remote', 'drive'])\n parser.add_argument('options', nargs=argparse.REMAINDER)\n return parser.parse_args(argv)\n\n\ndef main():\n args = get_options(sys.argv[1:])\n options = ' '.join(map(str, args.options))\n if args.mode == 'local':\n local_backup(*args.options)\n elif args.mode == 'remote':\n remote_backup(*args.options)\n else:\n drive_backup(*args.options)\n\n\nif __name__ == '__main__':\n main()\n", "step-5": "#!/usr/bin/env python3\nimport argparse\nimport os\nimport sys,shutil\nfrom shutil import make_archive\nimport pathlib\nfrom phpManager import execute,execute_outputfile\nfrom datetime import date,datetime\nimport re\nimport pymysql\nimport tarfile\n\n\ndef append_log(log,message):\n f = open(log, \"a+\")\n today = datetime.now()\n f.write(\"%s %s \\n\" % (today.strftime(\"%Y-%m-%d %H:%M:%S\"), message))\n f.close()\n\ndef get_root_pass():\n with open(\"/root/.my.cnf\") as fp: lines = fp.read().splitlines()\n for line in lines:\n grep = re.findall(r'password', line)\n if grep:\n pwrd = line.split('\"')[1]\n return pwrd\n\ndef get_db_name(argv):\n try:\n pwrd = get_root_pass()\n db = pymysql.connect(\"localhost\",\"root\",pwrd,\"secure_vps\")\n cursor = db.cursor()\n cursor.execute(\"select id,db_name from provision where provision_name='%s'\" % argv)\n data = cursor.fetchone()\n db.close()\n return data\n except pymysql.err.OperationalError as err:\n print (' An error has occurred \\n', err)\n except pymysql.err.InternalError as err:\n print (' An error has occurred \\n', err)\n\ndef backup_db(argv):\n\n data = get_db_name(argv)\n db_name = data[1]\n try:\n sqldir = '/home/kusanagi/'+argv+'/sql_backup/'\n p = pathlib.Path(sqldir)\n if not p.exists():\n p.mkdir(mode=0o755, parents=True, exist_ok=True)\n shutil.chown(sqldir,'kusanagi','kusanagi')\n except BaseException as error:\n print(error)\n pwrd = get_root_pass()\n\n log = '/home/kusanagi/'+argv+'/log/backup.log'\n mess = 'Backed up database '+db_name\n append_log(log,mess)\n\n cmd = 'mysqldump --single-transaction -p'+pwrd+' --databases '+db_name+' | gzip > '+sqldir+db_name+'.sql.gz'\n execute_outputfile(cmd,log)\n\ndef update_backup_record(argv,backup_type,result):\n\n pwrd = get_root_pass()\n data = get_db_name(argv)\n provi_id = data[0]\n log = '/home/kusanagi/'+argv+'/log/backup.log'\n\n db = pymysql.connect(\"localhost\",\"root\",pwrd,\"secure_vps\")\n cursor = db.cursor()\n cursor.execute(\"select id from logs where provision_id=%d and status=0 and backup_type=%d\" % (provi_id,backup_type))\n res = cursor.fetchone()\n record_id = res[0]\n\n if result:\n cursor.execute(\"update logs set status=1,message='Done' where provision_id=%d and id=%d\" % (provi_id,record_id))\n else:\n cursor.execute(\"update logs set status=-1,message='Failed. See %s' where provision_id=%d and id=%d\" % (log,provi_id,record_id))\n\n db.commit()\n db.close()\n\ndef compress_provision_dir(argv,chdir=''):\n date = datetime.now()\n today = date.strftime(\"%Y-%m-%d\")\n if chdir:\n tarname = chdir+argv+'.'+today\n else:\n tarname = '/home/kusanagi/backup/'+argv+'.'+today\n source_dir = '/home/kusanagi/'+argv\n shutil.make_archive(tarname,\"gztar\",source_dir)\n return tarname\n\ndef local_backup(argv):\n \n append_log('/home/kusanagi/'+argv+'/log/backup.log', '--- Local backup')\n backup_db(argv)\n tarname = compress_provision_dir(argv)\n\n tar_file=pathlib.Path(tarname+'.tar.gz')\n if tar_file.exists():\n update_backup_record(argv,0,1)\n else:\n update_backup_record(argv,0,0)\n\ndef check_ssh_conn(argv,remote_user,remote_host,remote_port,remote_pass):\n cmd = 'sshpass -p \"'+remote_pass+'\" ssh -o StrictHostKeyChecking=no -p '+remote_port+' -q '+remote_user+'@'+remote_host+' exit;echo $?'\n res = execute(cmd)\n log = '/home/kusanagi/'+argv+'/log/backup.log'\n if int(res) == 0:\n #print('Connect OK \\n')\n pass\n else:\n append_log(log, 'Remote connection failed. Can not issue remote backup')\n update_backup_record(argv,1,0)\n sys.exit(1)\n\ndef remote_backup(argv, remote_user, remote_host, remote_port, remote_pass, remote_dest):\n\n log = '/home/kusanagi/'+argv+'/log/backup.log'\n append_log(log, '--- Remote backup')\n check_ssh_conn(argv, remote_user, remote_host, remote_port, remote_pass)\n backup_db(argv)\n tarname = compress_provision_dir(argv,'/home/kusanagi/')\n \n conf_ssh = '/etc/ssh/ssh_config'\n with open(conf_ssh) as fp: lines = fp.read().splitlines()\n for line in lines:\n grep = re.findall(remote_host, line)\n if grep:\n break\n if not grep:\n #configure stricthostkey ssh\n f = open(conf_ssh,\"a+\")\n f.write('Host %s\\n\\tStrictHostKeyChecking no\\n' % remote_host)\n f.close()\n \n cmd = 'sshpass -p \"'+remote_pass+'\" rsync --remove-source-files -azhe \\'ssh -p'+remote_port+'\\' '+tarname+'.tar.gz '+remote_user+'@'+remote_host+':'+remote_dest+' 2>> '+log+' ; echo $?'\n res = execute(cmd)\n if int(res) == 0:\n update_backup_record(argv,1,1)\n else:\n update_backup_record(argv,1,0)\n\n\ndef drive_backup(argv,drive_dir):\n \n log = '/home/kusanagi/'+argv+'/log/backup.log'\n append_log(log,'--- Backup to Google Drive')\n backup_db(argv)\n tarname = compress_provision_dir(argv,'/home/kusanagi/')\n cmd = 'rclone copy '+tarname+'.tar.gz GGD1:'+drive_dir+ ' 2>> '+log+' ; echo $?'\n res = execute(cmd)\n if int(res) == 0:\n update_backup_record(argv,2,1)\n else:\n update_backup_record(argv,2,0)\n os.remove(tarname+'.tar.gz')\n \ndef get_options(argv):\n parser = argparse.ArgumentParser()\n parser.add_argument('mode', type=str, choices=['local', 'remote', 'drive'])\n parser.add_argument('options', nargs=argparse.REMAINDER)\n return parser.parse_args(argv)\n\ndef main():\n \n args=get_options(sys.argv[1:])\n #pwrd = get_root_pass()\n options = ' '.join(map(str, args.options))\n if args.mode == 'local':\n local_backup(*args.options)\n elif args.mode == 'remote':\n remote_backup(*args.options)\n else:\n drive_backup(*args.options)\n\nif __name__ == '__main__':\n main()\n\n", "step-ids": [ 10, 11, 13, 14, 15 ] }
[ 10, 11, 13, 14, 15 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> try: r = sr.Recognizer() with sr.Microphone() as source: system('clear') print(color(BOLD + 'Hola!\nAsk me anything.' + END, 'green')) while True: audio = r.listen(source) try: query = r.recognize_google(audio) print(query) except sr.UnknownValueError: print(color('Listening', 'blue')) except KeyboardInterrupt: print(color(BOLD + ' Bye!' + END, 'cyan')) <|reserved_special_token_1|> <|reserved_special_token_0|> BOLD = '\x1b[1m' END = '\x1b[0m' CLIENT_ACCESS_TOKEN = '2245d4ab7c99466e806c8986a18234c4' ai = apiai.ApiAI(CLIENT_ACCESS_TOKEN) google_search = 'https://www.google.com/search?q=' youtube_search = 'https://www.youtube.com/results?search_query=' google_drive = 'https://drive.google.com' gmail = 'https://mail.google.com' try: r = sr.Recognizer() with sr.Microphone() as source: system('clear') print(color(BOLD + 'Hola!\nAsk me anything.' + END, 'green')) while True: audio = r.listen(source) try: query = r.recognize_google(audio) print(query) except sr.UnknownValueError: print(color('Listening', 'blue')) except KeyboardInterrupt: print(color(BOLD + ' Bye!' + END, 'cyan')) <|reserved_special_token_1|> import speech_recognition as sr from termcolor import colored as color import apiai import json from os import system import wikipedia as wiki from time import sleep import webbrowser as wb BOLD = '\x1b[1m' END = '\x1b[0m' CLIENT_ACCESS_TOKEN = '2245d4ab7c99466e806c8986a18234c4' ai = apiai.ApiAI(CLIENT_ACCESS_TOKEN) google_search = 'https://www.google.com/search?q=' youtube_search = 'https://www.youtube.com/results?search_query=' google_drive = 'https://drive.google.com' gmail = 'https://mail.google.com' try: r = sr.Recognizer() with sr.Microphone() as source: system('clear') print(color(BOLD + 'Hola!\nAsk me anything.' + END, 'green')) while True: audio = r.listen(source) try: query = r.recognize_google(audio) print(query) except sr.UnknownValueError: print(color('Listening', 'blue')) except KeyboardInterrupt: print(color(BOLD + ' Bye!' + END, 'cyan')) <|reserved_special_token_1|> #!/usr/bin/env python import speech_recognition as sr from termcolor import colored as color import apiai import json from os import system import wikipedia as wiki from time import sleep import webbrowser as wb BOLD = "\033[1m" #use to bold the text END = "\033[0m" #use to close the bold text CLIENT_ACCESS_TOKEN = "2245d4ab7c99466e806c8986a18234c4" ai = apiai.ApiAI(CLIENT_ACCESS_TOKEN) google_search = "https://www.google.com/search?q=" youtube_search = "https://www.youtube.com/results?search_query=" google_drive = "https://drive.google.com" gmail = "https://mail.google.com" try: r = sr.Recognizer() with sr.Microphone() as source: system("clear") print(color(BOLD+"Hola!\nAsk me anything."+END,"green")) while True: audio = r.listen(source) # while True: try: query = r.recognize_google(audio) print(query) except sr.UnknownValueError: print (color("Listening","blue")) except KeyboardInterrupt: print (color(BOLD+" Bye!"+END, "cyan"))
flexible
{ "blob_id": "d3e728bda85d2e72b8e477ab439d4dcffa23d63a", "index": 5448, "step-1": "<mask token>\n", "step-2": "<mask token>\ntry:\n r = sr.Recognizer()\n with sr.Microphone() as source:\n system('clear')\n print(color(BOLD + 'Hola!\\nAsk me anything.' + END, 'green'))\n while True:\n audio = r.listen(source)\n try:\n query = r.recognize_google(audio)\n print(query)\n except sr.UnknownValueError:\n print(color('Listening', 'blue'))\nexcept KeyboardInterrupt:\n print(color(BOLD + ' Bye!' + END, 'cyan'))\n", "step-3": "<mask token>\nBOLD = '\\x1b[1m'\nEND = '\\x1b[0m'\nCLIENT_ACCESS_TOKEN = '2245d4ab7c99466e806c8986a18234c4'\nai = apiai.ApiAI(CLIENT_ACCESS_TOKEN)\ngoogle_search = 'https://www.google.com/search?q='\nyoutube_search = 'https://www.youtube.com/results?search_query='\ngoogle_drive = 'https://drive.google.com'\ngmail = 'https://mail.google.com'\ntry:\n r = sr.Recognizer()\n with sr.Microphone() as source:\n system('clear')\n print(color(BOLD + 'Hola!\\nAsk me anything.' + END, 'green'))\n while True:\n audio = r.listen(source)\n try:\n query = r.recognize_google(audio)\n print(query)\n except sr.UnknownValueError:\n print(color('Listening', 'blue'))\nexcept KeyboardInterrupt:\n print(color(BOLD + ' Bye!' + END, 'cyan'))\n", "step-4": "import speech_recognition as sr\nfrom termcolor import colored as color\nimport apiai\nimport json\nfrom os import system\nimport wikipedia as wiki\nfrom time import sleep\nimport webbrowser as wb\nBOLD = '\\x1b[1m'\nEND = '\\x1b[0m'\nCLIENT_ACCESS_TOKEN = '2245d4ab7c99466e806c8986a18234c4'\nai = apiai.ApiAI(CLIENT_ACCESS_TOKEN)\ngoogle_search = 'https://www.google.com/search?q='\nyoutube_search = 'https://www.youtube.com/results?search_query='\ngoogle_drive = 'https://drive.google.com'\ngmail = 'https://mail.google.com'\ntry:\n r = sr.Recognizer()\n with sr.Microphone() as source:\n system('clear')\n print(color(BOLD + 'Hola!\\nAsk me anything.' + END, 'green'))\n while True:\n audio = r.listen(source)\n try:\n query = r.recognize_google(audio)\n print(query)\n except sr.UnknownValueError:\n print(color('Listening', 'blue'))\nexcept KeyboardInterrupt:\n print(color(BOLD + ' Bye!' + END, 'cyan'))\n", "step-5": "#!/usr/bin/env python\n\nimport speech_recognition as sr\nfrom termcolor import colored as color\nimport apiai\nimport json\nfrom os import system\nimport wikipedia as wiki\nfrom time import sleep\nimport webbrowser as wb\n\n\nBOLD = \"\\033[1m\" #use to bold the text\nEND = \"\\033[0m\" #use to close the bold text\nCLIENT_ACCESS_TOKEN = \"2245d4ab7c99466e806c8986a18234c4\"\nai = apiai.ApiAI(CLIENT_ACCESS_TOKEN)\n\ngoogle_search = \"https://www.google.com/search?q=\"\nyoutube_search = \"https://www.youtube.com/results?search_query=\"\ngoogle_drive = \"https://drive.google.com\"\ngmail = \"https://mail.google.com\"\ntry:\n r = sr.Recognizer()\n with sr.Microphone() as source:\n system(\"clear\")\n print(color(BOLD+\"Hola!\\nAsk me anything.\"+END,\"green\"))\n while True:\n audio = r.listen(source)\n\n# while True: \n try:\n query = r.recognize_google(audio)\n print(query)\n except sr.UnknownValueError:\n print (color(\"Listening\",\"blue\"))\n\n\n \n\nexcept KeyboardInterrupt:\n print (color(BOLD+\" Bye!\"+END, \"cyan\"))\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def slope_distance(baseElev, elv2, dist_betwn_baseElev_elv2, projectedDistance ): import math numer = elv2 - baseElev denom = dist_betwn_baseElev_elv2 print(numer, denom) distance = math.sqrt(numer ** 2 + denom ** 2) if denom == 0: print('Denominator is zero') b = 0 if elv2 > baseElev: print(' and elv2 > baseElev') p = 1 theta = math.pi / 2 elif elv2 < baseElev: print(' and elv2 < baseElev') p = -1 theta = -math.pi / 2 else: print(' and elv2 = baseElev. Both of them are the same points !' ) p = 0 b = 0 theta = 0 else: print('Denominator is NOT zero') theta = math.atan(numer / denom) p = math.sin(theta) b = math.cos(theta) slope = theta if (projectedDistance != 0 and projectedDistance <= dist_betwn_baseElev_elv2): b = abs(projectedDistance) newElev = baseElev + b * math.tan(slope) distance = projectedDistance / math.cos(slope) else: newElev = elv2 return slope, distance, newElev <|reserved_special_token_1|> def slope_distance(baseElev, elv2, dist_betwn_baseElev_elv2, projectedDistance): # Calculate the slope and distance between two Cartesian points. # # Input: # For 2-D graphs, # dist_betwn_baseElev_elv2, Distance between two elevation points (FLOAT) # baseElev, Elevation of first cartesian point (FLOAT) # elv2, Elevation of second cartesian point (FLOAT) # # Output: # For 2-D graphs/profiles, # slope, Slope betweewn two points. The horizontal plane is the # plane of origin. Slope above and below the plane are # positive and negative, respectively. This variable is # needed for creating 2-D profiles/graphs. # distance, Cartesian length between two points on a graph/profile. # Used as 3-D Chainage distance (may differ from survey # chainage data) # # Created: April 24, 2019 (moostang) import math numer = elv2 - baseElev # Numerator denom = dist_betwn_baseElev_elv2 print(numer,denom) distance = math.sqrt( numer**2 + denom**2) # Check if denominator is zero, i.e. both points lies on the same # y-axis plane. # a. If denominator is zero, then determine if it lies on the # upper (positive) or bottom (negative) y-axis plane. # b. If denominator is not zero, then proceed with normal pythagorean # trigonometric calculations # if denom == 0: print("Denominator is zero") b = 0 if elv2 > baseElev: print(" and elv2 > baseElev") p = 1 # Second point is above first point theta = math.pi/2 elif elv2 < baseElev: print(" and elv2 < baseElev") p = -1 # Second point is below first point theta = - math.pi/2 else: print(" and elv2 = baseElev. Both of them are the same points !") p = 0 b = 0 theta = 0 else: print("Denominator is NOT zero") theta = math.atan(numer/denom) p = math.sin(theta) b = math.cos(theta) slope = theta if projectedDistance != 0 and projectedDistance <= dist_betwn_baseElev_elv2: b = abs(projectedDistance) # Tackle negative distances (may occur) newElev = baseElev + b*math.tan(slope) distance = projectedDistance/math.cos(slope) else: newElev = elv2 return slope, distance, newElev
flexible
{ "blob_id": "65b30bbe737b331447235b5c640e9c3f7f6d6f8c", "index": 5851, "step-1": "<mask token>\n", "step-2": "def slope_distance(baseElev, elv2, dist_betwn_baseElev_elv2, projectedDistance\n ):\n import math\n numer = elv2 - baseElev\n denom = dist_betwn_baseElev_elv2\n print(numer, denom)\n distance = math.sqrt(numer ** 2 + denom ** 2)\n if denom == 0:\n print('Denominator is zero')\n b = 0\n if elv2 > baseElev:\n print(' and elv2 > baseElev')\n p = 1\n theta = math.pi / 2\n elif elv2 < baseElev:\n print(' and elv2 < baseElev')\n p = -1\n theta = -math.pi / 2\n else:\n print(' and elv2 = baseElev. Both of them are the same points !'\n )\n p = 0\n b = 0\n theta = 0\n else:\n print('Denominator is NOT zero')\n theta = math.atan(numer / denom)\n p = math.sin(theta)\n b = math.cos(theta)\n slope = theta\n if (projectedDistance != 0 and projectedDistance <=\n dist_betwn_baseElev_elv2):\n b = abs(projectedDistance)\n newElev = baseElev + b * math.tan(slope)\n distance = projectedDistance / math.cos(slope)\n else:\n newElev = elv2\n return slope, distance, newElev\n", "step-3": "def slope_distance(baseElev, elv2, dist_betwn_baseElev_elv2, projectedDistance):\n\n # Calculate the slope and distance between two Cartesian points.\n #\n # Input:\n # For 2-D graphs,\n # dist_betwn_baseElev_elv2, Distance between two elevation points (FLOAT)\n # baseElev, Elevation of first cartesian point (FLOAT)\n # elv2, Elevation of second cartesian point (FLOAT)\n #\n # Output:\n # For 2-D graphs/profiles,\n # slope, Slope betweewn two points. The horizontal plane is the\n # plane of origin. Slope above and below the plane are\n # positive and negative, respectively. This variable is\n # needed for creating 2-D profiles/graphs.\n # distance, Cartesian length between two points on a graph/profile.\n # Used as 3-D Chainage distance (may differ from survey\n # chainage data)\n #\n # Created: April 24, 2019 (moostang)\n\n import math\n\n numer = elv2 - baseElev # Numerator\n denom = dist_betwn_baseElev_elv2\n\n print(numer,denom)\n\n distance = math.sqrt( numer**2 + denom**2)\n\n # Check if denominator is zero, i.e. both points lies on the same\n # y-axis plane.\n # a. If denominator is zero, then determine if it lies on the\n # upper (positive) or bottom (negative) y-axis plane.\n # b. If denominator is not zero, then proceed with normal pythagorean\n # trigonometric calculations\n #\n\n if denom == 0:\n print(\"Denominator is zero\")\n b = 0\n if elv2 > baseElev:\n print(\" and elv2 > baseElev\")\n p = 1 # Second point is above first point\n theta = math.pi/2\n elif elv2 < baseElev:\n print(\" and elv2 < baseElev\")\n p = -1 # Second point is below first point\n theta = - math.pi/2\n else:\n print(\" and elv2 = baseElev. Both of them are the same points !\")\n p = 0\n b = 0\n theta = 0\n else:\n print(\"Denominator is NOT zero\")\n theta = math.atan(numer/denom)\n p = math.sin(theta)\n b = math.cos(theta)\n\n slope = theta\n\n if projectedDistance != 0 and projectedDistance <= dist_betwn_baseElev_elv2:\n b = abs(projectedDistance) # Tackle negative distances (may occur)\n newElev = baseElev + b*math.tan(slope)\n distance = projectedDistance/math.cos(slope)\n else:\n newElev = elv2\n\n return slope, distance, newElev\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> class Solution: <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class TreeNode: <|reserved_special_token_0|> class Solution: def isSymmetric(self, root: TreeNode) ->bool: if not root: return True queue = collections.deque() queue.append((root.left, root.right)) while queue: left, right = queue.popleft() if not left and not right: continue if not left or not right or left.val != right.val: return False queue.append((left.left, right.right)) queue.append((left.right, right.left)) return True <|reserved_special_token_1|> <|reserved_special_token_0|> class TreeNode: def __init__(self, val=0, left=None, right=None): self.val = val self.left = left self.right = right class Solution: def isSymmetric(self, root: TreeNode) ->bool: if not root: return True queue = collections.deque() queue.append((root.left, root.right)) while queue: left, right = queue.popleft() if not left and not right: continue if not left or not right or left.val != right.val: return False queue.append((left.left, right.right)) queue.append((left.right, right.left)) return True <|reserved_special_token_1|> import collections class TreeNode: def __init__(self, val=0, left=None, right=None): self.val = val self.left = left self.right = right class Solution: def isSymmetric(self, root: TreeNode) ->bool: if not root: return True queue = collections.deque() queue.append((root.left, root.right)) while queue: left, right = queue.popleft() if not left and not right: continue if not left or not right or left.val != right.val: return False queue.append((left.left, right.right)) queue.append((left.right, right.left)) return True <|reserved_special_token_1|> import collections # Definition for a binary tree node. class TreeNode: def __init__(self, val=0, left=None, right=None): self.val = val self.left = left self.right = right class Solution: def isSymmetric(self, root: TreeNode) -> bool: if not root: return True queue = collections.deque() queue.append((root.left, root.right)) while queue: left, right = queue.popleft() if not left and not right: continue if not left or not right or left.val != right.val: return False queue.append((left.left, right.right)) queue.append((left.right, right.left)) return True
flexible
{ "blob_id": "24a4b9246a9b15334bebc45c532a25bd81266918", "index": 9650, "step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n", "step-2": "<mask token>\n\n\nclass TreeNode:\n <mask token>\n\n\nclass Solution:\n\n def isSymmetric(self, root: TreeNode) ->bool:\n if not root:\n return True\n queue = collections.deque()\n queue.append((root.left, root.right))\n while queue:\n left, right = queue.popleft()\n if not left and not right:\n continue\n if not left or not right or left.val != right.val:\n return False\n queue.append((left.left, right.right))\n queue.append((left.right, right.left))\n return True\n", "step-3": "<mask token>\n\n\nclass TreeNode:\n\n def __init__(self, val=0, left=None, right=None):\n self.val = val\n self.left = left\n self.right = right\n\n\nclass Solution:\n\n def isSymmetric(self, root: TreeNode) ->bool:\n if not root:\n return True\n queue = collections.deque()\n queue.append((root.left, root.right))\n while queue:\n left, right = queue.popleft()\n if not left and not right:\n continue\n if not left or not right or left.val != right.val:\n return False\n queue.append((left.left, right.right))\n queue.append((left.right, right.left))\n return True\n", "step-4": "import collections\n\n\nclass TreeNode:\n\n def __init__(self, val=0, left=None, right=None):\n self.val = val\n self.left = left\n self.right = right\n\n\nclass Solution:\n\n def isSymmetric(self, root: TreeNode) ->bool:\n if not root:\n return True\n queue = collections.deque()\n queue.append((root.left, root.right))\n while queue:\n left, right = queue.popleft()\n if not left and not right:\n continue\n if not left or not right or left.val != right.val:\n return False\n queue.append((left.left, right.right))\n queue.append((left.right, right.left))\n return True\n", "step-5": "import collections\n\n# Definition for a binary tree node.\nclass TreeNode:\n def __init__(self, val=0, left=None, right=None):\n self.val = val\n self.left = left\n self.right = right\n\nclass Solution:\n def isSymmetric(self, root: TreeNode) -> bool:\n if not root:\n return True\n\n queue = collections.deque()\n \n queue.append((root.left, root.right))\n \n while queue:\n left, right = queue.popleft()\n \n if not left and not right:\n continue\n \n if not left or not right or left.val != right.val:\n return False\n \n queue.append((left.left, right.right))\n queue.append((left.right, right.left))\n \n return True\n", "step-ids": [ 1, 3, 4, 5, 6 ] }
[ 1, 3, 4, 5, 6 ]
<|reserved_special_token_0|> class CutoffStrategy: <|reserved_special_token_0|> def __init__(self, generate_fn, description='undescribed cutoff strategy'): self.generate_fn = generate_fn self.description = description class FixWindowCutoffStrategy(CutoffStrategy): def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window): self.description = 'in next {} days'.format(cutoff_window) self.cutoff_base = cutoff_base self.cutoff_end = cutoff_end self.cutoff_window = cutoff_window self.entity_col = entity_col def generate_cutoffs(self, df): cutoff_st_ed_pairs = [] current = self.cutoff_base while True: current_end = current + timedelta(days=self.cutoff_window) if current_end > self.cutoff_end: break cutoff_st_ed_pairs.append((current, current_end)) current = current_end entity_cutoffs = [] for entity_name in set(df[self.entity_col]): for cutoff_st, cutoff_ed in cutoff_st_ed_pairs: entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed)) return pd.DataFrame(entity_cutoffs, columns=[self.entity_col, 'cutoff_st', 'cutoff_ed']) <|reserved_special_token_1|> <|reserved_special_token_0|> class CutoffStrategy: """ Class that holds a CutoffStrategy. This is a measure to prevent leakage Parameters ---------- generate_fn: a function that generates a cutoff time for a given entity. input: entity rows output: a training cutoff in np.datetime64 format Returns ------- CutoffStrategy Instance """ def __init__(self, generate_fn, description='undescribed cutoff strategy'): self.generate_fn = generate_fn self.description = description class FixWindowCutoffStrategy(CutoffStrategy): def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window): self.description = 'in next {} days'.format(cutoff_window) self.cutoff_base = cutoff_base self.cutoff_end = cutoff_end self.cutoff_window = cutoff_window self.entity_col = entity_col def generate_cutoffs(self, df): cutoff_st_ed_pairs = [] current = self.cutoff_base while True: current_end = current + timedelta(days=self.cutoff_window) if current_end > self.cutoff_end: break cutoff_st_ed_pairs.append((current, current_end)) current = current_end entity_cutoffs = [] for entity_name in set(df[self.entity_col]): for cutoff_st, cutoff_ed in cutoff_st_ed_pairs: entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed)) return pd.DataFrame(entity_cutoffs, columns=[self.entity_col, 'cutoff_st', 'cutoff_ed']) <|reserved_special_token_1|> <|reserved_special_token_0|> __all__ = ['FixWindowCutoffStrategy'] class CutoffStrategy: """ Class that holds a CutoffStrategy. This is a measure to prevent leakage Parameters ---------- generate_fn: a function that generates a cutoff time for a given entity. input: entity rows output: a training cutoff in np.datetime64 format Returns ------- CutoffStrategy Instance """ def __init__(self, generate_fn, description='undescribed cutoff strategy'): self.generate_fn = generate_fn self.description = description class FixWindowCutoffStrategy(CutoffStrategy): def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window): self.description = 'in next {} days'.format(cutoff_window) self.cutoff_base = cutoff_base self.cutoff_end = cutoff_end self.cutoff_window = cutoff_window self.entity_col = entity_col def generate_cutoffs(self, df): cutoff_st_ed_pairs = [] current = self.cutoff_base while True: current_end = current + timedelta(days=self.cutoff_window) if current_end > self.cutoff_end: break cutoff_st_ed_pairs.append((current, current_end)) current = current_end entity_cutoffs = [] for entity_name in set(df[self.entity_col]): for cutoff_st, cutoff_ed in cutoff_st_ed_pairs: entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed)) return pd.DataFrame(entity_cutoffs, columns=[self.entity_col, 'cutoff_st', 'cutoff_ed']) <|reserved_special_token_1|> from datetime import timedelta import pandas as pd __all__ = ['FixWindowCutoffStrategy'] class CutoffStrategy: """ Class that holds a CutoffStrategy. This is a measure to prevent leakage Parameters ---------- generate_fn: a function that generates a cutoff time for a given entity. input: entity rows output: a training cutoff in np.datetime64 format Returns ------- CutoffStrategy Instance """ def __init__(self, generate_fn, description='undescribed cutoff strategy'): self.generate_fn = generate_fn self.description = description class FixWindowCutoffStrategy(CutoffStrategy): def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window): self.description = 'in next {} days'.format(cutoff_window) self.cutoff_base = cutoff_base self.cutoff_end = cutoff_end self.cutoff_window = cutoff_window self.entity_col = entity_col def generate_cutoffs(self, df): cutoff_st_ed_pairs = [] current = self.cutoff_base while True: current_end = current + timedelta(days=self.cutoff_window) if current_end > self.cutoff_end: break cutoff_st_ed_pairs.append((current, current_end)) current = current_end entity_cutoffs = [] for entity_name in set(df[self.entity_col]): for cutoff_st, cutoff_ed in cutoff_st_ed_pairs: entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed)) return pd.DataFrame(entity_cutoffs, columns=[self.entity_col, 'cutoff_st', 'cutoff_ed']) <|reserved_special_token_1|> from datetime import timedelta import pandas as pd __all__ = ["FixWindowCutoffStrategy"] class CutoffStrategy: """ Class that holds a CutoffStrategy. This is a measure to prevent leakage Parameters ---------- generate_fn: a function that generates a cutoff time for a given entity. input: entity rows output: a training cutoff in np.datetime64 format Returns ------- CutoffStrategy Instance """ def __init__(self, generate_fn, description='undescribed cutoff strategy'): self.generate_fn = generate_fn self.description = description class FixWindowCutoffStrategy(CutoffStrategy): def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window): self.description = "in next {} days".format(cutoff_window) self.cutoff_base = cutoff_base self.cutoff_end = cutoff_end self.cutoff_window = cutoff_window self.entity_col = entity_col def generate_cutoffs(self, df): cutoff_st_ed_pairs = [] current = self.cutoff_base while True: current_end = current + timedelta(days=self.cutoff_window) if current_end > self.cutoff_end: break cutoff_st_ed_pairs.append((current, current_end)) current = current_end entity_cutoffs = [] for entity_name in set(df[self.entity_col]): for cutoff_st, cutoff_ed in cutoff_st_ed_pairs: entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed)) return pd.DataFrame(entity_cutoffs, columns=[self.entity_col, "cutoff_st", "cutoff_ed"])
flexible
{ "blob_id": "30f030d48368e1b103f926ee7a15b4b75c4459c7", "index": 7030, "step-1": "<mask token>\n\n\nclass CutoffStrategy:\n <mask token>\n\n def __init__(self, generate_fn, description='undescribed cutoff strategy'):\n self.generate_fn = generate_fn\n self.description = description\n\n\nclass FixWindowCutoffStrategy(CutoffStrategy):\n\n def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window):\n self.description = 'in next {} days'.format(cutoff_window)\n self.cutoff_base = cutoff_base\n self.cutoff_end = cutoff_end\n self.cutoff_window = cutoff_window\n self.entity_col = entity_col\n\n def generate_cutoffs(self, df):\n cutoff_st_ed_pairs = []\n current = self.cutoff_base\n while True:\n current_end = current + timedelta(days=self.cutoff_window)\n if current_end > self.cutoff_end:\n break\n cutoff_st_ed_pairs.append((current, current_end))\n current = current_end\n entity_cutoffs = []\n for entity_name in set(df[self.entity_col]):\n for cutoff_st, cutoff_ed in cutoff_st_ed_pairs:\n entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed))\n return pd.DataFrame(entity_cutoffs, columns=[self.entity_col,\n 'cutoff_st', 'cutoff_ed'])\n", "step-2": "<mask token>\n\n\nclass CutoffStrategy:\n \"\"\"\n Class that holds a CutoffStrategy. This is a measure to prevent leakage\n\n Parameters\n ----------\n generate_fn: a function that generates a cutoff time for a given entity.\n input: entity rows\n output: a training cutoff in np.datetime64 format\n\n Returns\n -------\n CutoffStrategy Instance\n \"\"\"\n\n def __init__(self, generate_fn, description='undescribed cutoff strategy'):\n self.generate_fn = generate_fn\n self.description = description\n\n\nclass FixWindowCutoffStrategy(CutoffStrategy):\n\n def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window):\n self.description = 'in next {} days'.format(cutoff_window)\n self.cutoff_base = cutoff_base\n self.cutoff_end = cutoff_end\n self.cutoff_window = cutoff_window\n self.entity_col = entity_col\n\n def generate_cutoffs(self, df):\n cutoff_st_ed_pairs = []\n current = self.cutoff_base\n while True:\n current_end = current + timedelta(days=self.cutoff_window)\n if current_end > self.cutoff_end:\n break\n cutoff_st_ed_pairs.append((current, current_end))\n current = current_end\n entity_cutoffs = []\n for entity_name in set(df[self.entity_col]):\n for cutoff_st, cutoff_ed in cutoff_st_ed_pairs:\n entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed))\n return pd.DataFrame(entity_cutoffs, columns=[self.entity_col,\n 'cutoff_st', 'cutoff_ed'])\n", "step-3": "<mask token>\n__all__ = ['FixWindowCutoffStrategy']\n\n\nclass CutoffStrategy:\n \"\"\"\n Class that holds a CutoffStrategy. This is a measure to prevent leakage\n\n Parameters\n ----------\n generate_fn: a function that generates a cutoff time for a given entity.\n input: entity rows\n output: a training cutoff in np.datetime64 format\n\n Returns\n -------\n CutoffStrategy Instance\n \"\"\"\n\n def __init__(self, generate_fn, description='undescribed cutoff strategy'):\n self.generate_fn = generate_fn\n self.description = description\n\n\nclass FixWindowCutoffStrategy(CutoffStrategy):\n\n def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window):\n self.description = 'in next {} days'.format(cutoff_window)\n self.cutoff_base = cutoff_base\n self.cutoff_end = cutoff_end\n self.cutoff_window = cutoff_window\n self.entity_col = entity_col\n\n def generate_cutoffs(self, df):\n cutoff_st_ed_pairs = []\n current = self.cutoff_base\n while True:\n current_end = current + timedelta(days=self.cutoff_window)\n if current_end > self.cutoff_end:\n break\n cutoff_st_ed_pairs.append((current, current_end))\n current = current_end\n entity_cutoffs = []\n for entity_name in set(df[self.entity_col]):\n for cutoff_st, cutoff_ed in cutoff_st_ed_pairs:\n entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed))\n return pd.DataFrame(entity_cutoffs, columns=[self.entity_col,\n 'cutoff_st', 'cutoff_ed'])\n", "step-4": "from datetime import timedelta\nimport pandas as pd\n__all__ = ['FixWindowCutoffStrategy']\n\n\nclass CutoffStrategy:\n \"\"\"\n Class that holds a CutoffStrategy. This is a measure to prevent leakage\n\n Parameters\n ----------\n generate_fn: a function that generates a cutoff time for a given entity.\n input: entity rows\n output: a training cutoff in np.datetime64 format\n\n Returns\n -------\n CutoffStrategy Instance\n \"\"\"\n\n def __init__(self, generate_fn, description='undescribed cutoff strategy'):\n self.generate_fn = generate_fn\n self.description = description\n\n\nclass FixWindowCutoffStrategy(CutoffStrategy):\n\n def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window):\n self.description = 'in next {} days'.format(cutoff_window)\n self.cutoff_base = cutoff_base\n self.cutoff_end = cutoff_end\n self.cutoff_window = cutoff_window\n self.entity_col = entity_col\n\n def generate_cutoffs(self, df):\n cutoff_st_ed_pairs = []\n current = self.cutoff_base\n while True:\n current_end = current + timedelta(days=self.cutoff_window)\n if current_end > self.cutoff_end:\n break\n cutoff_st_ed_pairs.append((current, current_end))\n current = current_end\n entity_cutoffs = []\n for entity_name in set(df[self.entity_col]):\n for cutoff_st, cutoff_ed in cutoff_st_ed_pairs:\n entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed))\n return pd.DataFrame(entity_cutoffs, columns=[self.entity_col,\n 'cutoff_st', 'cutoff_ed'])\n", "step-5": "from datetime import timedelta\n\nimport pandas as pd\n\n__all__ = [\"FixWindowCutoffStrategy\"]\n\n\nclass CutoffStrategy:\n \"\"\"\n Class that holds a CutoffStrategy. This is a measure to prevent leakage\n\n Parameters\n ----------\n generate_fn: a function that generates a cutoff time for a given entity.\n input: entity rows\n output: a training cutoff in np.datetime64 format\n\n Returns\n -------\n CutoffStrategy Instance\n \"\"\"\n\n def __init__(self, generate_fn, description='undescribed cutoff strategy'):\n self.generate_fn = generate_fn\n self.description = description\n\n\nclass FixWindowCutoffStrategy(CutoffStrategy):\n def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window):\n self.description = \"in next {} days\".format(cutoff_window)\n self.cutoff_base = cutoff_base\n self.cutoff_end = cutoff_end\n self.cutoff_window = cutoff_window\n self.entity_col = entity_col\n\n def generate_cutoffs(self, df):\n cutoff_st_ed_pairs = []\n\n current = self.cutoff_base\n while True:\n current_end = current + timedelta(days=self.cutoff_window)\n if current_end > self.cutoff_end:\n break\n cutoff_st_ed_pairs.append((current, current_end))\n current = current_end\n\n entity_cutoffs = []\n for entity_name in set(df[self.entity_col]):\n for cutoff_st, cutoff_ed in cutoff_st_ed_pairs:\n entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed))\n\n return pd.DataFrame(entity_cutoffs, columns=[self.entity_col, \"cutoff_st\", \"cutoff_ed\"])\n", "step-ids": [ 5, 6, 7, 8, 9 ] }
[ 5, 6, 7, 8, 9 ]
import xml.parsers.expat import urllib2 import threading def check_url(checkurl, checkstring, checkname): try: opener = urllib2.urlopen(checkurl, timeout = 5) if checkstring[0] == "!": if checkstring.encode('utf-8')[1:] not in opener.read(): print "Open",checkname else: #print "Closed",checkname pass else: if checkstring.encode('utf-8') in opener.read(): print "Open",checkname else: #print "Closed",checkname pass except IOError: #print "Broken",checkname pass p = xml.parsers.expat.ParserCreate() tname = "" url = "" check = "" mode = "" enabled = "" def char_data(data): global tname, url, check, mode, enabled if mode == "name": tname += data elif mode == "check": check += data elif mode == "signup": url += data elif mode == "type": enabled += data def end_element(name): global tname, url, check, mode, enabled mode = "" if name == "tracker" and enabled[0] == "T": threading.Thread(target=check_url, args=(url, check, tname)).start() tname = "" url = "" enabled = "" check = "" def start_element(name, attrs): global tname, url, check, mode, enabled if name == "name": mode = "name" elif name == "signup": mode = "signup" elif name == "check": mode = "check" elif name == "type": mode = "type" p.StartElementHandler = start_element p.EndElementHandler = end_element p.CharacterDataHandler = char_data f = open("trackers.xml") p.Parse(f.read(),1)
normal
{ "blob_id": "9d3d7000ed13a2623a53705d55b5dbb42662ce2f", "index": 4296, "step-1": "import xml.parsers.expat\nimport urllib2\nimport threading\n\n\n\ndef check_url(checkurl, checkstring, checkname):\n try:\n opener = urllib2.urlopen(checkurl, timeout = 5)\n if checkstring[0] == \"!\":\n if checkstring.encode('utf-8')[1:] not in opener.read():\n print \"Open\",checkname\n else:\n #print \"Closed\",checkname\n pass\n else:\n if checkstring.encode('utf-8') in opener.read():\n print \"Open\",checkname\n else:\n #print \"Closed\",checkname\n pass\n except IOError:\n #print \"Broken\",checkname\n pass\np = xml.parsers.expat.ParserCreate()\n\ntname = \"\"\nurl = \"\"\ncheck = \"\"\nmode = \"\"\nenabled = \"\"\n\ndef char_data(data):\n global tname, url, check, mode, enabled\n if mode == \"name\":\n tname += data\n elif mode == \"check\":\n check += data\n elif mode == \"signup\":\n url += data\n elif mode == \"type\":\n enabled += data\n \ndef end_element(name):\n global tname, url, check, mode, enabled\n mode = \"\"\n if name == \"tracker\" and enabled[0] == \"T\":\n threading.Thread(target=check_url, args=(url, check, tname)).start()\n tname = \"\"\n url = \"\"\n enabled = \"\"\n check = \"\"\n \n \ndef start_element(name, attrs):\n global tname, url, check, mode, enabled\n if name == \"name\":\n mode = \"name\"\n elif name == \"signup\":\n mode = \"signup\"\n elif name == \"check\":\n mode = \"check\"\n elif name == \"type\":\n mode = \"type\"\np.StartElementHandler = start_element\np.EndElementHandler = end_element\np.CharacterDataHandler = char_data\n\nf = open(\"trackers.xml\")\np.Parse(f.read(),1)\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
<|reserved_special_token_0|> class TabPaneRepartitor(editor.PaneRepartitor): def __init__(self, instance_editor, tab_edited_class): self.instance_editor = ... self.tab_edited_class = ... def is_displayed_in_other_tab(self, attribute, o_Class): ... <|reserved_special_token_0|> def is_displayed_in_hierarchy_pane(self, attribute, o, field_class: Optional[Any]=...): ... def is_displayed_in_attribute_pane(self, attribute, o, field_class: Optional[Any]=...): ... class OntologyInstanceEditor(editor.EditorTabbedDialog): _Qt_MODULE = ... _HTML_MODULE = ... def __init__(self, gui: Optional[Any]=..., master: Optional[Any]=..., direction=..., on_validate: Optional[Any]=..., edit_child_in_self= ..., undo_stack: Optional[Any]=..., on_close: Optional[Any]=..., menubar: bool=...): self.ontology = ... self.edited_classes = ... self.last_undoables = ... self.edited_instancess = ... def on_dialog_closed(self, *args): ... def set_ontology(self, ontology, edited_classes: Optional[Any]=...): self.ontology = ... self.edited_classes = ... self.last_undoables = ... self.edited_instancess = ... def add_tab_for_class(self, Class): ... def on_save(self, *args): self.last_undoables = ... def on_save_as(self, *args): ... <|reserved_special_token_1|> <|reserved_special_token_0|> class EditedInstances(object): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def details(self): ... <|reserved_special_token_0|> <|reserved_special_token_0|> class TabPaneRepartitor(editor.PaneRepartitor): def __init__(self, instance_editor, tab_edited_class): self.instance_editor = ... self.tab_edited_class = ... def is_displayed_in_other_tab(self, attribute, o_Class): ... def _compute(self, o, attribute, field_class: Optional[Any]=...): ... def is_displayed_in_hierarchy_pane(self, attribute, o, field_class: Optional[Any]=...): ... def is_displayed_in_attribute_pane(self, attribute, o, field_class: Optional[Any]=...): ... class OntologyInstanceEditor(editor.EditorTabbedDialog): _Qt_MODULE = ... _HTML_MODULE = ... def __init__(self, gui: Optional[Any]=..., master: Optional[Any]=..., direction=..., on_validate: Optional[Any]=..., edit_child_in_self= ..., undo_stack: Optional[Any]=..., on_close: Optional[Any]=..., menubar: bool=...): self.ontology = ... self.edited_classes = ... self.last_undoables = ... self.edited_instancess = ... def on_dialog_closed(self, *args): ... def set_ontology(self, ontology, edited_classes: Optional[Any]=...): self.ontology = ... self.edited_classes = ... self.last_undoables = ... self.edited_instancess = ... def add_tab_for_class(self, Class): ... def on_save(self, *args): self.last_undoables = ... def on_save_as(self, *args): ... <|reserved_special_token_1|> <|reserved_special_token_0|> class EditedInstances(object): <|reserved_special_token_0|> def get_instances(self): ... <|reserved_special_token_0|> def remove_instance(self, instance): ... def __str__(self): ... def details(self): ... def addable_values(self): ... <|reserved_special_token_0|> class TabPaneRepartitor(editor.PaneRepartitor): def __init__(self, instance_editor, tab_edited_class): self.instance_editor = ... self.tab_edited_class = ... def is_displayed_in_other_tab(self, attribute, o_Class): ... def _compute(self, o, attribute, field_class: Optional[Any]=...): ... def is_displayed_in_hierarchy_pane(self, attribute, o, field_class: Optional[Any]=...): ... def is_displayed_in_attribute_pane(self, attribute, o, field_class: Optional[Any]=...): ... class OntologyInstanceEditor(editor.EditorTabbedDialog): _Qt_MODULE = ... _HTML_MODULE = ... def __init__(self, gui: Optional[Any]=..., master: Optional[Any]=..., direction=..., on_validate: Optional[Any]=..., edit_child_in_self= ..., undo_stack: Optional[Any]=..., on_close: Optional[Any]=..., menubar: bool=...): self.ontology = ... self.edited_classes = ... self.last_undoables = ... self.edited_instancess = ... def on_dialog_closed(self, *args): ... def set_ontology(self, ontology, edited_classes: Optional[Any]=...): self.ontology = ... self.edited_classes = ... self.last_undoables = ... self.edited_instancess = ... def add_tab_for_class(self, Class): ... def on_save(self, *args): self.last_undoables = ... def on_save_as(self, *args): ... <|reserved_special_token_1|> <|reserved_special_token_0|> __all__ = ['EditedInstances', 'OntologyInstanceEditor'] class EditedInstances(object): def __init__(self, ontology, Class): self.ontology = ... self.namespace = ... self.Class = ... self.name = ... def get_instances(self): ... instances = ... def remove_instance(self, instance): ... def __str__(self): ... def details(self): ... def addable_values(self): ... descr = introsp.description(EditedInstances) class TabPaneRepartitor(editor.PaneRepartitor): def __init__(self, instance_editor, tab_edited_class): self.instance_editor = ... self.tab_edited_class = ... def is_displayed_in_other_tab(self, attribute, o_Class): ... def _compute(self, o, attribute, field_class: Optional[Any]=...): ... def is_displayed_in_hierarchy_pane(self, attribute, o, field_class: Optional[Any]=...): ... def is_displayed_in_attribute_pane(self, attribute, o, field_class: Optional[Any]=...): ... class OntologyInstanceEditor(editor.EditorTabbedDialog): _Qt_MODULE = ... _HTML_MODULE = ... def __init__(self, gui: Optional[Any]=..., master: Optional[Any]=..., direction=..., on_validate: Optional[Any]=..., edit_child_in_self= ..., undo_stack: Optional[Any]=..., on_close: Optional[Any]=..., menubar: bool=...): self.ontology = ... self.edited_classes = ... self.last_undoables = ... self.edited_instancess = ... def on_dialog_closed(self, *args): ... def set_ontology(self, ontology, edited_classes: Optional[Any]=...): self.ontology = ... self.edited_classes = ... self.last_undoables = ... self.edited_instancess = ... def add_tab_for_class(self, Class): ... def on_save(self, *args): self.last_undoables = ... def on_save_as(self, *args): ... <|reserved_special_token_1|> """ This type stub file was generated by pyright. """ import editobj3.introsp as introsp import editobj3.editor as editor from owlready2 import * from editobj3.observe import * from typing import Any, Optional __all__ = ["EditedInstances", "OntologyInstanceEditor"] class EditedInstances(object): def __init__(self, ontology, Class): self.ontology = ... self.namespace = ... self.Class = ... self.name = ... def get_instances(self): ... instances = ... def remove_instance(self, instance): ... def __str__(self): ... def details(self): ... def addable_values(self): ... descr = introsp.description(EditedInstances) class TabPaneRepartitor(editor.PaneRepartitor): def __init__(self, instance_editor, tab_edited_class): self.instance_editor = ... self.tab_edited_class = ... def is_displayed_in_other_tab(self, attribute, o_Class): ... def _compute(self, o, attribute, field_class: Optional[Any] = ...): ... def is_displayed_in_hierarchy_pane(self, attribute, o, field_class: Optional[Any] = ...): ... def is_displayed_in_attribute_pane(self, attribute, o, field_class: Optional[Any] = ...): ... class OntologyInstanceEditor(editor.EditorTabbedDialog): _Qt_MODULE = ... _HTML_MODULE = ... def __init__(self, gui: Optional[Any] = ..., master: Optional[Any] = ..., direction=..., on_validate: Optional[Any] = ..., edit_child_in_self=..., undo_stack: Optional[Any] = ..., on_close: Optional[Any] = ..., menubar: bool = ...): self.ontology = ... self.edited_classes = ... self.last_undoables = ... self.edited_instancess = ... def on_dialog_closed(self, *args): ... def set_ontology(self, ontology, edited_classes: Optional[Any] = ...): self.ontology = ... self.edited_classes = ... self.last_undoables = ... self.edited_instancess = ... def add_tab_for_class(self, Class): ... def on_save(self, *args): self.last_undoables = ... def on_save_as(self, *args): ...
flexible
{ "blob_id": "440c116327ee587b5a305953772523011ece5dda", "index": 9641, "step-1": "<mask token>\n\n\nclass TabPaneRepartitor(editor.PaneRepartitor):\n\n def __init__(self, instance_editor, tab_edited_class):\n self.instance_editor = ...\n self.tab_edited_class = ...\n\n def is_displayed_in_other_tab(self, attribute, o_Class):\n ...\n <mask token>\n\n def is_displayed_in_hierarchy_pane(self, attribute, o, field_class:\n Optional[Any]=...):\n ...\n\n def is_displayed_in_attribute_pane(self, attribute, o, field_class:\n Optional[Any]=...):\n ...\n\n\nclass OntologyInstanceEditor(editor.EditorTabbedDialog):\n _Qt_MODULE = ...\n _HTML_MODULE = ...\n\n def __init__(self, gui: Optional[Any]=..., master: Optional[Any]=...,\n direction=..., on_validate: Optional[Any]=..., edit_child_in_self=\n ..., undo_stack: Optional[Any]=..., on_close: Optional[Any]=...,\n menubar: bool=...):\n self.ontology = ...\n self.edited_classes = ...\n self.last_undoables = ...\n self.edited_instancess = ...\n\n def on_dialog_closed(self, *args):\n ...\n\n def set_ontology(self, ontology, edited_classes: Optional[Any]=...):\n self.ontology = ...\n self.edited_classes = ...\n self.last_undoables = ...\n self.edited_instancess = ...\n\n def add_tab_for_class(self, Class):\n ...\n\n def on_save(self, *args):\n self.last_undoables = ...\n\n def on_save_as(self, *args):\n ...\n", "step-2": "<mask token>\n\n\nclass EditedInstances(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def details(self):\n ...\n <mask token>\n\n\n<mask token>\n\n\nclass TabPaneRepartitor(editor.PaneRepartitor):\n\n def __init__(self, instance_editor, tab_edited_class):\n self.instance_editor = ...\n self.tab_edited_class = ...\n\n def is_displayed_in_other_tab(self, attribute, o_Class):\n ...\n\n def _compute(self, o, attribute, field_class: Optional[Any]=...):\n ...\n\n def is_displayed_in_hierarchy_pane(self, attribute, o, field_class:\n Optional[Any]=...):\n ...\n\n def is_displayed_in_attribute_pane(self, attribute, o, field_class:\n Optional[Any]=...):\n ...\n\n\nclass OntologyInstanceEditor(editor.EditorTabbedDialog):\n _Qt_MODULE = ...\n _HTML_MODULE = ...\n\n def __init__(self, gui: Optional[Any]=..., master: Optional[Any]=...,\n direction=..., on_validate: Optional[Any]=..., edit_child_in_self=\n ..., undo_stack: Optional[Any]=..., on_close: Optional[Any]=...,\n menubar: bool=...):\n self.ontology = ...\n self.edited_classes = ...\n self.last_undoables = ...\n self.edited_instancess = ...\n\n def on_dialog_closed(self, *args):\n ...\n\n def set_ontology(self, ontology, edited_classes: Optional[Any]=...):\n self.ontology = ...\n self.edited_classes = ...\n self.last_undoables = ...\n self.edited_instancess = ...\n\n def add_tab_for_class(self, Class):\n ...\n\n def on_save(self, *args):\n self.last_undoables = ...\n\n def on_save_as(self, *args):\n ...\n", "step-3": "<mask token>\n\n\nclass EditedInstances(object):\n <mask token>\n\n def get_instances(self):\n ...\n <mask token>\n\n def remove_instance(self, instance):\n ...\n\n def __str__(self):\n ...\n\n def details(self):\n ...\n\n def addable_values(self):\n ...\n\n\n<mask token>\n\n\nclass TabPaneRepartitor(editor.PaneRepartitor):\n\n def __init__(self, instance_editor, tab_edited_class):\n self.instance_editor = ...\n self.tab_edited_class = ...\n\n def is_displayed_in_other_tab(self, attribute, o_Class):\n ...\n\n def _compute(self, o, attribute, field_class: Optional[Any]=...):\n ...\n\n def is_displayed_in_hierarchy_pane(self, attribute, o, field_class:\n Optional[Any]=...):\n ...\n\n def is_displayed_in_attribute_pane(self, attribute, o, field_class:\n Optional[Any]=...):\n ...\n\n\nclass OntologyInstanceEditor(editor.EditorTabbedDialog):\n _Qt_MODULE = ...\n _HTML_MODULE = ...\n\n def __init__(self, gui: Optional[Any]=..., master: Optional[Any]=...,\n direction=..., on_validate: Optional[Any]=..., edit_child_in_self=\n ..., undo_stack: Optional[Any]=..., on_close: Optional[Any]=...,\n menubar: bool=...):\n self.ontology = ...\n self.edited_classes = ...\n self.last_undoables = ...\n self.edited_instancess = ...\n\n def on_dialog_closed(self, *args):\n ...\n\n def set_ontology(self, ontology, edited_classes: Optional[Any]=...):\n self.ontology = ...\n self.edited_classes = ...\n self.last_undoables = ...\n self.edited_instancess = ...\n\n def add_tab_for_class(self, Class):\n ...\n\n def on_save(self, *args):\n self.last_undoables = ...\n\n def on_save_as(self, *args):\n ...\n", "step-4": "<mask token>\n__all__ = ['EditedInstances', 'OntologyInstanceEditor']\n\n\nclass EditedInstances(object):\n\n def __init__(self, ontology, Class):\n self.ontology = ...\n self.namespace = ...\n self.Class = ...\n self.name = ...\n\n def get_instances(self):\n ...\n instances = ...\n\n def remove_instance(self, instance):\n ...\n\n def __str__(self):\n ...\n\n def details(self):\n ...\n\n def addable_values(self):\n ...\n\n\ndescr = introsp.description(EditedInstances)\n\n\nclass TabPaneRepartitor(editor.PaneRepartitor):\n\n def __init__(self, instance_editor, tab_edited_class):\n self.instance_editor = ...\n self.tab_edited_class = ...\n\n def is_displayed_in_other_tab(self, attribute, o_Class):\n ...\n\n def _compute(self, o, attribute, field_class: Optional[Any]=...):\n ...\n\n def is_displayed_in_hierarchy_pane(self, attribute, o, field_class:\n Optional[Any]=...):\n ...\n\n def is_displayed_in_attribute_pane(self, attribute, o, field_class:\n Optional[Any]=...):\n ...\n\n\nclass OntologyInstanceEditor(editor.EditorTabbedDialog):\n _Qt_MODULE = ...\n _HTML_MODULE = ...\n\n def __init__(self, gui: Optional[Any]=..., master: Optional[Any]=...,\n direction=..., on_validate: Optional[Any]=..., edit_child_in_self=\n ..., undo_stack: Optional[Any]=..., on_close: Optional[Any]=...,\n menubar: bool=...):\n self.ontology = ...\n self.edited_classes = ...\n self.last_undoables = ...\n self.edited_instancess = ...\n\n def on_dialog_closed(self, *args):\n ...\n\n def set_ontology(self, ontology, edited_classes: Optional[Any]=...):\n self.ontology = ...\n self.edited_classes = ...\n self.last_undoables = ...\n self.edited_instancess = ...\n\n def add_tab_for_class(self, Class):\n ...\n\n def on_save(self, *args):\n self.last_undoables = ...\n\n def on_save_as(self, *args):\n ...\n", "step-5": "\"\"\"\nThis type stub file was generated by pyright.\n\"\"\"\n\nimport editobj3.introsp as introsp\nimport editobj3.editor as editor\nfrom owlready2 import *\nfrom editobj3.observe import *\nfrom typing import Any, Optional\n\n__all__ = [\"EditedInstances\", \"OntologyInstanceEditor\"]\nclass EditedInstances(object):\n def __init__(self, ontology, Class):\n self.ontology = ...\n self.namespace = ...\n self.Class = ...\n self.name = ...\n \n def get_instances(self):\n ...\n \n instances = ...\n def remove_instance(self, instance):\n ...\n \n def __str__(self):\n ...\n \n def details(self):\n ...\n \n def addable_values(self):\n ...\n \n\n\ndescr = introsp.description(EditedInstances)\nclass TabPaneRepartitor(editor.PaneRepartitor):\n def __init__(self, instance_editor, tab_edited_class):\n self.instance_editor = ...\n self.tab_edited_class = ...\n \n def is_displayed_in_other_tab(self, attribute, o_Class):\n ...\n \n def _compute(self, o, attribute, field_class: Optional[Any] = ...):\n ...\n \n def is_displayed_in_hierarchy_pane(self, attribute, o, field_class: Optional[Any] = ...):\n ...\n \n def is_displayed_in_attribute_pane(self, attribute, o, field_class: Optional[Any] = ...):\n ...\n \n\n\nclass OntologyInstanceEditor(editor.EditorTabbedDialog):\n _Qt_MODULE = ...\n _HTML_MODULE = ...\n def __init__(self, gui: Optional[Any] = ..., master: Optional[Any] = ..., direction=..., on_validate: Optional[Any] = ..., edit_child_in_self=..., undo_stack: Optional[Any] = ..., on_close: Optional[Any] = ..., menubar: bool = ...):\n self.ontology = ...\n self.edited_classes = ...\n self.last_undoables = ...\n self.edited_instancess = ...\n \n def on_dialog_closed(self, *args):\n ...\n \n def set_ontology(self, ontology, edited_classes: Optional[Any] = ...):\n self.ontology = ...\n self.edited_classes = ...\n self.last_undoables = ...\n self.edited_instancess = ...\n \n def add_tab_for_class(self, Class):\n ...\n \n def on_save(self, *args):\n self.last_undoables = ...\n \n def on_save_as(self, *args):\n ...\n \n\n\n", "step-ids": [ 13, 16, 20, 23, 25 ] }
[ 13, 16, 20, 23, 25 ]
from tqdm import tqdm import os import pandas as pd import pickle import numpy as np def inv_list(l, start=0): d = {} for i in range(len(l)): d[l[i]] = i+start return d raw_data_path = '/home/reddy/sindhu/datasets/physionet_2012/' def read_dataset(d): ts = [] pbar = tqdm(os.listdir(raw_data_path+'/set-'+d), desc='Reading time series set '+d) for f in pbar: data = pd.read_csv(raw_data_path+'/set-'+d+'/'+f).iloc[1:] data = data.loc[data.Parameter.notna()] if len(data)<=5: continue data = data.loc[data.Value>=0] # neg Value indicates missingness. data['RecordID'] = f[:-4] ts.append(data) ts = pd.concat(ts) return ts ts = pd.concat((read_dataset('a'), read_dataset('b'), read_dataset('c'))) ts.Time = ts.Time.apply(lambda x:int(x[:2])+int(x[3:])/60) # No. of hours since admission. ts.rename(columns={'Time':'hour', 'Parameter':'variable', 'Value':'value'}, inplace=True) oc_a = pd.read_csv(raw_data_path+'/Outcomes-a.txt', usecols=['RecordID', 'Length_of_stay', 'In-hospital_death']) oc_a['subset'] = 'a' oc_b = pd.read_csv(raw_data_path+'/Outcomes-b.txt', usecols=['RecordID', 'Length_of_stay', 'In-hospital_death']) oc_b['subset'] = 'b' oc_c = pd.read_csv(raw_data_path+'/Outcomes-c.txt', usecols=['RecordID', 'Length_of_stay', 'In-hospital_death']) oc_c['subset'] = 'c' oc = pd.concat((oc_a,oc_b,oc_c)) oc.RecordID = oc.RecordID.astype(str) oc.rename(columns={'Length_of_stay':'length_of_stay', 'In-hospital_death':'in_hospital_mortality'}, inplace=True) rec_ids = sorted(list(ts.RecordID.unique())) rid_to_ind = inv_list(rec_ids) oc = oc.loc[oc.RecordID.isin(rec_ids)] ts['ts_ind'] = ts.RecordID.map(rid_to_ind) oc['ts_ind'] = oc.RecordID.map(rid_to_ind) ts.drop(columns='RecordID', inplace=True) oc.drop(columns='RecordID', inplace=True) # Drop duplicates. ts = ts.drop_duplicates() # Convert categorical to numeric. ii = (ts.variable=='ICUType') for val in [4,3,2,1]: kk = ii&(ts.value==val) ts.loc[kk, 'variable'] = 'ICUType_'+str(val) ts.loc[ii, 'value'] = 1 # Normalize data except Age, Gender, Height, ICUType. means_stds = ts.groupby('variable').agg({'value':['mean', 'std']}) means_stds.columns = [col[1] for col in means_stds.columns] means_stds.loc[means_stds['std']==0, 'std'] = 1 ts = ts.merge(means_stds.reset_index(), on='variable', how='left') ii = ts.variable.apply(lambda x:not(x.startswith('ICUType')))&(~ts.variable.isin(['Age', 'Gender', 'Height'])) ts.loc[ii, 'value'] = (ts.loc[ii, 'value']-ts.loc[ii, 'mean'])/ts.loc[ii, 'std'] # Generate split. train_valid_ind = np.array(oc.loc[oc.subset!='a'].ts_ind) np.random.seed(123) np.random.shuffle(train_valid_ind) bp = int(0.8*len(train_valid_ind)) train_ind = train_valid_ind[:bp] valid_ind = train_valid_ind[bp:] test_ind = np.array(oc.loc[oc.subset=='a'].ts_ind) oc.drop(columns='subset', inplace=True) # Store data. pickle.dump([ts, oc, train_ind, valid_ind, test_ind], open('physionet_2012_preprocessed.pkl','wb'))
normal
{ "blob_id": "3e07a2a2d0a810c016720fa41d71d0771cbccfef", "index": 626, "step-1": "<mask token>\n\n\ndef inv_list(l, start=0):\n d = {}\n for i in range(len(l)):\n d[l[i]] = i + start\n return d\n\n\n<mask token>\n\n\ndef read_dataset(d):\n ts = []\n pbar = tqdm(os.listdir(raw_data_path + '/set-' + d), desc=\n 'Reading time series set ' + d)\n for f in pbar:\n data = pd.read_csv(raw_data_path + '/set-' + d + '/' + f).iloc[1:]\n data = data.loc[data.Parameter.notna()]\n if len(data) <= 5:\n continue\n data = data.loc[data.Value >= 0]\n data['RecordID'] = f[:-4]\n ts.append(data)\n ts = pd.concat(ts)\n return ts\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef inv_list(l, start=0):\n d = {}\n for i in range(len(l)):\n d[l[i]] = i + start\n return d\n\n\n<mask token>\n\n\ndef read_dataset(d):\n ts = []\n pbar = tqdm(os.listdir(raw_data_path + '/set-' + d), desc=\n 'Reading time series set ' + d)\n for f in pbar:\n data = pd.read_csv(raw_data_path + '/set-' + d + '/' + f).iloc[1:]\n data = data.loc[data.Parameter.notna()]\n if len(data) <= 5:\n continue\n data = data.loc[data.Value >= 0]\n data['RecordID'] = f[:-4]\n ts.append(data)\n ts = pd.concat(ts)\n return ts\n\n\n<mask token>\nts.rename(columns={'Time': 'hour', 'Parameter': 'variable', 'Value':\n 'value'}, inplace=True)\n<mask token>\noc.rename(columns={'Length_of_stay': 'length_of_stay', 'In-hospital_death':\n 'in_hospital_mortality'}, inplace=True)\n<mask token>\nts.drop(columns='RecordID', inplace=True)\noc.drop(columns='RecordID', inplace=True)\n<mask token>\nfor val in [4, 3, 2, 1]:\n kk = ii & (ts.value == val)\n ts.loc[kk, 'variable'] = 'ICUType_' + str(val)\n<mask token>\nnp.random.seed(123)\nnp.random.shuffle(train_valid_ind)\n<mask token>\noc.drop(columns='subset', inplace=True)\npickle.dump([ts, oc, train_ind, valid_ind, test_ind], open(\n 'physionet_2012_preprocessed.pkl', 'wb'))\n", "step-3": "<mask token>\n\n\ndef inv_list(l, start=0):\n d = {}\n for i in range(len(l)):\n d[l[i]] = i + start\n return d\n\n\nraw_data_path = '/home/reddy/sindhu/datasets/physionet_2012/'\n\n\ndef read_dataset(d):\n ts = []\n pbar = tqdm(os.listdir(raw_data_path + '/set-' + d), desc=\n 'Reading time series set ' + d)\n for f in pbar:\n data = pd.read_csv(raw_data_path + '/set-' + d + '/' + f).iloc[1:]\n data = data.loc[data.Parameter.notna()]\n if len(data) <= 5:\n continue\n data = data.loc[data.Value >= 0]\n data['RecordID'] = f[:-4]\n ts.append(data)\n ts = pd.concat(ts)\n return ts\n\n\nts = pd.concat((read_dataset('a'), read_dataset('b'), read_dataset('c')))\nts.Time = ts.Time.apply(lambda x: int(x[:2]) + int(x[3:]) / 60)\nts.rename(columns={'Time': 'hour', 'Parameter': 'variable', 'Value':\n 'value'}, inplace=True)\noc_a = pd.read_csv(raw_data_path + '/Outcomes-a.txt', usecols=['RecordID',\n 'Length_of_stay', 'In-hospital_death'])\noc_a['subset'] = 'a'\noc_b = pd.read_csv(raw_data_path + '/Outcomes-b.txt', usecols=['RecordID',\n 'Length_of_stay', 'In-hospital_death'])\noc_b['subset'] = 'b'\noc_c = pd.read_csv(raw_data_path + '/Outcomes-c.txt', usecols=['RecordID',\n 'Length_of_stay', 'In-hospital_death'])\noc_c['subset'] = 'c'\noc = pd.concat((oc_a, oc_b, oc_c))\noc.RecordID = oc.RecordID.astype(str)\noc.rename(columns={'Length_of_stay': 'length_of_stay', 'In-hospital_death':\n 'in_hospital_mortality'}, inplace=True)\nrec_ids = sorted(list(ts.RecordID.unique()))\nrid_to_ind = inv_list(rec_ids)\noc = oc.loc[oc.RecordID.isin(rec_ids)]\nts['ts_ind'] = ts.RecordID.map(rid_to_ind)\noc['ts_ind'] = oc.RecordID.map(rid_to_ind)\nts.drop(columns='RecordID', inplace=True)\noc.drop(columns='RecordID', inplace=True)\nts = ts.drop_duplicates()\nii = ts.variable == 'ICUType'\nfor val in [4, 3, 2, 1]:\n kk = ii & (ts.value == val)\n ts.loc[kk, 'variable'] = 'ICUType_' + str(val)\nts.loc[ii, 'value'] = 1\nmeans_stds = ts.groupby('variable').agg({'value': ['mean', 'std']})\nmeans_stds.columns = [col[1] for col in means_stds.columns]\nmeans_stds.loc[means_stds['std'] == 0, 'std'] = 1\nts = ts.merge(means_stds.reset_index(), on='variable', how='left')\nii = ts.variable.apply(lambda x: not x.startswith('ICUType')\n ) & ~ts.variable.isin(['Age', 'Gender', 'Height'])\nts.loc[ii, 'value'] = (ts.loc[ii, 'value'] - ts.loc[ii, 'mean']) / ts.loc[\n ii, 'std']\ntrain_valid_ind = np.array(oc.loc[oc.subset != 'a'].ts_ind)\nnp.random.seed(123)\nnp.random.shuffle(train_valid_ind)\nbp = int(0.8 * len(train_valid_ind))\ntrain_ind = train_valid_ind[:bp]\nvalid_ind = train_valid_ind[bp:]\ntest_ind = np.array(oc.loc[oc.subset == 'a'].ts_ind)\noc.drop(columns='subset', inplace=True)\npickle.dump([ts, oc, train_ind, valid_ind, test_ind], open(\n 'physionet_2012_preprocessed.pkl', 'wb'))\n", "step-4": "from tqdm import tqdm\nimport os\nimport pandas as pd\nimport pickle\nimport numpy as np\n\n\ndef inv_list(l, start=0):\n d = {}\n for i in range(len(l)):\n d[l[i]] = i + start\n return d\n\n\nraw_data_path = '/home/reddy/sindhu/datasets/physionet_2012/'\n\n\ndef read_dataset(d):\n ts = []\n pbar = tqdm(os.listdir(raw_data_path + '/set-' + d), desc=\n 'Reading time series set ' + d)\n for f in pbar:\n data = pd.read_csv(raw_data_path + '/set-' + d + '/' + f).iloc[1:]\n data = data.loc[data.Parameter.notna()]\n if len(data) <= 5:\n continue\n data = data.loc[data.Value >= 0]\n data['RecordID'] = f[:-4]\n ts.append(data)\n ts = pd.concat(ts)\n return ts\n\n\nts = pd.concat((read_dataset('a'), read_dataset('b'), read_dataset('c')))\nts.Time = ts.Time.apply(lambda x: int(x[:2]) + int(x[3:]) / 60)\nts.rename(columns={'Time': 'hour', 'Parameter': 'variable', 'Value':\n 'value'}, inplace=True)\noc_a = pd.read_csv(raw_data_path + '/Outcomes-a.txt', usecols=['RecordID',\n 'Length_of_stay', 'In-hospital_death'])\noc_a['subset'] = 'a'\noc_b = pd.read_csv(raw_data_path + '/Outcomes-b.txt', usecols=['RecordID',\n 'Length_of_stay', 'In-hospital_death'])\noc_b['subset'] = 'b'\noc_c = pd.read_csv(raw_data_path + '/Outcomes-c.txt', usecols=['RecordID',\n 'Length_of_stay', 'In-hospital_death'])\noc_c['subset'] = 'c'\noc = pd.concat((oc_a, oc_b, oc_c))\noc.RecordID = oc.RecordID.astype(str)\noc.rename(columns={'Length_of_stay': 'length_of_stay', 'In-hospital_death':\n 'in_hospital_mortality'}, inplace=True)\nrec_ids = sorted(list(ts.RecordID.unique()))\nrid_to_ind = inv_list(rec_ids)\noc = oc.loc[oc.RecordID.isin(rec_ids)]\nts['ts_ind'] = ts.RecordID.map(rid_to_ind)\noc['ts_ind'] = oc.RecordID.map(rid_to_ind)\nts.drop(columns='RecordID', inplace=True)\noc.drop(columns='RecordID', inplace=True)\nts = ts.drop_duplicates()\nii = ts.variable == 'ICUType'\nfor val in [4, 3, 2, 1]:\n kk = ii & (ts.value == val)\n ts.loc[kk, 'variable'] = 'ICUType_' + str(val)\nts.loc[ii, 'value'] = 1\nmeans_stds = ts.groupby('variable').agg({'value': ['mean', 'std']})\nmeans_stds.columns = [col[1] for col in means_stds.columns]\nmeans_stds.loc[means_stds['std'] == 0, 'std'] = 1\nts = ts.merge(means_stds.reset_index(), on='variable', how='left')\nii = ts.variable.apply(lambda x: not x.startswith('ICUType')\n ) & ~ts.variable.isin(['Age', 'Gender', 'Height'])\nts.loc[ii, 'value'] = (ts.loc[ii, 'value'] - ts.loc[ii, 'mean']) / ts.loc[\n ii, 'std']\ntrain_valid_ind = np.array(oc.loc[oc.subset != 'a'].ts_ind)\nnp.random.seed(123)\nnp.random.shuffle(train_valid_ind)\nbp = int(0.8 * len(train_valid_ind))\ntrain_ind = train_valid_ind[:bp]\nvalid_ind = train_valid_ind[bp:]\ntest_ind = np.array(oc.loc[oc.subset == 'a'].ts_ind)\noc.drop(columns='subset', inplace=True)\npickle.dump([ts, oc, train_ind, valid_ind, test_ind], open(\n 'physionet_2012_preprocessed.pkl', 'wb'))\n", "step-5": "from tqdm import tqdm\nimport os\nimport pandas as pd\nimport pickle\nimport numpy as np\n\ndef inv_list(l, start=0):\n d = {}\n for i in range(len(l)):\n d[l[i]] = i+start\n return d\n\nraw_data_path = '/home/reddy/sindhu/datasets/physionet_2012/'\ndef read_dataset(d):\n ts = []\n pbar = tqdm(os.listdir(raw_data_path+'/set-'+d), desc='Reading time series set '+d)\n for f in pbar:\n data = pd.read_csv(raw_data_path+'/set-'+d+'/'+f).iloc[1:]\n data = data.loc[data.Parameter.notna()]\n if len(data)<=5:\n continue\n data = data.loc[data.Value>=0] # neg Value indicates missingness.\n data['RecordID'] = f[:-4]\n ts.append(data)\n ts = pd.concat(ts)\n return ts\n\nts = pd.concat((read_dataset('a'), read_dataset('b'), read_dataset('c')))\nts.Time = ts.Time.apply(lambda x:int(x[:2])+int(x[3:])/60) # No. of hours since admission.\nts.rename(columns={'Time':'hour', 'Parameter':'variable', 'Value':'value'}, inplace=True)\noc_a = pd.read_csv(raw_data_path+'/Outcomes-a.txt', usecols=['RecordID', 'Length_of_stay', 'In-hospital_death'])\noc_a['subset'] = 'a'\noc_b = pd.read_csv(raw_data_path+'/Outcomes-b.txt', usecols=['RecordID', 'Length_of_stay', 'In-hospital_death'])\noc_b['subset'] = 'b'\noc_c = pd.read_csv(raw_data_path+'/Outcomes-c.txt', usecols=['RecordID', 'Length_of_stay', 'In-hospital_death'])\noc_c['subset'] = 'c'\noc = pd.concat((oc_a,oc_b,oc_c))\noc.RecordID = oc.RecordID.astype(str)\noc.rename(columns={'Length_of_stay':'length_of_stay', 'In-hospital_death':'in_hospital_mortality'}, inplace=True)\nrec_ids = sorted(list(ts.RecordID.unique()))\nrid_to_ind = inv_list(rec_ids)\noc = oc.loc[oc.RecordID.isin(rec_ids)]\nts['ts_ind'] = ts.RecordID.map(rid_to_ind)\noc['ts_ind'] = oc.RecordID.map(rid_to_ind)\nts.drop(columns='RecordID', inplace=True)\noc.drop(columns='RecordID', inplace=True)\n\n# Drop duplicates.\nts = ts.drop_duplicates()\n\n# Convert categorical to numeric.\nii = (ts.variable=='ICUType')\nfor val in [4,3,2,1]:\n kk = ii&(ts.value==val)\n ts.loc[kk, 'variable'] = 'ICUType_'+str(val)\nts.loc[ii, 'value'] = 1\n \n# Normalize data except Age, Gender, Height, ICUType.\nmeans_stds = ts.groupby('variable').agg({'value':['mean', 'std']})\nmeans_stds.columns = [col[1] for col in means_stds.columns]\nmeans_stds.loc[means_stds['std']==0, 'std'] = 1\nts = ts.merge(means_stds.reset_index(), on='variable', how='left')\nii = ts.variable.apply(lambda x:not(x.startswith('ICUType')))&(~ts.variable.isin(['Age', 'Gender', 'Height']))\nts.loc[ii, 'value'] = (ts.loc[ii, 'value']-ts.loc[ii, 'mean'])/ts.loc[ii, 'std']\n\n# Generate split.\ntrain_valid_ind = np.array(oc.loc[oc.subset!='a'].ts_ind)\nnp.random.seed(123)\nnp.random.shuffle(train_valid_ind)\nbp = int(0.8*len(train_valid_ind))\ntrain_ind = train_valid_ind[:bp]\nvalid_ind = train_valid_ind[bp:]\ntest_ind = np.array(oc.loc[oc.subset=='a'].ts_ind)\noc.drop(columns='subset', inplace=True)\n\n# Store data.\npickle.dump([ts, oc, train_ind, valid_ind, test_ind], open('physionet_2012_preprocessed.pkl','wb'))\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
from unittest.case import TestCase from datetime import datetime from src.main.domain.Cohort import Cohort from src.main.domain.Group import Group from src.main.util.TimeFormatter import TimeFormatter __author__ = 'continueing' class CohortTest(TestCase): def testAnalyzeNewGroups(self): cohort = Cohort(aStartDate=TimeFormatter.toDatetime('2014-05-05 00:00:00'), aEndDate=TimeFormatter.toDatetime('2014-06-01 23:59:59'), aInterval = 7) groups = cohort.groups group = Group(anId=1, aStartDate=TimeFormatter.toDatetime('2014-05-05 00:00:00'), anEndDate=TimeFormatter.toDatetime('2014-05-11 23:59:59'), aNickname="5월 1째 주") self.assertEqual(groups[0].period, group.period) group = Group(anId=2, aStartDate=TimeFormatter.toDatetime('2014-05-12 00:00:00'), anEndDate=TimeFormatter.toDatetime('2014-05-18 23:59:59'), aNickname="5월 2째 주") self.assertEqual(groups[1].period, group.period) group = Group(anId=3, aStartDate=TimeFormatter.toDatetime('2014-05-19 00:00:00'), anEndDate=TimeFormatter.toDatetime('2014-05-25 23:59:59'), aNickname="5월 3째 주") self.assertEqual(groups[2].period, group.period) group = Group(anId=3, aStartDate=TimeFormatter.toDatetime('2014-05-26 00:00:00'), anEndDate=TimeFormatter.toDatetime('2014-06-01 23:59:59'), aNickname="5월 4째 주") self.assertEqual(groups[3].period, group.period) self.assertEqual(groups.__len__(),4) def testSnapshots(self): self.fail("should test this! but take too long network time")
normal
{ "blob_id": "f12bdfc054e62dc244a95daad9682790c880f20d", "index": 5367, "step-1": "<mask token>\n\n\nclass CohortTest(TestCase):\n\n def testAnalyzeNewGroups(self):\n cohort = Cohort(aStartDate=TimeFormatter.toDatetime(\n '2014-05-05 00:00:00'), aEndDate=TimeFormatter.toDatetime(\n '2014-06-01 23:59:59'), aInterval=7)\n groups = cohort.groups\n group = Group(anId=1, aStartDate=TimeFormatter.toDatetime(\n '2014-05-05 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-11 23:59:59'), aNickname='5월 1째 주')\n self.assertEqual(groups[0].period, group.period)\n group = Group(anId=2, aStartDate=TimeFormatter.toDatetime(\n '2014-05-12 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-18 23:59:59'), aNickname='5월 2째 주')\n self.assertEqual(groups[1].period, group.period)\n group = Group(anId=3, aStartDate=TimeFormatter.toDatetime(\n '2014-05-19 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-25 23:59:59'), aNickname='5월 3째 주')\n self.assertEqual(groups[2].period, group.period)\n group = Group(anId=3, aStartDate=TimeFormatter.toDatetime(\n '2014-05-26 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-06-01 23:59:59'), aNickname='5월 4째 주')\n self.assertEqual(groups[3].period, group.period)\n self.assertEqual(groups.__len__(), 4)\n <mask token>\n", "step-2": "<mask token>\n\n\nclass CohortTest(TestCase):\n\n def testAnalyzeNewGroups(self):\n cohort = Cohort(aStartDate=TimeFormatter.toDatetime(\n '2014-05-05 00:00:00'), aEndDate=TimeFormatter.toDatetime(\n '2014-06-01 23:59:59'), aInterval=7)\n groups = cohort.groups\n group = Group(anId=1, aStartDate=TimeFormatter.toDatetime(\n '2014-05-05 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-11 23:59:59'), aNickname='5월 1째 주')\n self.assertEqual(groups[0].period, group.period)\n group = Group(anId=2, aStartDate=TimeFormatter.toDatetime(\n '2014-05-12 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-18 23:59:59'), aNickname='5월 2째 주')\n self.assertEqual(groups[1].period, group.period)\n group = Group(anId=3, aStartDate=TimeFormatter.toDatetime(\n '2014-05-19 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-25 23:59:59'), aNickname='5월 3째 주')\n self.assertEqual(groups[2].period, group.period)\n group = Group(anId=3, aStartDate=TimeFormatter.toDatetime(\n '2014-05-26 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-06-01 23:59:59'), aNickname='5월 4째 주')\n self.assertEqual(groups[3].period, group.period)\n self.assertEqual(groups.__len__(), 4)\n\n def testSnapshots(self):\n self.fail('should test this! but take too long network time')\n", "step-3": "<mask token>\n__author__ = 'continueing'\n\n\nclass CohortTest(TestCase):\n\n def testAnalyzeNewGroups(self):\n cohort = Cohort(aStartDate=TimeFormatter.toDatetime(\n '2014-05-05 00:00:00'), aEndDate=TimeFormatter.toDatetime(\n '2014-06-01 23:59:59'), aInterval=7)\n groups = cohort.groups\n group = Group(anId=1, aStartDate=TimeFormatter.toDatetime(\n '2014-05-05 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-11 23:59:59'), aNickname='5월 1째 주')\n self.assertEqual(groups[0].period, group.period)\n group = Group(anId=2, aStartDate=TimeFormatter.toDatetime(\n '2014-05-12 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-18 23:59:59'), aNickname='5월 2째 주')\n self.assertEqual(groups[1].period, group.period)\n group = Group(anId=3, aStartDate=TimeFormatter.toDatetime(\n '2014-05-19 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-25 23:59:59'), aNickname='5월 3째 주')\n self.assertEqual(groups[2].period, group.period)\n group = Group(anId=3, aStartDate=TimeFormatter.toDatetime(\n '2014-05-26 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-06-01 23:59:59'), aNickname='5월 4째 주')\n self.assertEqual(groups[3].period, group.period)\n self.assertEqual(groups.__len__(), 4)\n\n def testSnapshots(self):\n self.fail('should test this! but take too long network time')\n", "step-4": "from unittest.case import TestCase\nfrom datetime import datetime\nfrom src.main.domain.Cohort import Cohort\nfrom src.main.domain.Group import Group\nfrom src.main.util.TimeFormatter import TimeFormatter\n__author__ = 'continueing'\n\n\nclass CohortTest(TestCase):\n\n def testAnalyzeNewGroups(self):\n cohort = Cohort(aStartDate=TimeFormatter.toDatetime(\n '2014-05-05 00:00:00'), aEndDate=TimeFormatter.toDatetime(\n '2014-06-01 23:59:59'), aInterval=7)\n groups = cohort.groups\n group = Group(anId=1, aStartDate=TimeFormatter.toDatetime(\n '2014-05-05 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-11 23:59:59'), aNickname='5월 1째 주')\n self.assertEqual(groups[0].period, group.period)\n group = Group(anId=2, aStartDate=TimeFormatter.toDatetime(\n '2014-05-12 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-18 23:59:59'), aNickname='5월 2째 주')\n self.assertEqual(groups[1].period, group.period)\n group = Group(anId=3, aStartDate=TimeFormatter.toDatetime(\n '2014-05-19 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-05-25 23:59:59'), aNickname='5월 3째 주')\n self.assertEqual(groups[2].period, group.period)\n group = Group(anId=3, aStartDate=TimeFormatter.toDatetime(\n '2014-05-26 00:00:00'), anEndDate=TimeFormatter.toDatetime(\n '2014-06-01 23:59:59'), aNickname='5월 4째 주')\n self.assertEqual(groups[3].period, group.period)\n self.assertEqual(groups.__len__(), 4)\n\n def testSnapshots(self):\n self.fail('should test this! but take too long network time')\n", "step-5": "from unittest.case import TestCase\nfrom datetime import datetime\nfrom src.main.domain.Cohort import Cohort\nfrom src.main.domain.Group import Group\nfrom src.main.util.TimeFormatter import TimeFormatter\n\n__author__ = 'continueing'\n\n\nclass CohortTest(TestCase):\n\n def testAnalyzeNewGroups(self):\n cohort = Cohort(aStartDate=TimeFormatter.toDatetime('2014-05-05 00:00:00'), aEndDate=TimeFormatter.toDatetime('2014-06-01 23:59:59'), aInterval = 7)\n groups = cohort.groups\n\n group = Group(anId=1, aStartDate=TimeFormatter.toDatetime('2014-05-05 00:00:00'), anEndDate=TimeFormatter.toDatetime('2014-05-11 23:59:59'), aNickname=\"5월 1째 주\")\n self.assertEqual(groups[0].period, group.period)\n group = Group(anId=2, aStartDate=TimeFormatter.toDatetime('2014-05-12 00:00:00'), anEndDate=TimeFormatter.toDatetime('2014-05-18 23:59:59'), aNickname=\"5월 2째 주\")\n self.assertEqual(groups[1].period, group.period)\n group = Group(anId=3, aStartDate=TimeFormatter.toDatetime('2014-05-19 00:00:00'), anEndDate=TimeFormatter.toDatetime('2014-05-25 23:59:59'), aNickname=\"5월 3째 주\")\n self.assertEqual(groups[2].period, group.period)\n group = Group(anId=3, aStartDate=TimeFormatter.toDatetime('2014-05-26 00:00:00'), anEndDate=TimeFormatter.toDatetime('2014-06-01 23:59:59'), aNickname=\"5월 4째 주\")\n self.assertEqual(groups[3].period, group.period)\n self.assertEqual(groups.__len__(),4)\n\n def testSnapshots(self):\n self.fail(\"should test this! but take too long network time\")\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
# Generated by Django 3.0.8 on 2021-03-25 13:47 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('Asha', '0005_baby'), ] operations = [ migrations.AlterField( model_name='baby', name='Auth_Id', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Asha.BasicDetails'), ), ]
normal
{ "blob_id": "e14b8d0f85042ceda955022bee08b3b3b4c2361d", "index": 7367, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Asha', '0005_baby')]\n operations = [migrations.AlterField(model_name='baby', name='Auth_Id',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,\n to='Asha.BasicDetails'))]\n", "step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Asha', '0005_baby')]\n operations = [migrations.AlterField(model_name='baby', name='Auth_Id',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,\n to='Asha.BasicDetails'))]\n", "step-5": "# Generated by Django 3.0.8 on 2021-03-25 13:47\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('Asha', '0005_baby'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterField(\r\n model_name='baby',\r\n name='Auth_Id',\r\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Asha.BasicDetails'),\r\n ),\r\n ]\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> @six.add_metaclass(abc.ABCMeta) class Hal: def __init__(self, configpath): self.configpath = configpath dir_path = os.path.join(os.path.dirname(__file__), 'libraries') lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os. path.join(dir_path, f)) and f.lower().endswith('.py')] self.responses = [] self.libraries = [] for f in lib_files: try: module_name = 'hal.libraries.' + f[:-3] module = importlib.import_module(module_name) for name, obj in inspect.getmembers(module): if inspect.isclass(obj) and issubclass(obj, HalLibrary ) and name != 'HalLibrary' and not inspect.isabstract( obj): self.libraries.append(obj) except: self.add_response('Error loading library {}'.format(f)) raise <|reserved_special_token_0|> <|reserved_special_token_0|> @abc.abstractmethod def display_help(self): """ Present some information to the user """ pass <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> @six.add_metaclass(abc.ABCMeta) class Hal: def __init__(self, configpath): self.configpath = configpath dir_path = os.path.join(os.path.dirname(__file__), 'libraries') lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os. path.join(dir_path, f)) and f.lower().endswith('.py')] self.responses = [] self.libraries = [] for f in lib_files: try: module_name = 'hal.libraries.' + f[:-3] module = importlib.import_module(module_name) for name, obj in inspect.getmembers(module): if inspect.isclass(obj) and issubclass(obj, HalLibrary ) and name != 'HalLibrary' and not inspect.isabstract( obj): self.libraries.append(obj) except: self.add_response('Error loading library {}'.format(f)) raise <|reserved_special_token_0|> def say_all(self): response = '\n'.join(self.responses) return response @abc.abstractmethod def display_help(self): """ Present some information to the user """ pass def greet(self): hour = datetime.datetime.now().hour greeting = 'Good Evening' if hour < 12: greeting = 'Good morning' elif 12 <= hour < 18: greeting = 'Good afternoon' self.add_response('{}. What can I help you with?'.format(greeting)) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> @six.add_metaclass(abc.ABCMeta) class Hal: def __init__(self, configpath): self.configpath = configpath dir_path = os.path.join(os.path.dirname(__file__), 'libraries') lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os. path.join(dir_path, f)) and f.lower().endswith('.py')] self.responses = [] self.libraries = [] for f in lib_files: try: module_name = 'hal.libraries.' + f[:-3] module = importlib.import_module(module_name) for name, obj in inspect.getmembers(module): if inspect.isclass(obj) and issubclass(obj, HalLibrary ) and name != 'HalLibrary' and not inspect.isabstract( obj): self.libraries.append(obj) except: self.add_response('Error loading library {}'.format(f)) raise def add_response(self, text): self.responses.append(text) def say_all(self): response = '\n'.join(self.responses) return response @abc.abstractmethod def display_help(self): """ Present some information to the user """ pass def greet(self): hour = datetime.datetime.now().hour greeting = 'Good Evening' if hour < 12: greeting = 'Good morning' elif 12 <= hour < 18: greeting = 'Good afternoon' self.add_response('{}. What can I help you with?'.format(greeting)) <|reserved_special_token_0|> <|reserved_special_token_1|> import abc import datetime import importlib import inspect import os import re import six from .library import HalLibrary @six.add_metaclass(abc.ABCMeta) class Hal: def __init__(self, configpath): self.configpath = configpath dir_path = os.path.join(os.path.dirname(__file__), 'libraries') lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os. path.join(dir_path, f)) and f.lower().endswith('.py')] self.responses = [] self.libraries = [] for f in lib_files: try: module_name = 'hal.libraries.' + f[:-3] module = importlib.import_module(module_name) for name, obj in inspect.getmembers(module): if inspect.isclass(obj) and issubclass(obj, HalLibrary ) and name != 'HalLibrary' and not inspect.isabstract( obj): self.libraries.append(obj) except: self.add_response('Error loading library {}'.format(f)) raise def add_response(self, text): self.responses.append(text) def say_all(self): response = '\n'.join(self.responses) return response @abc.abstractmethod def display_help(self): """ Present some information to the user """ pass def greet(self): hour = datetime.datetime.now().hour greeting = 'Good Evening' if hour < 12: greeting = 'Good morning' elif 12 <= hour < 18: greeting = 'Good afternoon' self.add_response('{}. What can I help you with?'.format(greeting)) def process(self, command): """ Process the command and get response by querying each plugin if required. """ self.responses = [] if len(command) == 0: self.greet() return self.say_all() command = command.strip() help_regex = re.compile('help\\s+([^\\s]+)') help_match = help_regex.match(command) if help_match: keyword = help_match.group(1).lower() for lib in self.libraries: if keyword in lib.keywords: help_content = lib.help() self.display_help(help_content) return matched = False for lib in self.libraries: lib_obj = lib(command) lib_obj.process_input() if (lib_obj.status == HalLibrary.SUCCESS or lib_obj.status == HalLibrary.INCOMPLETE): matched = True lib_obj.process() resp = lib_obj.get_response() for r in resp: self.add_response(r) elif lib_obj.status == HalLibrary.ERROR: matched = True self.add_response('ERROR: ' + lib_obj.get_error()) else: pass if not matched: self.add_response("I don't understand what you're saying.") return self.say_all() <|reserved_special_token_1|> # -*- coding: utf-8 -*- import abc import datetime import importlib import inspect import os import re import six from .library import HalLibrary @six.add_metaclass(abc.ABCMeta) class Hal(): def __init__(self, configpath): self.configpath = configpath # Find libraries inside the lib directory dir_path = os.path.join(os.path.dirname(__file__), "libraries") lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os.path.join(dir_path, f)) and f.lower().endswith(".py") ] self.responses = [] self.libraries = [] for f in lib_files: # Try to load the module try: module_name = "hal.libraries." + f[:-3] module = importlib.import_module(module_name) for name, obj in inspect.getmembers(module): # Find classes that inherit from HalLibrary if inspect.isclass(obj) and issubclass(obj, HalLibrary) and \ name != "HalLibrary" and not inspect.isabstract(obj): self.libraries.append(obj) except: self.add_response("Error loading library {}".format(f)) raise def add_response(self, text): self.responses.append(text) def say_all(self): response = "\n".join(self.responses) return response @abc.abstractmethod def display_help(self): """ Present some information to the user """ pass def greet(self): hour = datetime.datetime.now().hour greeting = "Good Evening" if hour < 12: greeting = 'Good morning' elif 12 <= hour < 18: greeting = 'Good afternoon' self.add_response("{}. What can I help you with?".format(greeting)) def process(self, command): """ Process the command and get response by querying each plugin if required. """ self.responses = [] if(len(command) == 0): self.greet() return self.say_all() # prepare the command command = command.strip() # Some hard coded patterns: If first word is help, activate help # moudule help_regex = re.compile("help\s+([^\s]+)") help_match = help_regex.match(command) if help_match: keyword = help_match.group(1).lower() # Try to find libraries with the keyword and print their help for lib in self.libraries: if keyword in lib.keywords: # Print the help text help_content = lib.help() self.display_help(help_content) return matched = False for lib in self.libraries: lib_obj = lib(command) # try to match the command with the library lib_obj.process_input() if lib_obj.status == HalLibrary.SUCCESS or lib_obj.status == HalLibrary.INCOMPLETE: matched = True lib_obj.process() resp = lib_obj.get_response() for r in resp: self.add_response(r) elif lib_obj.status == HalLibrary.ERROR: matched = True self.add_response("ERROR: " + lib_obj.get_error()) else: # Failure to match pass if not matched: self.add_response("I don't understand what you're saying.") return self.say_all()
flexible
{ "blob_id": "81dec10686b521dc9400a209caabc1601efd2a88", "index": 540, "step-1": "<mask token>\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass Hal:\n\n def __init__(self, configpath):\n self.configpath = configpath\n dir_path = os.path.join(os.path.dirname(__file__), 'libraries')\n lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os.\n path.join(dir_path, f)) and f.lower().endswith('.py')]\n self.responses = []\n self.libraries = []\n for f in lib_files:\n try:\n module_name = 'hal.libraries.' + f[:-3]\n module = importlib.import_module(module_name)\n for name, obj in inspect.getmembers(module):\n if inspect.isclass(obj) and issubclass(obj, HalLibrary\n ) and name != 'HalLibrary' and not inspect.isabstract(\n obj):\n self.libraries.append(obj)\n except:\n self.add_response('Error loading library {}'.format(f))\n raise\n <mask token>\n <mask token>\n\n @abc.abstractmethod\n def display_help(self):\n \"\"\" Present some information to the user \"\"\"\n pass\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass Hal:\n\n def __init__(self, configpath):\n self.configpath = configpath\n dir_path = os.path.join(os.path.dirname(__file__), 'libraries')\n lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os.\n path.join(dir_path, f)) and f.lower().endswith('.py')]\n self.responses = []\n self.libraries = []\n for f in lib_files:\n try:\n module_name = 'hal.libraries.' + f[:-3]\n module = importlib.import_module(module_name)\n for name, obj in inspect.getmembers(module):\n if inspect.isclass(obj) and issubclass(obj, HalLibrary\n ) and name != 'HalLibrary' and not inspect.isabstract(\n obj):\n self.libraries.append(obj)\n except:\n self.add_response('Error loading library {}'.format(f))\n raise\n <mask token>\n\n def say_all(self):\n response = '\\n'.join(self.responses)\n return response\n\n @abc.abstractmethod\n def display_help(self):\n \"\"\" Present some information to the user \"\"\"\n pass\n\n def greet(self):\n hour = datetime.datetime.now().hour\n greeting = 'Good Evening'\n if hour < 12:\n greeting = 'Good morning'\n elif 12 <= hour < 18:\n greeting = 'Good afternoon'\n self.add_response('{}. What can I help you with?'.format(greeting))\n <mask token>\n", "step-3": "<mask token>\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass Hal:\n\n def __init__(self, configpath):\n self.configpath = configpath\n dir_path = os.path.join(os.path.dirname(__file__), 'libraries')\n lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os.\n path.join(dir_path, f)) and f.lower().endswith('.py')]\n self.responses = []\n self.libraries = []\n for f in lib_files:\n try:\n module_name = 'hal.libraries.' + f[:-3]\n module = importlib.import_module(module_name)\n for name, obj in inspect.getmembers(module):\n if inspect.isclass(obj) and issubclass(obj, HalLibrary\n ) and name != 'HalLibrary' and not inspect.isabstract(\n obj):\n self.libraries.append(obj)\n except:\n self.add_response('Error loading library {}'.format(f))\n raise\n\n def add_response(self, text):\n self.responses.append(text)\n\n def say_all(self):\n response = '\\n'.join(self.responses)\n return response\n\n @abc.abstractmethod\n def display_help(self):\n \"\"\" Present some information to the user \"\"\"\n pass\n\n def greet(self):\n hour = datetime.datetime.now().hour\n greeting = 'Good Evening'\n if hour < 12:\n greeting = 'Good morning'\n elif 12 <= hour < 18:\n greeting = 'Good afternoon'\n self.add_response('{}. What can I help you with?'.format(greeting))\n <mask token>\n", "step-4": "import abc\nimport datetime\nimport importlib\nimport inspect\nimport os\nimport re\nimport six\nfrom .library import HalLibrary\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass Hal:\n\n def __init__(self, configpath):\n self.configpath = configpath\n dir_path = os.path.join(os.path.dirname(__file__), 'libraries')\n lib_files = [f for f in os.listdir(dir_path) if os.path.isfile(os.\n path.join(dir_path, f)) and f.lower().endswith('.py')]\n self.responses = []\n self.libraries = []\n for f in lib_files:\n try:\n module_name = 'hal.libraries.' + f[:-3]\n module = importlib.import_module(module_name)\n for name, obj in inspect.getmembers(module):\n if inspect.isclass(obj) and issubclass(obj, HalLibrary\n ) and name != 'HalLibrary' and not inspect.isabstract(\n obj):\n self.libraries.append(obj)\n except:\n self.add_response('Error loading library {}'.format(f))\n raise\n\n def add_response(self, text):\n self.responses.append(text)\n\n def say_all(self):\n response = '\\n'.join(self.responses)\n return response\n\n @abc.abstractmethod\n def display_help(self):\n \"\"\" Present some information to the user \"\"\"\n pass\n\n def greet(self):\n hour = datetime.datetime.now().hour\n greeting = 'Good Evening'\n if hour < 12:\n greeting = 'Good morning'\n elif 12 <= hour < 18:\n greeting = 'Good afternoon'\n self.add_response('{}. What can I help you with?'.format(greeting))\n\n def process(self, command):\n \"\"\"\n Process the command and get response by querying each plugin if required.\n \"\"\"\n self.responses = []\n if len(command) == 0:\n self.greet()\n return self.say_all()\n command = command.strip()\n help_regex = re.compile('help\\\\s+([^\\\\s]+)')\n help_match = help_regex.match(command)\n if help_match:\n keyword = help_match.group(1).lower()\n for lib in self.libraries:\n if keyword in lib.keywords:\n help_content = lib.help()\n self.display_help(help_content)\n return\n matched = False\n for lib in self.libraries:\n lib_obj = lib(command)\n lib_obj.process_input()\n if (lib_obj.status == HalLibrary.SUCCESS or lib_obj.status ==\n HalLibrary.INCOMPLETE):\n matched = True\n lib_obj.process()\n resp = lib_obj.get_response()\n for r in resp:\n self.add_response(r)\n elif lib_obj.status == HalLibrary.ERROR:\n matched = True\n self.add_response('ERROR: ' + lib_obj.get_error())\n else:\n pass\n if not matched:\n self.add_response(\"I don't understand what you're saying.\")\n return self.say_all()\n", "step-5": "# -*- coding: utf-8 -*-\n\nimport abc\nimport datetime\nimport importlib\nimport inspect\nimport os\nimport re\nimport six\n\nfrom .library import HalLibrary\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass Hal():\n\n def __init__(self, configpath):\n self.configpath = configpath\n # Find libraries inside the lib directory\n\n dir_path = os.path.join(os.path.dirname(__file__), \"libraries\")\n lib_files = [f for f in os.listdir(dir_path) if\n os.path.isfile(os.path.join(dir_path, f)) and\n f.lower().endswith(\".py\")\n ]\n\n self.responses = []\n self.libraries = []\n for f in lib_files:\n # Try to load the module\n try:\n module_name = \"hal.libraries.\" + f[:-3]\n module = importlib.import_module(module_name)\n for name, obj in inspect.getmembers(module):\n # Find classes that inherit from HalLibrary\n if inspect.isclass(obj) and issubclass(obj, HalLibrary) and \\\n name != \"HalLibrary\" and not inspect.isabstract(obj):\n self.libraries.append(obj)\n except:\n self.add_response(\"Error loading library {}\".format(f))\n raise\n\n def add_response(self, text):\n self.responses.append(text)\n\n def say_all(self):\n response = \"\\n\".join(self.responses)\n return response\n\n @abc.abstractmethod\n def display_help(self):\n \"\"\" Present some information to the user \"\"\"\n pass\n\n\n def greet(self):\n hour = datetime.datetime.now().hour\n\n greeting = \"Good Evening\"\n if hour < 12:\n greeting = 'Good morning'\n elif 12 <= hour < 18:\n greeting = 'Good afternoon'\n\n self.add_response(\"{}. What can I help you with?\".format(greeting))\n\n def process(self, command):\n \"\"\"\n Process the command and get response by querying each plugin if required.\n \"\"\"\n self.responses = []\n if(len(command) == 0):\n self.greet()\n return self.say_all()\n\n # prepare the command\n command = command.strip()\n\n # Some hard coded patterns: If first word is help, activate help\n # moudule\n help_regex = re.compile(\"help\\s+([^\\s]+)\")\n help_match = help_regex.match(command)\n if help_match:\n keyword = help_match.group(1).lower()\n # Try to find libraries with the keyword and print their help\n\n for lib in self.libraries:\n if keyword in lib.keywords:\n # Print the help text\n help_content = lib.help()\n self.display_help(help_content)\n return\n\n matched = False\n\n for lib in self.libraries:\n\n lib_obj = lib(command)\n\n # try to match the command with the library\n lib_obj.process_input()\n\n if lib_obj.status == HalLibrary.SUCCESS or lib_obj.status == HalLibrary.INCOMPLETE:\n\n matched = True\n\n lib_obj.process()\n\n resp = lib_obj.get_response()\n\n for r in resp:\n self.add_response(r)\n\n elif lib_obj.status == HalLibrary.ERROR:\n matched = True\n self.add_response(\"ERROR: \" + lib_obj.get_error())\n else:\n # Failure to match\n pass\n\n if not matched:\n self.add_response(\"I don't understand what you're saying.\")\n\n return self.say_all()\n", "step-ids": [ 3, 5, 6, 8, 9 ] }
[ 3, 5, 6, 8, 9 ]
HORIZONTAL_TABLE = b'\x09' class ReagentInfoItem(): ''' This class if defined for a single reagent info unit, from the table's view, its a cell of the table. ''' def __init__(self, reagent_name, reagent_count): self.reagent_name = reagent_name self.reagent_count = reagent_count def __repr__(self): return 'reagent name: ' + self.reagent_name + HORIZONTAL_TABLE +\ 'reagent count: ' + str(self.reagent_count) class InstrumentReagentInfo(): ''' This class is defined for single instrument,from the table's view, its a column of the reagent info table. ''' def __init__(self, instr_id, instr_type, time_stamp=None, reagent_info_list=[]): ''' Instrument_Id: str Instrument_Type: str Reagent_Info_List: ReagentInfoItem[] ''' self.instrument_id = instr_id self.instrument_type = instr_type self.time_stamp = time_stamp self.reagent_info_list = reagent_info_list def __repr__(self): return 'instrument id: '+ self.instrument_id + HORIZONTAL_TABLE +\ 'instrument type: ' + self.instrument_type + HORIZONTAL_TABLE+\ 'updated timestamp: ' + str(self.time_stamp) + HORIZONTAL_TABLE+\ '\nreagent inventory info:\n' + '\n'.join(str(item) for item in self.reagent_info_list) class SystemReagentInfo(): ''' Reagent information of the whole system ''' def __init__(self): self.system_reagent = [] def update_instrument_reagent_inventory(self,instrument_reagent_invemtory): if isinstance(instrument_reagent_invemtory,InstrumentReagentInfo): if not self.get_last_update_timestamp_per_instrument(instrument_reagent_invemtory.instrument_id) or \ self.get_last_update_timestamp_per_instrument(instrument_reagent_invemtory.instrument_id)<instrument_reagent_invemtory.time_stamp: old_record = self.get_instrument_reagent_inventory_item_by_id(instrument_reagent_invemtory.instrument_id) if old_record: old_record = instrument_reagent_invemtory else: self.system_reagent.append(instrument_reagent_invemtory) def get_instrument_reagent_inventory_item_by_id(self,instr_id): for item in self.system_reagent: if isinstance(item,InstrumentReagentInfo): if item.instrument_id == instr_id: return item def get_last_update_timestamp_per_instrument(self,instr_id): for item in self.system_reagent: if isinstance(item,InstrumentReagentInfo): if item.instrument_id == instr_id: return item.time_stamp def __repr__(self): return 'system reagent info:\n' +'\n'.join(str(item) for item in self.system_reagent) def test01(): ReagentInfoItem11 = ReagentInfoItem('dai', 12) ReagentInfoItem12 = ReagentInfoItem('han', 13) ReagentInfoItem13 = ReagentInfoItem('peng', 14) ReagentInfoList1 = [ReagentInfoItem11, ReagentInfoItem12, ReagentInfoItem13] ReagentInfoItem21 = ReagentInfoItem('I', 32) ReagentInfoItem22 = ReagentInfoItem('love', 33) ReagentInfoItem23 = ReagentInfoItem('python', 34) ReagentInfoList2 = [ReagentInfoItem21, ReagentInfoItem22, ReagentInfoItem23] # 'normal testing, below info should be updated:' InstrumentInfo1 = InstrumentReagentInfo('5', 'A24', '20160101110909', ReagentInfoList1) InstrumentInfo2 = InstrumentReagentInfo('7', 'CEN', '20151212090923', ReagentInfoList2) # 'abnormal testing, below info should not be updated:' InstrumentInfo3 = InstrumentReagentInfo('5', 'A24', '20150101110909', ReagentInfoList2) aptioReagentInfo = SystemReagentInfo() aptioReagentInfo.update_instrument_reagent_inventory(InstrumentInfo1) aptioReagentInfo.update_instrument_reagent_inventory(InstrumentInfo2) aptioReagentInfo.update_instrument_reagent_inventory(InstrumentInfo3) print aptioReagentInfo def test02(): from datetime import datetime dt1 = '20141117100340' dt = datetime.strptime(dt1,'%Y%m%d%H%M%S') print dt < None if __name__ == '__main__': test02()
normal
{ "blob_id": "994210b3de82af02ec7b1b7bee75ceb88ffb2bd5", "index": 2491, "step-1": "\nHORIZONTAL_TABLE = b'\\x09'\n\nclass ReagentInfoItem():\n '''\n This class if defined for a single reagent info unit, from the table's view, its a cell of the table.\n '''\n def __init__(self, reagent_name, reagent_count):\n self.reagent_name = reagent_name\n self.reagent_count = reagent_count\n\n def __repr__(self):\n return 'reagent name: ' + self.reagent_name + HORIZONTAL_TABLE +\\\n 'reagent count: ' + str(self.reagent_count)\n\n\nclass InstrumentReagentInfo():\n '''\n This class is defined for single instrument,from the table's view, its a column of the reagent info table.\n '''\n def __init__(self, instr_id, instr_type, time_stamp=None, reagent_info_list=[]):\n '''\n Instrument_Id: str\n Instrument_Type: str\n Reagent_Info_List: ReagentInfoItem[]\n '''\n self.instrument_id = instr_id\n self.instrument_type = instr_type\n self.time_stamp = time_stamp\n self.reagent_info_list = reagent_info_list\n\n def __repr__(self):\n return 'instrument id: '+ self.instrument_id + HORIZONTAL_TABLE +\\\n 'instrument type: ' + self.instrument_type + HORIZONTAL_TABLE+\\\n 'updated timestamp: ' + str(self.time_stamp) + HORIZONTAL_TABLE+\\\n '\\nreagent inventory info:\\n' + '\\n'.join(str(item) for item in self.reagent_info_list)\n\n\nclass SystemReagentInfo():\n '''\n Reagent information of the whole system\n '''\n def __init__(self):\n self.system_reagent = []\n\n def update_instrument_reagent_inventory(self,instrument_reagent_invemtory):\n if isinstance(instrument_reagent_invemtory,InstrumentReagentInfo):\n if not self.get_last_update_timestamp_per_instrument(instrument_reagent_invemtory.instrument_id) or \\\n self.get_last_update_timestamp_per_instrument(instrument_reagent_invemtory.instrument_id)<instrument_reagent_invemtory.time_stamp:\n old_record = self.get_instrument_reagent_inventory_item_by_id(instrument_reagent_invemtory.instrument_id)\n if old_record:\n old_record = instrument_reagent_invemtory\n else:\n self.system_reagent.append(instrument_reagent_invemtory)\n\n def get_instrument_reagent_inventory_item_by_id(self,instr_id):\n for item in self.system_reagent:\n if isinstance(item,InstrumentReagentInfo):\n if item.instrument_id == instr_id:\n return item\n\n def get_last_update_timestamp_per_instrument(self,instr_id):\n for item in self.system_reagent:\n if isinstance(item,InstrumentReagentInfo):\n if item.instrument_id == instr_id:\n return item.time_stamp\n\n def __repr__(self):\n return 'system reagent info:\\n' +'\\n'.join(str(item) for item in self.system_reagent)\n\n\ndef test01():\n ReagentInfoItem11 = ReagentInfoItem('dai', 12)\n ReagentInfoItem12 = ReagentInfoItem('han', 13)\n ReagentInfoItem13 = ReagentInfoItem('peng', 14)\n ReagentInfoList1 = [ReagentInfoItem11, ReagentInfoItem12, ReagentInfoItem13]\n\n ReagentInfoItem21 = ReagentInfoItem('I', 32)\n ReagentInfoItem22 = ReagentInfoItem('love', 33)\n ReagentInfoItem23 = ReagentInfoItem('python', 34)\n ReagentInfoList2 = [ReagentInfoItem21, ReagentInfoItem22, ReagentInfoItem23]\n\n # 'normal testing, below info should be updated:'\n InstrumentInfo1 = InstrumentReagentInfo('5', 'A24', '20160101110909', ReagentInfoList1)\n InstrumentInfo2 = InstrumentReagentInfo('7', 'CEN', '20151212090923', ReagentInfoList2)\n # 'abnormal testing, below info should not be updated:'\n InstrumentInfo3 = InstrumentReagentInfo('5', 'A24', '20150101110909', ReagentInfoList2)\n\n aptioReagentInfo = SystemReagentInfo()\n\n aptioReagentInfo.update_instrument_reagent_inventory(InstrumentInfo1)\n aptioReagentInfo.update_instrument_reagent_inventory(InstrumentInfo2)\n\n aptioReagentInfo.update_instrument_reagent_inventory(InstrumentInfo3)\n\n print aptioReagentInfo\n\ndef test02():\n from datetime import datetime\n dt1 = '20141117100340'\n dt = datetime.strptime(dt1,'%Y%m%d%H%M%S')\n print dt < None\n\nif __name__ == '__main__':\n test02()", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
<|reserved_special_token_0|> class HandleConfig(ConfigParser): def __init__(self, ini_file_neme): super().__init__() self.ini_file_neme = ini_file_neme def red_conf__(self): file_path = os.path.join(conf_dir, self.ini_file_neme) self.read(file_path, encoding='utf-8') <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class HandleConfig(ConfigParser): def __init__(self, ini_file_neme): super().__init__() self.ini_file_neme = ini_file_neme def red_conf__(self): file_path = os.path.join(conf_dir, self.ini_file_neme) self.read(file_path, encoding='utf-8') <|reserved_special_token_0|> red_conf.red_conf__() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class HandleConfig(ConfigParser): def __init__(self, ini_file_neme): super().__init__() self.ini_file_neme = ini_file_neme def red_conf__(self): file_path = os.path.join(conf_dir, self.ini_file_neme) self.read(file_path, encoding='utf-8') red_conf = HandleConfig('xiaoyin.ini') red_conf.red_conf__() log_data_list = [red_conf.get('log', 'log_name'), red_conf.get('log', 'log_level'), red_conf.getboolean('log', 'file')] <|reserved_special_token_1|> <|reserved_special_token_0|> from configparser import ConfigParser from Common.handle_path import conf_dir import os class HandleConfig(ConfigParser): def __init__(self, ini_file_neme): super().__init__() self.ini_file_neme = ini_file_neme def red_conf__(self): file_path = os.path.join(conf_dir, self.ini_file_neme) self.read(file_path, encoding='utf-8') red_conf = HandleConfig('xiaoyin.ini') red_conf.red_conf__() log_data_list = [red_conf.get('log', 'log_name'), red_conf.get('log', 'log_level'), red_conf.getboolean('log', 'file')] <|reserved_special_token_1|> """ -*- coding:utf-8 -*- @ Time : 14:05 @ Name : handle_ini_file.py @ Author : xiaoyin_ing @ Email : [email protected] @ Software : PyCharm ... """ from configparser import ConfigParser from Common.handle_path import conf_dir import os class HandleConfig(ConfigParser): def __init__(self, ini_file_neme): super().__init__() self.ini_file_neme = ini_file_neme def red_conf__(self): file_path = os.path.join(conf_dir, self.ini_file_neme) self.read(file_path, encoding="utf-8") red_conf = HandleConfig("xiaoyin.ini") red_conf.red_conf__() # 日志模块用到的属性 log_data_list = [red_conf.get("log", "log_name"), red_conf.get("log", "log_level"), red_conf.getboolean("log", "file")] # print(log_data_list)
flexible
{ "blob_id": "01e60123ad87d9ff49812fe3a6f5d55bc85921c5", "index": 4071, "step-1": "<mask token>\n\n\nclass HandleConfig(ConfigParser):\n\n def __init__(self, ini_file_neme):\n super().__init__()\n self.ini_file_neme = ini_file_neme\n\n def red_conf__(self):\n file_path = os.path.join(conf_dir, self.ini_file_neme)\n self.read(file_path, encoding='utf-8')\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass HandleConfig(ConfigParser):\n\n def __init__(self, ini_file_neme):\n super().__init__()\n self.ini_file_neme = ini_file_neme\n\n def red_conf__(self):\n file_path = os.path.join(conf_dir, self.ini_file_neme)\n self.read(file_path, encoding='utf-8')\n\n\n<mask token>\nred_conf.red_conf__()\n<mask token>\n", "step-3": "<mask token>\n\n\nclass HandleConfig(ConfigParser):\n\n def __init__(self, ini_file_neme):\n super().__init__()\n self.ini_file_neme = ini_file_neme\n\n def red_conf__(self):\n file_path = os.path.join(conf_dir, self.ini_file_neme)\n self.read(file_path, encoding='utf-8')\n\n\nred_conf = HandleConfig('xiaoyin.ini')\nred_conf.red_conf__()\nlog_data_list = [red_conf.get('log', 'log_name'), red_conf.get('log',\n 'log_level'), red_conf.getboolean('log', 'file')]\n", "step-4": "<mask token>\nfrom configparser import ConfigParser\nfrom Common.handle_path import conf_dir\nimport os\n\n\nclass HandleConfig(ConfigParser):\n\n def __init__(self, ini_file_neme):\n super().__init__()\n self.ini_file_neme = ini_file_neme\n\n def red_conf__(self):\n file_path = os.path.join(conf_dir, self.ini_file_neme)\n self.read(file_path, encoding='utf-8')\n\n\nred_conf = HandleConfig('xiaoyin.ini')\nred_conf.red_conf__()\nlog_data_list = [red_conf.get('log', 'log_name'), red_conf.get('log',\n 'log_level'), red_conf.getboolean('log', 'file')]\n", "step-5": "\"\"\"\n-*- coding:utf-8 -*-\n@ Time : 14:05\n@ Name : handle_ini_file.py\n@ Author : xiaoyin_ing\n@ Email : [email protected]\n@ Software : PyCharm\n ...\n \n\"\"\"\nfrom configparser import ConfigParser\nfrom Common.handle_path import conf_dir\nimport os\n\n\nclass HandleConfig(ConfigParser):\n def __init__(self, ini_file_neme):\n super().__init__()\n self.ini_file_neme = ini_file_neme\n\n def red_conf__(self):\n file_path = os.path.join(conf_dir, self.ini_file_neme)\n self.read(file_path, encoding=\"utf-8\")\n\n\nred_conf = HandleConfig(\"xiaoyin.ini\")\nred_conf.red_conf__()\n\n# 日志模块用到的属性\nlog_data_list = [red_conf.get(\"log\", \"log_name\"), red_conf.get(\"log\", \"log_level\"), red_conf.getboolean(\"log\", \"file\")]\n# print(log_data_list)\n", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
from django.apps import AppConfig class SmashbotspainConfig(AppConfig): name = 'smashbotspain'
normal
{ "blob_id": "e714755d660ba809f7958cad4f0b9f95b0a0ffdc", "index": 9320, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass SmashbotspainConfig(AppConfig):\n <mask token>\n", "step-3": "<mask token>\n\n\nclass SmashbotspainConfig(AppConfig):\n name = 'smashbotspain'\n", "step-4": "from django.apps import AppConfig\n\n\nclass SmashbotspainConfig(AppConfig):\n name = 'smashbotspain'\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
# # # # ------------------------------------------------------------------------------------------------------------------------------ # # This program have been developed by Hamed Noori and with citiation of the related publicaitons # can be used without permission. # This program is for a novel architecture for traffic light control system which can form and manipulate # vehicular platoons using clusters of traffic lights. This method, called Platoon-Based Intelligent Traffic Lights (PB-ITL) # is based on coordinated traffic lights that are connected and are also able to communicate with vehicles wirelessly. PB-ITL # groups traffic lights in clusters and each cluster tries to provide proper green status for the platoon of vehicles, using the # Platooning concept which is seeking to utilize close to full capacity of roads. # This lib is a Python-based program which can simulate a city with dynamic intelligent traffic lights. # The author can be reach at [email protected] # # ------------------------------------------------------------------------------------------------------------------------------ # # def start_simulation(sumo, scenario, network, begin, end, interval, output): logging.debug("Finding unused port") unused_port_lock = UnusedPortLock() unused_port_lock.__enter__() remote_port = find_unused_port() logging.debug("Port %d was found" % remote_port) logging.debug("Starting SUMO as a server") sumo = subprocess.Popen(["D:\\PATH\\sumo-gui.exe", "-c", "D:\\\PATH\\Your.sumo.cfg", "--tripinfo-output", output,"--device.emissions.probability", "1.0" , "--remote-port", str(remote_port)], stdout=sys.stdout, stderr=sys.stderr) unused_port_lock.release() try: traci.init(remote_port) run(network, begin, end, interval) except Exception: logging.exception("Something bad happened") finally: logging.exception("Terminating SUMO") terminate_sumo(sumo) unused_port_lock.__exit__()
normal
{ "blob_id": "4775bef3623497e9bbe79ca2d4e9e9da0422c450", "index": 401, "step-1": "<mask token>\n", "step-2": "def start_simulation(sumo, scenario, network, begin, end, interval, output):\n logging.debug('Finding unused port')\n unused_port_lock = UnusedPortLock()\n unused_port_lock.__enter__()\n remote_port = find_unused_port()\n logging.debug('Port %d was found' % remote_port)\n logging.debug('Starting SUMO as a server')\n sumo = subprocess.Popen(['D:\\\\PATH\\\\sumo-gui.exe', '-c',\n 'D:\\\\\\\\PATH\\\\Your.sumo.cfg', '--tripinfo-output', output,\n '--device.emissions.probability', '1.0', '--remote-port', str(\n remote_port)], stdout=sys.stdout, stderr=sys.stderr)\n unused_port_lock.release()\n try:\n traci.init(remote_port)\n run(network, begin, end, interval)\n except Exception:\n logging.exception('Something bad happened')\n finally:\n logging.exception('Terminating SUMO')\n terminate_sumo(sumo)\n unused_port_lock.__exit__()\n", "step-3": "# \r\n#\r\n#\r\n# ------------------------------------------------------------------------------------------------------------------------------\r\n#\r\n# This program have been developed by Hamed Noori and with citiation of the related publicaitons\r\n# can be used without permission.\r\n# This program is for a novel architecture for traffic light control system which can form and manipulate \r\n# vehicular platoons using clusters of traffic lights. This method, called Platoon-Based Intelligent Traffic Lights (PB-ITL) \r\n# is based on coordinated traffic lights that are connected and are also able to communicate with vehicles wirelessly. PB-ITL \r\n# groups traffic lights in clusters and each cluster tries to provide proper green status for the platoon of vehicles, using the\r\n# Platooning concept which is seeking to utilize close to full capacity of roads. \r\n# This lib is a Python-based program which can simulate a city with dynamic intelligent traffic lights. \r\n# The author can be reach at [email protected]\r\n#\r\n# ------------------------------------------------------------------------------------------------------------------------------\r\n#\r\n# \r\ndef start_simulation(sumo, scenario, network, begin, end, interval, output):\r\n logging.debug(\"Finding unused port\")\r\n \r\n unused_port_lock = UnusedPortLock()\r\n unused_port_lock.__enter__()\r\n remote_port = find_unused_port()\r\n \r\n logging.debug(\"Port %d was found\" % remote_port)\r\n \r\n logging.debug(\"Starting SUMO as a server\")\r\n \r\n sumo = subprocess.Popen([\"D:\\\\PATH\\\\sumo-gui.exe\", \"-c\", \"D:\\\\\\PATH\\\\Your.sumo.cfg\", \"--tripinfo-output\", output,\"--device.emissions.probability\", \"1.0\" , \"--remote-port\", str(remote_port)], stdout=sys.stdout, stderr=sys.stderr) \r\n unused_port_lock.release()\r\n \r\n try: \r\n traci.init(remote_port) \r\n run(network, begin, end, interval)\r\n except Exception:\r\n logging.exception(\"Something bad happened\")\r\n finally:\r\n logging.exception(\"Terminating SUMO\") \r\n terminate_sumo(sumo)\r\n unused_port_lock.__exit__()\r\n \r\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
import base64 import bleach import errno import fcntl import gzip import hashlib import importlib import inspect import magic import mimetypes import morepath import operator import os.path import re import shutil import sqlalchemy import urllib.request from markupsafe import Markup from collections.abc import Iterable from contextlib import contextmanager from cProfile import Profile from functools import reduce from importlib import import_module from io import BytesIO, StringIO from itertools import groupby, islice from onegov.core import log from onegov.core.cache import lru_cache from onegov.core.custom import json from onegov.core.errors import AlreadyLockedError from purl import URL from threading import Thread from time import perf_counter from unidecode import unidecode from uuid import UUID, uuid4 from webob import static from yubico_client import Yubico from yubico_client.yubico_exceptions import SignatureVerificationError from yubico_client.yubico_exceptions import StatusCodeError from typing import overload, Any, TypeVar, TYPE_CHECKING if TYPE_CHECKING: from _typeshed import SupportsRichComparison from collections.abc import Callable, Collection, Iterator from fs.base import FS, SubFS from re import Match from sqlalchemy import Column from sqlalchemy.orm import Session from types import ModuleType from webob import Response from .request import CoreRequest from .types import FileDict, LaxFileDict _T = TypeVar('_T') _KT = TypeVar('_KT') # http://stackoverflow.com/a/13500078 _unwanted_url_chars = re.compile(r'[\.\(\)\\/\s<>\[\]{},:;?!@&=+$#@%|\*"\'`]+') _double_dash = re.compile(r'[-]+') _number_suffix = re.compile(r'-([0-9]+)$') _repeated_spaces = re.compile(r'\s\s+') _uuid = re.compile( r'^[a-f0-9]{8}-?[a-f0-9]{4}-?[a-f0-9]{4}-?[a-f0-9]{4}-?[a-f0-9]{12}$') # only temporary until bleach has a release > 1.4.1 - _email_regex = re.compile(( r"([a-z0-9!#$%&'*+\/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+\/=?^_`" r"{|}~-]+)*(@|\sat\s)(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?(\.|" r"\sdot\s))+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)" )) # detects multiple successive newlines _multiple_newlines = re.compile(r'\n{2,}', re.MULTILINE) # detect starting strings of phone inside a link _phone_inside_a_tags = r'(\">|href=\"tel:)?' # regex pattern for swiss phone numbers _phone_ch_country_code = r"(\+41|0041|0[0-9]{2})" _phone_ch = re.compile(_phone_ch_country_code + r'([ \r\f\t\d]+)') # Adds a regex group to capture if a leading a tag is present or if the # number is part of the href attributes _phone_ch_html_safe = re.compile( _phone_inside_a_tags + _phone_ch_country_code + r'([ \r\f\t\d]+)') # for yubikeys ALPHABET = 'cbdefghijklnrtuv' ALPHABET_RE = re.compile(r'^[cbdefghijklnrtuv]{12,44}$') @contextmanager def local_lock(namespace: str, key: str) -> 'Iterator[None]': """ Locks the given namespace/key combination on the current system, automatically freeing it after the with statement has been completed or once the process is killed. Usage:: with lock('namespace', 'key'): pass """ name = f'{namespace}-{key}'.replace('/', '-') with open(f'/tmp/{name}', 'w+') as f: try: fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) yield fcntl.flock(f, fcntl.LOCK_UN) except BlockingIOError as exception: raise AlreadyLockedError from exception def normalize_for_url(text: str) -> str: """ Takes the given text and makes it fit to be used for an url. That means replacing spaces and other unwanted characters with '-', lowercasing everything and turning unicode characters into their closest ascii equivalent using Unidecode. See https://pypi.python.org/pypi/Unidecode """ # German is our main language, so we are extra considerate about it # (unidecode turns ü into u) text = text.replace("ü", "ue") text = text.replace("ä", "ae") text = text.replace("ö", "oe") clean = _unwanted_url_chars.sub('-', unidecode(text).strip(' ').lower()) clean = _double_dash.sub('-', clean) clean = clean.strip('-') return clean def increment_name(name: str) -> str: """ Takes the given name and adds a numbered suffix beginning at 1. For example:: foo => foo-1 foo-1 => foo-2 """ match = _number_suffix.search(name) if match: number_str = match.group(1) next_number = int(number_str) + 1 return f'{name[:-len(number_str)]}{next_number}' else: return f'{name}-1' def remove_repeated_spaces(text: str) -> str: """ Removes repeated spaces in the text ('a b' -> 'a b'). """ return _repeated_spaces.sub(' ', text) @contextmanager def profile(filename: str) -> 'Iterator[None]': """ Profiles the wrapped code and stores the result in the profiles folder with the given filename. """ profiler = Profile() profiler.enable() yield profiler.disable() profiler.create_stats() profiler.dump_stats('profiles/{}'.format(filename)) @contextmanager def timing(name: str | None = None) -> 'Iterator[None]': """ Runs the wrapped code and prints the time in ms it took to run it. The name is printed in front of the time, if given. """ start = perf_counter() yield duration_ms = 1000.0 * (perf_counter() - start) if name: print(f'{name}: {duration_ms:.0f} ms') else: print(f'{duration_ms:.0f} ms') @lru_cache(maxsize=32) def module_path_root(module: 'ModuleType | str') -> str: if isinstance(module, str): module = importlib.import_module(module) assert module is not None return os.path.dirname(inspect.getfile(module)) def module_path(module: 'ModuleType | str', subpath: str) -> str: """ Returns a subdirectory in the given python module. :mod: A python module (actual module or string) :subpath: Subpath below that python module. Leading slashes ('/') are ignored. """ parent = module_path_root(module) path = os.path.join(parent, subpath.strip('/')) # always be paranoid with path manipulation assert is_subpath(parent, path) return path def touch(file_path: str) -> None: """ Touches the file on the given path. """ try: os.utime(file_path, None) except Exception: open(file_path, 'a').close() class Bunch: """ A simple but handy "collector of a bunch of named stuff" class. See `<https://code.activestate.com/recipes/\ 52308-the-simple-but-handy-collector-of-a-bunch-of-named/>`_. For example:: point = Bunch(x=1, y=2) assert point.x == 1 assert point.y == 2 point.z = 3 assert point.z == 3 Allows the creation of simple nested bunches, for example:: request = Bunch(**{'app.settings.org.my_setting': True}) assert request.app.settings.org.my_setting is True """ def __init__(self, **kwargs: Any): self.__dict__.update( (key, value) for key, value in kwargs.items() if '.' not in key ) for key, value in kwargs.items(): if '.' in key: name, _, key = key.partition('.') setattr(self, name, Bunch(**{key: value})) if TYPE_CHECKING: # let mypy know that any attribute access could be valid def __getattr__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __delattr__(self, name: str) -> None: ... def __eq__(self, other: object) -> bool: if type(other) is type(self): return self.__dict__ == other.__dict__ return False def __ne__(self, other: object) -> bool: return not self.__eq__(other) def render_file(file_path: str, request: 'CoreRequest') -> 'Response': """ Takes the given file_path (content) and renders it to the browser. The file must exist on the local system and be readable by the current process. """ def hash_path(path: str) -> str: return hashlib.new( # nosec:B324 'sha1', path.encode('utf-8'), usedforsecurity=False ).hexdigest() # this is a very cachable result - though it's possible that a file # changes it's content type, it should usually not, especially since # we emphasize the use of random filenames @request.app.cache.cache_on_arguments(to_str=hash_path) def get_content_type(file_path: str) -> str: content_type = mimetypes.guess_type(file_path)[0] if not content_type: content_type = magic.from_file(file_path, mime=True) return content_type return request.get_response( static.FileApp(file_path, content_type=get_content_type(file_path))) def hash_dictionary(dictionary: dict[str, Any]) -> str: """ Computes a sha256 hash for the given dictionary. The dictionary is expected to only contain values that can be serialized by json. That includes int, decimal, string, boolean. Note that this function is not meant to be used for hashing secrets. Do not include data in this dictionary that is secret! """ dict_as_string = json.dumps(dictionary, sort_keys=True).encode('utf-8') return hashlib.new( # nosec:B324 'sha1', dict_as_string, usedforsecurity=False ).hexdigest() @overload def groupbylist( iterable: Iterable[_T], key: None = ... ) -> list[tuple[_T, list[_T]]]: ... @overload def groupbylist( iterable: Iterable[_T], key: 'Callable[[_T], _KT]' ) -> list[tuple[_KT, list[_T]]]: ... def groupbylist( iterable: Iterable[_T], key: 'Callable[[_T], Any] | None' = None ) -> list[tuple[Any, list[_T]]]: """ Works just like Python's ``itertools.groupby`` function, but instead of returning generators, it returns lists. """ return [(k, list(g)) for k, g in groupby(iterable, key=key)] def linkify_phone(text: str) -> str: """ Takes a string and replaces valid phone numbers with html links. If a phone number is matched, it will be replaced by the result of a callback function, that does further checks on the regex match. If these checks do not pass, the matched number will remain unchanged. """ def strip_whitespace(number: str) -> str: return re.sub(r'\s', '', number) def is_valid_length(number: str) -> bool: if number.startswith('+00'): return False if number.startswith('00'): return len(number) == 13 elif number.startswith('0'): return len(number) == 10 elif number.startswith('+'): return len(number) == 12 return False def handle_match(match: 'Match[str]') -> str: inside_html = match.group(1) number = f'{match.group(2)}{match.group(3)}' assert not number.endswith('\n') if inside_html: return match.group(0) if is_valid_length(strip_whitespace(number)): number = remove_repeated_spaces(number).strip() return f'<a href="tel:{number}">{number}</a> ' return match.group(0) return _phone_ch_html_safe.sub(handle_match, text) # FIXME: A lot of these methods should be using MarkupSafe def linkify(text: str, escape: bool = True) -> str: """ Takes plain text and injects html links for urls and email addresses. By default the text is html escaped before it is linkified. This accounts for the fact that we usually use this for text blocks that we mean to extend with email addresses and urls. If html is already possible, why linkify it? Note: We need to clean the html after we've created it (linkify parses escaped html and turns it into real html). As a consequence it is possible to have html urls in the text that won't be escaped. """ if not text: return text long_top_level_domains = ['.agency'] # bleach.linkify supports only a fairly limited amount of tlds if any(domain in text for domain in long_top_level_domains): if '@' in text: linkified = str( Markup('<a href="mailto:{text}">{text}</a>').format( text=text ) ) else: linkified = str( Markup('<a href="{text}">{text}</a>').format(text=text) ) else: linkified = linkify_phone(bleach.linkify(text, parse_email=True)) if not escape: return linkified return bleach.clean( linkified, tags=['a'], attributes={'a': ['href', 'rel']}, protocols=['http', 'https', 'mailto', 'tel'] ) def paragraphify(text: str) -> str: """ Takes a text with newlines groups them into paragraphs according to the following rules: If there's a single newline between two lines, a <br> will replace that newline. If there are multiple newlines between two lines, each line will become a paragraph and the extra newlines are discarded. """ text = text and text.replace('\r', '').strip('\n') if not text: return '' return ''.join(f'<p>{p}</p>' for p in ( p.replace('\n', '<br>') for p in _multiple_newlines.split(text) )) def to_html_ul( value: str, convert_dashes: bool = True, with_title: bool = False ) -> str: """ Linkify and convert to text to one or multiple ul's or paragraphs. """ if not value: return '' value = value.replace('\r', '').strip('\n') value = value.replace('\n\n', '\n \n') if not convert_dashes: return '<p>{}</p>'.format( '<br>'.join(linkify(value).splitlines()) ) elements = [] temp: list[str] = [] def ul(inner: str) -> str: return f'<ul class="bulleted">{inner}</ul>' def li(inner: str) -> str: return f'<li>{inner}</li>' def p(inner: str) -> str: return f'<p>{inner}</p>' was_list = False for i, line in enumerate(value.splitlines()): if not line: continue line = linkify(line) is_list = line.startswith('-') new_p_or_ul = True if line == ' ' else False line = line.lstrip('-').strip() if with_title: elements.append(p(f'<span class="title">{line}</span>')) with_title = False else: if new_p_or_ul or (was_list != is_list and i > 0): elements.append( ul(''.join(temp)) if was_list else p('<br>'.join(temp)) ) temp = [] was_list = False if not new_p_or_ul: temp.append((li(line) if is_list else line)) new_p_or_ul = False was_list = is_list if temp: elements.append( ul(''.join(temp)) if was_list else p('<br>'.join(temp)) ) return ''.join(elements) def ensure_scheme(url: str, default: str = 'http') -> str: """ Makes sure that the given url has a scheme in front, if none was provided. """ if not url: return url # purl (or to be precise urlparse) will parse empty host names ('abc.xyz') # wrongly, assuming the abc.xyz is a path. by adding a double slash if # there isn't one already, we can circumvent that problem if '//' not in url: url = '//' + url _url = URL(url) if _url.scheme(): return url return _url.scheme(default).as_string() def is_uuid(value: str | UUID) -> bool: """ Returns true if the given value is a uuid. The value may be a string or of type UUID. If it's a string, the uuid is checked with a regex. """ if isinstance(value, str): return _uuid.match(str(value)) and True or False return isinstance(value, UUID) def is_non_string_iterable(obj: object) -> bool: """ Returns true if the given obj is an iterable, but not a string. """ return not (isinstance(obj, str) or isinstance(obj, bytes))\ and isinstance(obj, Iterable) def relative_url(absolute_url: str | None) -> str: """ Removes everything in front of the path, including scheme, host, username, password and port. """ url = URL._mutate( URL(absolute_url), scheme=None, username=None, password=None, host=None, port=None ) return url.as_string() def is_subpath(directory: str, path: str) -> bool: """ Returns true if the given path is inside the given directory. """ directory = os.path.join(os.path.realpath(directory), '') path = os.path.realpath(path) # return true, if the common prefix of both is equal to directory # e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b return os.path.commonprefix([path, directory]) == directory @overload def is_sorted( iterable: 'Iterable[SupportsRichComparison]', key: 'Callable[[SupportsRichComparison], SupportsRichComparison]' = ..., reverse: bool = ... ) -> bool: ... @overload def is_sorted( iterable: 'Iterable[_T]', key: 'Callable[[_T], SupportsRichComparison]', reverse: bool = ... ) -> bool: ... # FIXME: Do we really want to allow any Iterable? This seems like a bad # idea to me... Iterators will be consumed and the Iterable might # be infinite. This seems like it should be a Container instead, # then we also don't need to use tee or list to make a copy def is_sorted( iterable: 'Iterable[Any]', key: 'Callable[[Any], SupportsRichComparison]' = lambda i: i, reverse: bool = False ) -> bool: """ Returns True if the iterable is sorted. """ # NOTE: we previously used `tee` here, but since `sorted` consumes # the entire iterator, this is the exact case where tee is # slower than just pulling the entire sequence into a list seq = list(iterable) for a, b in zip(seq, sorted(seq, key=key, reverse=reverse)): if a is not b: return False return True def morepath_modules(cls: type[morepath.App]) -> 'Iterator[str]': """ Returns all morepath modules which should be scanned for the given morepath application class. We can't reliably know the actual morepath modules that need to be scanned, which is why we assume that each module has one namespace (like 'more.transaction' or 'onegov.core'). """ for base in cls.__mro__: if not issubclass(base, morepath.App): continue if base is morepath.App: continue module = '.'.join(base.__module__.split('.')[:2]) if module.startswith('test'): continue yield module def scan_morepath_modules(cls: type[morepath.App]) -> None: """ Tries to scan all the morepath modules required for the given application class. This is not guaranteed to stay reliable as there is no sure way to discover all modules required by the application class. """ for module in sorted(morepath_modules(cls)): morepath.scan(import_module(module)) def get_unique_hstore_keys( session: 'Session', column: 'Column[dict[str, Any]]' ) -> set[str]: """ Returns a set of keys found in an hstore column over all records of its table. """ base = session.query(column.keys()).with_entities( # type:ignore sqlalchemy.func.skeys(column).label('keys')) query = sqlalchemy.select( [sqlalchemy.func.array_agg(sqlalchemy.column('keys'))], distinct=True ).select_from(base.subquery()) keys = session.execute(query).scalar() return set(keys) if keys else set() def makeopendir(fs: 'FS', directory: str) -> 'SubFS[FS]': """ Creates and opens the given directory in the given PyFilesystem. """ if not fs.isdir(directory): fs.makedir(directory) return fs.opendir(directory) def append_query_param(url: str, key: str, value: str) -> str: """ Appends a single query parameter to an url. This is faster than using Purl, if and only if we only add one query param. Also this function assumes that the value is already url encoded. """ template = '?' in url and '{}&{}={}' or '{}?{}={}' return template.format(url, key, value) class PostThread(Thread): """ POSTs the given data with the headers to the URL. Example:: data = {'a': 1, 'b': 2} data = json.dumps(data).encode('utf-8') PostThread( 'https://example.com/post', data, ( ('Content-Type', 'application/json; charset=utf-8'), ('Content-Length', len(data)) ) ).start() This only works for external URLs! If posting to server itself is needed, use a process instead of the thread! """ def __init__( self, url: str, data: bytes, headers: 'Collection[tuple[str, str]]', timeout: float = 30 ): Thread.__init__(self) self.url = url self.data = data self.headers = headers self.timeout = timeout def run(self) -> None: try: # Validate URL protocol before opening it, since it's possible to # open ftp:// and file:// as well. if not self.url.lower().startswith('http'): raise ValueError from None request = urllib.request.Request(self.url) for header in self.headers: request.add_header(header[0], header[1]) urllib.request.urlopen( # nosec B310 request, self.data, self.timeout ) except Exception as e: log.error( 'Error while sending a POST request to {}: {}'.format( self.url, str(e) ) ) def toggle(collection: set[_T], item: _T | None) -> set[_T]: """ Returns a new set where the item has been toggled. """ if item is None: return collection if item in collection: return collection - {item} else: return collection | {item} def binary_to_dictionary( binary: bytes, filename: str | None = None ) -> 'FileDict': """ Takes raw binary filedata and stores it in a dictionary together with metadata information. The data is compressed before it is stored int he dictionary. Use :func:`dictionary_to_binary` to get the original binary data back. """ assert isinstance(binary, bytes) mimetype = magic.from_buffer(binary, mime=True) # according to https://tools.ietf.org/html/rfc7111, text/csv should be used if mimetype == 'application/csv': mimetype = 'text/csv' gzipdata = BytesIO() with gzip.GzipFile(fileobj=gzipdata, mode='wb') as f: f.write(binary) return { 'data': base64.b64encode(gzipdata.getvalue()).decode('ascii'), 'filename': filename, 'mimetype': mimetype, 'size': len(binary) } def dictionary_to_binary(dictionary: 'LaxFileDict') -> bytes: """ Takes a dictionary created by :func:`binary_to_dictionary` and returns the original binary data. """ data = base64.b64decode(dictionary['data']) with gzip.GzipFile(fileobj=BytesIO(data), mode='r') as f: return f.read() @overload def safe_format( format: str, dictionary: dict[str, str | int | float], types: None = ..., adapt: 'Callable[[str], str] | None' = ..., raise_on_missing: bool = ... ) -> str: ... @overload def safe_format( format: str, dictionary: dict[str, _T], types: set[type[_T]] = ..., adapt: 'Callable[[str], str] | None' = ..., raise_on_missing: bool = ... ) -> str: ... def safe_format( format: str, dictionary: dict[str, Any], types: set[type[Any]] | None = None, adapt: 'Callable[[str], str] | None' = None, raise_on_missing: bool = False ) -> str: """ Takes a user-supplied string with format blocks and returns a string where those blocks are replaced by values in a dictionary. For example:: >>> safe_format('[user] has logged in', {'user': 'admin'}) 'admin has logged in' :param format: The format to use. Square brackets denote dictionary keys. To literally print square bracktes, mask them by doubling ('[[' -> '[') :param dictionary: The dictionary holding the variables to use. If the key is not found in the dictionary, the bracket is replaced with an empty string. :param types: A set of types supported by the dictionary. Limiting this to safe types like builtins (str, int, float) ensure that no values are accidentally leaked through faulty __str__ representations. Note that inheritance is ignored. Supported types need to be whitelisted explicitly. :param adapt: An optional callable that receives the key before it is used. Returns the same key or an altered version. :param raise_on_missing: True if missing keys should result in a runtime error (defaults to False). This is strictly meant for formats provided by users. Python's string formatting options are clearly superior to this, however it is less secure! """ types = types or {int, str, float} output = StringIO() buffer = StringIO() opened = 0 for ix, char in enumerate(format): if char == '[': opened += 1 if char == ']': opened -= 1 if opened == 1 and char != '[' and char != ']': print(char, file=buffer, end='') continue if opened == 2 or opened == -2: if buffer.tell(): raise RuntimeError("Unexpected bracket inside bracket found") print(char, file=output, end='') opened = 0 continue if buffer.tell(): k = adapt(buffer.getvalue()) if adapt else buffer.getvalue() if raise_on_missing and k not in dictionary: raise RuntimeError("Key '{}' is unknown".format(k)) v = dictionary.get(k, '') t = type(v) if t not in types: raise RuntimeError("Invalid type for '{}': {}".format(k, t)) print(v, file=output, end='') buffer = StringIO() if char != '[' and char != ']': print(char, file=output, end='') if opened != 0: raise RuntimeError("Uneven number of brackets in '{}'".format(format)) return output.getvalue() def safe_format_keys( format: str, adapt: 'Callable[[str], str] | None' = None ) -> list[str]: """ Takes a :func:`safe_format` string and returns the found keys. """ keys = [] def adapt_and_record(key: str) -> str: key = adapt(key) if adapt else key keys.append(key) return key safe_format(format, {}, adapt=adapt_and_record) return keys def is_valid_yubikey( client_id: str, secret_key: str, expected_yubikey_id: str, yubikey: str ) -> bool: """ Asks the yubico validation servers if the given yubikey OTP is valid. :client_id: The yubico API client id. :secret_key: The yubico API secret key. :expected_yubikey_id: The expected yubikey id. The yubikey id is defined as the first twelve characters of any yubikey value. Each user should have a yubikey associated with it's account. If the yubikey value comes from a different key, the key is invalid. :yubikey: The actual yubikey value that should be verified. :return: True if yubico confirmed the validity of the key. """ assert client_id and secret_key and expected_yubikey_id and yubikey assert len(expected_yubikey_id) == 12 # if the yubikey doesn't start with the expected yubikey id we do not # need to make a roundtrip to the validation server if not yubikey.startswith(expected_yubikey_id): # FIXME: Are we leaking information with this early out? return False try: return Yubico(client_id, secret_key).verify(yubikey) except StatusCodeError as e: if e.status_code != 'REPLAYED_OTP': raise e return False except SignatureVerificationError: return False def is_valid_yubikey_format(otp: str) -> bool: """ Returns True if the given OTP has the correct format. Does not actually contact Yubico, so this function may return true, for some invalid keys. """ return ALPHABET_RE.match(otp) and True or False def yubikey_otp_to_serial(otp: str) -> int | None: """ Takes a Yubikey OTP and calculates the serial number of the key. The serial key is printed on the yubikey, in decimal and as a QR code. Example: >>> yubikey_otp_to_serial( 'ccccccdefghdefghdefghdefghdefghdefghdefghklv') 2311522 Adapted from Java: https://github.com/Yubico/yubikey-salesforce-client/blob/ e38e46ee90296a852374a8b744555e99d16b6ca7/src/classes/Modhex.cls If the key cannot be calculated, None is returned. This can happen if they key is malformed. """ if not is_valid_yubikey_format(otp): return None token = 'cccc' + otp[:12] toggle = False keep = 0 bytesarray = [] for char in token: n = ALPHABET.index(char) toggle = not toggle if toggle: keep = n else: bytesarray.append((keep << 4) | n) value = 0 # in Java, shifts on integers are masked with 0x1f using AND # https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.19 mask_value = 0x1f for i in range(0, 8): shift = (4 - 1 - i) * 8 value += (bytesarray[i] & 255) << (shift & mask_value) return value def yubikey_public_id(otp: str) -> str: """ Returns the yubikey identity given a token. """ return otp[:12] def dict_path(dictionary: dict[str, _T], path: str) -> _T: """ Gets the value of the given dictionary at the given path. For example: >>> data = {'foo': {'bar': True}} >>> dict_path(data, 'foo.bar') True """ if not dictionary: raise KeyError() return reduce(operator.getitem, path.split('.'), dictionary) # type:ignore def safe_move(src: str, dst: str) -> None: """ Rename a file from ``src`` to ``dst``. * Moves must be atomic. ``shutil.move()`` is not atomic. * Moves must work across filesystems. Often temp directories and the cache directories live on different filesystems. ``os.rename()`` can throw errors if run across filesystems. So we try ``os.rename()``, but if we detect a cross-filesystem copy, we switch to ``shutil.move()`` with some wrappers to make it atomic. Via https://alexwlchan.net/2019/03/atomic-cross-filesystem-moves-in-python """ try: os.rename(src, dst) except OSError as err: if err.errno == errno.EXDEV: # Generate a unique ID, and copy `<src>` to the target directory # with a temporary name `<dst>.<ID>.tmp`. Because we're copying # across a filesystem boundary, this initial copy may not be # atomic. We intersperse a random UUID so if different processes # are copying into `<dst>`, they don't overlap in their tmp copies. copy_id = uuid4() tmp_dst = "%s.%s.tmp" % (dst, copy_id) shutil.copyfile(src, tmp_dst) # Then do an atomic rename onto the new name, and clean up the # source image. os.rename(tmp_dst, dst) os.unlink(src) else: raise @overload def batched( iterable: Iterable[_T], batch_size: int, container_factory: 'type[tuple]' = ... # type:ignore[type-arg] ) -> 'Iterator[tuple[_T, ...]]': ... @overload def batched( iterable: Iterable[_T], batch_size: int, container_factory: 'type[list]' # type:ignore[type-arg] ) -> 'Iterator[list[_T]]': ... # NOTE: If there were higher order TypeVars, we could properly infer # the type of the Container, for now we just add overloads for # two of the most common container_factories @overload def batched( iterable: Iterable[_T], batch_size: int, container_factory: 'Callable[[Iterator[_T]], Collection[_T]]' ) -> 'Iterator[Collection[_T]]': ... def batched( iterable: Iterable[_T], batch_size: int, container_factory: 'Callable[[Iterator[_T]], Collection[_T]]' = tuple ) -> 'Iterator[Collection[_T]]': """ Splits an iterable into batches of batch_size and puts them inside a given collection (tuple by default). The container_factory is necessary in order to consume the iterator returned by islice. Otherwise this function would never return. """ iterator = iter(iterable) while True: batch = container_factory(islice(iterator, batch_size)) if len(batch) == 0: return yield batch
normal
{ "blob_id": "084c9ad83091f6f96d19c0f0c28520ccda93bbaf", "index": 7778, "step-1": "<mask token>\n\n\ndef normalize_for_url(text: str) ->str:\n \"\"\" Takes the given text and makes it fit to be used for an url.\n\n That means replacing spaces and other unwanted characters with '-',\n lowercasing everything and turning unicode characters into their closest\n ascii equivalent using Unidecode.\n\n See https://pypi.python.org/pypi/Unidecode\n\n \"\"\"\n text = text.replace('ü', 'ue')\n text = text.replace('ä', 'ae')\n text = text.replace('ö', 'oe')\n clean = _unwanted_url_chars.sub('-', unidecode(text).strip(' ').lower())\n clean = _double_dash.sub('-', clean)\n clean = clean.strip('-')\n return clean\n\n\ndef increment_name(name: str) ->str:\n \"\"\" Takes the given name and adds a numbered suffix beginning at 1.\n\n For example::\n\n foo => foo-1\n foo-1 => foo-2\n\n \"\"\"\n match = _number_suffix.search(name)\n if match:\n number_str = match.group(1)\n next_number = int(number_str) + 1\n return f'{name[:-len(number_str)]}{next_number}'\n else:\n return f'{name}-1'\n\n\ndef remove_repeated_spaces(text: str) ->str:\n \"\"\" Removes repeated spaces in the text ('a b' -> 'a b'). \"\"\"\n return _repeated_spaces.sub(' ', text)\n\n\n@contextmanager\ndef profile(filename: str) ->'Iterator[None]':\n \"\"\" Profiles the wrapped code and stores the result in the profiles folder\n with the given filename.\n\n \"\"\"\n profiler = Profile()\n profiler.enable()\n yield\n profiler.disable()\n profiler.create_stats()\n profiler.dump_stats('profiles/{}'.format(filename))\n\n\n@contextmanager\ndef timing(name: (str | None)=None) ->'Iterator[None]':\n \"\"\" Runs the wrapped code and prints the time in ms it took to run it.\n The name is printed in front of the time, if given.\n\n \"\"\"\n start = perf_counter()\n yield\n duration_ms = 1000.0 * (perf_counter() - start)\n if name:\n print(f'{name}: {duration_ms:.0f} ms')\n else:\n print(f'{duration_ms:.0f} ms')\n\n\n<mask token>\n\n\nclass Bunch:\n \"\"\" A simple but handy \"collector of a bunch of named stuff\" class.\n\n See `<https://code.activestate.com/recipes/ 52308-the-simple-but-handy-collector-of-a-bunch-of-named/>`_.\n\n For example::\n\n point = Bunch(x=1, y=2)\n assert point.x == 1\n assert point.y == 2\n\n point.z = 3\n assert point.z == 3\n\n Allows the creation of simple nested bunches, for example::\n\n request = Bunch(**{'app.settings.org.my_setting': True})\n assert request.app.settings.org.my_setting is True\n\n \"\"\"\n\n def __init__(self, **kwargs: Any):\n self.__dict__.update((key, value) for key, value in kwargs.items() if\n '.' not in key)\n for key, value in kwargs.items():\n if '.' in key:\n name, _, key = key.partition('.')\n setattr(self, name, Bunch(**{key: value}))\n if TYPE_CHECKING:\n\n def __getattr__(self, name: str) ->Any:\n ...\n\n def __setattr__(self, name: str, value: Any) ->None:\n ...\n\n def __delattr__(self, name: str) ->None:\n ...\n\n def __eq__(self, other: object) ->bool:\n if type(other) is type(self):\n return self.__dict__ == other.__dict__\n return False\n\n def __ne__(self, other: object) ->bool:\n return not self.__eq__(other)\n\n\n<mask token>\n\n\ndef hash_dictionary(dictionary: dict[str, Any]) ->str:\n \"\"\" Computes a sha256 hash for the given dictionary. The dictionary\n is expected to only contain values that can be serialized by json.\n\n That includes int, decimal, string, boolean.\n\n Note that this function is not meant to be used for hashing secrets. Do\n not include data in this dictionary that is secret!\n\n \"\"\"\n dict_as_string = json.dumps(dictionary, sort_keys=True).encode('utf-8')\n return hashlib.new('sha1', dict_as_string, usedforsecurity=False\n ).hexdigest()\n\n\n<mask token>\n\n\ndef linkify_phone(text: str) ->str:\n \"\"\" Takes a string and replaces valid phone numbers with html links. If a\n phone number is matched, it will be replaced by the result of a callback\n function, that does further checks on the regex match. If these checks do\n not pass, the matched number will remain unchanged.\n\n \"\"\"\n\n def strip_whitespace(number: str) ->str:\n return re.sub('\\\\s', '', number)\n\n def is_valid_length(number: str) ->bool:\n if number.startswith('+00'):\n return False\n if number.startswith('00'):\n return len(number) == 13\n elif number.startswith('0'):\n return len(number) == 10\n elif number.startswith('+'):\n return len(number) == 12\n return False\n\n def handle_match(match: 'Match[str]') ->str:\n inside_html = match.group(1)\n number = f'{match.group(2)}{match.group(3)}'\n assert not number.endswith('\\n')\n if inside_html:\n return match.group(0)\n if is_valid_length(strip_whitespace(number)):\n number = remove_repeated_spaces(number).strip()\n return f'<a href=\"tel:{number}\">{number}</a> '\n return match.group(0)\n return _phone_ch_html_safe.sub(handle_match, text)\n\n\ndef linkify(text: str, escape: bool=True) ->str:\n \"\"\" Takes plain text and injects html links for urls and email addresses.\n\n By default the text is html escaped before it is linkified. This accounts\n for the fact that we usually use this for text blocks that we mean to\n extend with email addresses and urls.\n\n If html is already possible, why linkify it?\n\n Note: We need to clean the html after we've created it (linkify\n parses escaped html and turns it into real html). As a consequence it\n is possible to have html urls in the text that won't be escaped.\n\n \"\"\"\n if not text:\n return text\n long_top_level_domains = ['.agency']\n if any(domain in text for domain in long_top_level_domains):\n if '@' in text:\n linkified = str(Markup('<a href=\"mailto:{text}\">{text}</a>').\n format(text=text))\n else:\n linkified = str(Markup('<a href=\"{text}\">{text}</a>').format(\n text=text))\n else:\n linkified = linkify_phone(bleach.linkify(text, parse_email=True))\n if not escape:\n return linkified\n return bleach.clean(linkified, tags=['a'], attributes={'a': ['href',\n 'rel']}, protocols=['http', 'https', 'mailto', 'tel'])\n\n\ndef paragraphify(text: str) ->str:\n \"\"\" Takes a text with newlines groups them into paragraphs according to the\n following rules:\n\n If there's a single newline between two lines, a <br> will replace that\n newline.\n\n If there are multiple newlines between two lines, each line will become\n a paragraph and the extra newlines are discarded.\n\n \"\"\"\n text = text and text.replace('\\r', '').strip('\\n')\n if not text:\n return ''\n return ''.join(f'<p>{p}</p>' for p in (p.replace('\\n', '<br>') for p in\n _multiple_newlines.split(text)))\n\n\ndef to_html_ul(value: str, convert_dashes: bool=True, with_title: bool=False\n ) ->str:\n \"\"\" Linkify and convert to text to one or multiple ul's or paragraphs.\n \"\"\"\n if not value:\n return ''\n value = value.replace('\\r', '').strip('\\n')\n value = value.replace('\\n\\n', '\\n \\n')\n if not convert_dashes:\n return '<p>{}</p>'.format('<br>'.join(linkify(value).splitlines()))\n elements = []\n temp: list[str] = []\n\n def ul(inner: str) ->str:\n return f'<ul class=\"bulleted\">{inner}</ul>'\n\n def li(inner: str) ->str:\n return f'<li>{inner}</li>'\n\n def p(inner: str) ->str:\n return f'<p>{inner}</p>'\n was_list = False\n for i, line in enumerate(value.splitlines()):\n if not line:\n continue\n line = linkify(line)\n is_list = line.startswith('-')\n new_p_or_ul = True if line == ' ' else False\n line = line.lstrip('-').strip()\n if with_title:\n elements.append(p(f'<span class=\"title\">{line}</span>'))\n with_title = False\n else:\n if new_p_or_ul or was_list != is_list and i > 0:\n elements.append(ul(''.join(temp)) if was_list else p('<br>'\n .join(temp)))\n temp = []\n was_list = False\n if not new_p_or_ul:\n temp.append(li(line) if is_list else line)\n new_p_or_ul = False\n was_list = is_list\n if temp:\n elements.append(ul(''.join(temp)) if was_list else p('<br>'.join(temp))\n )\n return ''.join(elements)\n\n\ndef ensure_scheme(url: str, default: str='http') ->str:\n \"\"\" Makes sure that the given url has a scheme in front, if none\n was provided.\n\n \"\"\"\n if not url:\n return url\n if '//' not in url:\n url = '//' + url\n _url = URL(url)\n if _url.scheme():\n return url\n return _url.scheme(default).as_string()\n\n\ndef is_uuid(value: (str | UUID)) ->bool:\n \"\"\" Returns true if the given value is a uuid. The value may be a string\n or of type UUID. If it's a string, the uuid is checked with a regex.\n \"\"\"\n if isinstance(value, str):\n return _uuid.match(str(value)) and True or False\n return isinstance(value, UUID)\n\n\n<mask token>\n\n\ndef is_subpath(directory: str, path: str) ->bool:\n \"\"\" Returns true if the given path is inside the given directory. \"\"\"\n directory = os.path.join(os.path.realpath(directory), '')\n path = os.path.realpath(path)\n return os.path.commonprefix([path, directory]) == directory\n\n\n@overload\ndef is_sorted(iterable: 'Iterable[SupportsRichComparison]', key:\n 'Callable[[SupportsRichComparison], SupportsRichComparison]'=...,\n reverse: bool=...) ->bool:\n ...\n\n\n@overload\ndef is_sorted(iterable: 'Iterable[_T]', key:\n 'Callable[[_T], SupportsRichComparison]', reverse: bool=...) ->bool:\n ...\n\n\n<mask token>\n\n\ndef morepath_modules(cls: type[morepath.App]) ->'Iterator[str]':\n \"\"\" Returns all morepath modules which should be scanned for the given\n morepath application class.\n\n We can't reliably know the actual morepath modules that\n need to be scanned, which is why we assume that each module has\n one namespace (like 'more.transaction' or 'onegov.core').\n\n \"\"\"\n for base in cls.__mro__:\n if not issubclass(base, morepath.App):\n continue\n if base is morepath.App:\n continue\n module = '.'.join(base.__module__.split('.')[:2])\n if module.startswith('test'):\n continue\n yield module\n\n\n<mask token>\n\n\ndef get_unique_hstore_keys(session: 'Session', column: 'Column[dict[str, Any]]'\n ) ->set[str]:\n \"\"\" Returns a set of keys found in an hstore column over all records\n of its table.\n\n \"\"\"\n base = session.query(column.keys()).with_entities(sqlalchemy.func.skeys\n (column).label('keys'))\n query = sqlalchemy.select([sqlalchemy.func.array_agg(sqlalchemy.column(\n 'keys'))], distinct=True).select_from(base.subquery())\n keys = session.execute(query).scalar()\n return set(keys) if keys else set()\n\n\ndef makeopendir(fs: 'FS', directory: str) ->'SubFS[FS]':\n \"\"\" Creates and opens the given directory in the given PyFilesystem. \"\"\"\n if not fs.isdir(directory):\n fs.makedir(directory)\n return fs.opendir(directory)\n\n\ndef append_query_param(url: str, key: str, value: str) ->str:\n \"\"\" Appends a single query parameter to an url. This is faster than\n using Purl, if and only if we only add one query param.\n\n Also this function assumes that the value is already url encoded.\n\n \"\"\"\n template = '?' in url and '{}&{}={}' or '{}?{}={}'\n return template.format(url, key, value)\n\n\nclass PostThread(Thread):\n \"\"\" POSTs the given data with the headers to the URL.\n\n Example::\n\n data = {'a': 1, 'b': 2}\n data = json.dumps(data).encode('utf-8')\n PostThread(\n 'https://example.com/post',\n data,\n (\n ('Content-Type', 'application/json; charset=utf-8'),\n ('Content-Length', len(data))\n )\n ).start()\n\n This only works for external URLs! If posting to server itself is\n needed, use a process instead of the thread!\n\n \"\"\"\n\n def __init__(self, url: str, data: bytes, headers:\n 'Collection[tuple[str, str]]', timeout: float=30):\n Thread.__init__(self)\n self.url = url\n self.data = data\n self.headers = headers\n self.timeout = timeout\n\n def run(self) ->None:\n try:\n if not self.url.lower().startswith('http'):\n raise ValueError from None\n request = urllib.request.Request(self.url)\n for header in self.headers:\n request.add_header(header[0], header[1])\n urllib.request.urlopen(request, self.data, self.timeout)\n except Exception as e:\n log.error('Error while sending a POST request to {}: {}'.format\n (self.url, str(e)))\n\n\n<mask token>\n\n\ndef dictionary_to_binary(dictionary: 'LaxFileDict') ->bytes:\n \"\"\" Takes a dictionary created by :func:`binary_to_dictionary` and returns\n the original binary data.\n\n \"\"\"\n data = base64.b64decode(dictionary['data'])\n with gzip.GzipFile(fileobj=BytesIO(data), mode='r') as f:\n return f.read()\n\n\n@overload\ndef safe_format(format: str, dictionary: dict[str, str | int | float],\n types: None=..., adapt: 'Callable[[str], str] | None'=...,\n raise_on_missing: bool=...) ->str:\n ...\n\n\n<mask token>\n\n\ndef is_valid_yubikey(client_id: str, secret_key: str, expected_yubikey_id:\n str, yubikey: str) ->bool:\n \"\"\" Asks the yubico validation servers if the given yubikey OTP is valid.\n\n :client_id:\n The yubico API client id.\n\n :secret_key:\n The yubico API secret key.\n\n :expected_yubikey_id:\n The expected yubikey id. The yubikey id is defined as the first twelve\n characters of any yubikey value. Each user should have a yubikey\n associated with it's account. If the yubikey value comes from a\n different key, the key is invalid.\n\n :yubikey:\n The actual yubikey value that should be verified.\n\n :return: True if yubico confirmed the validity of the key.\n\n \"\"\"\n assert client_id and secret_key and expected_yubikey_id and yubikey\n assert len(expected_yubikey_id) == 12\n if not yubikey.startswith(expected_yubikey_id):\n return False\n try:\n return Yubico(client_id, secret_key).verify(yubikey)\n except StatusCodeError as e:\n if e.status_code != 'REPLAYED_OTP':\n raise e\n return False\n except SignatureVerificationError:\n return False\n\n\n<mask token>\n\n\ndef yubikey_otp_to_serial(otp: str) ->(int | None):\n \"\"\" Takes a Yubikey OTP and calculates the serial number of the key.\n\n The serial key is printed on the yubikey, in decimal and as a QR code.\n\n Example:\n\n >>> yubikey_otp_to_serial(\n 'ccccccdefghdefghdefghdefghdefghdefghdefghklv')\n 2311522\n\n Adapted from Java:\n\n https://github.com/Yubico/yubikey-salesforce-client/blob/\n e38e46ee90296a852374a8b744555e99d16b6ca7/src/classes/Modhex.cls\n\n If the key cannot be calculated, None is returned. This can happen if\n they key is malformed.\n\n \"\"\"\n if not is_valid_yubikey_format(otp):\n return None\n token = 'cccc' + otp[:12]\n toggle = False\n keep = 0\n bytesarray = []\n for char in token:\n n = ALPHABET.index(char)\n toggle = not toggle\n if toggle:\n keep = n\n else:\n bytesarray.append(keep << 4 | n)\n value = 0\n mask_value = 31\n for i in range(0, 8):\n shift = (4 - 1 - i) * 8\n value += (bytesarray[i] & 255) << (shift & mask_value)\n return value\n\n\n<mask token>\n\n\ndef dict_path(dictionary: dict[str, _T], path: str) ->_T:\n \"\"\" Gets the value of the given dictionary at the given path. For example:\n\n >>> data = {'foo': {'bar': True}}\n >>> dict_path(data, 'foo.bar')\n True\n\n \"\"\"\n if not dictionary:\n raise KeyError()\n return reduce(operator.getitem, path.split('.'), dictionary)\n\n\n<mask token>\n\n\n@overload\ndef batched(iterable: Iterable[_T], batch_size: int, container_factory:\n 'type[list]') ->'Iterator[list[_T]]':\n ...\n\n\n@overload\ndef batched(iterable: Iterable[_T], batch_size: int, container_factory:\n 'Callable[[Iterator[_T]], Collection[_T]]') ->'Iterator[Collection[_T]]':\n ...\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\n@contextmanager\ndef local_lock(namespace: str, key: str) ->'Iterator[None]':\n \"\"\" Locks the given namespace/key combination on the current system,\n automatically freeing it after the with statement has been completed or\n once the process is killed.\n\n Usage::\n\n with lock('namespace', 'key'):\n pass\n\n \"\"\"\n name = f'{namespace}-{key}'.replace('/', '-')\n with open(f'/tmp/{name}', 'w+') as f:\n try:\n fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)\n yield\n fcntl.flock(f, fcntl.LOCK_UN)\n except BlockingIOError as exception:\n raise AlreadyLockedError from exception\n\n\ndef normalize_for_url(text: str) ->str:\n \"\"\" Takes the given text and makes it fit to be used for an url.\n\n That means replacing spaces and other unwanted characters with '-',\n lowercasing everything and turning unicode characters into their closest\n ascii equivalent using Unidecode.\n\n See https://pypi.python.org/pypi/Unidecode\n\n \"\"\"\n text = text.replace('ü', 'ue')\n text = text.replace('ä', 'ae')\n text = text.replace('ö', 'oe')\n clean = _unwanted_url_chars.sub('-', unidecode(text).strip(' ').lower())\n clean = _double_dash.sub('-', clean)\n clean = clean.strip('-')\n return clean\n\n\ndef increment_name(name: str) ->str:\n \"\"\" Takes the given name and adds a numbered suffix beginning at 1.\n\n For example::\n\n foo => foo-1\n foo-1 => foo-2\n\n \"\"\"\n match = _number_suffix.search(name)\n if match:\n number_str = match.group(1)\n next_number = int(number_str) + 1\n return f'{name[:-len(number_str)]}{next_number}'\n else:\n return f'{name}-1'\n\n\ndef remove_repeated_spaces(text: str) ->str:\n \"\"\" Removes repeated spaces in the text ('a b' -> 'a b'). \"\"\"\n return _repeated_spaces.sub(' ', text)\n\n\n@contextmanager\ndef profile(filename: str) ->'Iterator[None]':\n \"\"\" Profiles the wrapped code and stores the result in the profiles folder\n with the given filename.\n\n \"\"\"\n profiler = Profile()\n profiler.enable()\n yield\n profiler.disable()\n profiler.create_stats()\n profiler.dump_stats('profiles/{}'.format(filename))\n\n\n@contextmanager\ndef timing(name: (str | None)=None) ->'Iterator[None]':\n \"\"\" Runs the wrapped code and prints the time in ms it took to run it.\n The name is printed in front of the time, if given.\n\n \"\"\"\n start = perf_counter()\n yield\n duration_ms = 1000.0 * (perf_counter() - start)\n if name:\n print(f'{name}: {duration_ms:.0f} ms')\n else:\n print(f'{duration_ms:.0f} ms')\n\n\n<mask token>\n\n\nclass Bunch:\n \"\"\" A simple but handy \"collector of a bunch of named stuff\" class.\n\n See `<https://code.activestate.com/recipes/ 52308-the-simple-but-handy-collector-of-a-bunch-of-named/>`_.\n\n For example::\n\n point = Bunch(x=1, y=2)\n assert point.x == 1\n assert point.y == 2\n\n point.z = 3\n assert point.z == 3\n\n Allows the creation of simple nested bunches, for example::\n\n request = Bunch(**{'app.settings.org.my_setting': True})\n assert request.app.settings.org.my_setting is True\n\n \"\"\"\n\n def __init__(self, **kwargs: Any):\n self.__dict__.update((key, value) for key, value in kwargs.items() if\n '.' not in key)\n for key, value in kwargs.items():\n if '.' in key:\n name, _, key = key.partition('.')\n setattr(self, name, Bunch(**{key: value}))\n if TYPE_CHECKING:\n\n def __getattr__(self, name: str) ->Any:\n ...\n\n def __setattr__(self, name: str, value: Any) ->None:\n ...\n\n def __delattr__(self, name: str) ->None:\n ...\n\n def __eq__(self, other: object) ->bool:\n if type(other) is type(self):\n return self.__dict__ == other.__dict__\n return False\n\n def __ne__(self, other: object) ->bool:\n return not self.__eq__(other)\n\n\n<mask token>\n\n\ndef hash_dictionary(dictionary: dict[str, Any]) ->str:\n \"\"\" Computes a sha256 hash for the given dictionary. The dictionary\n is expected to only contain values that can be serialized by json.\n\n That includes int, decimal, string, boolean.\n\n Note that this function is not meant to be used for hashing secrets. Do\n not include data in this dictionary that is secret!\n\n \"\"\"\n dict_as_string = json.dumps(dictionary, sort_keys=True).encode('utf-8')\n return hashlib.new('sha1', dict_as_string, usedforsecurity=False\n ).hexdigest()\n\n\n<mask token>\n\n\ndef linkify_phone(text: str) ->str:\n \"\"\" Takes a string and replaces valid phone numbers with html links. If a\n phone number is matched, it will be replaced by the result of a callback\n function, that does further checks on the regex match. If these checks do\n not pass, the matched number will remain unchanged.\n\n \"\"\"\n\n def strip_whitespace(number: str) ->str:\n return re.sub('\\\\s', '', number)\n\n def is_valid_length(number: str) ->bool:\n if number.startswith('+00'):\n return False\n if number.startswith('00'):\n return len(number) == 13\n elif number.startswith('0'):\n return len(number) == 10\n elif number.startswith('+'):\n return len(number) == 12\n return False\n\n def handle_match(match: 'Match[str]') ->str:\n inside_html = match.group(1)\n number = f'{match.group(2)}{match.group(3)}'\n assert not number.endswith('\\n')\n if inside_html:\n return match.group(0)\n if is_valid_length(strip_whitespace(number)):\n number = remove_repeated_spaces(number).strip()\n return f'<a href=\"tel:{number}\">{number}</a> '\n return match.group(0)\n return _phone_ch_html_safe.sub(handle_match, text)\n\n\ndef linkify(text: str, escape: bool=True) ->str:\n \"\"\" Takes plain text and injects html links for urls and email addresses.\n\n By default the text is html escaped before it is linkified. This accounts\n for the fact that we usually use this for text blocks that we mean to\n extend with email addresses and urls.\n\n If html is already possible, why linkify it?\n\n Note: We need to clean the html after we've created it (linkify\n parses escaped html and turns it into real html). As a consequence it\n is possible to have html urls in the text that won't be escaped.\n\n \"\"\"\n if not text:\n return text\n long_top_level_domains = ['.agency']\n if any(domain in text for domain in long_top_level_domains):\n if '@' in text:\n linkified = str(Markup('<a href=\"mailto:{text}\">{text}</a>').\n format(text=text))\n else:\n linkified = str(Markup('<a href=\"{text}\">{text}</a>').format(\n text=text))\n else:\n linkified = linkify_phone(bleach.linkify(text, parse_email=True))\n if not escape:\n return linkified\n return bleach.clean(linkified, tags=['a'], attributes={'a': ['href',\n 'rel']}, protocols=['http', 'https', 'mailto', 'tel'])\n\n\ndef paragraphify(text: str) ->str:\n \"\"\" Takes a text with newlines groups them into paragraphs according to the\n following rules:\n\n If there's a single newline between two lines, a <br> will replace that\n newline.\n\n If there are multiple newlines between two lines, each line will become\n a paragraph and the extra newlines are discarded.\n\n \"\"\"\n text = text and text.replace('\\r', '').strip('\\n')\n if not text:\n return ''\n return ''.join(f'<p>{p}</p>' for p in (p.replace('\\n', '<br>') for p in\n _multiple_newlines.split(text)))\n\n\ndef to_html_ul(value: str, convert_dashes: bool=True, with_title: bool=False\n ) ->str:\n \"\"\" Linkify and convert to text to one or multiple ul's or paragraphs.\n \"\"\"\n if not value:\n return ''\n value = value.replace('\\r', '').strip('\\n')\n value = value.replace('\\n\\n', '\\n \\n')\n if not convert_dashes:\n return '<p>{}</p>'.format('<br>'.join(linkify(value).splitlines()))\n elements = []\n temp: list[str] = []\n\n def ul(inner: str) ->str:\n return f'<ul class=\"bulleted\">{inner}</ul>'\n\n def li(inner: str) ->str:\n return f'<li>{inner}</li>'\n\n def p(inner: str) ->str:\n return f'<p>{inner}</p>'\n was_list = False\n for i, line in enumerate(value.splitlines()):\n if not line:\n continue\n line = linkify(line)\n is_list = line.startswith('-')\n new_p_or_ul = True if line == ' ' else False\n line = line.lstrip('-').strip()\n if with_title:\n elements.append(p(f'<span class=\"title\">{line}</span>'))\n with_title = False\n else:\n if new_p_or_ul or was_list != is_list and i > 0:\n elements.append(ul(''.join(temp)) if was_list else p('<br>'\n .join(temp)))\n temp = []\n was_list = False\n if not new_p_or_ul:\n temp.append(li(line) if is_list else line)\n new_p_or_ul = False\n was_list = is_list\n if temp:\n elements.append(ul(''.join(temp)) if was_list else p('<br>'.join(temp))\n )\n return ''.join(elements)\n\n\ndef ensure_scheme(url: str, default: str='http') ->str:\n \"\"\" Makes sure that the given url has a scheme in front, if none\n was provided.\n\n \"\"\"\n if not url:\n return url\n if '//' not in url:\n url = '//' + url\n _url = URL(url)\n if _url.scheme():\n return url\n return _url.scheme(default).as_string()\n\n\ndef is_uuid(value: (str | UUID)) ->bool:\n \"\"\" Returns true if the given value is a uuid. The value may be a string\n or of type UUID. If it's a string, the uuid is checked with a regex.\n \"\"\"\n if isinstance(value, str):\n return _uuid.match(str(value)) and True or False\n return isinstance(value, UUID)\n\n\n<mask token>\n\n\ndef relative_url(absolute_url: (str | None)) ->str:\n \"\"\" Removes everything in front of the path, including scheme, host,\n username, password and port.\n\n \"\"\"\n url = URL._mutate(URL(absolute_url), scheme=None, username=None,\n password=None, host=None, port=None)\n return url.as_string()\n\n\ndef is_subpath(directory: str, path: str) ->bool:\n \"\"\" Returns true if the given path is inside the given directory. \"\"\"\n directory = os.path.join(os.path.realpath(directory), '')\n path = os.path.realpath(path)\n return os.path.commonprefix([path, directory]) == directory\n\n\n@overload\ndef is_sorted(iterable: 'Iterable[SupportsRichComparison]', key:\n 'Callable[[SupportsRichComparison], SupportsRichComparison]'=...,\n reverse: bool=...) ->bool:\n ...\n\n\n@overload\ndef is_sorted(iterable: 'Iterable[_T]', key:\n 'Callable[[_T], SupportsRichComparison]', reverse: bool=...) ->bool:\n ...\n\n\n<mask token>\n\n\ndef morepath_modules(cls: type[morepath.App]) ->'Iterator[str]':\n \"\"\" Returns all morepath modules which should be scanned for the given\n morepath application class.\n\n We can't reliably know the actual morepath modules that\n need to be scanned, which is why we assume that each module has\n one namespace (like 'more.transaction' or 'onegov.core').\n\n \"\"\"\n for base in cls.__mro__:\n if not issubclass(base, morepath.App):\n continue\n if base is morepath.App:\n continue\n module = '.'.join(base.__module__.split('.')[:2])\n if module.startswith('test'):\n continue\n yield module\n\n\n<mask token>\n\n\ndef get_unique_hstore_keys(session: 'Session', column: 'Column[dict[str, Any]]'\n ) ->set[str]:\n \"\"\" Returns a set of keys found in an hstore column over all records\n of its table.\n\n \"\"\"\n base = session.query(column.keys()).with_entities(sqlalchemy.func.skeys\n (column).label('keys'))\n query = sqlalchemy.select([sqlalchemy.func.array_agg(sqlalchemy.column(\n 'keys'))], distinct=True).select_from(base.subquery())\n keys = session.execute(query).scalar()\n return set(keys) if keys else set()\n\n\ndef makeopendir(fs: 'FS', directory: str) ->'SubFS[FS]':\n \"\"\" Creates and opens the given directory in the given PyFilesystem. \"\"\"\n if not fs.isdir(directory):\n fs.makedir(directory)\n return fs.opendir(directory)\n\n\ndef append_query_param(url: str, key: str, value: str) ->str:\n \"\"\" Appends a single query parameter to an url. This is faster than\n using Purl, if and only if we only add one query param.\n\n Also this function assumes that the value is already url encoded.\n\n \"\"\"\n template = '?' in url and '{}&{}={}' or '{}?{}={}'\n return template.format(url, key, value)\n\n\nclass PostThread(Thread):\n \"\"\" POSTs the given data with the headers to the URL.\n\n Example::\n\n data = {'a': 1, 'b': 2}\n data = json.dumps(data).encode('utf-8')\n PostThread(\n 'https://example.com/post',\n data,\n (\n ('Content-Type', 'application/json; charset=utf-8'),\n ('Content-Length', len(data))\n )\n ).start()\n\n This only works for external URLs! If posting to server itself is\n needed, use a process instead of the thread!\n\n \"\"\"\n\n def __init__(self, url: str, data: bytes, headers:\n 'Collection[tuple[str, str]]', timeout: float=30):\n Thread.__init__(self)\n self.url = url\n self.data = data\n self.headers = headers\n self.timeout = timeout\n\n def run(self) ->None:\n try:\n if not self.url.lower().startswith('http'):\n raise ValueError from None\n request = urllib.request.Request(self.url)\n for header in self.headers:\n request.add_header(header[0], header[1])\n urllib.request.urlopen(request, self.data, self.timeout)\n except Exception as e:\n log.error('Error while sending a POST request to {}: {}'.format\n (self.url, str(e)))\n\n\n<mask token>\n\n\ndef dictionary_to_binary(dictionary: 'LaxFileDict') ->bytes:\n \"\"\" Takes a dictionary created by :func:`binary_to_dictionary` and returns\n the original binary data.\n\n \"\"\"\n data = base64.b64decode(dictionary['data'])\n with gzip.GzipFile(fileobj=BytesIO(data), mode='r') as f:\n return f.read()\n\n\n@overload\ndef safe_format(format: str, dictionary: dict[str, str | int | float],\n types: None=..., adapt: 'Callable[[str], str] | None'=...,\n raise_on_missing: bool=...) ->str:\n ...\n\n\n<mask token>\n\n\ndef is_valid_yubikey(client_id: str, secret_key: str, expected_yubikey_id:\n str, yubikey: str) ->bool:\n \"\"\" Asks the yubico validation servers if the given yubikey OTP is valid.\n\n :client_id:\n The yubico API client id.\n\n :secret_key:\n The yubico API secret key.\n\n :expected_yubikey_id:\n The expected yubikey id. The yubikey id is defined as the first twelve\n characters of any yubikey value. Each user should have a yubikey\n associated with it's account. If the yubikey value comes from a\n different key, the key is invalid.\n\n :yubikey:\n The actual yubikey value that should be verified.\n\n :return: True if yubico confirmed the validity of the key.\n\n \"\"\"\n assert client_id and secret_key and expected_yubikey_id and yubikey\n assert len(expected_yubikey_id) == 12\n if not yubikey.startswith(expected_yubikey_id):\n return False\n try:\n return Yubico(client_id, secret_key).verify(yubikey)\n except StatusCodeError as e:\n if e.status_code != 'REPLAYED_OTP':\n raise e\n return False\n except SignatureVerificationError:\n return False\n\n\n<mask token>\n\n\ndef yubikey_otp_to_serial(otp: str) ->(int | None):\n \"\"\" Takes a Yubikey OTP and calculates the serial number of the key.\n\n The serial key is printed on the yubikey, in decimal and as a QR code.\n\n Example:\n\n >>> yubikey_otp_to_serial(\n 'ccccccdefghdefghdefghdefghdefghdefghdefghklv')\n 2311522\n\n Adapted from Java:\n\n https://github.com/Yubico/yubikey-salesforce-client/blob/\n e38e46ee90296a852374a8b744555e99d16b6ca7/src/classes/Modhex.cls\n\n If the key cannot be calculated, None is returned. This can happen if\n they key is malformed.\n\n \"\"\"\n if not is_valid_yubikey_format(otp):\n return None\n token = 'cccc' + otp[:12]\n toggle = False\n keep = 0\n bytesarray = []\n for char in token:\n n = ALPHABET.index(char)\n toggle = not toggle\n if toggle:\n keep = n\n else:\n bytesarray.append(keep << 4 | n)\n value = 0\n mask_value = 31\n for i in range(0, 8):\n shift = (4 - 1 - i) * 8\n value += (bytesarray[i] & 255) << (shift & mask_value)\n return value\n\n\n<mask token>\n\n\ndef dict_path(dictionary: dict[str, _T], path: str) ->_T:\n \"\"\" Gets the value of the given dictionary at the given path. For example:\n\n >>> data = {'foo': {'bar': True}}\n >>> dict_path(data, 'foo.bar')\n True\n\n \"\"\"\n if not dictionary:\n raise KeyError()\n return reduce(operator.getitem, path.split('.'), dictionary)\n\n\n<mask token>\n\n\n@overload\ndef batched(iterable: Iterable[_T], batch_size: int, container_factory:\n 'type[list]') ->'Iterator[list[_T]]':\n ...\n\n\n@overload\ndef batched(iterable: Iterable[_T], batch_size: int, container_factory:\n 'Callable[[Iterator[_T]], Collection[_T]]') ->'Iterator[Collection[_T]]':\n ...\n\n\ndef batched(iterable: Iterable[_T], batch_size: int, container_factory:\n 'Callable[[Iterator[_T]], Collection[_T]]'=tuple\n ) ->'Iterator[Collection[_T]]':\n \"\"\" Splits an iterable into batches of batch_size and puts them\n inside a given collection (tuple by default).\n\n The container_factory is necessary in order to consume the iterator\n returned by islice. Otherwise this function would never return.\n\n \"\"\"\n iterator = iter(iterable)\n while True:\n batch = container_factory(islice(iterator, batch_size))\n if len(batch) == 0:\n return\n yield batch\n", "step-3": "<mask token>\n\n\n@contextmanager\ndef local_lock(namespace: str, key: str) ->'Iterator[None]':\n \"\"\" Locks the given namespace/key combination on the current system,\n automatically freeing it after the with statement has been completed or\n once the process is killed.\n\n Usage::\n\n with lock('namespace', 'key'):\n pass\n\n \"\"\"\n name = f'{namespace}-{key}'.replace('/', '-')\n with open(f'/tmp/{name}', 'w+') as f:\n try:\n fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)\n yield\n fcntl.flock(f, fcntl.LOCK_UN)\n except BlockingIOError as exception:\n raise AlreadyLockedError from exception\n\n\ndef normalize_for_url(text: str) ->str:\n \"\"\" Takes the given text and makes it fit to be used for an url.\n\n That means replacing spaces and other unwanted characters with '-',\n lowercasing everything and turning unicode characters into their closest\n ascii equivalent using Unidecode.\n\n See https://pypi.python.org/pypi/Unidecode\n\n \"\"\"\n text = text.replace('ü', 'ue')\n text = text.replace('ä', 'ae')\n text = text.replace('ö', 'oe')\n clean = _unwanted_url_chars.sub('-', unidecode(text).strip(' ').lower())\n clean = _double_dash.sub('-', clean)\n clean = clean.strip('-')\n return clean\n\n\ndef increment_name(name: str) ->str:\n \"\"\" Takes the given name and adds a numbered suffix beginning at 1.\n\n For example::\n\n foo => foo-1\n foo-1 => foo-2\n\n \"\"\"\n match = _number_suffix.search(name)\n if match:\n number_str = match.group(1)\n next_number = int(number_str) + 1\n return f'{name[:-len(number_str)]}{next_number}'\n else:\n return f'{name}-1'\n\n\ndef remove_repeated_spaces(text: str) ->str:\n \"\"\" Removes repeated spaces in the text ('a b' -> 'a b'). \"\"\"\n return _repeated_spaces.sub(' ', text)\n\n\n@contextmanager\ndef profile(filename: str) ->'Iterator[None]':\n \"\"\" Profiles the wrapped code and stores the result in the profiles folder\n with the given filename.\n\n \"\"\"\n profiler = Profile()\n profiler.enable()\n yield\n profiler.disable()\n profiler.create_stats()\n profiler.dump_stats('profiles/{}'.format(filename))\n\n\n@contextmanager\ndef timing(name: (str | None)=None) ->'Iterator[None]':\n \"\"\" Runs the wrapped code and prints the time in ms it took to run it.\n The name is printed in front of the time, if given.\n\n \"\"\"\n start = perf_counter()\n yield\n duration_ms = 1000.0 * (perf_counter() - start)\n if name:\n print(f'{name}: {duration_ms:.0f} ms')\n else:\n print(f'{duration_ms:.0f} ms')\n\n\n<mask token>\n\n\ndef module_path(module: 'ModuleType | str', subpath: str) ->str:\n \"\"\" Returns a subdirectory in the given python module.\n\n :mod:\n A python module (actual module or string)\n\n :subpath:\n Subpath below that python module. Leading slashes ('/') are ignored.\n \"\"\"\n parent = module_path_root(module)\n path = os.path.join(parent, subpath.strip('/'))\n assert is_subpath(parent, path)\n return path\n\n\n<mask token>\n\n\nclass Bunch:\n \"\"\" A simple but handy \"collector of a bunch of named stuff\" class.\n\n See `<https://code.activestate.com/recipes/ 52308-the-simple-but-handy-collector-of-a-bunch-of-named/>`_.\n\n For example::\n\n point = Bunch(x=1, y=2)\n assert point.x == 1\n assert point.y == 2\n\n point.z = 3\n assert point.z == 3\n\n Allows the creation of simple nested bunches, for example::\n\n request = Bunch(**{'app.settings.org.my_setting': True})\n assert request.app.settings.org.my_setting is True\n\n \"\"\"\n\n def __init__(self, **kwargs: Any):\n self.__dict__.update((key, value) for key, value in kwargs.items() if\n '.' not in key)\n for key, value in kwargs.items():\n if '.' in key:\n name, _, key = key.partition('.')\n setattr(self, name, Bunch(**{key: value}))\n if TYPE_CHECKING:\n\n def __getattr__(self, name: str) ->Any:\n ...\n\n def __setattr__(self, name: str, value: Any) ->None:\n ...\n\n def __delattr__(self, name: str) ->None:\n ...\n\n def __eq__(self, other: object) ->bool:\n if type(other) is type(self):\n return self.__dict__ == other.__dict__\n return False\n\n def __ne__(self, other: object) ->bool:\n return not self.__eq__(other)\n\n\n<mask token>\n\n\ndef hash_dictionary(dictionary: dict[str, Any]) ->str:\n \"\"\" Computes a sha256 hash for the given dictionary. The dictionary\n is expected to only contain values that can be serialized by json.\n\n That includes int, decimal, string, boolean.\n\n Note that this function is not meant to be used for hashing secrets. Do\n not include data in this dictionary that is secret!\n\n \"\"\"\n dict_as_string = json.dumps(dictionary, sort_keys=True).encode('utf-8')\n return hashlib.new('sha1', dict_as_string, usedforsecurity=False\n ).hexdigest()\n\n\n@overload\ndef groupbylist(iterable: Iterable[_T], key: None=...) ->list[tuple[_T,\n list[_T]]]:\n ...\n\n\n<mask token>\n\n\ndef groupbylist(iterable: Iterable[_T], key: 'Callable[[_T], Any] | None'=None\n ) ->list[tuple[Any, list[_T]]]:\n \"\"\" Works just like Python's ``itertools.groupby`` function, but instead\n of returning generators, it returns lists.\n\n \"\"\"\n return [(k, list(g)) for k, g in groupby(iterable, key=key)]\n\n\ndef linkify_phone(text: str) ->str:\n \"\"\" Takes a string and replaces valid phone numbers with html links. If a\n phone number is matched, it will be replaced by the result of a callback\n function, that does further checks on the regex match. If these checks do\n not pass, the matched number will remain unchanged.\n\n \"\"\"\n\n def strip_whitespace(number: str) ->str:\n return re.sub('\\\\s', '', number)\n\n def is_valid_length(number: str) ->bool:\n if number.startswith('+00'):\n return False\n if number.startswith('00'):\n return len(number) == 13\n elif number.startswith('0'):\n return len(number) == 10\n elif number.startswith('+'):\n return len(number) == 12\n return False\n\n def handle_match(match: 'Match[str]') ->str:\n inside_html = match.group(1)\n number = f'{match.group(2)}{match.group(3)}'\n assert not number.endswith('\\n')\n if inside_html:\n return match.group(0)\n if is_valid_length(strip_whitespace(number)):\n number = remove_repeated_spaces(number).strip()\n return f'<a href=\"tel:{number}\">{number}</a> '\n return match.group(0)\n return _phone_ch_html_safe.sub(handle_match, text)\n\n\ndef linkify(text: str, escape: bool=True) ->str:\n \"\"\" Takes plain text and injects html links for urls and email addresses.\n\n By default the text is html escaped before it is linkified. This accounts\n for the fact that we usually use this for text blocks that we mean to\n extend with email addresses and urls.\n\n If html is already possible, why linkify it?\n\n Note: We need to clean the html after we've created it (linkify\n parses escaped html and turns it into real html). As a consequence it\n is possible to have html urls in the text that won't be escaped.\n\n \"\"\"\n if not text:\n return text\n long_top_level_domains = ['.agency']\n if any(domain in text for domain in long_top_level_domains):\n if '@' in text:\n linkified = str(Markup('<a href=\"mailto:{text}\">{text}</a>').\n format(text=text))\n else:\n linkified = str(Markup('<a href=\"{text}\">{text}</a>').format(\n text=text))\n else:\n linkified = linkify_phone(bleach.linkify(text, parse_email=True))\n if not escape:\n return linkified\n return bleach.clean(linkified, tags=['a'], attributes={'a': ['href',\n 'rel']}, protocols=['http', 'https', 'mailto', 'tel'])\n\n\ndef paragraphify(text: str) ->str:\n \"\"\" Takes a text with newlines groups them into paragraphs according to the\n following rules:\n\n If there's a single newline between two lines, a <br> will replace that\n newline.\n\n If there are multiple newlines between two lines, each line will become\n a paragraph and the extra newlines are discarded.\n\n \"\"\"\n text = text and text.replace('\\r', '').strip('\\n')\n if not text:\n return ''\n return ''.join(f'<p>{p}</p>' for p in (p.replace('\\n', '<br>') for p in\n _multiple_newlines.split(text)))\n\n\ndef to_html_ul(value: str, convert_dashes: bool=True, with_title: bool=False\n ) ->str:\n \"\"\" Linkify and convert to text to one or multiple ul's or paragraphs.\n \"\"\"\n if not value:\n return ''\n value = value.replace('\\r', '').strip('\\n')\n value = value.replace('\\n\\n', '\\n \\n')\n if not convert_dashes:\n return '<p>{}</p>'.format('<br>'.join(linkify(value).splitlines()))\n elements = []\n temp: list[str] = []\n\n def ul(inner: str) ->str:\n return f'<ul class=\"bulleted\">{inner}</ul>'\n\n def li(inner: str) ->str:\n return f'<li>{inner}</li>'\n\n def p(inner: str) ->str:\n return f'<p>{inner}</p>'\n was_list = False\n for i, line in enumerate(value.splitlines()):\n if not line:\n continue\n line = linkify(line)\n is_list = line.startswith('-')\n new_p_or_ul = True if line == ' ' else False\n line = line.lstrip('-').strip()\n if with_title:\n elements.append(p(f'<span class=\"title\">{line}</span>'))\n with_title = False\n else:\n if new_p_or_ul or was_list != is_list and i > 0:\n elements.append(ul(''.join(temp)) if was_list else p('<br>'\n .join(temp)))\n temp = []\n was_list = False\n if not new_p_or_ul:\n temp.append(li(line) if is_list else line)\n new_p_or_ul = False\n was_list = is_list\n if temp:\n elements.append(ul(''.join(temp)) if was_list else p('<br>'.join(temp))\n )\n return ''.join(elements)\n\n\ndef ensure_scheme(url: str, default: str='http') ->str:\n \"\"\" Makes sure that the given url has a scheme in front, if none\n was provided.\n\n \"\"\"\n if not url:\n return url\n if '//' not in url:\n url = '//' + url\n _url = URL(url)\n if _url.scheme():\n return url\n return _url.scheme(default).as_string()\n\n\ndef is_uuid(value: (str | UUID)) ->bool:\n \"\"\" Returns true if the given value is a uuid. The value may be a string\n or of type UUID. If it's a string, the uuid is checked with a regex.\n \"\"\"\n if isinstance(value, str):\n return _uuid.match(str(value)) and True or False\n return isinstance(value, UUID)\n\n\n<mask token>\n\n\ndef relative_url(absolute_url: (str | None)) ->str:\n \"\"\" Removes everything in front of the path, including scheme, host,\n username, password and port.\n\n \"\"\"\n url = URL._mutate(URL(absolute_url), scheme=None, username=None,\n password=None, host=None, port=None)\n return url.as_string()\n\n\ndef is_subpath(directory: str, path: str) ->bool:\n \"\"\" Returns true if the given path is inside the given directory. \"\"\"\n directory = os.path.join(os.path.realpath(directory), '')\n path = os.path.realpath(path)\n return os.path.commonprefix([path, directory]) == directory\n\n\n@overload\ndef is_sorted(iterable: 'Iterable[SupportsRichComparison]', key:\n 'Callable[[SupportsRichComparison], SupportsRichComparison]'=...,\n reverse: bool=...) ->bool:\n ...\n\n\n@overload\ndef is_sorted(iterable: 'Iterable[_T]', key:\n 'Callable[[_T], SupportsRichComparison]', reverse: bool=...) ->bool:\n ...\n\n\n<mask token>\n\n\ndef morepath_modules(cls: type[morepath.App]) ->'Iterator[str]':\n \"\"\" Returns all morepath modules which should be scanned for the given\n morepath application class.\n\n We can't reliably know the actual morepath modules that\n need to be scanned, which is why we assume that each module has\n one namespace (like 'more.transaction' or 'onegov.core').\n\n \"\"\"\n for base in cls.__mro__:\n if not issubclass(base, morepath.App):\n continue\n if base is morepath.App:\n continue\n module = '.'.join(base.__module__.split('.')[:2])\n if module.startswith('test'):\n continue\n yield module\n\n\ndef scan_morepath_modules(cls: type[morepath.App]) ->None:\n \"\"\" Tries to scan all the morepath modules required for the given\n application class. This is not guaranteed to stay reliable as there is\n no sure way to discover all modules required by the application class.\n\n \"\"\"\n for module in sorted(morepath_modules(cls)):\n morepath.scan(import_module(module))\n\n\ndef get_unique_hstore_keys(session: 'Session', column: 'Column[dict[str, Any]]'\n ) ->set[str]:\n \"\"\" Returns a set of keys found in an hstore column over all records\n of its table.\n\n \"\"\"\n base = session.query(column.keys()).with_entities(sqlalchemy.func.skeys\n (column).label('keys'))\n query = sqlalchemy.select([sqlalchemy.func.array_agg(sqlalchemy.column(\n 'keys'))], distinct=True).select_from(base.subquery())\n keys = session.execute(query).scalar()\n return set(keys) if keys else set()\n\n\ndef makeopendir(fs: 'FS', directory: str) ->'SubFS[FS]':\n \"\"\" Creates and opens the given directory in the given PyFilesystem. \"\"\"\n if not fs.isdir(directory):\n fs.makedir(directory)\n return fs.opendir(directory)\n\n\ndef append_query_param(url: str, key: str, value: str) ->str:\n \"\"\" Appends a single query parameter to an url. This is faster than\n using Purl, if and only if we only add one query param.\n\n Also this function assumes that the value is already url encoded.\n\n \"\"\"\n template = '?' in url and '{}&{}={}' or '{}?{}={}'\n return template.format(url, key, value)\n\n\nclass PostThread(Thread):\n \"\"\" POSTs the given data with the headers to the URL.\n\n Example::\n\n data = {'a': 1, 'b': 2}\n data = json.dumps(data).encode('utf-8')\n PostThread(\n 'https://example.com/post',\n data,\n (\n ('Content-Type', 'application/json; charset=utf-8'),\n ('Content-Length', len(data))\n )\n ).start()\n\n This only works for external URLs! If posting to server itself is\n needed, use a process instead of the thread!\n\n \"\"\"\n\n def __init__(self, url: str, data: bytes, headers:\n 'Collection[tuple[str, str]]', timeout: float=30):\n Thread.__init__(self)\n self.url = url\n self.data = data\n self.headers = headers\n self.timeout = timeout\n\n def run(self) ->None:\n try:\n if not self.url.lower().startswith('http'):\n raise ValueError from None\n request = urllib.request.Request(self.url)\n for header in self.headers:\n request.add_header(header[0], header[1])\n urllib.request.urlopen(request, self.data, self.timeout)\n except Exception as e:\n log.error('Error while sending a POST request to {}: {}'.format\n (self.url, str(e)))\n\n\n<mask token>\n\n\ndef dictionary_to_binary(dictionary: 'LaxFileDict') ->bytes:\n \"\"\" Takes a dictionary created by :func:`binary_to_dictionary` and returns\n the original binary data.\n\n \"\"\"\n data = base64.b64decode(dictionary['data'])\n with gzip.GzipFile(fileobj=BytesIO(data), mode='r') as f:\n return f.read()\n\n\n@overload\ndef safe_format(format: str, dictionary: dict[str, str | int | float],\n types: None=..., adapt: 'Callable[[str], str] | None'=...,\n raise_on_missing: bool=...) ->str:\n ...\n\n\n@overload\ndef safe_format(format: str, dictionary: dict[str, _T], types: set[type[_T]\n ]=..., adapt: 'Callable[[str], str] | None'=..., raise_on_missing: bool=...\n ) ->str:\n ...\n\n\n<mask token>\n\n\ndef safe_format_keys(format: str, adapt: 'Callable[[str], str] | None'=None\n ) ->list[str]:\n \"\"\" Takes a :func:`safe_format` string and returns the found keys. \"\"\"\n keys = []\n\n def adapt_and_record(key: str) ->str:\n key = adapt(key) if adapt else key\n keys.append(key)\n return key\n safe_format(format, {}, adapt=adapt_and_record)\n return keys\n\n\ndef is_valid_yubikey(client_id: str, secret_key: str, expected_yubikey_id:\n str, yubikey: str) ->bool:\n \"\"\" Asks the yubico validation servers if the given yubikey OTP is valid.\n\n :client_id:\n The yubico API client id.\n\n :secret_key:\n The yubico API secret key.\n\n :expected_yubikey_id:\n The expected yubikey id. The yubikey id is defined as the first twelve\n characters of any yubikey value. Each user should have a yubikey\n associated with it's account. If the yubikey value comes from a\n different key, the key is invalid.\n\n :yubikey:\n The actual yubikey value that should be verified.\n\n :return: True if yubico confirmed the validity of the key.\n\n \"\"\"\n assert client_id and secret_key and expected_yubikey_id and yubikey\n assert len(expected_yubikey_id) == 12\n if not yubikey.startswith(expected_yubikey_id):\n return False\n try:\n return Yubico(client_id, secret_key).verify(yubikey)\n except StatusCodeError as e:\n if e.status_code != 'REPLAYED_OTP':\n raise e\n return False\n except SignatureVerificationError:\n return False\n\n\ndef is_valid_yubikey_format(otp: str) ->bool:\n \"\"\" Returns True if the given OTP has the correct format. Does not actually\n contact Yubico, so this function may return true, for some invalid keys.\n\n \"\"\"\n return ALPHABET_RE.match(otp) and True or False\n\n\ndef yubikey_otp_to_serial(otp: str) ->(int | None):\n \"\"\" Takes a Yubikey OTP and calculates the serial number of the key.\n\n The serial key is printed on the yubikey, in decimal and as a QR code.\n\n Example:\n\n >>> yubikey_otp_to_serial(\n 'ccccccdefghdefghdefghdefghdefghdefghdefghklv')\n 2311522\n\n Adapted from Java:\n\n https://github.com/Yubico/yubikey-salesforce-client/blob/\n e38e46ee90296a852374a8b744555e99d16b6ca7/src/classes/Modhex.cls\n\n If the key cannot be calculated, None is returned. This can happen if\n they key is malformed.\n\n \"\"\"\n if not is_valid_yubikey_format(otp):\n return None\n token = 'cccc' + otp[:12]\n toggle = False\n keep = 0\n bytesarray = []\n for char in token:\n n = ALPHABET.index(char)\n toggle = not toggle\n if toggle:\n keep = n\n else:\n bytesarray.append(keep << 4 | n)\n value = 0\n mask_value = 31\n for i in range(0, 8):\n shift = (4 - 1 - i) * 8\n value += (bytesarray[i] & 255) << (shift & mask_value)\n return value\n\n\n<mask token>\n\n\ndef dict_path(dictionary: dict[str, _T], path: str) ->_T:\n \"\"\" Gets the value of the given dictionary at the given path. For example:\n\n >>> data = {'foo': {'bar': True}}\n >>> dict_path(data, 'foo.bar')\n True\n\n \"\"\"\n if not dictionary:\n raise KeyError()\n return reduce(operator.getitem, path.split('.'), dictionary)\n\n\ndef safe_move(src: str, dst: str) ->None:\n \"\"\" Rename a file from ``src`` to ``dst``.\n\n * Moves must be atomic. ``shutil.move()`` is not atomic.\n\n * Moves must work across filesystems. Often temp directories and the\n cache directories live on different filesystems. ``os.rename()`` can\n throw errors if run across filesystems.\n\n So we try ``os.rename()``, but if we detect a cross-filesystem copy, we\n switch to ``shutil.move()`` with some wrappers to make it atomic.\n\n Via https://alexwlchan.net/2019/03/atomic-cross-filesystem-moves-in-python\n\n \"\"\"\n try:\n os.rename(src, dst)\n except OSError as err:\n if err.errno == errno.EXDEV:\n copy_id = uuid4()\n tmp_dst = '%s.%s.tmp' % (dst, copy_id)\n shutil.copyfile(src, tmp_dst)\n os.rename(tmp_dst, dst)\n os.unlink(src)\n else:\n raise\n\n\n<mask token>\n\n\n@overload\ndef batched(iterable: Iterable[_T], batch_size: int, container_factory:\n 'type[list]') ->'Iterator[list[_T]]':\n ...\n\n\n@overload\ndef batched(iterable: Iterable[_T], batch_size: int, container_factory:\n 'Callable[[Iterator[_T]], Collection[_T]]') ->'Iterator[Collection[_T]]':\n ...\n\n\ndef batched(iterable: Iterable[_T], batch_size: int, container_factory:\n 'Callable[[Iterator[_T]], Collection[_T]]'=tuple\n ) ->'Iterator[Collection[_T]]':\n \"\"\" Splits an iterable into batches of batch_size and puts them\n inside a given collection (tuple by default).\n\n The container_factory is necessary in order to consume the iterator\n returned by islice. Otherwise this function would never return.\n\n \"\"\"\n iterator = iter(iterable)\n while True:\n batch = container_factory(islice(iterator, batch_size))\n if len(batch) == 0:\n return\n yield batch\n", "step-4": "<mask token>\n\n\n@contextmanager\ndef local_lock(namespace: str, key: str) ->'Iterator[None]':\n \"\"\" Locks the given namespace/key combination on the current system,\n automatically freeing it after the with statement has been completed or\n once the process is killed.\n\n Usage::\n\n with lock('namespace', 'key'):\n pass\n\n \"\"\"\n name = f'{namespace}-{key}'.replace('/', '-')\n with open(f'/tmp/{name}', 'w+') as f:\n try:\n fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)\n yield\n fcntl.flock(f, fcntl.LOCK_UN)\n except BlockingIOError as exception:\n raise AlreadyLockedError from exception\n\n\ndef normalize_for_url(text: str) ->str:\n \"\"\" Takes the given text and makes it fit to be used for an url.\n\n That means replacing spaces and other unwanted characters with '-',\n lowercasing everything and turning unicode characters into their closest\n ascii equivalent using Unidecode.\n\n See https://pypi.python.org/pypi/Unidecode\n\n \"\"\"\n text = text.replace('ü', 'ue')\n text = text.replace('ä', 'ae')\n text = text.replace('ö', 'oe')\n clean = _unwanted_url_chars.sub('-', unidecode(text).strip(' ').lower())\n clean = _double_dash.sub('-', clean)\n clean = clean.strip('-')\n return clean\n\n\ndef increment_name(name: str) ->str:\n \"\"\" Takes the given name and adds a numbered suffix beginning at 1.\n\n For example::\n\n foo => foo-1\n foo-1 => foo-2\n\n \"\"\"\n match = _number_suffix.search(name)\n if match:\n number_str = match.group(1)\n next_number = int(number_str) + 1\n return f'{name[:-len(number_str)]}{next_number}'\n else:\n return f'{name}-1'\n\n\ndef remove_repeated_spaces(text: str) ->str:\n \"\"\" Removes repeated spaces in the text ('a b' -> 'a b'). \"\"\"\n return _repeated_spaces.sub(' ', text)\n\n\n@contextmanager\ndef profile(filename: str) ->'Iterator[None]':\n \"\"\" Profiles the wrapped code and stores the result in the profiles folder\n with the given filename.\n\n \"\"\"\n profiler = Profile()\n profiler.enable()\n yield\n profiler.disable()\n profiler.create_stats()\n profiler.dump_stats('profiles/{}'.format(filename))\n\n\n@contextmanager\ndef timing(name: (str | None)=None) ->'Iterator[None]':\n \"\"\" Runs the wrapped code and prints the time in ms it took to run it.\n The name is printed in front of the time, if given.\n\n \"\"\"\n start = perf_counter()\n yield\n duration_ms = 1000.0 * (perf_counter() - start)\n if name:\n print(f'{name}: {duration_ms:.0f} ms')\n else:\n print(f'{duration_ms:.0f} ms')\n\n\n<mask token>\n\n\ndef module_path(module: 'ModuleType | str', subpath: str) ->str:\n \"\"\" Returns a subdirectory in the given python module.\n\n :mod:\n A python module (actual module or string)\n\n :subpath:\n Subpath below that python module. Leading slashes ('/') are ignored.\n \"\"\"\n parent = module_path_root(module)\n path = os.path.join(parent, subpath.strip('/'))\n assert is_subpath(parent, path)\n return path\n\n\ndef touch(file_path: str) ->None:\n \"\"\" Touches the file on the given path. \"\"\"\n try:\n os.utime(file_path, None)\n except Exception:\n open(file_path, 'a').close()\n\n\nclass Bunch:\n \"\"\" A simple but handy \"collector of a bunch of named stuff\" class.\n\n See `<https://code.activestate.com/recipes/ 52308-the-simple-but-handy-collector-of-a-bunch-of-named/>`_.\n\n For example::\n\n point = Bunch(x=1, y=2)\n assert point.x == 1\n assert point.y == 2\n\n point.z = 3\n assert point.z == 3\n\n Allows the creation of simple nested bunches, for example::\n\n request = Bunch(**{'app.settings.org.my_setting': True})\n assert request.app.settings.org.my_setting is True\n\n \"\"\"\n\n def __init__(self, **kwargs: Any):\n self.__dict__.update((key, value) for key, value in kwargs.items() if\n '.' not in key)\n for key, value in kwargs.items():\n if '.' in key:\n name, _, key = key.partition('.')\n setattr(self, name, Bunch(**{key: value}))\n if TYPE_CHECKING:\n\n def __getattr__(self, name: str) ->Any:\n ...\n\n def __setattr__(self, name: str, value: Any) ->None:\n ...\n\n def __delattr__(self, name: str) ->None:\n ...\n\n def __eq__(self, other: object) ->bool:\n if type(other) is type(self):\n return self.__dict__ == other.__dict__\n return False\n\n def __ne__(self, other: object) ->bool:\n return not self.__eq__(other)\n\n\n<mask token>\n\n\ndef hash_dictionary(dictionary: dict[str, Any]) ->str:\n \"\"\" Computes a sha256 hash for the given dictionary. The dictionary\n is expected to only contain values that can be serialized by json.\n\n That includes int, decimal, string, boolean.\n\n Note that this function is not meant to be used for hashing secrets. Do\n not include data in this dictionary that is secret!\n\n \"\"\"\n dict_as_string = json.dumps(dictionary, sort_keys=True).encode('utf-8')\n return hashlib.new('sha1', dict_as_string, usedforsecurity=False\n ).hexdigest()\n\n\n@overload\ndef groupbylist(iterable: Iterable[_T], key: None=...) ->list[tuple[_T,\n list[_T]]]:\n ...\n\n\n@overload\ndef groupbylist(iterable: Iterable[_T], key: 'Callable[[_T], _KT]') ->list[\n tuple[_KT, list[_T]]]:\n ...\n\n\ndef groupbylist(iterable: Iterable[_T], key: 'Callable[[_T], Any] | None'=None\n ) ->list[tuple[Any, list[_T]]]:\n \"\"\" Works just like Python's ``itertools.groupby`` function, but instead\n of returning generators, it returns lists.\n\n \"\"\"\n return [(k, list(g)) for k, g in groupby(iterable, key=key)]\n\n\ndef linkify_phone(text: str) ->str:\n \"\"\" Takes a string and replaces valid phone numbers with html links. If a\n phone number is matched, it will be replaced by the result of a callback\n function, that does further checks on the regex match. If these checks do\n not pass, the matched number will remain unchanged.\n\n \"\"\"\n\n def strip_whitespace(number: str) ->str:\n return re.sub('\\\\s', '', number)\n\n def is_valid_length(number: str) ->bool:\n if number.startswith('+00'):\n return False\n if number.startswith('00'):\n return len(number) == 13\n elif number.startswith('0'):\n return len(number) == 10\n elif number.startswith('+'):\n return len(number) == 12\n return False\n\n def handle_match(match: 'Match[str]') ->str:\n inside_html = match.group(1)\n number = f'{match.group(2)}{match.group(3)}'\n assert not number.endswith('\\n')\n if inside_html:\n return match.group(0)\n if is_valid_length(strip_whitespace(number)):\n number = remove_repeated_spaces(number).strip()\n return f'<a href=\"tel:{number}\">{number}</a> '\n return match.group(0)\n return _phone_ch_html_safe.sub(handle_match, text)\n\n\ndef linkify(text: str, escape: bool=True) ->str:\n \"\"\" Takes plain text and injects html links for urls and email addresses.\n\n By default the text is html escaped before it is linkified. This accounts\n for the fact that we usually use this for text blocks that we mean to\n extend with email addresses and urls.\n\n If html is already possible, why linkify it?\n\n Note: We need to clean the html after we've created it (linkify\n parses escaped html and turns it into real html). As a consequence it\n is possible to have html urls in the text that won't be escaped.\n\n \"\"\"\n if not text:\n return text\n long_top_level_domains = ['.agency']\n if any(domain in text for domain in long_top_level_domains):\n if '@' in text:\n linkified = str(Markup('<a href=\"mailto:{text}\">{text}</a>').\n format(text=text))\n else:\n linkified = str(Markup('<a href=\"{text}\">{text}</a>').format(\n text=text))\n else:\n linkified = linkify_phone(bleach.linkify(text, parse_email=True))\n if not escape:\n return linkified\n return bleach.clean(linkified, tags=['a'], attributes={'a': ['href',\n 'rel']}, protocols=['http', 'https', 'mailto', 'tel'])\n\n\ndef paragraphify(text: str) ->str:\n \"\"\" Takes a text with newlines groups them into paragraphs according to the\n following rules:\n\n If there's a single newline between two lines, a <br> will replace that\n newline.\n\n If there are multiple newlines between two lines, each line will become\n a paragraph and the extra newlines are discarded.\n\n \"\"\"\n text = text and text.replace('\\r', '').strip('\\n')\n if not text:\n return ''\n return ''.join(f'<p>{p}</p>' for p in (p.replace('\\n', '<br>') for p in\n _multiple_newlines.split(text)))\n\n\ndef to_html_ul(value: str, convert_dashes: bool=True, with_title: bool=False\n ) ->str:\n \"\"\" Linkify and convert to text to one or multiple ul's or paragraphs.\n \"\"\"\n if not value:\n return ''\n value = value.replace('\\r', '').strip('\\n')\n value = value.replace('\\n\\n', '\\n \\n')\n if not convert_dashes:\n return '<p>{}</p>'.format('<br>'.join(linkify(value).splitlines()))\n elements = []\n temp: list[str] = []\n\n def ul(inner: str) ->str:\n return f'<ul class=\"bulleted\">{inner}</ul>'\n\n def li(inner: str) ->str:\n return f'<li>{inner}</li>'\n\n def p(inner: str) ->str:\n return f'<p>{inner}</p>'\n was_list = False\n for i, line in enumerate(value.splitlines()):\n if not line:\n continue\n line = linkify(line)\n is_list = line.startswith('-')\n new_p_or_ul = True if line == ' ' else False\n line = line.lstrip('-').strip()\n if with_title:\n elements.append(p(f'<span class=\"title\">{line}</span>'))\n with_title = False\n else:\n if new_p_or_ul or was_list != is_list and i > 0:\n elements.append(ul(''.join(temp)) if was_list else p('<br>'\n .join(temp)))\n temp = []\n was_list = False\n if not new_p_or_ul:\n temp.append(li(line) if is_list else line)\n new_p_or_ul = False\n was_list = is_list\n if temp:\n elements.append(ul(''.join(temp)) if was_list else p('<br>'.join(temp))\n )\n return ''.join(elements)\n\n\ndef ensure_scheme(url: str, default: str='http') ->str:\n \"\"\" Makes sure that the given url has a scheme in front, if none\n was provided.\n\n \"\"\"\n if not url:\n return url\n if '//' not in url:\n url = '//' + url\n _url = URL(url)\n if _url.scheme():\n return url\n return _url.scheme(default).as_string()\n\n\ndef is_uuid(value: (str | UUID)) ->bool:\n \"\"\" Returns true if the given value is a uuid. The value may be a string\n or of type UUID. If it's a string, the uuid is checked with a regex.\n \"\"\"\n if isinstance(value, str):\n return _uuid.match(str(value)) and True or False\n return isinstance(value, UUID)\n\n\n<mask token>\n\n\ndef relative_url(absolute_url: (str | None)) ->str:\n \"\"\" Removes everything in front of the path, including scheme, host,\n username, password and port.\n\n \"\"\"\n url = URL._mutate(URL(absolute_url), scheme=None, username=None,\n password=None, host=None, port=None)\n return url.as_string()\n\n\ndef is_subpath(directory: str, path: str) ->bool:\n \"\"\" Returns true if the given path is inside the given directory. \"\"\"\n directory = os.path.join(os.path.realpath(directory), '')\n path = os.path.realpath(path)\n return os.path.commonprefix([path, directory]) == directory\n\n\n@overload\ndef is_sorted(iterable: 'Iterable[SupportsRichComparison]', key:\n 'Callable[[SupportsRichComparison], SupportsRichComparison]'=...,\n reverse: bool=...) ->bool:\n ...\n\n\n@overload\ndef is_sorted(iterable: 'Iterable[_T]', key:\n 'Callable[[_T], SupportsRichComparison]', reverse: bool=...) ->bool:\n ...\n\n\ndef is_sorted(iterable: 'Iterable[Any]', key:\n 'Callable[[Any], SupportsRichComparison]'=lambda i: i, reverse: bool=False\n ) ->bool:\n \"\"\" Returns True if the iterable is sorted. \"\"\"\n seq = list(iterable)\n for a, b in zip(seq, sorted(seq, key=key, reverse=reverse)):\n if a is not b:\n return False\n return True\n\n\ndef morepath_modules(cls: type[morepath.App]) ->'Iterator[str]':\n \"\"\" Returns all morepath modules which should be scanned for the given\n morepath application class.\n\n We can't reliably know the actual morepath modules that\n need to be scanned, which is why we assume that each module has\n one namespace (like 'more.transaction' or 'onegov.core').\n\n \"\"\"\n for base in cls.__mro__:\n if not issubclass(base, morepath.App):\n continue\n if base is morepath.App:\n continue\n module = '.'.join(base.__module__.split('.')[:2])\n if module.startswith('test'):\n continue\n yield module\n\n\ndef scan_morepath_modules(cls: type[morepath.App]) ->None:\n \"\"\" Tries to scan all the morepath modules required for the given\n application class. This is not guaranteed to stay reliable as there is\n no sure way to discover all modules required by the application class.\n\n \"\"\"\n for module in sorted(morepath_modules(cls)):\n morepath.scan(import_module(module))\n\n\ndef get_unique_hstore_keys(session: 'Session', column: 'Column[dict[str, Any]]'\n ) ->set[str]:\n \"\"\" Returns a set of keys found in an hstore column over all records\n of its table.\n\n \"\"\"\n base = session.query(column.keys()).with_entities(sqlalchemy.func.skeys\n (column).label('keys'))\n query = sqlalchemy.select([sqlalchemy.func.array_agg(sqlalchemy.column(\n 'keys'))], distinct=True).select_from(base.subquery())\n keys = session.execute(query).scalar()\n return set(keys) if keys else set()\n\n\ndef makeopendir(fs: 'FS', directory: str) ->'SubFS[FS]':\n \"\"\" Creates and opens the given directory in the given PyFilesystem. \"\"\"\n if not fs.isdir(directory):\n fs.makedir(directory)\n return fs.opendir(directory)\n\n\ndef append_query_param(url: str, key: str, value: str) ->str:\n \"\"\" Appends a single query parameter to an url. This is faster than\n using Purl, if and only if we only add one query param.\n\n Also this function assumes that the value is already url encoded.\n\n \"\"\"\n template = '?' in url and '{}&{}={}' or '{}?{}={}'\n return template.format(url, key, value)\n\n\nclass PostThread(Thread):\n \"\"\" POSTs the given data with the headers to the URL.\n\n Example::\n\n data = {'a': 1, 'b': 2}\n data = json.dumps(data).encode('utf-8')\n PostThread(\n 'https://example.com/post',\n data,\n (\n ('Content-Type', 'application/json; charset=utf-8'),\n ('Content-Length', len(data))\n )\n ).start()\n\n This only works for external URLs! If posting to server itself is\n needed, use a process instead of the thread!\n\n \"\"\"\n\n def __init__(self, url: str, data: bytes, headers:\n 'Collection[tuple[str, str]]', timeout: float=30):\n Thread.__init__(self)\n self.url = url\n self.data = data\n self.headers = headers\n self.timeout = timeout\n\n def run(self) ->None:\n try:\n if not self.url.lower().startswith('http'):\n raise ValueError from None\n request = urllib.request.Request(self.url)\n for header in self.headers:\n request.add_header(header[0], header[1])\n urllib.request.urlopen(request, self.data, self.timeout)\n except Exception as e:\n log.error('Error while sending a POST request to {}: {}'.format\n (self.url, str(e)))\n\n\n<mask token>\n\n\ndef dictionary_to_binary(dictionary: 'LaxFileDict') ->bytes:\n \"\"\" Takes a dictionary created by :func:`binary_to_dictionary` and returns\n the original binary data.\n\n \"\"\"\n data = base64.b64decode(dictionary['data'])\n with gzip.GzipFile(fileobj=BytesIO(data), mode='r') as f:\n return f.read()\n\n\n@overload\ndef safe_format(format: str, dictionary: dict[str, str | int | float],\n types: None=..., adapt: 'Callable[[str], str] | None'=...,\n raise_on_missing: bool=...) ->str:\n ...\n\n\n@overload\ndef safe_format(format: str, dictionary: dict[str, _T], types: set[type[_T]\n ]=..., adapt: 'Callable[[str], str] | None'=..., raise_on_missing: bool=...\n ) ->str:\n ...\n\n\n<mask token>\n\n\ndef safe_format_keys(format: str, adapt: 'Callable[[str], str] | None'=None\n ) ->list[str]:\n \"\"\" Takes a :func:`safe_format` string and returns the found keys. \"\"\"\n keys = []\n\n def adapt_and_record(key: str) ->str:\n key = adapt(key) if adapt else key\n keys.append(key)\n return key\n safe_format(format, {}, adapt=adapt_and_record)\n return keys\n\n\ndef is_valid_yubikey(client_id: str, secret_key: str, expected_yubikey_id:\n str, yubikey: str) ->bool:\n \"\"\" Asks the yubico validation servers if the given yubikey OTP is valid.\n\n :client_id:\n The yubico API client id.\n\n :secret_key:\n The yubico API secret key.\n\n :expected_yubikey_id:\n The expected yubikey id. The yubikey id is defined as the first twelve\n characters of any yubikey value. Each user should have a yubikey\n associated with it's account. If the yubikey value comes from a\n different key, the key is invalid.\n\n :yubikey:\n The actual yubikey value that should be verified.\n\n :return: True if yubico confirmed the validity of the key.\n\n \"\"\"\n assert client_id and secret_key and expected_yubikey_id and yubikey\n assert len(expected_yubikey_id) == 12\n if not yubikey.startswith(expected_yubikey_id):\n return False\n try:\n return Yubico(client_id, secret_key).verify(yubikey)\n except StatusCodeError as e:\n if e.status_code != 'REPLAYED_OTP':\n raise e\n return False\n except SignatureVerificationError:\n return False\n\n\ndef is_valid_yubikey_format(otp: str) ->bool:\n \"\"\" Returns True if the given OTP has the correct format. Does not actually\n contact Yubico, so this function may return true, for some invalid keys.\n\n \"\"\"\n return ALPHABET_RE.match(otp) and True or False\n\n\ndef yubikey_otp_to_serial(otp: str) ->(int | None):\n \"\"\" Takes a Yubikey OTP and calculates the serial number of the key.\n\n The serial key is printed on the yubikey, in decimal and as a QR code.\n\n Example:\n\n >>> yubikey_otp_to_serial(\n 'ccccccdefghdefghdefghdefghdefghdefghdefghklv')\n 2311522\n\n Adapted from Java:\n\n https://github.com/Yubico/yubikey-salesforce-client/blob/\n e38e46ee90296a852374a8b744555e99d16b6ca7/src/classes/Modhex.cls\n\n If the key cannot be calculated, None is returned. This can happen if\n they key is malformed.\n\n \"\"\"\n if not is_valid_yubikey_format(otp):\n return None\n token = 'cccc' + otp[:12]\n toggle = False\n keep = 0\n bytesarray = []\n for char in token:\n n = ALPHABET.index(char)\n toggle = not toggle\n if toggle:\n keep = n\n else:\n bytesarray.append(keep << 4 | n)\n value = 0\n mask_value = 31\n for i in range(0, 8):\n shift = (4 - 1 - i) * 8\n value += (bytesarray[i] & 255) << (shift & mask_value)\n return value\n\n\n<mask token>\n\n\ndef dict_path(dictionary: dict[str, _T], path: str) ->_T:\n \"\"\" Gets the value of the given dictionary at the given path. For example:\n\n >>> data = {'foo': {'bar': True}}\n >>> dict_path(data, 'foo.bar')\n True\n\n \"\"\"\n if not dictionary:\n raise KeyError()\n return reduce(operator.getitem, path.split('.'), dictionary)\n\n\ndef safe_move(src: str, dst: str) ->None:\n \"\"\" Rename a file from ``src`` to ``dst``.\n\n * Moves must be atomic. ``shutil.move()`` is not atomic.\n\n * Moves must work across filesystems. Often temp directories and the\n cache directories live on different filesystems. ``os.rename()`` can\n throw errors if run across filesystems.\n\n So we try ``os.rename()``, but if we detect a cross-filesystem copy, we\n switch to ``shutil.move()`` with some wrappers to make it atomic.\n\n Via https://alexwlchan.net/2019/03/atomic-cross-filesystem-moves-in-python\n\n \"\"\"\n try:\n os.rename(src, dst)\n except OSError as err:\n if err.errno == errno.EXDEV:\n copy_id = uuid4()\n tmp_dst = '%s.%s.tmp' % (dst, copy_id)\n shutil.copyfile(src, tmp_dst)\n os.rename(tmp_dst, dst)\n os.unlink(src)\n else:\n raise\n\n\n<mask token>\n\n\n@overload\ndef batched(iterable: Iterable[_T], batch_size: int, container_factory:\n 'type[list]') ->'Iterator[list[_T]]':\n ...\n\n\n@overload\ndef batched(iterable: Iterable[_T], batch_size: int, container_factory:\n 'Callable[[Iterator[_T]], Collection[_T]]') ->'Iterator[Collection[_T]]':\n ...\n\n\ndef batched(iterable: Iterable[_T], batch_size: int, container_factory:\n 'Callable[[Iterator[_T]], Collection[_T]]'=tuple\n ) ->'Iterator[Collection[_T]]':\n \"\"\" Splits an iterable into batches of batch_size and puts them\n inside a given collection (tuple by default).\n\n The container_factory is necessary in order to consume the iterator\n returned by islice. Otherwise this function would never return.\n\n \"\"\"\n iterator = iter(iterable)\n while True:\n batch = container_factory(islice(iterator, batch_size))\n if len(batch) == 0:\n return\n yield batch\n", "step-5": "import base64\nimport bleach\nimport errno\nimport fcntl\nimport gzip\nimport hashlib\nimport importlib\nimport inspect\nimport magic\nimport mimetypes\nimport morepath\nimport operator\nimport os.path\nimport re\nimport shutil\nimport sqlalchemy\nimport urllib.request\n\nfrom markupsafe import Markup\nfrom collections.abc import Iterable\nfrom contextlib import contextmanager\nfrom cProfile import Profile\nfrom functools import reduce\nfrom importlib import import_module\nfrom io import BytesIO, StringIO\nfrom itertools import groupby, islice\nfrom onegov.core import log\nfrom onegov.core.cache import lru_cache\nfrom onegov.core.custom import json\nfrom onegov.core.errors import AlreadyLockedError\nfrom purl import URL\nfrom threading import Thread\nfrom time import perf_counter\nfrom unidecode import unidecode\nfrom uuid import UUID, uuid4\nfrom webob import static\nfrom yubico_client import Yubico\nfrom yubico_client.yubico_exceptions import SignatureVerificationError\nfrom yubico_client.yubico_exceptions import StatusCodeError\n\n\nfrom typing import overload, Any, TypeVar, TYPE_CHECKING\nif TYPE_CHECKING:\n from _typeshed import SupportsRichComparison\n from collections.abc import Callable, Collection, Iterator\n from fs.base import FS, SubFS\n from re import Match\n from sqlalchemy import Column\n from sqlalchemy.orm import Session\n from types import ModuleType\n from webob import Response\n from .request import CoreRequest\n from .types import FileDict, LaxFileDict\n\n\n_T = TypeVar('_T')\n_KT = TypeVar('_KT')\n\n\n# http://stackoverflow.com/a/13500078\n_unwanted_url_chars = re.compile(r'[\\.\\(\\)\\\\/\\s<>\\[\\]{},:;?!@&=+$#@%|\\*\"\\'`]+')\n_double_dash = re.compile(r'[-]+')\n_number_suffix = re.compile(r'-([0-9]+)$')\n_repeated_spaces = re.compile(r'\\s\\s+')\n_uuid = re.compile(\n r'^[a-f0-9]{8}-?[a-f0-9]{4}-?[a-f0-9]{4}-?[a-f0-9]{4}-?[a-f0-9]{12}$')\n\n# only temporary until bleach has a release > 1.4.1 -\n_email_regex = re.compile((\n r\"([a-z0-9!#$%&'*+\\/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&'*+\\/=?^_`\"\n r\"{|}~-]+)*(@|\\sat\\s)(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?(\\.|\"\n r\"\\sdot\\s))+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)\"\n))\n\n# detects multiple successive newlines\n_multiple_newlines = re.compile(r'\\n{2,}', re.MULTILINE)\n\n# detect starting strings of phone inside a link\n_phone_inside_a_tags = r'(\\\">|href=\\\"tel:)?'\n\n# regex pattern for swiss phone numbers\n_phone_ch_country_code = r\"(\\+41|0041|0[0-9]{2})\"\n_phone_ch = re.compile(_phone_ch_country_code + r'([ \\r\\f\\t\\d]+)')\n\n# Adds a regex group to capture if a leading a tag is present or if the\n# number is part of the href attributes\n_phone_ch_html_safe = re.compile(\n _phone_inside_a_tags + _phone_ch_country_code + r'([ \\r\\f\\t\\d]+)')\n\n# for yubikeys\nALPHABET = 'cbdefghijklnrtuv'\nALPHABET_RE = re.compile(r'^[cbdefghijklnrtuv]{12,44}$')\n\n\n@contextmanager\ndef local_lock(namespace: str, key: str) -> 'Iterator[None]':\n \"\"\" Locks the given namespace/key combination on the current system,\n automatically freeing it after the with statement has been completed or\n once the process is killed.\n\n Usage::\n\n with lock('namespace', 'key'):\n pass\n\n \"\"\"\n name = f'{namespace}-{key}'.replace('/', '-')\n\n with open(f'/tmp/{name}', 'w+') as f:\n try:\n fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)\n yield\n fcntl.flock(f, fcntl.LOCK_UN)\n except BlockingIOError as exception:\n raise AlreadyLockedError from exception\n\n\ndef normalize_for_url(text: str) -> str:\n \"\"\" Takes the given text and makes it fit to be used for an url.\n\n That means replacing spaces and other unwanted characters with '-',\n lowercasing everything and turning unicode characters into their closest\n ascii equivalent using Unidecode.\n\n See https://pypi.python.org/pypi/Unidecode\n\n \"\"\"\n\n # German is our main language, so we are extra considerate about it\n # (unidecode turns ü into u)\n text = text.replace(\"ü\", \"ue\")\n text = text.replace(\"ä\", \"ae\")\n text = text.replace(\"ö\", \"oe\")\n clean = _unwanted_url_chars.sub('-', unidecode(text).strip(' ').lower())\n clean = _double_dash.sub('-', clean)\n clean = clean.strip('-')\n\n return clean\n\n\ndef increment_name(name: str) -> str:\n \"\"\" Takes the given name and adds a numbered suffix beginning at 1.\n\n For example::\n\n foo => foo-1\n foo-1 => foo-2\n\n \"\"\"\n\n match = _number_suffix.search(name)\n if match:\n number_str = match.group(1)\n next_number = int(number_str) + 1\n return f'{name[:-len(number_str)]}{next_number}'\n else:\n return f'{name}-1'\n\n\ndef remove_repeated_spaces(text: str) -> str:\n \"\"\" Removes repeated spaces in the text ('a b' -> 'a b'). \"\"\"\n\n return _repeated_spaces.sub(' ', text)\n\n\n@contextmanager\ndef profile(filename: str) -> 'Iterator[None]':\n \"\"\" Profiles the wrapped code and stores the result in the profiles folder\n with the given filename.\n\n \"\"\"\n profiler = Profile()\n profiler.enable()\n\n yield\n\n profiler.disable()\n profiler.create_stats()\n profiler.dump_stats('profiles/{}'.format(filename))\n\n\n@contextmanager\ndef timing(name: str | None = None) -> 'Iterator[None]':\n \"\"\" Runs the wrapped code and prints the time in ms it took to run it.\n The name is printed in front of the time, if given.\n\n \"\"\"\n start = perf_counter()\n\n yield\n\n duration_ms = 1000.0 * (perf_counter() - start)\n\n if name:\n print(f'{name}: {duration_ms:.0f} ms')\n else:\n print(f'{duration_ms:.0f} ms')\n\n\n@lru_cache(maxsize=32)\ndef module_path_root(module: 'ModuleType | str') -> str:\n if isinstance(module, str):\n module = importlib.import_module(module)\n\n assert module is not None\n\n return os.path.dirname(inspect.getfile(module))\n\n\ndef module_path(module: 'ModuleType | str', subpath: str) -> str:\n \"\"\" Returns a subdirectory in the given python module.\n\n :mod:\n A python module (actual module or string)\n\n :subpath:\n Subpath below that python module. Leading slashes ('/') are ignored.\n \"\"\"\n\n parent = module_path_root(module)\n path = os.path.join(parent, subpath.strip('/'))\n\n # always be paranoid with path manipulation\n assert is_subpath(parent, path)\n\n return path\n\n\ndef touch(file_path: str) -> None:\n \"\"\" Touches the file on the given path. \"\"\"\n try:\n os.utime(file_path, None)\n except Exception:\n open(file_path, 'a').close()\n\n\nclass Bunch:\n \"\"\" A simple but handy \"collector of a bunch of named stuff\" class.\n\n See `<https://code.activestate.com/recipes/\\\n 52308-the-simple-but-handy-collector-of-a-bunch-of-named/>`_.\n\n For example::\n\n point = Bunch(x=1, y=2)\n assert point.x == 1\n assert point.y == 2\n\n point.z = 3\n assert point.z == 3\n\n Allows the creation of simple nested bunches, for example::\n\n request = Bunch(**{'app.settings.org.my_setting': True})\n assert request.app.settings.org.my_setting is True\n\n \"\"\"\n def __init__(self, **kwargs: Any):\n self.__dict__.update(\n (key, value)\n for key, value in kwargs.items()\n if '.' not in key\n )\n for key, value in kwargs.items():\n if '.' in key:\n name, _, key = key.partition('.')\n setattr(self, name, Bunch(**{key: value}))\n\n if TYPE_CHECKING:\n # let mypy know that any attribute access could be valid\n def __getattr__(self, name: str) -> Any: ...\n def __setattr__(self, name: str, value: Any) -> None: ...\n def __delattr__(self, name: str) -> None: ...\n\n def __eq__(self, other: object) -> bool:\n if type(other) is type(self):\n return self.__dict__ == other.__dict__\n return False\n\n def __ne__(self, other: object) -> bool:\n return not self.__eq__(other)\n\n\ndef render_file(file_path: str, request: 'CoreRequest') -> 'Response':\n \"\"\" Takes the given file_path (content) and renders it to the browser.\n The file must exist on the local system and be readable by the current\n process.\n\n \"\"\"\n\n def hash_path(path: str) -> str:\n return hashlib.new( # nosec:B324\n 'sha1',\n path.encode('utf-8'),\n usedforsecurity=False\n ).hexdigest()\n\n # this is a very cachable result - though it's possible that a file\n # changes it's content type, it should usually not, especially since\n # we emphasize the use of random filenames\n @request.app.cache.cache_on_arguments(to_str=hash_path)\n def get_content_type(file_path: str) -> str:\n content_type = mimetypes.guess_type(file_path)[0]\n\n if not content_type:\n content_type = magic.from_file(file_path, mime=True)\n\n return content_type\n\n return request.get_response(\n static.FileApp(file_path, content_type=get_content_type(file_path)))\n\n\ndef hash_dictionary(dictionary: dict[str, Any]) -> str:\n \"\"\" Computes a sha256 hash for the given dictionary. The dictionary\n is expected to only contain values that can be serialized by json.\n\n That includes int, decimal, string, boolean.\n\n Note that this function is not meant to be used for hashing secrets. Do\n not include data in this dictionary that is secret!\n\n \"\"\"\n dict_as_string = json.dumps(dictionary, sort_keys=True).encode('utf-8')\n return hashlib.new( # nosec:B324\n 'sha1',\n dict_as_string,\n usedforsecurity=False\n ).hexdigest()\n\n\n@overload\ndef groupbylist(\n iterable: Iterable[_T],\n key: None = ...\n) -> list[tuple[_T, list[_T]]]: ...\n\n\n@overload\ndef groupbylist(\n iterable: Iterable[_T],\n key: 'Callable[[_T], _KT]'\n) -> list[tuple[_KT, list[_T]]]: ...\n\n\ndef groupbylist(\n iterable: Iterable[_T],\n key: 'Callable[[_T], Any] | None' = None\n) -> list[tuple[Any, list[_T]]]:\n \"\"\" Works just like Python's ``itertools.groupby`` function, but instead\n of returning generators, it returns lists.\n\n \"\"\"\n return [(k, list(g)) for k, g in groupby(iterable, key=key)]\n\n\ndef linkify_phone(text: str) -> str:\n \"\"\" Takes a string and replaces valid phone numbers with html links. If a\n phone number is matched, it will be replaced by the result of a callback\n function, that does further checks on the regex match. If these checks do\n not pass, the matched number will remain unchanged.\n\n \"\"\"\n\n def strip_whitespace(number: str) -> str:\n return re.sub(r'\\s', '', number)\n\n def is_valid_length(number: str) -> bool:\n if number.startswith('+00'):\n return False\n if number.startswith('00'):\n return len(number) == 13\n elif number.startswith('0'):\n return len(number) == 10\n elif number.startswith('+'):\n return len(number) == 12\n return False\n\n def handle_match(match: 'Match[str]') -> str:\n inside_html = match.group(1)\n number = f'{match.group(2)}{match.group(3)}'\n assert not number.endswith('\\n')\n if inside_html:\n return match.group(0)\n if is_valid_length(strip_whitespace(number)):\n number = remove_repeated_spaces(number).strip()\n return f'<a href=\"tel:{number}\">{number}</a> '\n\n return match.group(0)\n\n return _phone_ch_html_safe.sub(handle_match, text)\n\n\n# FIXME: A lot of these methods should be using MarkupSafe\ndef linkify(text: str, escape: bool = True) -> str:\n \"\"\" Takes plain text and injects html links for urls and email addresses.\n\n By default the text is html escaped before it is linkified. This accounts\n for the fact that we usually use this for text blocks that we mean to\n extend with email addresses and urls.\n\n If html is already possible, why linkify it?\n\n Note: We need to clean the html after we've created it (linkify\n parses escaped html and turns it into real html). As a consequence it\n is possible to have html urls in the text that won't be escaped.\n\n \"\"\"\n\n if not text:\n return text\n\n long_top_level_domains = ['.agency']\n\n # bleach.linkify supports only a fairly limited amount of tlds\n if any(domain in text for domain in long_top_level_domains):\n if '@' in text:\n linkified = str(\n Markup('<a href=\"mailto:{text}\">{text}</a>').format(\n text=text\n )\n )\n else:\n linkified = str(\n Markup('<a href=\"{text}\">{text}</a>').format(text=text)\n )\n else:\n linkified = linkify_phone(bleach.linkify(text, parse_email=True))\n\n if not escape:\n return linkified\n\n return bleach.clean(\n linkified,\n tags=['a'],\n attributes={'a': ['href', 'rel']},\n protocols=['http', 'https', 'mailto', 'tel']\n )\n\n\ndef paragraphify(text: str) -> str:\n \"\"\" Takes a text with newlines groups them into paragraphs according to the\n following rules:\n\n If there's a single newline between two lines, a <br> will replace that\n newline.\n\n If there are multiple newlines between two lines, each line will become\n a paragraph and the extra newlines are discarded.\n\n \"\"\"\n text = text and text.replace('\\r', '').strip('\\n')\n\n if not text:\n return ''\n\n return ''.join(f'<p>{p}</p>' for p in (\n p.replace('\\n', '<br>') for p in _multiple_newlines.split(text)\n ))\n\n\ndef to_html_ul(\n value: str,\n convert_dashes: bool = True,\n with_title: bool = False\n) -> str:\n \"\"\" Linkify and convert to text to one or multiple ul's or paragraphs.\n \"\"\"\n if not value:\n return ''\n\n value = value.replace('\\r', '').strip('\\n')\n value = value.replace('\\n\\n', '\\n \\n')\n\n if not convert_dashes:\n return '<p>{}</p>'.format(\n '<br>'.join(linkify(value).splitlines())\n )\n\n elements = []\n temp: list[str] = []\n\n def ul(inner: str) -> str:\n return f'<ul class=\"bulleted\">{inner}</ul>'\n\n def li(inner: str) -> str:\n return f'<li>{inner}</li>'\n\n def p(inner: str) -> str:\n return f'<p>{inner}</p>'\n\n was_list = False\n\n for i, line in enumerate(value.splitlines()):\n if not line:\n continue\n\n line = linkify(line)\n is_list = line.startswith('-')\n new_p_or_ul = True if line == ' ' else False\n\n line = line.lstrip('-').strip()\n\n if with_title:\n elements.append(p(f'<span class=\"title\">{line}</span>'))\n with_title = False\n else:\n if new_p_or_ul or (was_list != is_list and i > 0):\n elements.append(\n ul(''.join(temp)) if was_list else p('<br>'.join(temp))\n )\n temp = []\n was_list = False\n\n if not new_p_or_ul:\n temp.append((li(line) if is_list else line))\n\n new_p_or_ul = False\n was_list = is_list\n\n if temp:\n elements.append(\n ul(''.join(temp)) if was_list else p('<br>'.join(temp))\n )\n\n return ''.join(elements)\n\n\ndef ensure_scheme(url: str, default: str = 'http') -> str:\n \"\"\" Makes sure that the given url has a scheme in front, if none\n was provided.\n\n \"\"\"\n\n if not url:\n return url\n\n # purl (or to be precise urlparse) will parse empty host names ('abc.xyz')\n # wrongly, assuming the abc.xyz is a path. by adding a double slash if\n # there isn't one already, we can circumvent that problem\n if '//' not in url:\n url = '//' + url\n\n _url = URL(url)\n\n if _url.scheme():\n return url\n\n return _url.scheme(default).as_string()\n\n\ndef is_uuid(value: str | UUID) -> bool:\n \"\"\" Returns true if the given value is a uuid. The value may be a string\n or of type UUID. If it's a string, the uuid is checked with a regex.\n \"\"\"\n if isinstance(value, str):\n return _uuid.match(str(value)) and True or False\n\n return isinstance(value, UUID)\n\n\ndef is_non_string_iterable(obj: object) -> bool:\n \"\"\" Returns true if the given obj is an iterable, but not a string. \"\"\"\n return not (isinstance(obj, str) or isinstance(obj, bytes))\\\n and isinstance(obj, Iterable)\n\n\ndef relative_url(absolute_url: str | None) -> str:\n \"\"\" Removes everything in front of the path, including scheme, host,\n username, password and port.\n\n \"\"\"\n url = URL._mutate(\n URL(absolute_url),\n scheme=None,\n username=None,\n password=None,\n host=None,\n port=None\n )\n\n return url.as_string()\n\n\ndef is_subpath(directory: str, path: str) -> bool:\n \"\"\" Returns true if the given path is inside the given directory. \"\"\"\n directory = os.path.join(os.path.realpath(directory), '')\n path = os.path.realpath(path)\n\n # return true, if the common prefix of both is equal to directory\n # e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b\n return os.path.commonprefix([path, directory]) == directory\n\n\n@overload\ndef is_sorted(\n iterable: 'Iterable[SupportsRichComparison]',\n key: 'Callable[[SupportsRichComparison], SupportsRichComparison]' = ...,\n reverse: bool = ...\n) -> bool: ...\n\n\n@overload\ndef is_sorted(\n iterable: 'Iterable[_T]',\n key: 'Callable[[_T], SupportsRichComparison]',\n reverse: bool = ...\n) -> bool: ...\n\n\n# FIXME: Do we really want to allow any Iterable? This seems like a bad\n# idea to me... Iterators will be consumed and the Iterable might\n# be infinite. This seems like it should be a Container instead,\n# then we also don't need to use tee or list to make a copy\ndef is_sorted(\n iterable: 'Iterable[Any]',\n key: 'Callable[[Any], SupportsRichComparison]' = lambda i: i,\n reverse: bool = False\n) -> bool:\n \"\"\" Returns True if the iterable is sorted. \"\"\"\n\n # NOTE: we previously used `tee` here, but since `sorted` consumes\n # the entire iterator, this is the exact case where tee is\n # slower than just pulling the entire sequence into a list\n seq = list(iterable)\n\n for a, b in zip(seq, sorted(seq, key=key, reverse=reverse)):\n if a is not b:\n return False\n\n return True\n\n\ndef morepath_modules(cls: type[morepath.App]) -> 'Iterator[str]':\n \"\"\" Returns all morepath modules which should be scanned for the given\n morepath application class.\n\n We can't reliably know the actual morepath modules that\n need to be scanned, which is why we assume that each module has\n one namespace (like 'more.transaction' or 'onegov.core').\n\n \"\"\"\n for base in cls.__mro__:\n if not issubclass(base, morepath.App):\n continue\n\n if base is morepath.App:\n continue\n\n module = '.'.join(base.__module__.split('.')[:2])\n\n if module.startswith('test'):\n continue\n\n yield module\n\n\ndef scan_morepath_modules(cls: type[morepath.App]) -> None:\n \"\"\" Tries to scan all the morepath modules required for the given\n application class. This is not guaranteed to stay reliable as there is\n no sure way to discover all modules required by the application class.\n\n \"\"\"\n for module in sorted(morepath_modules(cls)):\n morepath.scan(import_module(module))\n\n\ndef get_unique_hstore_keys(\n session: 'Session',\n column: 'Column[dict[str, Any]]'\n) -> set[str]:\n \"\"\" Returns a set of keys found in an hstore column over all records\n of its table.\n\n \"\"\"\n\n base = session.query(column.keys()).with_entities( # type:ignore\n sqlalchemy.func.skeys(column).label('keys'))\n\n query = sqlalchemy.select(\n [sqlalchemy.func.array_agg(sqlalchemy.column('keys'))],\n distinct=True\n ).select_from(base.subquery())\n\n keys = session.execute(query).scalar()\n return set(keys) if keys else set()\n\n\ndef makeopendir(fs: 'FS', directory: str) -> 'SubFS[FS]':\n \"\"\" Creates and opens the given directory in the given PyFilesystem. \"\"\"\n\n if not fs.isdir(directory):\n fs.makedir(directory)\n\n return fs.opendir(directory)\n\n\ndef append_query_param(url: str, key: str, value: str) -> str:\n \"\"\" Appends a single query parameter to an url. This is faster than\n using Purl, if and only if we only add one query param.\n\n Also this function assumes that the value is already url encoded.\n\n \"\"\"\n template = '?' in url and '{}&{}={}' or '{}?{}={}'\n return template.format(url, key, value)\n\n\nclass PostThread(Thread):\n\n \"\"\" POSTs the given data with the headers to the URL.\n\n Example::\n\n data = {'a': 1, 'b': 2}\n data = json.dumps(data).encode('utf-8')\n PostThread(\n 'https://example.com/post',\n data,\n (\n ('Content-Type', 'application/json; charset=utf-8'),\n ('Content-Length', len(data))\n )\n ).start()\n\n This only works for external URLs! If posting to server itself is\n needed, use a process instead of the thread!\n\n \"\"\"\n\n def __init__(\n self,\n url: str,\n data: bytes,\n headers: 'Collection[tuple[str, str]]',\n timeout: float = 30\n ):\n Thread.__init__(self)\n self.url = url\n self.data = data\n self.headers = headers\n self.timeout = timeout\n\n def run(self) -> None:\n try:\n # Validate URL protocol before opening it, since it's possible to\n # open ftp:// and file:// as well.\n if not self.url.lower().startswith('http'):\n raise ValueError from None\n\n request = urllib.request.Request(self.url)\n for header in self.headers:\n request.add_header(header[0], header[1])\n urllib.request.urlopen( # nosec B310\n request, self.data, self.timeout\n )\n except Exception as e:\n log.error(\n 'Error while sending a POST request to {}: {}'.format(\n self.url, str(e)\n )\n )\n\n\ndef toggle(collection: set[_T], item: _T | None) -> set[_T]:\n \"\"\" Returns a new set where the item has been toggled. \"\"\"\n\n if item is None:\n return collection\n\n if item in collection:\n return collection - {item}\n else:\n return collection | {item}\n\n\ndef binary_to_dictionary(\n binary: bytes,\n filename: str | None = None\n) -> 'FileDict':\n \"\"\" Takes raw binary filedata and stores it in a dictionary together\n with metadata information.\n\n The data is compressed before it is stored int he dictionary. Use\n :func:`dictionary_to_binary` to get the original binary data back.\n\n \"\"\"\n\n assert isinstance(binary, bytes)\n\n mimetype = magic.from_buffer(binary, mime=True)\n\n # according to https://tools.ietf.org/html/rfc7111, text/csv should be used\n if mimetype == 'application/csv':\n mimetype = 'text/csv'\n\n gzipdata = BytesIO()\n\n with gzip.GzipFile(fileobj=gzipdata, mode='wb') as f:\n f.write(binary)\n\n return {\n 'data': base64.b64encode(gzipdata.getvalue()).decode('ascii'),\n 'filename': filename,\n 'mimetype': mimetype,\n 'size': len(binary)\n }\n\n\ndef dictionary_to_binary(dictionary: 'LaxFileDict') -> bytes:\n \"\"\" Takes a dictionary created by :func:`binary_to_dictionary` and returns\n the original binary data.\n\n \"\"\"\n data = base64.b64decode(dictionary['data'])\n\n with gzip.GzipFile(fileobj=BytesIO(data), mode='r') as f:\n return f.read()\n\n\n@overload\ndef safe_format(\n format: str,\n dictionary: dict[str, str | int | float],\n types: None = ...,\n adapt: 'Callable[[str], str] | None' = ...,\n raise_on_missing: bool = ...\n) -> str: ...\n\n\n@overload\ndef safe_format(\n format: str,\n dictionary: dict[str, _T],\n types: set[type[_T]] = ...,\n adapt: 'Callable[[str], str] | None' = ...,\n raise_on_missing: bool = ...\n) -> str: ...\n\n\ndef safe_format(\n format: str,\n dictionary: dict[str, Any],\n types: set[type[Any]] | None = None,\n adapt: 'Callable[[str], str] | None' = None,\n raise_on_missing: bool = False\n) -> str:\n \"\"\" Takes a user-supplied string with format blocks and returns a string\n where those blocks are replaced by values in a dictionary.\n\n For example::\n\n >>> safe_format('[user] has logged in', {'user': 'admin'})\n 'admin has logged in'\n\n :param format:\n The format to use. Square brackets denote dictionary keys. To\n literally print square bracktes, mask them by doubling ('[[' -> '[')\n\n :param dictionary:\n The dictionary holding the variables to use. If the key is not found\n in the dictionary, the bracket is replaced with an empty string.\n\n :param types:\n A set of types supported by the dictionary. Limiting this to safe\n types like builtins (str, int, float) ensure that no values are\n accidentally leaked through faulty __str__ representations.\n\n Note that inheritance is ignored. Supported types need to be\n whitelisted explicitly.\n\n :param adapt:\n An optional callable that receives the key before it is used. Returns\n the same key or an altered version.\n\n :param raise_on_missing:\n True if missing keys should result in a runtime error (defaults to\n False).\n\n This is strictly meant for formats provided by users. Python's string\n formatting options are clearly superior to this, however it is less\n secure!\n\n \"\"\"\n\n types = types or {int, str, float}\n output = StringIO()\n buffer = StringIO()\n opened = 0\n\n for ix, char in enumerate(format):\n if char == '[':\n opened += 1\n\n if char == ']':\n opened -= 1\n\n if opened == 1 and char != '[' and char != ']':\n print(char, file=buffer, end='')\n continue\n\n if opened == 2 or opened == -2:\n if buffer.tell():\n raise RuntimeError(\"Unexpected bracket inside bracket found\")\n\n print(char, file=output, end='')\n opened = 0\n continue\n\n if buffer.tell():\n k = adapt(buffer.getvalue()) if adapt else buffer.getvalue()\n\n if raise_on_missing and k not in dictionary:\n raise RuntimeError(\"Key '{}' is unknown\".format(k))\n\n v = dictionary.get(k, '')\n t = type(v)\n\n if t not in types:\n raise RuntimeError(\"Invalid type for '{}': {}\".format(k, t))\n\n print(v, file=output, end='')\n buffer = StringIO()\n\n if char != '[' and char != ']':\n print(char, file=output, end='')\n\n if opened != 0:\n raise RuntimeError(\"Uneven number of brackets in '{}'\".format(format))\n\n return output.getvalue()\n\n\ndef safe_format_keys(\n format: str,\n adapt: 'Callable[[str], str] | None' = None\n) -> list[str]:\n \"\"\" Takes a :func:`safe_format` string and returns the found keys. \"\"\"\n\n keys = []\n\n def adapt_and_record(key: str) -> str:\n key = adapt(key) if adapt else key\n keys.append(key)\n\n return key\n\n safe_format(format, {}, adapt=adapt_and_record)\n\n return keys\n\n\ndef is_valid_yubikey(\n client_id: str,\n secret_key: str,\n expected_yubikey_id: str,\n yubikey: str\n) -> bool:\n \"\"\" Asks the yubico validation servers if the given yubikey OTP is valid.\n\n :client_id:\n The yubico API client id.\n\n :secret_key:\n The yubico API secret key.\n\n :expected_yubikey_id:\n The expected yubikey id. The yubikey id is defined as the first twelve\n characters of any yubikey value. Each user should have a yubikey\n associated with it's account. If the yubikey value comes from a\n different key, the key is invalid.\n\n :yubikey:\n The actual yubikey value that should be verified.\n\n :return: True if yubico confirmed the validity of the key.\n\n \"\"\"\n assert client_id and secret_key and expected_yubikey_id and yubikey\n assert len(expected_yubikey_id) == 12\n\n # if the yubikey doesn't start with the expected yubikey id we do not\n # need to make a roundtrip to the validation server\n if not yubikey.startswith(expected_yubikey_id):\n # FIXME: Are we leaking information with this early out?\n return False\n\n try:\n return Yubico(client_id, secret_key).verify(yubikey)\n except StatusCodeError as e:\n if e.status_code != 'REPLAYED_OTP':\n raise e\n\n return False\n except SignatureVerificationError:\n return False\n\n\ndef is_valid_yubikey_format(otp: str) -> bool:\n \"\"\" Returns True if the given OTP has the correct format. Does not actually\n contact Yubico, so this function may return true, for some invalid keys.\n\n \"\"\"\n\n return ALPHABET_RE.match(otp) and True or False\n\n\ndef yubikey_otp_to_serial(otp: str) -> int | None:\n \"\"\" Takes a Yubikey OTP and calculates the serial number of the key.\n\n The serial key is printed on the yubikey, in decimal and as a QR code.\n\n Example:\n\n >>> yubikey_otp_to_serial(\n 'ccccccdefghdefghdefghdefghdefghdefghdefghklv')\n 2311522\n\n Adapted from Java:\n\n https://github.com/Yubico/yubikey-salesforce-client/blob/\n e38e46ee90296a852374a8b744555e99d16b6ca7/src/classes/Modhex.cls\n\n If the key cannot be calculated, None is returned. This can happen if\n they key is malformed.\n\n \"\"\"\n\n if not is_valid_yubikey_format(otp):\n return None\n\n token = 'cccc' + otp[:12]\n\n toggle = False\n keep = 0\n\n bytesarray = []\n\n for char in token:\n n = ALPHABET.index(char)\n\n toggle = not toggle\n\n if toggle:\n keep = n\n else:\n bytesarray.append((keep << 4) | n)\n\n value = 0\n\n # in Java, shifts on integers are masked with 0x1f using AND\n # https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.19\n mask_value = 0x1f\n\n for i in range(0, 8):\n shift = (4 - 1 - i) * 8\n value += (bytesarray[i] & 255) << (shift & mask_value)\n\n return value\n\n\ndef yubikey_public_id(otp: str) -> str:\n \"\"\" Returns the yubikey identity given a token. \"\"\"\n\n return otp[:12]\n\n\ndef dict_path(dictionary: dict[str, _T], path: str) -> _T:\n \"\"\" Gets the value of the given dictionary at the given path. For example:\n\n >>> data = {'foo': {'bar': True}}\n >>> dict_path(data, 'foo.bar')\n True\n\n \"\"\"\n\n if not dictionary:\n raise KeyError()\n\n return reduce(operator.getitem, path.split('.'), dictionary) # type:ignore\n\n\ndef safe_move(src: str, dst: str) -> None:\n \"\"\" Rename a file from ``src`` to ``dst``.\n\n * Moves must be atomic. ``shutil.move()`` is not atomic.\n\n * Moves must work across filesystems. Often temp directories and the\n cache directories live on different filesystems. ``os.rename()`` can\n throw errors if run across filesystems.\n\n So we try ``os.rename()``, but if we detect a cross-filesystem copy, we\n switch to ``shutil.move()`` with some wrappers to make it atomic.\n\n Via https://alexwlchan.net/2019/03/atomic-cross-filesystem-moves-in-python\n\n \"\"\"\n try:\n os.rename(src, dst)\n except OSError as err:\n\n if err.errno == errno.EXDEV:\n # Generate a unique ID, and copy `<src>` to the target directory\n # with a temporary name `<dst>.<ID>.tmp`. Because we're copying\n # across a filesystem boundary, this initial copy may not be\n # atomic. We intersperse a random UUID so if different processes\n # are copying into `<dst>`, they don't overlap in their tmp copies.\n copy_id = uuid4()\n tmp_dst = \"%s.%s.tmp\" % (dst, copy_id)\n shutil.copyfile(src, tmp_dst)\n\n # Then do an atomic rename onto the new name, and clean up the\n # source image.\n os.rename(tmp_dst, dst)\n os.unlink(src)\n else:\n raise\n\n\n@overload\ndef batched(\n iterable: Iterable[_T],\n batch_size: int,\n container_factory: 'type[tuple]' = ... # type:ignore[type-arg]\n) -> 'Iterator[tuple[_T, ...]]': ...\n\n\n@overload\ndef batched(\n iterable: Iterable[_T],\n batch_size: int,\n container_factory: 'type[list]' # type:ignore[type-arg]\n) -> 'Iterator[list[_T]]': ...\n\n\n# NOTE: If there were higher order TypeVars, we could properly infer\n# the type of the Container, for now we just add overloads for\n# two of the most common container_factories\n@overload\ndef batched(\n iterable: Iterable[_T],\n batch_size: int,\n container_factory: 'Callable[[Iterator[_T]], Collection[_T]]'\n) -> 'Iterator[Collection[_T]]': ...\n\n\ndef batched(\n iterable: Iterable[_T],\n batch_size: int,\n container_factory: 'Callable[[Iterator[_T]], Collection[_T]]' = tuple\n) -> 'Iterator[Collection[_T]]':\n \"\"\" Splits an iterable into batches of batch_size and puts them\n inside a given collection (tuple by default).\n\n The container_factory is necessary in order to consume the iterator\n returned by islice. Otherwise this function would never return.\n\n \"\"\"\n\n iterator = iter(iterable)\n while True:\n batch = container_factory(islice(iterator, batch_size))\n if len(batch) == 0:\n return\n\n yield batch\n", "step-ids": [ 35, 38, 46, 49, 61 ] }
[ 35, 38, 46, 49, 61 ]
from django.shortcuts import get_object_or_404 from rest_framework import generics from .models import Duck from .serializers import Duck_Serializer class DuckList(generics.ListCreateAPIView): queryset = Duck.objects.all() serializer_class = Duck_Serializer def get_object(self): queryset = self.get_queryset() obj = get_object_or_404( queryset, pk = self.kwargs['pk'], ) return obj
normal
{ "blob_id": "8334478c8b7fc7688477cdb837467e00e857c07c", "index": 1196, "step-1": "<mask token>\n\n\nclass DuckList(generics.ListCreateAPIView):\n <mask token>\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass DuckList(generics.ListCreateAPIView):\n <mask token>\n <mask token>\n\n def get_object(self):\n queryset = self.get_queryset()\n obj = get_object_or_404(queryset, pk=self.kwargs['pk'])\n return obj\n", "step-3": "<mask token>\n\n\nclass DuckList(generics.ListCreateAPIView):\n queryset = Duck.objects.all()\n serializer_class = Duck_Serializer\n\n def get_object(self):\n queryset = self.get_queryset()\n obj = get_object_or_404(queryset, pk=self.kwargs['pk'])\n return obj\n", "step-4": "from django.shortcuts import get_object_or_404\nfrom rest_framework import generics\nfrom .models import Duck\nfrom .serializers import Duck_Serializer\n\n\nclass DuckList(generics.ListCreateAPIView):\n queryset = Duck.objects.all()\n serializer_class = Duck_Serializer\n\n def get_object(self):\n queryset = self.get_queryset()\n obj = get_object_or_404(queryset, pk=self.kwargs['pk'])\n return obj\n", "step-5": "from django.shortcuts import get_object_or_404\nfrom rest_framework import generics\nfrom .models import Duck\nfrom .serializers import Duck_Serializer\n\nclass DuckList(generics.ListCreateAPIView):\n\tqueryset = Duck.objects.all()\n\tserializer_class = Duck_Serializer\n\n\tdef get_object(self):\n\t\tqueryset = self.get_queryset()\n\t\tobj = get_object_or_404(\n\t\t\tqueryset,\n\t\t\tpk = self.kwargs['pk'],\n\t\t)\n\t\treturn obj\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> class Answer: """ Wrapper for A_B_X directory containing all associated attributes. Populate all fields of the class and call grade to determine if the question was correct **user_answers user_answer either "A" or "B" indicating which file sounded better user_preference_weight numeric value between 1-5 indicating how much better the preferred value was. 5 being significant and 1 minimal user_X_value either "A" or "B" denoting which file the user believes X was a duplicate of user_answer_confidence numeric value between 1-5 indicating how easy it was to distinguish between A and B and pick X x_answer_alpha the answer to which file X was a duplicate of. Either "A" or "B" A_value String field denoting which scenario A belonged to. Either scenario_one or SCENARIO_TWO_SUBDIR B_value String field denoting which scenario B belonged to. Either scenario_one or SCENARIO_TWO_SUBDIR correct Call self.grade to populate this field. Compares user_X_value and x_answer_alpha to determine if question was correct. Populates with boolean """ def __init__(self, question_num, **user_answers): self.question_num = question_num self.correct = None try: self.user_answer = user_answers[USER_ANSWER_KEY] except KeyError: self.user_answer = None try: self.user_preference_weight = user_answers[USER_PREFERENCE_KEY] except KeyError: self.user_preference_weight = None try: self.user_X_value = user_answers[USER_X_VALUE_KEY] except KeyError: self.user_X_value = None try: self.user_answer_confidence = user_answers[USER_CONFIDENCE_KEY] except KeyError: self.user_answer_confidence = None try: self.x_answer_alpha = user_answers[X_ANSWER_KEY] except KeyError: self.x_answer_alpha = None try: self.A_value = user_answers[A_VALUE_KEY] except KeyError: self.A_value = None try: self.B_value = user_answers[B_VALUE_KEY] except KeyError: self.B_value = None def grade(self): if self.x_answer_alpha == self.user_X_value: self.correct = True else: self.correct = False <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Answer: """ Wrapper for A_B_X directory containing all associated attributes. Populate all fields of the class and call grade to determine if the question was correct **user_answers user_answer either "A" or "B" indicating which file sounded better user_preference_weight numeric value between 1-5 indicating how much better the preferred value was. 5 being significant and 1 minimal user_X_value either "A" or "B" denoting which file the user believes X was a duplicate of user_answer_confidence numeric value between 1-5 indicating how easy it was to distinguish between A and B and pick X x_answer_alpha the answer to which file X was a duplicate of. Either "A" or "B" A_value String field denoting which scenario A belonged to. Either scenario_one or SCENARIO_TWO_SUBDIR B_value String field denoting which scenario B belonged to. Either scenario_one or SCENARIO_TWO_SUBDIR correct Call self.grade to populate this field. Compares user_X_value and x_answer_alpha to determine if question was correct. Populates with boolean """ def __init__(self, question_num, **user_answers): self.question_num = question_num self.correct = None try: self.user_answer = user_answers[USER_ANSWER_KEY] except KeyError: self.user_answer = None try: self.user_preference_weight = user_answers[USER_PREFERENCE_KEY] except KeyError: self.user_preference_weight = None try: self.user_X_value = user_answers[USER_X_VALUE_KEY] except KeyError: self.user_X_value = None try: self.user_answer_confidence = user_answers[USER_CONFIDENCE_KEY] except KeyError: self.user_answer_confidence = None try: self.x_answer_alpha = user_answers[X_ANSWER_KEY] except KeyError: self.x_answer_alpha = None try: self.A_value = user_answers[A_VALUE_KEY] except KeyError: self.A_value = None try: self.B_value = user_answers[B_VALUE_KEY] except KeyError: self.B_value = None def grade(self): if self.x_answer_alpha == self.user_X_value: self.correct = True else: self.correct = False <|reserved_special_token_0|> def _cleanup_scenarios(adjusted_file_path): try: shutil.rmtree(adjusted_file_path) except: print( 'The system could not delete the temporary audio files that were created for this test. This directory can be removed at {}' .format(adjusted_file_path)) def _create_output_directory(output_base_path): logging.info('Enter: _create_output_directory') global output_path output_path = os.path.join(output_base_path, TESTCASES_SUBDIR) if os.path.exists(output_path): try: input( """Please note there is already a Testcases directory at - {} . Press enter to continue and remove it. Press CNTRL-C to exit.""" .format(output_path)) shutil.rmtree(output_path) except PermissionError: print( 'There is a test directory located in the same location as the test directory location you specified' ) print( 'It cannot be removed becase another process is still using it. Please close the process or delete yourself.' ) sys.exit() except KeyboardInterrupt: print('Exiting...') sys.exit() os.mkdir(output_path) logging.info('Exit: _create_output_directory') return output_path def _create_answer_key(output_path): logging.info('Enter: _create_answer_key') global answer_key global scenario_one global scenario_two scenario_one_latency_data = {} if os.path.exists(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)): with open(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE) ) as output_data: scenario_one_latency_data[SCENARIO_ONE_DATA_FILE_KEY] = yaml.load( output_data) scenario_two_latency_data = {} if os.path.exists(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)): with open(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE) ) as output_data: scenario_two_latency_data[SCENARIO_TWO_DATA_FILE_KEY] = yaml.load( output_data) with open(os.path.join(output_path, ANSWER_KEY_NAME), 'w' ) as answer_key_yml: yaml.dump(scenario_one_latency_data, answer_key_yml, default_flow_style=False) yaml.dump(scenario_two_latency_data, answer_key_yml, default_flow_style=False) for question in answer_key: yaml_dict = {} Key = str(ANSWER_KEY_QUESTION_KEY + str(question.question_num)) yaml_dict[Key] = {X_ANSWER_KEY: question.x_answer_alpha, A_VALUE_KEY: question.A_value, B_VALUE_KEY: question.B_value} yaml.dump(yaml_dict, answer_key_yml, default_flow_style=False) logging.info('Exit: _create_answer_key') def _create_temp_dir(root_directory, scenario_one, scenario_two): logging.info('Enter: _create_temp_dir') adjusted_file_path = os.path.join(root_directory, ADJUSTED_AUDIO_SUBDIR) scenario_one_temp = os.path.join(adjusted_file_path, SCENARIO_ONE_SUBDIR) scenario_two_temp = os.path.join(adjusted_file_path, SCENARIO_TWO_SUBDIR) try: os.mkdir(adjusted_file_path) except FileExistsError: print( 'To properly create ABX tests, the audio files are modified so audio begins play at the same time' ) print( "In order to do this, a new directory called 'adjusted_audio' is temproarily created to hold the adjusted audio." ) input( 'This directory already exists. Press enter to remove and continue or CTRL-C to quit' ) shutil.rmtree(adjusted_file_path) os.mkdir(adjusted_file_path) shutil.copytree(scenario_one, scenario_one_temp) shutil.copytree(scenario_two, scenario_two_temp) logging.info('Exit: _create_temp_dir') return adjusted_file_path, scenario_one_temp, scenario_one_temp def create_A_B_X_cases(A_B_cases_zip_list, output_path): """ Method to create A_B_X testing directories and return the corresponding answer key An A file is chosen from either the scenario one or two with a 50/50 probability. The B file is then from the scenario not chosen for A. An X file is then created with a 50/50 probability of being either a duplicate of A or B Parameters: A_B_cases_zip_list: A list containing absolute file pairs [[scenario_one, scenario_two]...] output_path: absolute file path to store testcase directory Returns: None """ logging.info('Enter: create_A_B_X_cases ') global scenario_one global scenario_two global answer_key for case_num, case in enumerate(A_B_cases_zip_list): if case_num > MAX_CASE_NUM: logging.info( 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured' ) print( 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured' ) test_case_path = os.path.join(output_path, str(case_num)) try: os.mkdir(test_case_path) except FileExistsError: logging.debug( 'Could not create test case directory at {} - encountered FileExistsError' .format(test_case_path)) print( 'Could not create test case directory at {} - encountered FileExistsError' .format(test_case_path)) sys.exit() switch_A_B = random.randint(0, 1) x_answer = random.randint(0, 1) if switch_A_B: cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) if x_answer == 1: x_answer_alpha = USER_ANSWER_CASE_A cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) if x_answer == 0: x_answer_alpha = USER_ANSWER_CASE_B cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) A_value = ANSWER_KEY_SCENARIO_TWO B_value = ANSWER_KEY_SCENARIO_ONE else: cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) if x_answer == 0: x_answer_alpha = USER_ANSWER_CASE_A cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) if x_answer == 1: x_answer_alpha = USER_ANSWER_CASE_B cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) A_value = ANSWER_KEY_SCENARIO_ONE B_value = ANSWER_KEY_SCENARIO_TWO question_info = Answer(case_num, x_answer_alpha=x_answer_alpha, A_value=A_value, B_value=B_value) answer_key.append(question_info) logging.info('Exit: create_A_B_X_cases') def create_manual_tests(): logging.info('Enter: create_manual_tests') global root_directory scenario_one, scenario_two, output_base_path = _collect_locations() output_path = _create_output_directory(output_base_path) if os.path.exists(os.path.join(output_path, ANSWER_KEY_NAME)): input('An answer_key.yml file already exists at - ' + output_path + ' - this file will be deleted. Press enter if this is okay of CNTRL-C to exit' ) os.remove(os.path.join(output_path, ANSWER_KEY_NAME)) adjusted_file_path, scenario_one_temp, scenario_two_temp = ( _create_temp_dir(root_directory, scenario_one, scenario_two)) print( 'Please note that to create the manual tests, the latency of each file must be calculated. This takes roughly 30 minutes per 25 recordings. Press Enter to continue.' ) rate_log, correlation_sample_log, correlation_coefficient_log = (aa. find_latency_values(scenario_one_temp, scenario_two_temp)) file_zip = aa.pair_directories(scenario_one_temp, scenario_two_temp) aa.adjust_files(correlation_sample_log, rate_log, file_zip) create_A_B_X_cases(file_zip, output_path) _cleanup_scenarios(adjusted_file_path) _create_answer_key(output_base_path) print('done') logging.info('Exit: create_manual_tests') <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Answer: """ Wrapper for A_B_X directory containing all associated attributes. Populate all fields of the class and call grade to determine if the question was correct **user_answers user_answer either "A" or "B" indicating which file sounded better user_preference_weight numeric value between 1-5 indicating how much better the preferred value was. 5 being significant and 1 minimal user_X_value either "A" or "B" denoting which file the user believes X was a duplicate of user_answer_confidence numeric value between 1-5 indicating how easy it was to distinguish between A and B and pick X x_answer_alpha the answer to which file X was a duplicate of. Either "A" or "B" A_value String field denoting which scenario A belonged to. Either scenario_one or SCENARIO_TWO_SUBDIR B_value String field denoting which scenario B belonged to. Either scenario_one or SCENARIO_TWO_SUBDIR correct Call self.grade to populate this field. Compares user_X_value and x_answer_alpha to determine if question was correct. Populates with boolean """ def __init__(self, question_num, **user_answers): self.question_num = question_num self.correct = None try: self.user_answer = user_answers[USER_ANSWER_KEY] except KeyError: self.user_answer = None try: self.user_preference_weight = user_answers[USER_PREFERENCE_KEY] except KeyError: self.user_preference_weight = None try: self.user_X_value = user_answers[USER_X_VALUE_KEY] except KeyError: self.user_X_value = None try: self.user_answer_confidence = user_answers[USER_CONFIDENCE_KEY] except KeyError: self.user_answer_confidence = None try: self.x_answer_alpha = user_answers[X_ANSWER_KEY] except KeyError: self.x_answer_alpha = None try: self.A_value = user_answers[A_VALUE_KEY] except KeyError: self.A_value = None try: self.B_value = user_answers[B_VALUE_KEY] except KeyError: self.B_value = None def grade(self): if self.x_answer_alpha == self.user_X_value: self.correct = True else: self.correct = False def _collect_locations(): logging.info('Enter: _collect_locations') global scenario_one global scenario_two global output_base_path if not os.path.exists(scenario_one): print('Scenario One file path does not exist. Exiting') sys.exit() if not os.path.exists(scenario_two): print('Scenario Two file path does not exist. Exiting') sys.exit() print('Creating listening test...') logging.info('Exit: _collect_locations') return scenario_one, scenario_two, output_base_path def _cleanup_scenarios(adjusted_file_path): try: shutil.rmtree(adjusted_file_path) except: print( 'The system could not delete the temporary audio files that were created for this test. This directory can be removed at {}' .format(adjusted_file_path)) def _create_output_directory(output_base_path): logging.info('Enter: _create_output_directory') global output_path output_path = os.path.join(output_base_path, TESTCASES_SUBDIR) if os.path.exists(output_path): try: input( """Please note there is already a Testcases directory at - {} . Press enter to continue and remove it. Press CNTRL-C to exit.""" .format(output_path)) shutil.rmtree(output_path) except PermissionError: print( 'There is a test directory located in the same location as the test directory location you specified' ) print( 'It cannot be removed becase another process is still using it. Please close the process or delete yourself.' ) sys.exit() except KeyboardInterrupt: print('Exiting...') sys.exit() os.mkdir(output_path) logging.info('Exit: _create_output_directory') return output_path def _create_answer_key(output_path): logging.info('Enter: _create_answer_key') global answer_key global scenario_one global scenario_two scenario_one_latency_data = {} if os.path.exists(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)): with open(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE) ) as output_data: scenario_one_latency_data[SCENARIO_ONE_DATA_FILE_KEY] = yaml.load( output_data) scenario_two_latency_data = {} if os.path.exists(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)): with open(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE) ) as output_data: scenario_two_latency_data[SCENARIO_TWO_DATA_FILE_KEY] = yaml.load( output_data) with open(os.path.join(output_path, ANSWER_KEY_NAME), 'w' ) as answer_key_yml: yaml.dump(scenario_one_latency_data, answer_key_yml, default_flow_style=False) yaml.dump(scenario_two_latency_data, answer_key_yml, default_flow_style=False) for question in answer_key: yaml_dict = {} Key = str(ANSWER_KEY_QUESTION_KEY + str(question.question_num)) yaml_dict[Key] = {X_ANSWER_KEY: question.x_answer_alpha, A_VALUE_KEY: question.A_value, B_VALUE_KEY: question.B_value} yaml.dump(yaml_dict, answer_key_yml, default_flow_style=False) logging.info('Exit: _create_answer_key') def _create_temp_dir(root_directory, scenario_one, scenario_two): logging.info('Enter: _create_temp_dir') adjusted_file_path = os.path.join(root_directory, ADJUSTED_AUDIO_SUBDIR) scenario_one_temp = os.path.join(adjusted_file_path, SCENARIO_ONE_SUBDIR) scenario_two_temp = os.path.join(adjusted_file_path, SCENARIO_TWO_SUBDIR) try: os.mkdir(adjusted_file_path) except FileExistsError: print( 'To properly create ABX tests, the audio files are modified so audio begins play at the same time' ) print( "In order to do this, a new directory called 'adjusted_audio' is temproarily created to hold the adjusted audio." ) input( 'This directory already exists. Press enter to remove and continue or CTRL-C to quit' ) shutil.rmtree(adjusted_file_path) os.mkdir(adjusted_file_path) shutil.copytree(scenario_one, scenario_one_temp) shutil.copytree(scenario_two, scenario_two_temp) logging.info('Exit: _create_temp_dir') return adjusted_file_path, scenario_one_temp, scenario_one_temp def create_A_B_X_cases(A_B_cases_zip_list, output_path): """ Method to create A_B_X testing directories and return the corresponding answer key An A file is chosen from either the scenario one or two with a 50/50 probability. The B file is then from the scenario not chosen for A. An X file is then created with a 50/50 probability of being either a duplicate of A or B Parameters: A_B_cases_zip_list: A list containing absolute file pairs [[scenario_one, scenario_two]...] output_path: absolute file path to store testcase directory Returns: None """ logging.info('Enter: create_A_B_X_cases ') global scenario_one global scenario_two global answer_key for case_num, case in enumerate(A_B_cases_zip_list): if case_num > MAX_CASE_NUM: logging.info( 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured' ) print( 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured' ) test_case_path = os.path.join(output_path, str(case_num)) try: os.mkdir(test_case_path) except FileExistsError: logging.debug( 'Could not create test case directory at {} - encountered FileExistsError' .format(test_case_path)) print( 'Could not create test case directory at {} - encountered FileExistsError' .format(test_case_path)) sys.exit() switch_A_B = random.randint(0, 1) x_answer = random.randint(0, 1) if switch_A_B: cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) if x_answer == 1: x_answer_alpha = USER_ANSWER_CASE_A cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) if x_answer == 0: x_answer_alpha = USER_ANSWER_CASE_B cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) A_value = ANSWER_KEY_SCENARIO_TWO B_value = ANSWER_KEY_SCENARIO_ONE else: cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) if x_answer == 0: x_answer_alpha = USER_ANSWER_CASE_A cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) if x_answer == 1: x_answer_alpha = USER_ANSWER_CASE_B cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) A_value = ANSWER_KEY_SCENARIO_ONE B_value = ANSWER_KEY_SCENARIO_TWO question_info = Answer(case_num, x_answer_alpha=x_answer_alpha, A_value=A_value, B_value=B_value) answer_key.append(question_info) logging.info('Exit: create_A_B_X_cases') def create_manual_tests(): logging.info('Enter: create_manual_tests') global root_directory scenario_one, scenario_two, output_base_path = _collect_locations() output_path = _create_output_directory(output_base_path) if os.path.exists(os.path.join(output_path, ANSWER_KEY_NAME)): input('An answer_key.yml file already exists at - ' + output_path + ' - this file will be deleted. Press enter if this is okay of CNTRL-C to exit' ) os.remove(os.path.join(output_path, ANSWER_KEY_NAME)) adjusted_file_path, scenario_one_temp, scenario_two_temp = ( _create_temp_dir(root_directory, scenario_one, scenario_two)) print( 'Please note that to create the manual tests, the latency of each file must be calculated. This takes roughly 30 minutes per 25 recordings. Press Enter to continue.' ) rate_log, correlation_sample_log, correlation_coefficient_log = (aa. find_latency_values(scenario_one_temp, scenario_two_temp)) file_zip = aa.pair_directories(scenario_one_temp, scenario_two_temp) aa.adjust_files(correlation_sample_log, rate_log, file_zip) create_A_B_X_cases(file_zip, output_path) _cleanup_scenarios(adjusted_file_path) _create_answer_key(output_base_path) print('done') logging.info('Exit: create_manual_tests') <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> help_string = """ Please note that manual_test.py makes 3 assumptions about these file paths. 1.Both scenarios contain the same amount of wav files. 2.The wav files in both scenarios have a one to one correspondence between each other. Each test case contains a pair of files, one from each scenario. This pair is made by matching files between scenarios with the same names 3.There are no more than 25 audio file pairs""" parser = argparse.ArgumentParser(description= 'Script to create a listening test. The output, test case directory and answer_key.yml file, can be found in the root directory.' + help_string) parser.add_argument('-o', dest='output_base_path', default=os.getcwd(), help= '(optional)Absolute file path to locatin to save test directory and answer key (default: root directory)' ) parser.add_argument('scenario_one', help= 'Absolute file path to location of first scenario. Required') parser.add_argument('scenario_two', help= 'Absolute file path to location of second scenario. Required') args = parser.parse_args() output_base_path = args.output_base_path root_directory = os.getcwd() scenario_one = args.scenario_one scenario_one_latency = 0 scenario_one_correlation_coefficient = 0 scenario_two = args.scenario_two scenario_two_latency = 0 scenario_two_correlation_coefficient = 0 output_path = '' answer_key = [] USER_ANSWER_KEY = 'user_answer' USER_PREFERENCE_KEY = 'user_preference_weight' USER_X_VALUE_KEY = 'user_X_value' USER_CONFIDENCE_KEY = 'user_answer_confidence' X_ANSWER_KEY = 'x_answer_alpha' A_VALUE_KEY = 'A_value' B_VALUE_KEY = 'B_value' TESTCASES_SUBDIR = 'testcases' A_CASE_NAME = 'A_' B_CASE_NAME = 'B_' X_CASE_NAME = 'X_' WNDWS_COPY_CMD = 'copy' AUDIO_TYPE = '.wav' SCNEARIO_ONE_DATA_FILE = 'output_data.yml' SCENARIO_ONE_DATA_FILE_KEY = 'Scenario One' SCENARIO_TWO_DATA_FILE = 'output_data.yml' SCENARIO_TWO_DATA_FILE_KEY = 'Scenario Two' ANSWER_KEY_NAME = 'answer_key.yml' USER_ANSWER_CASE_A = 'A' USER_ANSWER_CASE_B = 'B' ANSWER_KEY_SCENARIO_ONE = 'scenario one' ANSWER_KEY_SCENARIO_TWO = 'scenario two' ANSWER_KEY_QUESTION_KEY = 'Q_' MAX_CASE_NUM = 24 ADJUSTED_AUDIO_SUBDIR = 'adjusted_audio' SCENARIO_ONE_SUBDIR = 'scenario_one' SCENARIO_TWO_SUBDIR = 'scenario_two' class Answer: """ Wrapper for A_B_X directory containing all associated attributes. Populate all fields of the class and call grade to determine if the question was correct **user_answers user_answer either "A" or "B" indicating which file sounded better user_preference_weight numeric value between 1-5 indicating how much better the preferred value was. 5 being significant and 1 minimal user_X_value either "A" or "B" denoting which file the user believes X was a duplicate of user_answer_confidence numeric value between 1-5 indicating how easy it was to distinguish between A and B and pick X x_answer_alpha the answer to which file X was a duplicate of. Either "A" or "B" A_value String field denoting which scenario A belonged to. Either scenario_one or SCENARIO_TWO_SUBDIR B_value String field denoting which scenario B belonged to. Either scenario_one or SCENARIO_TWO_SUBDIR correct Call self.grade to populate this field. Compares user_X_value and x_answer_alpha to determine if question was correct. Populates with boolean """ def __init__(self, question_num, **user_answers): self.question_num = question_num self.correct = None try: self.user_answer = user_answers[USER_ANSWER_KEY] except KeyError: self.user_answer = None try: self.user_preference_weight = user_answers[USER_PREFERENCE_KEY] except KeyError: self.user_preference_weight = None try: self.user_X_value = user_answers[USER_X_VALUE_KEY] except KeyError: self.user_X_value = None try: self.user_answer_confidence = user_answers[USER_CONFIDENCE_KEY] except KeyError: self.user_answer_confidence = None try: self.x_answer_alpha = user_answers[X_ANSWER_KEY] except KeyError: self.x_answer_alpha = None try: self.A_value = user_answers[A_VALUE_KEY] except KeyError: self.A_value = None try: self.B_value = user_answers[B_VALUE_KEY] except KeyError: self.B_value = None def grade(self): if self.x_answer_alpha == self.user_X_value: self.correct = True else: self.correct = False def _collect_locations(): logging.info('Enter: _collect_locations') global scenario_one global scenario_two global output_base_path if not os.path.exists(scenario_one): print('Scenario One file path does not exist. Exiting') sys.exit() if not os.path.exists(scenario_two): print('Scenario Two file path does not exist. Exiting') sys.exit() print('Creating listening test...') logging.info('Exit: _collect_locations') return scenario_one, scenario_two, output_base_path def _cleanup_scenarios(adjusted_file_path): try: shutil.rmtree(adjusted_file_path) except: print( 'The system could not delete the temporary audio files that were created for this test. This directory can be removed at {}' .format(adjusted_file_path)) def _create_output_directory(output_base_path): logging.info('Enter: _create_output_directory') global output_path output_path = os.path.join(output_base_path, TESTCASES_SUBDIR) if os.path.exists(output_path): try: input( """Please note there is already a Testcases directory at - {} . Press enter to continue and remove it. Press CNTRL-C to exit.""" .format(output_path)) shutil.rmtree(output_path) except PermissionError: print( 'There is a test directory located in the same location as the test directory location you specified' ) print( 'It cannot be removed becase another process is still using it. Please close the process or delete yourself.' ) sys.exit() except KeyboardInterrupt: print('Exiting...') sys.exit() os.mkdir(output_path) logging.info('Exit: _create_output_directory') return output_path def _create_answer_key(output_path): logging.info('Enter: _create_answer_key') global answer_key global scenario_one global scenario_two scenario_one_latency_data = {} if os.path.exists(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)): with open(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE) ) as output_data: scenario_one_latency_data[SCENARIO_ONE_DATA_FILE_KEY] = yaml.load( output_data) scenario_two_latency_data = {} if os.path.exists(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)): with open(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE) ) as output_data: scenario_two_latency_data[SCENARIO_TWO_DATA_FILE_KEY] = yaml.load( output_data) with open(os.path.join(output_path, ANSWER_KEY_NAME), 'w' ) as answer_key_yml: yaml.dump(scenario_one_latency_data, answer_key_yml, default_flow_style=False) yaml.dump(scenario_two_latency_data, answer_key_yml, default_flow_style=False) for question in answer_key: yaml_dict = {} Key = str(ANSWER_KEY_QUESTION_KEY + str(question.question_num)) yaml_dict[Key] = {X_ANSWER_KEY: question.x_answer_alpha, A_VALUE_KEY: question.A_value, B_VALUE_KEY: question.B_value} yaml.dump(yaml_dict, answer_key_yml, default_flow_style=False) logging.info('Exit: _create_answer_key') def _create_temp_dir(root_directory, scenario_one, scenario_two): logging.info('Enter: _create_temp_dir') adjusted_file_path = os.path.join(root_directory, ADJUSTED_AUDIO_SUBDIR) scenario_one_temp = os.path.join(adjusted_file_path, SCENARIO_ONE_SUBDIR) scenario_two_temp = os.path.join(adjusted_file_path, SCENARIO_TWO_SUBDIR) try: os.mkdir(adjusted_file_path) except FileExistsError: print( 'To properly create ABX tests, the audio files are modified so audio begins play at the same time' ) print( "In order to do this, a new directory called 'adjusted_audio' is temproarily created to hold the adjusted audio." ) input( 'This directory already exists. Press enter to remove and continue or CTRL-C to quit' ) shutil.rmtree(adjusted_file_path) os.mkdir(adjusted_file_path) shutil.copytree(scenario_one, scenario_one_temp) shutil.copytree(scenario_two, scenario_two_temp) logging.info('Exit: _create_temp_dir') return adjusted_file_path, scenario_one_temp, scenario_one_temp def create_A_B_X_cases(A_B_cases_zip_list, output_path): """ Method to create A_B_X testing directories and return the corresponding answer key An A file is chosen from either the scenario one or two with a 50/50 probability. The B file is then from the scenario not chosen for A. An X file is then created with a 50/50 probability of being either a duplicate of A or B Parameters: A_B_cases_zip_list: A list containing absolute file pairs [[scenario_one, scenario_two]...] output_path: absolute file path to store testcase directory Returns: None """ logging.info('Enter: create_A_B_X_cases ') global scenario_one global scenario_two global answer_key for case_num, case in enumerate(A_B_cases_zip_list): if case_num > MAX_CASE_NUM: logging.info( 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured' ) print( 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured' ) test_case_path = os.path.join(output_path, str(case_num)) try: os.mkdir(test_case_path) except FileExistsError: logging.debug( 'Could not create test case directory at {} - encountered FileExistsError' .format(test_case_path)) print( 'Could not create test case directory at {} - encountered FileExistsError' .format(test_case_path)) sys.exit() switch_A_B = random.randint(0, 1) x_answer = random.randint(0, 1) if switch_A_B: cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) if x_answer == 1: x_answer_alpha = USER_ANSWER_CASE_A cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) if x_answer == 0: x_answer_alpha = USER_ANSWER_CASE_B cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) A_value = ANSWER_KEY_SCENARIO_TWO B_value = ANSWER_KEY_SCENARIO_ONE else: cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str( case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) if x_answer == 0: x_answer_alpha = USER_ANSWER_CASE_A cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_a) if x_answer == 1: x_answer_alpha = USER_ANSWER_CASE_B cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1 ] + ' ' + os.path.join(test_case_path, X_CASE_NAME + str(case_num) + AUDIO_TYPE) os.system(cmd_command_copy_b) A_value = ANSWER_KEY_SCENARIO_ONE B_value = ANSWER_KEY_SCENARIO_TWO question_info = Answer(case_num, x_answer_alpha=x_answer_alpha, A_value=A_value, B_value=B_value) answer_key.append(question_info) logging.info('Exit: create_A_B_X_cases') def create_manual_tests(): logging.info('Enter: create_manual_tests') global root_directory scenario_one, scenario_two, output_base_path = _collect_locations() output_path = _create_output_directory(output_base_path) if os.path.exists(os.path.join(output_path, ANSWER_KEY_NAME)): input('An answer_key.yml file already exists at - ' + output_path + ' - this file will be deleted. Press enter if this is okay of CNTRL-C to exit' ) os.remove(os.path.join(output_path, ANSWER_KEY_NAME)) adjusted_file_path, scenario_one_temp, scenario_two_temp = ( _create_temp_dir(root_directory, scenario_one, scenario_two)) print( 'Please note that to create the manual tests, the latency of each file must be calculated. This takes roughly 30 minutes per 25 recordings. Press Enter to continue.' ) rate_log, correlation_sample_log, correlation_coefficient_log = (aa. find_latency_values(scenario_one_temp, scenario_two_temp)) file_zip = aa.pair_directories(scenario_one_temp, scenario_two_temp) aa.adjust_files(correlation_sample_log, rate_log, file_zip) create_A_B_X_cases(file_zip, output_path) _cleanup_scenarios(adjusted_file_path) _create_answer_key(output_base_path) print('done') logging.info('Exit: create_manual_tests') if __name__ == '__main__': logging.basicConfig(filename='manualtest.log', level=logging.INFO, format= '%(asctime)s %(levelname)s %(module)s line: %(lineno)d, %(message)s') logging.info('Enter: main') create_manual_tests() logging.info('Exit: main') <|reserved_special_token_1|> import os import yaml import sys import random import shutil import openpyxl import yaml import audioanalysis as aa import numpy as np import argparse import logging """ manualtest.py Script to create a listeneing test. The output, test case directory and answer_key.yml file, can be found in the root directory. manual test creation responsibilities: 1) directory of directories that each contain two files to compare(a,b) and a duplicated one (x) example scenarios to test: JITTER_BUFFER_INIT_X VS. JITTER_BUFFER_INIT_Y dev version vs dev version need to come up with more 2) an output yaml file labeled answer_key.yml that says which (a,b) is x """ # command line parse help_string = ("\nPlease note that manual_test.py makes 3 assumptions about " "these file paths. " "\n1.Both scenarios contain the same amount of wav files." "\n2.The wav files in both scenarios have a one to one " "correspondence between each other. Each test case contains a " "pair of files, one from each scenario. This pair is made by " "matching files between scenarios with the same names 3." "There are no more than 25 audio file pairs") parser = argparse.ArgumentParser(description="Script to create a listening test. The output, test case directory and answer_key.yml file, can be found in the root directory."+help_string) parser.add_argument("-o", dest="output_base_path", default= os.getcwd(),help="(optional)Absolute file path to locatin to save test directory and answer key (default: root directory)") parser.add_argument("scenario_one", help="Absolute file path to location of first scenario. Required") parser.add_argument("scenario_two", help="Absolute file path to location of second scenario. Required") args=parser.parse_args() # globals output_base_path=args.output_base_path root_directory = os.getcwd() # first scenario scenario_one = args.scenario_one scenario_one_latency=0 scenario_one_correlation_coefficient=0 # second scenario scenario_two = args.scenario_two scenario_two_latency=0 scenario_two_correlation_coefficient=0 output_path="" answer_key=[] USER_ANSWER_KEY="user_answer" USER_PREFERENCE_KEY="user_preference_weight" USER_X_VALUE_KEY="user_X_value" USER_CONFIDENCE_KEY="user_answer_confidence" X_ANSWER_KEY="x_answer_alpha" A_VALUE_KEY="A_value" B_VALUE_KEY="B_value" TESTCASES_SUBDIR="testcases" A_CASE_NAME="A_" B_CASE_NAME="B_" X_CASE_NAME="X_" WNDWS_COPY_CMD="copy" AUDIO_TYPE=".wav" SCNEARIO_ONE_DATA_FILE="output_data.yml" SCENARIO_ONE_DATA_FILE_KEY="Scenario One" SCENARIO_TWO_DATA_FILE="output_data.yml" SCENARIO_TWO_DATA_FILE_KEY="Scenario Two" ANSWER_KEY_NAME="answer_key.yml" USER_ANSWER_CASE_A="A" USER_ANSWER_CASE_B="B" ANSWER_KEY_SCENARIO_ONE="scenario one" ANSWER_KEY_SCENARIO_TWO="scenario two" ANSWER_KEY_QUESTION_KEY="Q_" MAX_CASE_NUM=24 ADJUSTED_AUDIO_SUBDIR="adjusted_audio" SCENARIO_ONE_SUBDIR="scenario_one" SCENARIO_TWO_SUBDIR="scenario_two" class Answer(): """ Wrapper for A_B_X directory containing all associated attributes. Populate all fields of the class and call grade to determine if the question was correct **user_answers user_answer either "A" or "B" indicating which file sounded better user_preference_weight numeric value between 1-5 indicating how much better the preferred value was. 5 being significant and 1 minimal user_X_value either "A" or "B" denoting which file the user believes X was a duplicate of user_answer_confidence numeric value between 1-5 indicating how easy it was to distinguish between A and B and pick X x_answer_alpha the answer to which file X was a duplicate of. Either "A" or "B" A_value String field denoting which scenario A belonged to. Either scenario_one or SCENARIO_TWO_SUBDIR B_value String field denoting which scenario B belonged to. Either scenario_one or SCENARIO_TWO_SUBDIR correct Call self.grade to populate this field. Compares user_X_value and x_answer_alpha to determine if question was correct. Populates with boolean """ def __init__(self, question_num, **user_answers): self.question_num=question_num self.correct = None try: self.user_answer=user_answers[USER_ANSWER_KEY] except KeyError: self.user_answer=None try: self.user_preference_weight=user_answers[USER_PREFERENCE_KEY] except KeyError: self.user_preference_weight=None try: self.user_X_value=user_answers[USER_X_VALUE_KEY] except KeyError: self.user_X_value=None try: self.user_answer_confidence=user_answers[USER_CONFIDENCE_KEY] except KeyError: self.user_answer_confidence=None try: self.x_answer_alpha=user_answers[X_ANSWER_KEY] except KeyError: self.x_answer_alpha=None try: self.A_value=user_answers[A_VALUE_KEY] except KeyError: self.A_value=None try: self.B_value=user_answers[B_VALUE_KEY] except KeyError: self.B_value=None def grade(self): if self.x_answer_alpha==self.user_X_value: self.correct=True else: self.correct=False def _collect_locations(): # Method to pair all the files for comparison in the two scenarios the user has elected to compare logging.info("Enter: _collect_locations") global scenario_one global scenario_two global output_base_path if not os.path.exists(scenario_one): print("Scenario One file path does not exist. Exiting") sys.exit() if not os.path.exists(scenario_two): print("Scenario Two file path does not exist. Exiting") sys.exit() print("Creating listening test...") logging.info("Exit: _collect_locations") return scenario_one, scenario_two, output_base_path def _cleanup_scenarios(adjusted_file_path): # Delete the adjusted audio created for this module try: shutil.rmtree(adjusted_file_path) except: print("The system could not delete the temporary audio files that " "were created for this test. This directory can be removed " "at {}".format(adjusted_file_path)) def _create_output_directory(output_base_path): # From the base path create a testcases subdirectory # Return the subdirectory full path logging.info("Enter: _create_output_directory") global output_path output_path = os.path.join(output_base_path, TESTCASES_SUBDIR) if os.path.exists(output_path): try: input("Please note there is already a Testcases directory at - {} .\nPress enter to continue and remove it. Press CNTRL-C to exit.".format(output_path)) shutil.rmtree(output_path) except PermissionError: print("There is a test directory located in the same location as the test directory location you specified") print("It cannot be removed becase another process is still using it. Please close the process or delete yourself.") sys.exit() except KeyboardInterrupt: print("Exiting...") sys.exit() os.mkdir(output_path) logging.info("Exit: _create_output_directory") return output_path def _create_answer_key(output_path): # Parse the data file from scenario one and two if it exists and add too answer key # Dump data from processes to ANSWER_KEY_NAME in output_path logging.info("Enter: _create_answer_key") global answer_key global scenario_one global scenario_two scenario_one_latency_data={} if os.path.exists(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)): with open(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)) as output_data: scenario_one_latency_data[SCENARIO_ONE_DATA_FILE_KEY]=yaml.load(output_data) scenario_two_latency_data={} if os.path.exists(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)): with open(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)) as output_data: scenario_two_latency_data[SCENARIO_TWO_DATA_FILE_KEY]=yaml.load(output_data) with open(os.path.join(output_path, ANSWER_KEY_NAME), "w") as answer_key_yml: yaml.dump(scenario_one_latency_data, answer_key_yml, default_flow_style=False) yaml.dump(scenario_two_latency_data, answer_key_yml, default_flow_style=False) for question in answer_key: yaml_dict={} Key = str(ANSWER_KEY_QUESTION_KEY+str(question.question_num)) yaml_dict[Key] = {X_ANSWER_KEY: question.x_answer_alpha,A_VALUE_KEY: question.A_value,B_VALUE_KEY: question.B_value} yaml.dump(yaml_dict, answer_key_yml, default_flow_style=False) logging.info("Exit: _create_answer_key") def _create_temp_dir(root_directory, scenario_one, scenario_two): logging.info("Enter: _create_temp_dir") # Will create exact copies of both directories specified so files may be altered later adjusted_file_path = os.path.join(root_directory, ADJUSTED_AUDIO_SUBDIR) scenario_one_temp = os.path.join(adjusted_file_path, SCENARIO_ONE_SUBDIR) scenario_two_temp = os.path.join(adjusted_file_path, SCENARIO_TWO_SUBDIR) try: os.mkdir(adjusted_file_path) except FileExistsError: print("To properly create ABX tests, the audio files are modified so audio begins play at the same time") print("In order to do this, a new directory called 'adjusted_audio' is temproarily created to hold the adjusted audio.") input("This directory already exists. Press enter to remove and continue or CTRL-C to quit") shutil.rmtree(adjusted_file_path) os.mkdir(adjusted_file_path) shutil.copytree(scenario_one, scenario_one_temp) shutil.copytree(scenario_two, scenario_two_temp) logging.info("Exit: _create_temp_dir") return adjusted_file_path, scenario_one_temp, scenario_one_temp def create_A_B_X_cases(A_B_cases_zip_list, output_path): """ Method to create A_B_X testing directories and return the corresponding answer key An A file is chosen from either the scenario one or two with a 50/50 probability. The B file is then from the scenario not chosen for A. An X file is then created with a 50/50 probability of being either a duplicate of A or B Parameters: A_B_cases_zip_list: A list containing absolute file pairs [[scenario_one, scenario_two]...] output_path: absolute file path to store testcase directory Returns: None """ logging.info("Enter: create_A_B_X_cases ") global scenario_one global scenario_two global answer_key # create listening directories and record answer to each in answer_log for case_num, case in enumerate(A_B_cases_zip_list): #MRR I really don't like silently dropping audio pairs. Please just create multiple ABX tests, each with up to 25. Up to you whether you have 3 of 25 and one of 21 or 4 of 24. if case_num > MAX_CASE_NUM: logging.info("The amount of cases has exceeded 25. Please note that " "the accompanying excel sheet only has 25 answer slots and that it will need to " "be restructured") print("The amount of cases has exceeded 25. Please note that " "the accompanying excel sheet only has 25 answer slots and that it will need to " "be restructured") test_case_path = os.path.join(output_path, str(case_num)) try: os.mkdir(test_case_path) except FileExistsError: logging.debug("Could not create test case directory at {} - encountered FileExistsError".format(test_case_path)) print("Could not create test case directory at {} - encountered FileExistsError".format(test_case_path)) sys.exit() switch_A_B = random.randint(0,1) #If one then A and B are switched. This is so scenario one and two alternate thier A and B positions roughly 50% of the time # add the wav files # pick one to duplicate x_answer=random.randint(0,1) if switch_A_B: # add A cmd_command_copy_a = WNDWS_COPY_CMD+" " + case[1] + " "+ os.path.join(test_case_path, A_CASE_NAME+str(case_num)+AUDIO_TYPE) os.system(cmd_command_copy_a) # add B cmd_command_copy_b = WNDWS_COPY_CMD+" " + case[0] + " "+ os.path.join(test_case_path, B_CASE_NAME+str(case_num)+AUDIO_TYPE) os.system(cmd_command_copy_b) # add X if x_answer==1: x_answer_alpha=USER_ANSWER_CASE_A cmd_command_copy_a = WNDWS_COPY_CMD+" " + case[1] + " "+ os.path.join(test_case_path, X_CASE_NAME+str(case_num)+AUDIO_TYPE) os.system(cmd_command_copy_a) if x_answer==0: x_answer_alpha=USER_ANSWER_CASE_B cmd_command_copy_b = WNDWS_COPY_CMD+" " + case[0] + " "+ os.path.join(test_case_path, X_CASE_NAME+str(case_num)+AUDIO_TYPE) os.system(cmd_command_copy_b) A_value=ANSWER_KEY_SCENARIO_TWO B_value=ANSWER_KEY_SCENARIO_ONE else: # add A cmd_command_copy_a = WNDWS_COPY_CMD+" " + case[0] + " "+ os.path.join(test_case_path, A_CASE_NAME+str(case_num)+AUDIO_TYPE) os.system(cmd_command_copy_a) # add B cmd_command_copy_b = WNDWS_COPY_CMD+" " + case[1] + " "+ os.path.join(test_case_path, B_CASE_NAME+str(case_num)+AUDIO_TYPE) os.system(cmd_command_copy_b) # add X if x_answer==0: x_answer_alpha=USER_ANSWER_CASE_A cmd_command_copy_a = WNDWS_COPY_CMD+" " + case[0] + " "+ os.path.join(test_case_path, X_CASE_NAME+str(case_num)+AUDIO_TYPE) os.system(cmd_command_copy_a) if x_answer==1: x_answer_alpha=USER_ANSWER_CASE_B cmd_command_copy_b = WNDWS_COPY_CMD+" " + case[1] + " "+ os.path.join(test_case_path, X_CASE_NAME+str(case_num)+AUDIO_TYPE) os.system(cmd_command_copy_b) A_value=ANSWER_KEY_SCENARIO_ONE B_value=ANSWER_KEY_SCENARIO_TWO question_info = Answer(case_num, x_answer_alpha=x_answer_alpha,A_value=A_value, B_value=B_value) answer_key.append(question_info) logging.info("Exit: create_A_B_X_cases") def create_manual_tests(): logging.info("Enter: create_manual_tests") global root_directory scenario_one, scenario_two, output_base_path=_collect_locations() output_path = _create_output_directory(output_base_path) # Confirm another answer key does not already exist if os.path.exists(os.path.join(output_path, ANSWER_KEY_NAME)): input("An answer_key.yml file already exists at - "+output_path+" - this file will be deleted. Press enter if this is okay of CNTRL-C to exit") os.remove(os.path.join(output_path, ANSWER_KEY_NAME)) adjusted_file_path, scenario_one_temp, scenario_two_temp= _create_temp_dir(root_directory, scenario_one, scenario_two) print("Please note that to create the manual tests, the latency of each file must be calculated. This takes roughly 30 minutes per 25 recordings. Press Enter to continue.") rate_log, correlation_sample_log, correlation_coefficient_log = aa.find_latency_values(scenario_one_temp, scenario_two_temp) # Negative value indicates that scenario one signal was delayed. Positive value indicates that scenario two signal was delayed file_zip = aa.pair_directories(scenario_one_temp, scenario_two_temp) aa.adjust_files(correlation_sample_log, rate_log, file_zip) create_A_B_X_cases(file_zip, output_path) _cleanup_scenarios(adjusted_file_path) _create_answer_key(output_base_path) print("done") logging.info("Exit: create_manual_tests") if __name__ =="__main__": logging.basicConfig(filename="manualtest.log", level=logging.INFO, format="%(asctime)s %(levelname)s %(module)s line: %(lineno)d, %(message)s") logging.info("Enter: main") create_manual_tests() logging.info("Exit: main")
flexible
{ "blob_id": "c6ef9154285dee3b21980801a101ad5e34a50cab", "index": 4656, "step-1": "<mask token>\n\n\nclass Answer:\n \"\"\"\n Wrapper for A_B_X directory containing all associated attributes. \n Populate all fields of the class and call grade to determine if the \n question was correct\n **user_answers\n user_answer either \"A\" or \"B\" indicating which file sounded better\n user_preference_weight numeric value between 1-5 indicating how much better the \n preferred value was. 5 being significant and 1 minimal\n user_X_value either \"A\" or \"B\" denoting which file the user believes\n X was a duplicate of \n user_answer_confidence numeric value between 1-5 indicating how easy it was to \n distinguish between A and B and pick X\n x_answer_alpha the answer to which file X was a duplicate of. Either \n \"A\" or \"B\"\n A_value String field denoting which scenario A belonged to. Either\n scenario_one or SCENARIO_TWO_SUBDIR\n B_value String field denoting which scenario B belonged to. Either\n scenario_one or SCENARIO_TWO_SUBDIR\n correct Call self.grade to populate this field. Compares user_X_value\n and x_answer_alpha to determine if question was correct. \n Populates with boolean\n \"\"\"\n\n def __init__(self, question_num, **user_answers):\n self.question_num = question_num\n self.correct = None\n try:\n self.user_answer = user_answers[USER_ANSWER_KEY]\n except KeyError:\n self.user_answer = None\n try:\n self.user_preference_weight = user_answers[USER_PREFERENCE_KEY]\n except KeyError:\n self.user_preference_weight = None\n try:\n self.user_X_value = user_answers[USER_X_VALUE_KEY]\n except KeyError:\n self.user_X_value = None\n try:\n self.user_answer_confidence = user_answers[USER_CONFIDENCE_KEY]\n except KeyError:\n self.user_answer_confidence = None\n try:\n self.x_answer_alpha = user_answers[X_ANSWER_KEY]\n except KeyError:\n self.x_answer_alpha = None\n try:\n self.A_value = user_answers[A_VALUE_KEY]\n except KeyError:\n self.A_value = None\n try:\n self.B_value = user_answers[B_VALUE_KEY]\n except KeyError:\n self.B_value = None\n\n def grade(self):\n if self.x_answer_alpha == self.user_X_value:\n self.correct = True\n else:\n self.correct = False\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Answer:\n \"\"\"\n Wrapper for A_B_X directory containing all associated attributes. \n Populate all fields of the class and call grade to determine if the \n question was correct\n **user_answers\n user_answer either \"A\" or \"B\" indicating which file sounded better\n user_preference_weight numeric value between 1-5 indicating how much better the \n preferred value was. 5 being significant and 1 minimal\n user_X_value either \"A\" or \"B\" denoting which file the user believes\n X was a duplicate of \n user_answer_confidence numeric value between 1-5 indicating how easy it was to \n distinguish between A and B and pick X\n x_answer_alpha the answer to which file X was a duplicate of. Either \n \"A\" or \"B\"\n A_value String field denoting which scenario A belonged to. Either\n scenario_one or SCENARIO_TWO_SUBDIR\n B_value String field denoting which scenario B belonged to. Either\n scenario_one or SCENARIO_TWO_SUBDIR\n correct Call self.grade to populate this field. Compares user_X_value\n and x_answer_alpha to determine if question was correct. \n Populates with boolean\n \"\"\"\n\n def __init__(self, question_num, **user_answers):\n self.question_num = question_num\n self.correct = None\n try:\n self.user_answer = user_answers[USER_ANSWER_KEY]\n except KeyError:\n self.user_answer = None\n try:\n self.user_preference_weight = user_answers[USER_PREFERENCE_KEY]\n except KeyError:\n self.user_preference_weight = None\n try:\n self.user_X_value = user_answers[USER_X_VALUE_KEY]\n except KeyError:\n self.user_X_value = None\n try:\n self.user_answer_confidence = user_answers[USER_CONFIDENCE_KEY]\n except KeyError:\n self.user_answer_confidence = None\n try:\n self.x_answer_alpha = user_answers[X_ANSWER_KEY]\n except KeyError:\n self.x_answer_alpha = None\n try:\n self.A_value = user_answers[A_VALUE_KEY]\n except KeyError:\n self.A_value = None\n try:\n self.B_value = user_answers[B_VALUE_KEY]\n except KeyError:\n self.B_value = None\n\n def grade(self):\n if self.x_answer_alpha == self.user_X_value:\n self.correct = True\n else:\n self.correct = False\n\n\n<mask token>\n\n\ndef _cleanup_scenarios(adjusted_file_path):\n try:\n shutil.rmtree(adjusted_file_path)\n except:\n print(\n 'The system could not delete the temporary audio files that were created for this test. This directory can be removed at {}'\n .format(adjusted_file_path))\n\n\ndef _create_output_directory(output_base_path):\n logging.info('Enter: _create_output_directory')\n global output_path\n output_path = os.path.join(output_base_path, TESTCASES_SUBDIR)\n if os.path.exists(output_path):\n try:\n input(\n \"\"\"Please note there is already a Testcases directory at - {} .\nPress enter to continue and remove it. Press CNTRL-C to exit.\"\"\"\n .format(output_path))\n shutil.rmtree(output_path)\n except PermissionError:\n print(\n 'There is a test directory located in the same location as the test directory location you specified'\n )\n print(\n 'It cannot be removed becase another process is still using it. Please close the process or delete yourself.'\n )\n sys.exit()\n except KeyboardInterrupt:\n print('Exiting...')\n sys.exit()\n os.mkdir(output_path)\n logging.info('Exit: _create_output_directory')\n return output_path\n\n\ndef _create_answer_key(output_path):\n logging.info('Enter: _create_answer_key')\n global answer_key\n global scenario_one\n global scenario_two\n scenario_one_latency_data = {}\n if os.path.exists(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)):\n with open(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)\n ) as output_data:\n scenario_one_latency_data[SCENARIO_ONE_DATA_FILE_KEY] = yaml.load(\n output_data)\n scenario_two_latency_data = {}\n if os.path.exists(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)):\n with open(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)\n ) as output_data:\n scenario_two_latency_data[SCENARIO_TWO_DATA_FILE_KEY] = yaml.load(\n output_data)\n with open(os.path.join(output_path, ANSWER_KEY_NAME), 'w'\n ) as answer_key_yml:\n yaml.dump(scenario_one_latency_data, answer_key_yml,\n default_flow_style=False)\n yaml.dump(scenario_two_latency_data, answer_key_yml,\n default_flow_style=False)\n for question in answer_key:\n yaml_dict = {}\n Key = str(ANSWER_KEY_QUESTION_KEY + str(question.question_num))\n yaml_dict[Key] = {X_ANSWER_KEY: question.x_answer_alpha,\n A_VALUE_KEY: question.A_value, B_VALUE_KEY: question.B_value}\n yaml.dump(yaml_dict, answer_key_yml, default_flow_style=False)\n logging.info('Exit: _create_answer_key')\n\n\ndef _create_temp_dir(root_directory, scenario_one, scenario_two):\n logging.info('Enter: _create_temp_dir')\n adjusted_file_path = os.path.join(root_directory, ADJUSTED_AUDIO_SUBDIR)\n scenario_one_temp = os.path.join(adjusted_file_path, SCENARIO_ONE_SUBDIR)\n scenario_two_temp = os.path.join(adjusted_file_path, SCENARIO_TWO_SUBDIR)\n try:\n os.mkdir(adjusted_file_path)\n except FileExistsError:\n print(\n 'To properly create ABX tests, the audio files are modified so audio begins play at the same time'\n )\n print(\n \"In order to do this, a new directory called 'adjusted_audio' is temproarily created to hold the adjusted audio.\"\n )\n input(\n 'This directory already exists. Press enter to remove and continue or CTRL-C to quit'\n )\n shutil.rmtree(adjusted_file_path)\n os.mkdir(adjusted_file_path)\n shutil.copytree(scenario_one, scenario_one_temp)\n shutil.copytree(scenario_two, scenario_two_temp)\n logging.info('Exit: _create_temp_dir')\n return adjusted_file_path, scenario_one_temp, scenario_one_temp\n\n\ndef create_A_B_X_cases(A_B_cases_zip_list, output_path):\n \"\"\"\n Method to create A_B_X testing directories and return the corresponding answer key\n An A file is chosen from either the scenario one or two with a 50/50 probability. \n The B file is then from the scenario not chosen for A. An X file is then created with a 50/50\n probability of being either a duplicate of A or B\n Parameters:\n A_B_cases_zip_list: A list containing absolute file pairs\n [[scenario_one, scenario_two]...]\n output_path: absolute file path to store testcase directory \n\n Returns:\n None\n \"\"\"\n logging.info('Enter: create_A_B_X_cases ')\n global scenario_one\n global scenario_two\n global answer_key\n for case_num, case in enumerate(A_B_cases_zip_list):\n if case_num > MAX_CASE_NUM:\n logging.info(\n 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured'\n )\n print(\n 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured'\n )\n test_case_path = os.path.join(output_path, str(case_num))\n try:\n os.mkdir(test_case_path)\n except FileExistsError:\n logging.debug(\n 'Could not create test case directory at {} - encountered FileExistsError'\n .format(test_case_path))\n print(\n 'Could not create test case directory at {} - encountered FileExistsError'\n .format(test_case_path))\n sys.exit()\n switch_A_B = random.randint(0, 1)\n x_answer = random.randint(0, 1)\n if switch_A_B:\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n if x_answer == 1:\n x_answer_alpha = USER_ANSWER_CASE_A\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n if x_answer == 0:\n x_answer_alpha = USER_ANSWER_CASE_B\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n A_value = ANSWER_KEY_SCENARIO_TWO\n B_value = ANSWER_KEY_SCENARIO_ONE\n else:\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n if x_answer == 0:\n x_answer_alpha = USER_ANSWER_CASE_A\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n if x_answer == 1:\n x_answer_alpha = USER_ANSWER_CASE_B\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n A_value = ANSWER_KEY_SCENARIO_ONE\n B_value = ANSWER_KEY_SCENARIO_TWO\n question_info = Answer(case_num, x_answer_alpha=x_answer_alpha,\n A_value=A_value, B_value=B_value)\n answer_key.append(question_info)\n logging.info('Exit: create_A_B_X_cases')\n\n\ndef create_manual_tests():\n logging.info('Enter: create_manual_tests')\n global root_directory\n scenario_one, scenario_two, output_base_path = _collect_locations()\n output_path = _create_output_directory(output_base_path)\n if os.path.exists(os.path.join(output_path, ANSWER_KEY_NAME)):\n input('An answer_key.yml file already exists at - ' + output_path +\n ' - this file will be deleted. Press enter if this is okay of CNTRL-C to exit'\n )\n os.remove(os.path.join(output_path, ANSWER_KEY_NAME))\n adjusted_file_path, scenario_one_temp, scenario_two_temp = (\n _create_temp_dir(root_directory, scenario_one, scenario_two))\n print(\n 'Please note that to create the manual tests, the latency of each file must be calculated. This takes roughly 30 minutes per 25 recordings. Press Enter to continue.'\n )\n rate_log, correlation_sample_log, correlation_coefficient_log = (aa.\n find_latency_values(scenario_one_temp, scenario_two_temp))\n file_zip = aa.pair_directories(scenario_one_temp, scenario_two_temp)\n aa.adjust_files(correlation_sample_log, rate_log, file_zip)\n create_A_B_X_cases(file_zip, output_path)\n _cleanup_scenarios(adjusted_file_path)\n _create_answer_key(output_base_path)\n print('done')\n logging.info('Exit: create_manual_tests')\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Answer:\n \"\"\"\n Wrapper for A_B_X directory containing all associated attributes. \n Populate all fields of the class and call grade to determine if the \n question was correct\n **user_answers\n user_answer either \"A\" or \"B\" indicating which file sounded better\n user_preference_weight numeric value between 1-5 indicating how much better the \n preferred value was. 5 being significant and 1 minimal\n user_X_value either \"A\" or \"B\" denoting which file the user believes\n X was a duplicate of \n user_answer_confidence numeric value between 1-5 indicating how easy it was to \n distinguish between A and B and pick X\n x_answer_alpha the answer to which file X was a duplicate of. Either \n \"A\" or \"B\"\n A_value String field denoting which scenario A belonged to. Either\n scenario_one or SCENARIO_TWO_SUBDIR\n B_value String field denoting which scenario B belonged to. Either\n scenario_one or SCENARIO_TWO_SUBDIR\n correct Call self.grade to populate this field. Compares user_X_value\n and x_answer_alpha to determine if question was correct. \n Populates with boolean\n \"\"\"\n\n def __init__(self, question_num, **user_answers):\n self.question_num = question_num\n self.correct = None\n try:\n self.user_answer = user_answers[USER_ANSWER_KEY]\n except KeyError:\n self.user_answer = None\n try:\n self.user_preference_weight = user_answers[USER_PREFERENCE_KEY]\n except KeyError:\n self.user_preference_weight = None\n try:\n self.user_X_value = user_answers[USER_X_VALUE_KEY]\n except KeyError:\n self.user_X_value = None\n try:\n self.user_answer_confidence = user_answers[USER_CONFIDENCE_KEY]\n except KeyError:\n self.user_answer_confidence = None\n try:\n self.x_answer_alpha = user_answers[X_ANSWER_KEY]\n except KeyError:\n self.x_answer_alpha = None\n try:\n self.A_value = user_answers[A_VALUE_KEY]\n except KeyError:\n self.A_value = None\n try:\n self.B_value = user_answers[B_VALUE_KEY]\n except KeyError:\n self.B_value = None\n\n def grade(self):\n if self.x_answer_alpha == self.user_X_value:\n self.correct = True\n else:\n self.correct = False\n\n\ndef _collect_locations():\n logging.info('Enter: _collect_locations')\n global scenario_one\n global scenario_two\n global output_base_path\n if not os.path.exists(scenario_one):\n print('Scenario One file path does not exist. Exiting')\n sys.exit()\n if not os.path.exists(scenario_two):\n print('Scenario Two file path does not exist. Exiting')\n sys.exit()\n print('Creating listening test...')\n logging.info('Exit: _collect_locations')\n return scenario_one, scenario_two, output_base_path\n\n\ndef _cleanup_scenarios(adjusted_file_path):\n try:\n shutil.rmtree(adjusted_file_path)\n except:\n print(\n 'The system could not delete the temporary audio files that were created for this test. This directory can be removed at {}'\n .format(adjusted_file_path))\n\n\ndef _create_output_directory(output_base_path):\n logging.info('Enter: _create_output_directory')\n global output_path\n output_path = os.path.join(output_base_path, TESTCASES_SUBDIR)\n if os.path.exists(output_path):\n try:\n input(\n \"\"\"Please note there is already a Testcases directory at - {} .\nPress enter to continue and remove it. Press CNTRL-C to exit.\"\"\"\n .format(output_path))\n shutil.rmtree(output_path)\n except PermissionError:\n print(\n 'There is a test directory located in the same location as the test directory location you specified'\n )\n print(\n 'It cannot be removed becase another process is still using it. Please close the process or delete yourself.'\n )\n sys.exit()\n except KeyboardInterrupt:\n print('Exiting...')\n sys.exit()\n os.mkdir(output_path)\n logging.info('Exit: _create_output_directory')\n return output_path\n\n\ndef _create_answer_key(output_path):\n logging.info('Enter: _create_answer_key')\n global answer_key\n global scenario_one\n global scenario_two\n scenario_one_latency_data = {}\n if os.path.exists(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)):\n with open(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)\n ) as output_data:\n scenario_one_latency_data[SCENARIO_ONE_DATA_FILE_KEY] = yaml.load(\n output_data)\n scenario_two_latency_data = {}\n if os.path.exists(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)):\n with open(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)\n ) as output_data:\n scenario_two_latency_data[SCENARIO_TWO_DATA_FILE_KEY] = yaml.load(\n output_data)\n with open(os.path.join(output_path, ANSWER_KEY_NAME), 'w'\n ) as answer_key_yml:\n yaml.dump(scenario_one_latency_data, answer_key_yml,\n default_flow_style=False)\n yaml.dump(scenario_two_latency_data, answer_key_yml,\n default_flow_style=False)\n for question in answer_key:\n yaml_dict = {}\n Key = str(ANSWER_KEY_QUESTION_KEY + str(question.question_num))\n yaml_dict[Key] = {X_ANSWER_KEY: question.x_answer_alpha,\n A_VALUE_KEY: question.A_value, B_VALUE_KEY: question.B_value}\n yaml.dump(yaml_dict, answer_key_yml, default_flow_style=False)\n logging.info('Exit: _create_answer_key')\n\n\ndef _create_temp_dir(root_directory, scenario_one, scenario_two):\n logging.info('Enter: _create_temp_dir')\n adjusted_file_path = os.path.join(root_directory, ADJUSTED_AUDIO_SUBDIR)\n scenario_one_temp = os.path.join(adjusted_file_path, SCENARIO_ONE_SUBDIR)\n scenario_two_temp = os.path.join(adjusted_file_path, SCENARIO_TWO_SUBDIR)\n try:\n os.mkdir(adjusted_file_path)\n except FileExistsError:\n print(\n 'To properly create ABX tests, the audio files are modified so audio begins play at the same time'\n )\n print(\n \"In order to do this, a new directory called 'adjusted_audio' is temproarily created to hold the adjusted audio.\"\n )\n input(\n 'This directory already exists. Press enter to remove and continue or CTRL-C to quit'\n )\n shutil.rmtree(adjusted_file_path)\n os.mkdir(adjusted_file_path)\n shutil.copytree(scenario_one, scenario_one_temp)\n shutil.copytree(scenario_two, scenario_two_temp)\n logging.info('Exit: _create_temp_dir')\n return adjusted_file_path, scenario_one_temp, scenario_one_temp\n\n\ndef create_A_B_X_cases(A_B_cases_zip_list, output_path):\n \"\"\"\n Method to create A_B_X testing directories and return the corresponding answer key\n An A file is chosen from either the scenario one or two with a 50/50 probability. \n The B file is then from the scenario not chosen for A. An X file is then created with a 50/50\n probability of being either a duplicate of A or B\n Parameters:\n A_B_cases_zip_list: A list containing absolute file pairs\n [[scenario_one, scenario_two]...]\n output_path: absolute file path to store testcase directory \n\n Returns:\n None\n \"\"\"\n logging.info('Enter: create_A_B_X_cases ')\n global scenario_one\n global scenario_two\n global answer_key\n for case_num, case in enumerate(A_B_cases_zip_list):\n if case_num > MAX_CASE_NUM:\n logging.info(\n 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured'\n )\n print(\n 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured'\n )\n test_case_path = os.path.join(output_path, str(case_num))\n try:\n os.mkdir(test_case_path)\n except FileExistsError:\n logging.debug(\n 'Could not create test case directory at {} - encountered FileExistsError'\n .format(test_case_path))\n print(\n 'Could not create test case directory at {} - encountered FileExistsError'\n .format(test_case_path))\n sys.exit()\n switch_A_B = random.randint(0, 1)\n x_answer = random.randint(0, 1)\n if switch_A_B:\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n if x_answer == 1:\n x_answer_alpha = USER_ANSWER_CASE_A\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n if x_answer == 0:\n x_answer_alpha = USER_ANSWER_CASE_B\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n A_value = ANSWER_KEY_SCENARIO_TWO\n B_value = ANSWER_KEY_SCENARIO_ONE\n else:\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n if x_answer == 0:\n x_answer_alpha = USER_ANSWER_CASE_A\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n if x_answer == 1:\n x_answer_alpha = USER_ANSWER_CASE_B\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n A_value = ANSWER_KEY_SCENARIO_ONE\n B_value = ANSWER_KEY_SCENARIO_TWO\n question_info = Answer(case_num, x_answer_alpha=x_answer_alpha,\n A_value=A_value, B_value=B_value)\n answer_key.append(question_info)\n logging.info('Exit: create_A_B_X_cases')\n\n\ndef create_manual_tests():\n logging.info('Enter: create_manual_tests')\n global root_directory\n scenario_one, scenario_two, output_base_path = _collect_locations()\n output_path = _create_output_directory(output_base_path)\n if os.path.exists(os.path.join(output_path, ANSWER_KEY_NAME)):\n input('An answer_key.yml file already exists at - ' + output_path +\n ' - this file will be deleted. Press enter if this is okay of CNTRL-C to exit'\n )\n os.remove(os.path.join(output_path, ANSWER_KEY_NAME))\n adjusted_file_path, scenario_one_temp, scenario_two_temp = (\n _create_temp_dir(root_directory, scenario_one, scenario_two))\n print(\n 'Please note that to create the manual tests, the latency of each file must be calculated. This takes roughly 30 minutes per 25 recordings. Press Enter to continue.'\n )\n rate_log, correlation_sample_log, correlation_coefficient_log = (aa.\n find_latency_values(scenario_one_temp, scenario_two_temp))\n file_zip = aa.pair_directories(scenario_one_temp, scenario_two_temp)\n aa.adjust_files(correlation_sample_log, rate_log, file_zip)\n create_A_B_X_cases(file_zip, output_path)\n _cleanup_scenarios(adjusted_file_path)\n _create_answer_key(output_base_path)\n print('done')\n logging.info('Exit: create_manual_tests')\n\n\n<mask token>\n", "step-4": "<mask token>\nhelp_string = \"\"\"\nPlease note that manual_test.py makes 3 assumptions about these file paths. \n1.Both scenarios contain the same amount of wav files.\n2.The wav files in both scenarios have a one to one correspondence between each other. Each test case contains a pair of files, one from each scenario. This pair is made by matching files between scenarios with the same names 3.There are no more than 25 audio file pairs\"\"\"\nparser = argparse.ArgumentParser(description=\n 'Script to create a listening test. The output, test case directory and answer_key.yml file, can be found in the root directory.'\n + help_string)\nparser.add_argument('-o', dest='output_base_path', default=os.getcwd(),\n help=\n '(optional)Absolute file path to locatin to save test directory and answer key (default: root directory)'\n )\nparser.add_argument('scenario_one', help=\n 'Absolute file path to location of first scenario. Required')\nparser.add_argument('scenario_two', help=\n 'Absolute file path to location of second scenario. Required')\nargs = parser.parse_args()\noutput_base_path = args.output_base_path\nroot_directory = os.getcwd()\nscenario_one = args.scenario_one\nscenario_one_latency = 0\nscenario_one_correlation_coefficient = 0\nscenario_two = args.scenario_two\nscenario_two_latency = 0\nscenario_two_correlation_coefficient = 0\noutput_path = ''\nanswer_key = []\nUSER_ANSWER_KEY = 'user_answer'\nUSER_PREFERENCE_KEY = 'user_preference_weight'\nUSER_X_VALUE_KEY = 'user_X_value'\nUSER_CONFIDENCE_KEY = 'user_answer_confidence'\nX_ANSWER_KEY = 'x_answer_alpha'\nA_VALUE_KEY = 'A_value'\nB_VALUE_KEY = 'B_value'\nTESTCASES_SUBDIR = 'testcases'\nA_CASE_NAME = 'A_'\nB_CASE_NAME = 'B_'\nX_CASE_NAME = 'X_'\nWNDWS_COPY_CMD = 'copy'\nAUDIO_TYPE = '.wav'\nSCNEARIO_ONE_DATA_FILE = 'output_data.yml'\nSCENARIO_ONE_DATA_FILE_KEY = 'Scenario One'\nSCENARIO_TWO_DATA_FILE = 'output_data.yml'\nSCENARIO_TWO_DATA_FILE_KEY = 'Scenario Two'\nANSWER_KEY_NAME = 'answer_key.yml'\nUSER_ANSWER_CASE_A = 'A'\nUSER_ANSWER_CASE_B = 'B'\nANSWER_KEY_SCENARIO_ONE = 'scenario one'\nANSWER_KEY_SCENARIO_TWO = 'scenario two'\nANSWER_KEY_QUESTION_KEY = 'Q_'\nMAX_CASE_NUM = 24\nADJUSTED_AUDIO_SUBDIR = 'adjusted_audio'\nSCENARIO_ONE_SUBDIR = 'scenario_one'\nSCENARIO_TWO_SUBDIR = 'scenario_two'\n\n\nclass Answer:\n \"\"\"\n Wrapper for A_B_X directory containing all associated attributes. \n Populate all fields of the class and call grade to determine if the \n question was correct\n **user_answers\n user_answer either \"A\" or \"B\" indicating which file sounded better\n user_preference_weight numeric value between 1-5 indicating how much better the \n preferred value was. 5 being significant and 1 minimal\n user_X_value either \"A\" or \"B\" denoting which file the user believes\n X was a duplicate of \n user_answer_confidence numeric value between 1-5 indicating how easy it was to \n distinguish between A and B and pick X\n x_answer_alpha the answer to which file X was a duplicate of. Either \n \"A\" or \"B\"\n A_value String field denoting which scenario A belonged to. Either\n scenario_one or SCENARIO_TWO_SUBDIR\n B_value String field denoting which scenario B belonged to. Either\n scenario_one or SCENARIO_TWO_SUBDIR\n correct Call self.grade to populate this field. Compares user_X_value\n and x_answer_alpha to determine if question was correct. \n Populates with boolean\n \"\"\"\n\n def __init__(self, question_num, **user_answers):\n self.question_num = question_num\n self.correct = None\n try:\n self.user_answer = user_answers[USER_ANSWER_KEY]\n except KeyError:\n self.user_answer = None\n try:\n self.user_preference_weight = user_answers[USER_PREFERENCE_KEY]\n except KeyError:\n self.user_preference_weight = None\n try:\n self.user_X_value = user_answers[USER_X_VALUE_KEY]\n except KeyError:\n self.user_X_value = None\n try:\n self.user_answer_confidence = user_answers[USER_CONFIDENCE_KEY]\n except KeyError:\n self.user_answer_confidence = None\n try:\n self.x_answer_alpha = user_answers[X_ANSWER_KEY]\n except KeyError:\n self.x_answer_alpha = None\n try:\n self.A_value = user_answers[A_VALUE_KEY]\n except KeyError:\n self.A_value = None\n try:\n self.B_value = user_answers[B_VALUE_KEY]\n except KeyError:\n self.B_value = None\n\n def grade(self):\n if self.x_answer_alpha == self.user_X_value:\n self.correct = True\n else:\n self.correct = False\n\n\ndef _collect_locations():\n logging.info('Enter: _collect_locations')\n global scenario_one\n global scenario_two\n global output_base_path\n if not os.path.exists(scenario_one):\n print('Scenario One file path does not exist. Exiting')\n sys.exit()\n if not os.path.exists(scenario_two):\n print('Scenario Two file path does not exist. Exiting')\n sys.exit()\n print('Creating listening test...')\n logging.info('Exit: _collect_locations')\n return scenario_one, scenario_two, output_base_path\n\n\ndef _cleanup_scenarios(adjusted_file_path):\n try:\n shutil.rmtree(adjusted_file_path)\n except:\n print(\n 'The system could not delete the temporary audio files that were created for this test. This directory can be removed at {}'\n .format(adjusted_file_path))\n\n\ndef _create_output_directory(output_base_path):\n logging.info('Enter: _create_output_directory')\n global output_path\n output_path = os.path.join(output_base_path, TESTCASES_SUBDIR)\n if os.path.exists(output_path):\n try:\n input(\n \"\"\"Please note there is already a Testcases directory at - {} .\nPress enter to continue and remove it. Press CNTRL-C to exit.\"\"\"\n .format(output_path))\n shutil.rmtree(output_path)\n except PermissionError:\n print(\n 'There is a test directory located in the same location as the test directory location you specified'\n )\n print(\n 'It cannot be removed becase another process is still using it. Please close the process or delete yourself.'\n )\n sys.exit()\n except KeyboardInterrupt:\n print('Exiting...')\n sys.exit()\n os.mkdir(output_path)\n logging.info('Exit: _create_output_directory')\n return output_path\n\n\ndef _create_answer_key(output_path):\n logging.info('Enter: _create_answer_key')\n global answer_key\n global scenario_one\n global scenario_two\n scenario_one_latency_data = {}\n if os.path.exists(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)):\n with open(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)\n ) as output_data:\n scenario_one_latency_data[SCENARIO_ONE_DATA_FILE_KEY] = yaml.load(\n output_data)\n scenario_two_latency_data = {}\n if os.path.exists(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)):\n with open(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)\n ) as output_data:\n scenario_two_latency_data[SCENARIO_TWO_DATA_FILE_KEY] = yaml.load(\n output_data)\n with open(os.path.join(output_path, ANSWER_KEY_NAME), 'w'\n ) as answer_key_yml:\n yaml.dump(scenario_one_latency_data, answer_key_yml,\n default_flow_style=False)\n yaml.dump(scenario_two_latency_data, answer_key_yml,\n default_flow_style=False)\n for question in answer_key:\n yaml_dict = {}\n Key = str(ANSWER_KEY_QUESTION_KEY + str(question.question_num))\n yaml_dict[Key] = {X_ANSWER_KEY: question.x_answer_alpha,\n A_VALUE_KEY: question.A_value, B_VALUE_KEY: question.B_value}\n yaml.dump(yaml_dict, answer_key_yml, default_flow_style=False)\n logging.info('Exit: _create_answer_key')\n\n\ndef _create_temp_dir(root_directory, scenario_one, scenario_two):\n logging.info('Enter: _create_temp_dir')\n adjusted_file_path = os.path.join(root_directory, ADJUSTED_AUDIO_SUBDIR)\n scenario_one_temp = os.path.join(adjusted_file_path, SCENARIO_ONE_SUBDIR)\n scenario_two_temp = os.path.join(adjusted_file_path, SCENARIO_TWO_SUBDIR)\n try:\n os.mkdir(adjusted_file_path)\n except FileExistsError:\n print(\n 'To properly create ABX tests, the audio files are modified so audio begins play at the same time'\n )\n print(\n \"In order to do this, a new directory called 'adjusted_audio' is temproarily created to hold the adjusted audio.\"\n )\n input(\n 'This directory already exists. Press enter to remove and continue or CTRL-C to quit'\n )\n shutil.rmtree(adjusted_file_path)\n os.mkdir(adjusted_file_path)\n shutil.copytree(scenario_one, scenario_one_temp)\n shutil.copytree(scenario_two, scenario_two_temp)\n logging.info('Exit: _create_temp_dir')\n return adjusted_file_path, scenario_one_temp, scenario_one_temp\n\n\ndef create_A_B_X_cases(A_B_cases_zip_list, output_path):\n \"\"\"\n Method to create A_B_X testing directories and return the corresponding answer key\n An A file is chosen from either the scenario one or two with a 50/50 probability. \n The B file is then from the scenario not chosen for A. An X file is then created with a 50/50\n probability of being either a duplicate of A or B\n Parameters:\n A_B_cases_zip_list: A list containing absolute file pairs\n [[scenario_one, scenario_two]...]\n output_path: absolute file path to store testcase directory \n\n Returns:\n None\n \"\"\"\n logging.info('Enter: create_A_B_X_cases ')\n global scenario_one\n global scenario_two\n global answer_key\n for case_num, case in enumerate(A_B_cases_zip_list):\n if case_num > MAX_CASE_NUM:\n logging.info(\n 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured'\n )\n print(\n 'The amount of cases has exceeded 25. Please note that the accompanying excel sheet only has 25 answer slots and that it will need to be restructured'\n )\n test_case_path = os.path.join(output_path, str(case_num))\n try:\n os.mkdir(test_case_path)\n except FileExistsError:\n logging.debug(\n 'Could not create test case directory at {} - encountered FileExistsError'\n .format(test_case_path))\n print(\n 'Could not create test case directory at {} - encountered FileExistsError'\n .format(test_case_path))\n sys.exit()\n switch_A_B = random.randint(0, 1)\n x_answer = random.randint(0, 1)\n if switch_A_B:\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n if x_answer == 1:\n x_answer_alpha = USER_ANSWER_CASE_A\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n if x_answer == 0:\n x_answer_alpha = USER_ANSWER_CASE_B\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n A_value = ANSWER_KEY_SCENARIO_TWO\n B_value = ANSWER_KEY_SCENARIO_ONE\n else:\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, A_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, B_CASE_NAME + str(\n case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n if x_answer == 0:\n x_answer_alpha = USER_ANSWER_CASE_A\n cmd_command_copy_a = WNDWS_COPY_CMD + ' ' + case[0\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n if x_answer == 1:\n x_answer_alpha = USER_ANSWER_CASE_B\n cmd_command_copy_b = WNDWS_COPY_CMD + ' ' + case[1\n ] + ' ' + os.path.join(test_case_path, X_CASE_NAME +\n str(case_num) + AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n A_value = ANSWER_KEY_SCENARIO_ONE\n B_value = ANSWER_KEY_SCENARIO_TWO\n question_info = Answer(case_num, x_answer_alpha=x_answer_alpha,\n A_value=A_value, B_value=B_value)\n answer_key.append(question_info)\n logging.info('Exit: create_A_B_X_cases')\n\n\ndef create_manual_tests():\n logging.info('Enter: create_manual_tests')\n global root_directory\n scenario_one, scenario_two, output_base_path = _collect_locations()\n output_path = _create_output_directory(output_base_path)\n if os.path.exists(os.path.join(output_path, ANSWER_KEY_NAME)):\n input('An answer_key.yml file already exists at - ' + output_path +\n ' - this file will be deleted. Press enter if this is okay of CNTRL-C to exit'\n )\n os.remove(os.path.join(output_path, ANSWER_KEY_NAME))\n adjusted_file_path, scenario_one_temp, scenario_two_temp = (\n _create_temp_dir(root_directory, scenario_one, scenario_two))\n print(\n 'Please note that to create the manual tests, the latency of each file must be calculated. This takes roughly 30 minutes per 25 recordings. Press Enter to continue.'\n )\n rate_log, correlation_sample_log, correlation_coefficient_log = (aa.\n find_latency_values(scenario_one_temp, scenario_two_temp))\n file_zip = aa.pair_directories(scenario_one_temp, scenario_two_temp)\n aa.adjust_files(correlation_sample_log, rate_log, file_zip)\n create_A_B_X_cases(file_zip, output_path)\n _cleanup_scenarios(adjusted_file_path)\n _create_answer_key(output_base_path)\n print('done')\n logging.info('Exit: create_manual_tests')\n\n\nif __name__ == '__main__':\n logging.basicConfig(filename='manualtest.log', level=logging.INFO,\n format=\n '%(asctime)s %(levelname)s %(module)s line: %(lineno)d, %(message)s')\n logging.info('Enter: main')\n create_manual_tests()\n logging.info('Exit: main')\n", "step-5": "\nimport os \nimport yaml\nimport sys\nimport random\nimport shutil\nimport openpyxl\nimport yaml\nimport audioanalysis as aa\nimport numpy as np\nimport argparse\nimport logging\n\"\"\"\nmanualtest.py\n\nScript to create a listeneing test. The output, test \ncase directory and answer_key.yml file, can be \nfound in the root directory.\n\nmanual test creation\nresponsibilities:\n1) directory of directories that each contain two files to compare(a,b) and a duplicated one (x)\n example scenarios to test:\n JITTER_BUFFER_INIT_X VS. JITTER_BUFFER_INIT_Y\n dev version vs dev version\n need to come up with more\n2) an output yaml file labeled answer_key.yml that says which (a,b) is x \n\n\"\"\"\n# command line parse\nhelp_string = (\"\\nPlease note that manual_test.py makes 3 assumptions about \"\n \"these file paths. \" \n \"\\n1.Both scenarios contain the same amount of wav files.\"\n \"\\n2.The wav files in both scenarios have a one to one \"\n \"correspondence between each other. Each test case contains a \"\n \"pair of files, one from each scenario. This pair is made by \"\n \"matching files between scenarios with the same names 3.\"\n \"There are no more than 25 audio file pairs\")\n\nparser = argparse.ArgumentParser(description=\"Script to create a listening test. The output, test case directory and answer_key.yml file, can be found in the root directory.\"+help_string)\nparser.add_argument(\"-o\", dest=\"output_base_path\", default= os.getcwd(),help=\"(optional)Absolute file path to locatin to save test directory and answer key (default: root directory)\")\nparser.add_argument(\"scenario_one\", help=\"Absolute file path to location of first scenario. Required\")\nparser.add_argument(\"scenario_two\", help=\"Absolute file path to location of second scenario. Required\")\nargs=parser.parse_args()\n\n# globals\noutput_base_path=args.output_base_path\nroot_directory = os.getcwd()\n# first scenario\nscenario_one = args.scenario_one\nscenario_one_latency=0\nscenario_one_correlation_coefficient=0\n# second scenario\nscenario_two = args.scenario_two\nscenario_two_latency=0\nscenario_two_correlation_coefficient=0\noutput_path=\"\"\nanswer_key=[]\n\nUSER_ANSWER_KEY=\"user_answer\"\nUSER_PREFERENCE_KEY=\"user_preference_weight\"\nUSER_X_VALUE_KEY=\"user_X_value\"\nUSER_CONFIDENCE_KEY=\"user_answer_confidence\"\nX_ANSWER_KEY=\"x_answer_alpha\"\nA_VALUE_KEY=\"A_value\"\nB_VALUE_KEY=\"B_value\"\nTESTCASES_SUBDIR=\"testcases\"\nA_CASE_NAME=\"A_\"\nB_CASE_NAME=\"B_\"\nX_CASE_NAME=\"X_\"\nWNDWS_COPY_CMD=\"copy\"\nAUDIO_TYPE=\".wav\"\nSCNEARIO_ONE_DATA_FILE=\"output_data.yml\"\nSCENARIO_ONE_DATA_FILE_KEY=\"Scenario One\"\nSCENARIO_TWO_DATA_FILE=\"output_data.yml\"\nSCENARIO_TWO_DATA_FILE_KEY=\"Scenario Two\"\nANSWER_KEY_NAME=\"answer_key.yml\"\nUSER_ANSWER_CASE_A=\"A\"\nUSER_ANSWER_CASE_B=\"B\"\nANSWER_KEY_SCENARIO_ONE=\"scenario one\"\nANSWER_KEY_SCENARIO_TWO=\"scenario two\"\nANSWER_KEY_QUESTION_KEY=\"Q_\"\nMAX_CASE_NUM=24\nADJUSTED_AUDIO_SUBDIR=\"adjusted_audio\"\nSCENARIO_ONE_SUBDIR=\"scenario_one\"\nSCENARIO_TWO_SUBDIR=\"scenario_two\"\n\nclass Answer():\n \"\"\"\n Wrapper for A_B_X directory containing all associated attributes. \n Populate all fields of the class and call grade to determine if the \n question was correct\n **user_answers\n user_answer either \"A\" or \"B\" indicating which file sounded better\n user_preference_weight numeric value between 1-5 indicating how much better the \n preferred value was. 5 being significant and 1 minimal\n user_X_value either \"A\" or \"B\" denoting which file the user believes\n X was a duplicate of \n user_answer_confidence numeric value between 1-5 indicating how easy it was to \n distinguish between A and B and pick X\n x_answer_alpha the answer to which file X was a duplicate of. Either \n \"A\" or \"B\"\n A_value String field denoting which scenario A belonged to. Either\n scenario_one or SCENARIO_TWO_SUBDIR\n B_value String field denoting which scenario B belonged to. Either\n scenario_one or SCENARIO_TWO_SUBDIR\n correct Call self.grade to populate this field. Compares user_X_value\n and x_answer_alpha to determine if question was correct. \n Populates with boolean\n \"\"\"\n def __init__(self, question_num, **user_answers):\n self.question_num=question_num\n self.correct = None\n try:\n self.user_answer=user_answers[USER_ANSWER_KEY]\n except KeyError:\n self.user_answer=None\n try:\n self.user_preference_weight=user_answers[USER_PREFERENCE_KEY]\n except KeyError: \n self.user_preference_weight=None\n try:\n self.user_X_value=user_answers[USER_X_VALUE_KEY]\n except KeyError:\n self.user_X_value=None\n try:\n self.user_answer_confidence=user_answers[USER_CONFIDENCE_KEY]\n except KeyError:\n self.user_answer_confidence=None\n try:\n self.x_answer_alpha=user_answers[X_ANSWER_KEY]\n except KeyError:\n self.x_answer_alpha=None\n try: \n self.A_value=user_answers[A_VALUE_KEY]\n except KeyError:\n self.A_value=None \n try:\n self.B_value=user_answers[B_VALUE_KEY]\n except KeyError:\n self.B_value=None\n\n def grade(self):\n if self.x_answer_alpha==self.user_X_value:\n self.correct=True\n else:\n self.correct=False\n\n\ndef _collect_locations():\n # Method to pair all the files for comparison in the two scenarios the user has elected to compare \n logging.info(\"Enter: _collect_locations\")\n global scenario_one\n global scenario_two\n global output_base_path\n if not os.path.exists(scenario_one):\n print(\"Scenario One file path does not exist. Exiting\")\n sys.exit()\n if not os.path.exists(scenario_two):\n print(\"Scenario Two file path does not exist. Exiting\")\n sys.exit()\n print(\"Creating listening test...\")\n logging.info(\"Exit: _collect_locations\")\n return scenario_one, scenario_two, output_base_path\n \n\ndef _cleanup_scenarios(adjusted_file_path):\n # Delete the adjusted audio created for this module\n try:\n shutil.rmtree(adjusted_file_path)\n except:\n print(\"The system could not delete the temporary audio files that \"\n \"were created for this test. This directory can be removed \"\n \"at {}\".format(adjusted_file_path))\n\n\ndef _create_output_directory(output_base_path):\n # From the base path create a testcases subdirectory\n # Return the subdirectory full path\n logging.info(\"Enter: _create_output_directory\")\n global output_path \n output_path = os.path.join(output_base_path, TESTCASES_SUBDIR)\n if os.path.exists(output_path):\n try:\n input(\"Please note there is already a Testcases directory at - {} .\\nPress enter to continue and remove it. Press CNTRL-C to exit.\".format(output_path))\n shutil.rmtree(output_path)\n except PermissionError:\n print(\"There is a test directory located in the same location as the test directory location you specified\")\n print(\"It cannot be removed becase another process is still using it. Please close the process or delete yourself.\")\n sys.exit()\n except KeyboardInterrupt:\n print(\"Exiting...\")\n sys.exit()\n os.mkdir(output_path)\n logging.info(\"Exit: _create_output_directory\")\n return output_path\n\n\ndef _create_answer_key(output_path):\n # Parse the data file from scenario one and two if it exists and add too answer key\n # Dump data from processes to ANSWER_KEY_NAME in output_path\n logging.info(\"Enter: _create_answer_key\")\n global answer_key\n global scenario_one\n global scenario_two\n scenario_one_latency_data={}\n if os.path.exists(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)):\n with open(os.path.join(scenario_one, SCNEARIO_ONE_DATA_FILE)) as output_data:\n scenario_one_latency_data[SCENARIO_ONE_DATA_FILE_KEY]=yaml.load(output_data)\n scenario_two_latency_data={}\n if os.path.exists(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)):\n with open(os.path.join(scenario_two, SCENARIO_TWO_DATA_FILE)) as output_data:\n scenario_two_latency_data[SCENARIO_TWO_DATA_FILE_KEY]=yaml.load(output_data)\n\n with open(os.path.join(output_path, ANSWER_KEY_NAME), \"w\") as answer_key_yml:\n yaml.dump(scenario_one_latency_data, answer_key_yml, default_flow_style=False)\n yaml.dump(scenario_two_latency_data, answer_key_yml, default_flow_style=False)\n for question in answer_key:\n yaml_dict={}\n Key = str(ANSWER_KEY_QUESTION_KEY+str(question.question_num))\n yaml_dict[Key] = {X_ANSWER_KEY: question.x_answer_alpha,A_VALUE_KEY: question.A_value,B_VALUE_KEY: question.B_value}\n yaml.dump(yaml_dict, answer_key_yml, default_flow_style=False)\n logging.info(\"Exit: _create_answer_key\")\n\n\ndef _create_temp_dir(root_directory, scenario_one, scenario_two):\n logging.info(\"Enter: _create_temp_dir\")\n # Will create exact copies of both directories specified so files may be altered later\n adjusted_file_path = os.path.join(root_directory, ADJUSTED_AUDIO_SUBDIR)\n scenario_one_temp = os.path.join(adjusted_file_path, SCENARIO_ONE_SUBDIR)\n scenario_two_temp = os.path.join(adjusted_file_path, SCENARIO_TWO_SUBDIR)\n try:\n os.mkdir(adjusted_file_path)\n except FileExistsError:\n print(\"To properly create ABX tests, the audio files are modified so audio begins play at the same time\")\n print(\"In order to do this, a new directory called 'adjusted_audio' is temproarily created to hold the adjusted audio.\")\n input(\"This directory already exists. Press enter to remove and continue or CTRL-C to quit\")\n shutil.rmtree(adjusted_file_path)\n os.mkdir(adjusted_file_path)\n shutil.copytree(scenario_one, scenario_one_temp)\n shutil.copytree(scenario_two, scenario_two_temp)\n logging.info(\"Exit: _create_temp_dir\")\n return adjusted_file_path, scenario_one_temp, scenario_one_temp\n\n\ndef create_A_B_X_cases(A_B_cases_zip_list, output_path):\n \"\"\"\n Method to create A_B_X testing directories and return the corresponding answer key\n An A file is chosen from either the scenario one or two with a 50/50 probability. \n The B file is then from the scenario not chosen for A. An X file is then created with a 50/50\n probability of being either a duplicate of A or B\n Parameters:\n A_B_cases_zip_list: A list containing absolute file pairs\n [[scenario_one, scenario_two]...]\n output_path: absolute file path to store testcase directory \n\n Returns:\n None\n \"\"\"\n logging.info(\"Enter: create_A_B_X_cases \")\n global scenario_one\n global scenario_two\n global answer_key\n # create listening directories and record answer to each in answer_log\n for case_num, case in enumerate(A_B_cases_zip_list):\n #MRR I really don't like silently dropping audio pairs. Please just create multiple ABX tests, each with up to 25. Up to you whether you have 3 of 25 and one of 21 or 4 of 24.\n if case_num > MAX_CASE_NUM:\n logging.info(\"The amount of cases has exceeded 25. Please note that \"\n \"the accompanying excel sheet only has 25 answer slots and that it will need to \"\n \"be restructured\") \n print(\"The amount of cases has exceeded 25. Please note that \"\n \"the accompanying excel sheet only has 25 answer slots and that it will need to \"\n \"be restructured\")\n test_case_path = os.path.join(output_path, str(case_num))\n try:\n os.mkdir(test_case_path)\n except FileExistsError:\n logging.debug(\"Could not create test case directory at {} - encountered FileExistsError\".format(test_case_path))\n print(\"Could not create test case directory at {} - encountered FileExistsError\".format(test_case_path))\n sys.exit()\n switch_A_B = random.randint(0,1) #If one then A and B are switched. This is so scenario one and two alternate thier A and B positions roughly 50% of the time\n # add the wav files\n # pick one to duplicate\n x_answer=random.randint(0,1)\n if switch_A_B:\n # add A\n cmd_command_copy_a = WNDWS_COPY_CMD+\" \" + case[1] + \" \"+ os.path.join(test_case_path, A_CASE_NAME+str(case_num)+AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n # add B \n cmd_command_copy_b = WNDWS_COPY_CMD+\" \" + case[0] + \" \"+ os.path.join(test_case_path, B_CASE_NAME+str(case_num)+AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n # add X\n if x_answer==1:\n x_answer_alpha=USER_ANSWER_CASE_A\n cmd_command_copy_a = WNDWS_COPY_CMD+\" \" + case[1] + \" \"+ os.path.join(test_case_path, X_CASE_NAME+str(case_num)+AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n if x_answer==0:\n x_answer_alpha=USER_ANSWER_CASE_B\n cmd_command_copy_b = WNDWS_COPY_CMD+\" \" + case[0] + \" \"+ os.path.join(test_case_path, X_CASE_NAME+str(case_num)+AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n A_value=ANSWER_KEY_SCENARIO_TWO\n B_value=ANSWER_KEY_SCENARIO_ONE\n else:\n # add A\n cmd_command_copy_a = WNDWS_COPY_CMD+\" \" + case[0] + \" \"+ os.path.join(test_case_path, A_CASE_NAME+str(case_num)+AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n # add B \n cmd_command_copy_b = WNDWS_COPY_CMD+\" \" + case[1] + \" \"+ os.path.join(test_case_path, B_CASE_NAME+str(case_num)+AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n # add X\n if x_answer==0:\n x_answer_alpha=USER_ANSWER_CASE_A\n cmd_command_copy_a = WNDWS_COPY_CMD+\" \" + case[0] + \" \"+ os.path.join(test_case_path, X_CASE_NAME+str(case_num)+AUDIO_TYPE)\n os.system(cmd_command_copy_a)\n if x_answer==1:\n x_answer_alpha=USER_ANSWER_CASE_B\n cmd_command_copy_b = WNDWS_COPY_CMD+\" \" + case[1] + \" \"+ os.path.join(test_case_path, X_CASE_NAME+str(case_num)+AUDIO_TYPE)\n os.system(cmd_command_copy_b)\n A_value=ANSWER_KEY_SCENARIO_ONE\n B_value=ANSWER_KEY_SCENARIO_TWO\n question_info = Answer(case_num, x_answer_alpha=x_answer_alpha,A_value=A_value, B_value=B_value)\n answer_key.append(question_info)\n logging.info(\"Exit: create_A_B_X_cases\")\n \n\ndef create_manual_tests():\n logging.info(\"Enter: create_manual_tests\")\n global root_directory\n scenario_one, scenario_two, output_base_path=_collect_locations()\n output_path = _create_output_directory(output_base_path)\n # Confirm another answer key does not already exist\n if os.path.exists(os.path.join(output_path, ANSWER_KEY_NAME)):\n input(\"An answer_key.yml file already exists at - \"+output_path+\" - this file will be deleted. Press enter if this is okay of CNTRL-C to exit\")\n os.remove(os.path.join(output_path, ANSWER_KEY_NAME))\n adjusted_file_path, scenario_one_temp, scenario_two_temp= _create_temp_dir(root_directory, scenario_one, scenario_two)\n print(\"Please note that to create the manual tests, the latency of each file must be calculated. This takes roughly 30 minutes per 25 recordings. Press Enter to continue.\")\n rate_log, correlation_sample_log, correlation_coefficient_log = aa.find_latency_values(scenario_one_temp, scenario_two_temp)\n # Negative value indicates that scenario one signal was delayed. Positive value indicates that scenario two signal was delayed\n file_zip = aa.pair_directories(scenario_one_temp, scenario_two_temp)\n aa.adjust_files(correlation_sample_log, rate_log, file_zip)\n create_A_B_X_cases(file_zip, output_path)\n _cleanup_scenarios(adjusted_file_path)\n _create_answer_key(output_base_path)\n print(\"done\")\n logging.info(\"Exit: create_manual_tests\")\n\n\nif __name__ ==\"__main__\":\n logging.basicConfig(filename=\"manualtest.log\", level=logging.INFO, format=\"%(asctime)s %(levelname)s %(module)s line: %(lineno)d, %(message)s\")\n logging.info(\"Enter: main\")\n create_manual_tests()\n logging.info(\"Exit: main\")\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", "step-ids": [ 4, 10, 11, 13, 15 ] }
[ 4, 10, 11, 13, 15 ]
<|reserved_special_token_0|> class Cart: def __init__(self): self.book_list = [] self.total = 0 self.save = 0 def total_price(self): ele = 0 for i in self.book_list: ele += i.book.book_dprice * i.amount self.total = round(ele, 2) return self <|reserved_special_token_0|> def add_books(self, book, amount): print('加入中') for i in self.book_list: if i.book == book: i.amount += int(amount) return self self.book_list.append(CartItem(book, int(amount))) print('加完了', self.book_list) return self def del_books(self, book): print('删除中') for i in self.book_list: if i.book == book: self.book_list.remove(i) print('删完了', self.book_list) return self <|reserved_special_token_1|> <|reserved_special_token_0|> class Cart: def __init__(self): self.book_list = [] self.total = 0 self.save = 0 def total_price(self): ele = 0 for i in self.book_list: ele += i.book.book_dprice * i.amount self.total = round(ele, 2) return self def save_money(self): befor_save = 0 for i in self.book_list: befor_save += i.book.book_price * i.amount self.save = round(befor_save - self.total, 2) print('节省', self.save) return self def add_books(self, book, amount): print('加入中') for i in self.book_list: if i.book == book: i.amount += int(amount) return self self.book_list.append(CartItem(book, int(amount))) print('加完了', self.book_list) return self def del_books(self, book): print('删除中') for i in self.book_list: if i.book == book: self.book_list.remove(i) print('删完了', self.book_list) return self <|reserved_special_token_1|> class CartItem: <|reserved_special_token_0|> class Cart: def __init__(self): self.book_list = [] self.total = 0 self.save = 0 def total_price(self): ele = 0 for i in self.book_list: ele += i.book.book_dprice * i.amount self.total = round(ele, 2) return self def save_money(self): befor_save = 0 for i in self.book_list: befor_save += i.book.book_price * i.amount self.save = round(befor_save - self.total, 2) print('节省', self.save) return self def add_books(self, book, amount): print('加入中') for i in self.book_list: if i.book == book: i.amount += int(amount) return self self.book_list.append(CartItem(book, int(amount))) print('加完了', self.book_list) return self def del_books(self, book): print('删除中') for i in self.book_list: if i.book == book: self.book_list.remove(i) print('删完了', self.book_list) return self <|reserved_special_token_1|> class CartItem: def __init__(self, book, amount): self.book = book self.amount = int(amount) class Cart: def __init__(self): self.book_list = [] self.total = 0 self.save = 0 def total_price(self): ele = 0 for i in self.book_list: ele += i.book.book_dprice * i.amount self.total = round(ele, 2) return self def save_money(self): befor_save = 0 for i in self.book_list: befor_save += i.book.book_price * i.amount self.save = round(befor_save - self.total, 2) print('节省', self.save) return self def add_books(self, book, amount): print('加入中') for i in self.book_list: if i.book == book: i.amount += int(amount) return self self.book_list.append(CartItem(book, int(amount))) print('加完了', self.book_list) return self def del_books(self, book): print('删除中') for i in self.book_list: if i.book == book: self.book_list.remove(i) print('删完了', self.book_list) return self <|reserved_special_token_1|> # 自定义购物车项类 class CartItem(): def __init__(self, book, amount): self.book = book self.amount = int(amount) # 自定义购物车 class Cart(): def __init__(self): self.book_list = [] self.total = 0 self.save = 0 def total_price(self): ele = 0 for i in self.book_list: ele += i.book.book_dprice*i.amount self.total = round(ele,2) return self def save_money(self): befor_save = 0 for i in self.book_list: befor_save += i.book.book_price*i.amount self.save = round(befor_save - self.total,2) print("节省",self.save) return self # 定义添加购物车 def add_books(self, book, amount): # 判断图书已经在购物车项列表中 print("加入中") for i in self.book_list: if i.book == book: i.amount += int(amount) return self self.book_list.append(CartItem(book, int(amount))) print("加完了",self.book_list) return self def del_books(self, book): print("删除中") for i in self.book_list: if i.book == book: self.book_list.remove(i) print("删完了", self.book_list) return self
flexible
{ "blob_id": "58efaad41d02bb5dffbf71c478c7fad12af68e5b", "index": 9900, "step-1": "<mask token>\n\n\nclass Cart:\n\n def __init__(self):\n self.book_list = []\n self.total = 0\n self.save = 0\n\n def total_price(self):\n ele = 0\n for i in self.book_list:\n ele += i.book.book_dprice * i.amount\n self.total = round(ele, 2)\n return self\n <mask token>\n\n def add_books(self, book, amount):\n print('加入中')\n for i in self.book_list:\n if i.book == book:\n i.amount += int(amount)\n return self\n self.book_list.append(CartItem(book, int(amount)))\n print('加完了', self.book_list)\n return self\n\n def del_books(self, book):\n print('删除中')\n for i in self.book_list:\n if i.book == book:\n self.book_list.remove(i)\n print('删完了', self.book_list)\n return self\n", "step-2": "<mask token>\n\n\nclass Cart:\n\n def __init__(self):\n self.book_list = []\n self.total = 0\n self.save = 0\n\n def total_price(self):\n ele = 0\n for i in self.book_list:\n ele += i.book.book_dprice * i.amount\n self.total = round(ele, 2)\n return self\n\n def save_money(self):\n befor_save = 0\n for i in self.book_list:\n befor_save += i.book.book_price * i.amount\n self.save = round(befor_save - self.total, 2)\n print('节省', self.save)\n return self\n\n def add_books(self, book, amount):\n print('加入中')\n for i in self.book_list:\n if i.book == book:\n i.amount += int(amount)\n return self\n self.book_list.append(CartItem(book, int(amount)))\n print('加完了', self.book_list)\n return self\n\n def del_books(self, book):\n print('删除中')\n for i in self.book_list:\n if i.book == book:\n self.book_list.remove(i)\n print('删完了', self.book_list)\n return self\n", "step-3": "class CartItem:\n <mask token>\n\n\nclass Cart:\n\n def __init__(self):\n self.book_list = []\n self.total = 0\n self.save = 0\n\n def total_price(self):\n ele = 0\n for i in self.book_list:\n ele += i.book.book_dprice * i.amount\n self.total = round(ele, 2)\n return self\n\n def save_money(self):\n befor_save = 0\n for i in self.book_list:\n befor_save += i.book.book_price * i.amount\n self.save = round(befor_save - self.total, 2)\n print('节省', self.save)\n return self\n\n def add_books(self, book, amount):\n print('加入中')\n for i in self.book_list:\n if i.book == book:\n i.amount += int(amount)\n return self\n self.book_list.append(CartItem(book, int(amount)))\n print('加完了', self.book_list)\n return self\n\n def del_books(self, book):\n print('删除中')\n for i in self.book_list:\n if i.book == book:\n self.book_list.remove(i)\n print('删完了', self.book_list)\n return self\n", "step-4": "class CartItem:\n\n def __init__(self, book, amount):\n self.book = book\n self.amount = int(amount)\n\n\nclass Cart:\n\n def __init__(self):\n self.book_list = []\n self.total = 0\n self.save = 0\n\n def total_price(self):\n ele = 0\n for i in self.book_list:\n ele += i.book.book_dprice * i.amount\n self.total = round(ele, 2)\n return self\n\n def save_money(self):\n befor_save = 0\n for i in self.book_list:\n befor_save += i.book.book_price * i.amount\n self.save = round(befor_save - self.total, 2)\n print('节省', self.save)\n return self\n\n def add_books(self, book, amount):\n print('加入中')\n for i in self.book_list:\n if i.book == book:\n i.amount += int(amount)\n return self\n self.book_list.append(CartItem(book, int(amount)))\n print('加完了', self.book_list)\n return self\n\n def del_books(self, book):\n print('删除中')\n for i in self.book_list:\n if i.book == book:\n self.book_list.remove(i)\n print('删完了', self.book_list)\n return self\n", "step-5": "# 自定义购物车项类\nclass CartItem():\n def __init__(self, book, amount):\n self.book = book\n self.amount = int(amount)\n\n# 自定义购物车\nclass Cart():\n def __init__(self):\n self.book_list = []\n self.total = 0\n self.save = 0\n\n def total_price(self):\n ele = 0\n for i in self.book_list:\n ele += i.book.book_dprice*i.amount\n self.total = round(ele,2)\n return self\n\n def save_money(self):\n befor_save = 0\n for i in self.book_list:\n befor_save += i.book.book_price*i.amount\n self.save = round(befor_save - self.total,2)\n print(\"节省\",self.save)\n return self\n # 定义添加购物车\n def add_books(self, book, amount):\n # 判断图书已经在购物车项列表中\n print(\"加入中\")\n for i in self.book_list:\n if i.book == book:\n i.amount += int(amount)\n return self\n self.book_list.append(CartItem(book, int(amount)))\n print(\"加完了\",self.book_list)\n return self\n\n def del_books(self, book):\n print(\"删除中\")\n for i in self.book_list:\n if i.book == book:\n self.book_list.remove(i)\n print(\"删完了\", self.book_list)\n return self", "step-ids": [ 5, 6, 7, 8, 9 ] }
[ 5, 6, 7, 8, 9 ]
<|reserved_special_token_0|> class CustomRegistrationBackend(object): <|reserved_special_token_0|> def activate(self, request, activation_key): activated = RegistrationProfile.objects.activate_user(activation_key) if activated: signals.user_activated.send(sender=self.__class__, user= activated, request=request) return activated def registration_allowed(self, request): """ Indicate whether account registration is currently permitted, based on the value of the setting ``REGISTRATION_OPEN``. This is determined as follows: * If ``REGISTRATION_OPEN`` is not specified in settings, or is set to ``True``, registration is permitted. * If ``REGISTRATION_OPEN`` is both specified and set to ``False``, registration is not permitted. """ return getattr(settings, 'REGISTRATION_OPEN', True) def get_form_class(self, request): """ Return the default form class used for user registration. """ return RegistrationForm def post_registration_redirect(self, request, user): """ Return the name of the URL to redirect to after successful user registration. """ return 'registration_complete', (), {} <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class CustomRegistrationBackend(object): def register(self, request, **kwargs): username, email, password = kwargs['username'], kwargs['email' ], kwargs['password1'] if Site._meta.installed: site = Site.objects.get_current() else: site = RequestSite(request) new_user = RegistrationProfile.objects.create_inactive_user(username, email, password, site) signals.user_registered.send(sender=self.__class__, user=new_user, request=request) new_profile = StaffMember.objects.get(user=new_user) new_profile.first_name = kwargs['first_name'] new_profile.last_name = kwargs['last_name'] new_profile.position = kwargs['position'] new_profile.save() return new_user def activate(self, request, activation_key): activated = RegistrationProfile.objects.activate_user(activation_key) if activated: signals.user_activated.send(sender=self.__class__, user= activated, request=request) return activated def registration_allowed(self, request): """ Indicate whether account registration is currently permitted, based on the value of the setting ``REGISTRATION_OPEN``. This is determined as follows: * If ``REGISTRATION_OPEN`` is not specified in settings, or is set to ``True``, registration is permitted. * If ``REGISTRATION_OPEN`` is both specified and set to ``False``, registration is not permitted. """ return getattr(settings, 'REGISTRATION_OPEN', True) def get_form_class(self, request): """ Return the default form class used for user registration. """ return RegistrationForm def post_registration_redirect(self, request, user): """ Return the name of the URL to redirect to after successful user registration. """ return 'registration_complete', (), {} <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class CustomRegistrationBackend(object): def register(self, request, **kwargs): username, email, password = kwargs['username'], kwargs['email' ], kwargs['password1'] if Site._meta.installed: site = Site.objects.get_current() else: site = RequestSite(request) new_user = RegistrationProfile.objects.create_inactive_user(username, email, password, site) signals.user_registered.send(sender=self.__class__, user=new_user, request=request) new_profile = StaffMember.objects.get(user=new_user) new_profile.first_name = kwargs['first_name'] new_profile.last_name = kwargs['last_name'] new_profile.position = kwargs['position'] new_profile.save() return new_user def activate(self, request, activation_key): activated = RegistrationProfile.objects.activate_user(activation_key) if activated: signals.user_activated.send(sender=self.__class__, user= activated, request=request) return activated def registration_allowed(self, request): """ Indicate whether account registration is currently permitted, based on the value of the setting ``REGISTRATION_OPEN``. This is determined as follows: * If ``REGISTRATION_OPEN`` is not specified in settings, or is set to ``True``, registration is permitted. * If ``REGISTRATION_OPEN`` is both specified and set to ``False``, registration is not permitted. """ return getattr(settings, 'REGISTRATION_OPEN', True) def get_form_class(self, request): """ Return the default form class used for user registration. """ return RegistrationForm def post_registration_redirect(self, request, user): """ Return the name of the URL to redirect to after successful user registration. """ return 'registration_complete', (), {} def post_activation_redirect(self, request, user): """ Return the name of the URL to redirect to after successful account activation. """ newMember = StaffMember.objects.filter(user_id__exact=user.pk).get() labGroup = LabGroup.objects.filter(pk=1).get() newMember.lab_group = labGroup newMember.save() return 'registration_activation_complete', (), {} <|reserved_special_token_1|> from django.conf import settings from django.contrib.sites.models import RequestSite from django.contrib.sites.models import Site from fish.labinterface.models import * from registration import signals from registration.forms import RegistrationForm from registration.models import RegistrationProfile from labinterface.models import StaffMember class CustomRegistrationBackend(object): def register(self, request, **kwargs): username, email, password = kwargs['username'], kwargs['email' ], kwargs['password1'] if Site._meta.installed: site = Site.objects.get_current() else: site = RequestSite(request) new_user = RegistrationProfile.objects.create_inactive_user(username, email, password, site) signals.user_registered.send(sender=self.__class__, user=new_user, request=request) new_profile = StaffMember.objects.get(user=new_user) new_profile.first_name = kwargs['first_name'] new_profile.last_name = kwargs['last_name'] new_profile.position = kwargs['position'] new_profile.save() return new_user def activate(self, request, activation_key): activated = RegistrationProfile.objects.activate_user(activation_key) if activated: signals.user_activated.send(sender=self.__class__, user= activated, request=request) return activated def registration_allowed(self, request): """ Indicate whether account registration is currently permitted, based on the value of the setting ``REGISTRATION_OPEN``. This is determined as follows: * If ``REGISTRATION_OPEN`` is not specified in settings, or is set to ``True``, registration is permitted. * If ``REGISTRATION_OPEN`` is both specified and set to ``False``, registration is not permitted. """ return getattr(settings, 'REGISTRATION_OPEN', True) def get_form_class(self, request): """ Return the default form class used for user registration. """ return RegistrationForm def post_registration_redirect(self, request, user): """ Return the name of the URL to redirect to after successful user registration. """ return 'registration_complete', (), {} def post_activation_redirect(self, request, user): """ Return the name of the URL to redirect to after successful account activation. """ newMember = StaffMember.objects.filter(user_id__exact=user.pk).get() labGroup = LabGroup.objects.filter(pk=1).get() newMember.lab_group = labGroup newMember.save() return 'registration_activation_complete', (), {} <|reserved_special_token_1|> from django.conf import settings from django.contrib.sites.models import RequestSite from django.contrib.sites.models import Site from fish.labinterface.models import * from registration import signals from registration.forms import RegistrationForm from registration.models import RegistrationProfile from labinterface.models import StaffMember class CustomRegistrationBackend(object): def register(self, request, **kwargs): username, email, password = kwargs['username'], kwargs['email'], kwargs['password1'] if Site._meta.installed: site = Site.objects.get_current() else: site = RequestSite(request) new_user = RegistrationProfile.objects.create_inactive_user(username, email, password, site) signals.user_registered.send(sender=self.__class__, user=new_user, request=request) new_profile = StaffMember.objects.get(user=new_user) new_profile.first_name=kwargs['first_name'] new_profile.last_name=kwargs['last_name'] new_profile.position=kwargs['position'] new_profile.save() return new_user def activate(self, request, activation_key): activated = RegistrationProfile.objects.activate_user(activation_key) if activated: signals.user_activated.send(sender=self.__class__, user=activated, request=request) return activated def registration_allowed(self, request): """ Indicate whether account registration is currently permitted, based on the value of the setting ``REGISTRATION_OPEN``. This is determined as follows: * If ``REGISTRATION_OPEN`` is not specified in settings, or is set to ``True``, registration is permitted. * If ``REGISTRATION_OPEN`` is both specified and set to ``False``, registration is not permitted. """ return getattr(settings, 'REGISTRATION_OPEN', True) def get_form_class(self, request): """ Return the default form class used for user registration. """ return RegistrationForm def post_registration_redirect(self, request, user): """ Return the name of the URL to redirect to after successful user registration. """ return ('registration_complete', (), {}) def post_activation_redirect(self, request, user): """ Return the name of the URL to redirect to after successful account activation. """ newMember = StaffMember.objects.filter(user_id__exact=user.pk).get() labGroup = LabGroup.objects.filter(pk=1).get() newMember.lab_group = labGroup newMember.save() return ('registration_activation_complete', (), {})
flexible
{ "blob_id": "201279c0cba2d52b6863204bfadb6291a0065f60", "index": 3961, "step-1": "<mask token>\n\n\nclass CustomRegistrationBackend(object):\n <mask token>\n\n def activate(self, request, activation_key):\n activated = RegistrationProfile.objects.activate_user(activation_key)\n if activated:\n signals.user_activated.send(sender=self.__class__, user=\n activated, request=request)\n return activated\n\n def registration_allowed(self, request):\n \"\"\"\n\t\tIndicate whether account registration is currently permitted,\n\t\tbased on the value of the setting ``REGISTRATION_OPEN``. This\n\t\tis determined as follows:\n\n\t\t* If ``REGISTRATION_OPEN`` is not specified in settings, or is\n\t\tset to ``True``, registration is permitted.\n\n\t\t* If ``REGISTRATION_OPEN`` is both specified and set to\n\t\t``False``, registration is not permitted.\n\t\t\n\t\t\"\"\"\n return getattr(settings, 'REGISTRATION_OPEN', True)\n\n def get_form_class(self, request):\n \"\"\"\n\t\tReturn the default form class used for user registration.\n\t\t\n\t\t\"\"\"\n return RegistrationForm\n\n def post_registration_redirect(self, request, user):\n \"\"\"\n\t\tReturn the name of the URL to redirect to after successful\n\t\tuser registration.\n\t\t\n\t\t\"\"\"\n return 'registration_complete', (), {}\n <mask token>\n", "step-2": "<mask token>\n\n\nclass CustomRegistrationBackend(object):\n\n def register(self, request, **kwargs):\n username, email, password = kwargs['username'], kwargs['email'\n ], kwargs['password1']\n if Site._meta.installed:\n site = Site.objects.get_current()\n else:\n site = RequestSite(request)\n new_user = RegistrationProfile.objects.create_inactive_user(username,\n email, password, site)\n signals.user_registered.send(sender=self.__class__, user=new_user,\n request=request)\n new_profile = StaffMember.objects.get(user=new_user)\n new_profile.first_name = kwargs['first_name']\n new_profile.last_name = kwargs['last_name']\n new_profile.position = kwargs['position']\n new_profile.save()\n return new_user\n\n def activate(self, request, activation_key):\n activated = RegistrationProfile.objects.activate_user(activation_key)\n if activated:\n signals.user_activated.send(sender=self.__class__, user=\n activated, request=request)\n return activated\n\n def registration_allowed(self, request):\n \"\"\"\n\t\tIndicate whether account registration is currently permitted,\n\t\tbased on the value of the setting ``REGISTRATION_OPEN``. This\n\t\tis determined as follows:\n\n\t\t* If ``REGISTRATION_OPEN`` is not specified in settings, or is\n\t\tset to ``True``, registration is permitted.\n\n\t\t* If ``REGISTRATION_OPEN`` is both specified and set to\n\t\t``False``, registration is not permitted.\n\t\t\n\t\t\"\"\"\n return getattr(settings, 'REGISTRATION_OPEN', True)\n\n def get_form_class(self, request):\n \"\"\"\n\t\tReturn the default form class used for user registration.\n\t\t\n\t\t\"\"\"\n return RegistrationForm\n\n def post_registration_redirect(self, request, user):\n \"\"\"\n\t\tReturn the name of the URL to redirect to after successful\n\t\tuser registration.\n\t\t\n\t\t\"\"\"\n return 'registration_complete', (), {}\n <mask token>\n", "step-3": "<mask token>\n\n\nclass CustomRegistrationBackend(object):\n\n def register(self, request, **kwargs):\n username, email, password = kwargs['username'], kwargs['email'\n ], kwargs['password1']\n if Site._meta.installed:\n site = Site.objects.get_current()\n else:\n site = RequestSite(request)\n new_user = RegistrationProfile.objects.create_inactive_user(username,\n email, password, site)\n signals.user_registered.send(sender=self.__class__, user=new_user,\n request=request)\n new_profile = StaffMember.objects.get(user=new_user)\n new_profile.first_name = kwargs['first_name']\n new_profile.last_name = kwargs['last_name']\n new_profile.position = kwargs['position']\n new_profile.save()\n return new_user\n\n def activate(self, request, activation_key):\n activated = RegistrationProfile.objects.activate_user(activation_key)\n if activated:\n signals.user_activated.send(sender=self.__class__, user=\n activated, request=request)\n return activated\n\n def registration_allowed(self, request):\n \"\"\"\n\t\tIndicate whether account registration is currently permitted,\n\t\tbased on the value of the setting ``REGISTRATION_OPEN``. This\n\t\tis determined as follows:\n\n\t\t* If ``REGISTRATION_OPEN`` is not specified in settings, or is\n\t\tset to ``True``, registration is permitted.\n\n\t\t* If ``REGISTRATION_OPEN`` is both specified and set to\n\t\t``False``, registration is not permitted.\n\t\t\n\t\t\"\"\"\n return getattr(settings, 'REGISTRATION_OPEN', True)\n\n def get_form_class(self, request):\n \"\"\"\n\t\tReturn the default form class used for user registration.\n\t\t\n\t\t\"\"\"\n return RegistrationForm\n\n def post_registration_redirect(self, request, user):\n \"\"\"\n\t\tReturn the name of the URL to redirect to after successful\n\t\tuser registration.\n\t\t\n\t\t\"\"\"\n return 'registration_complete', (), {}\n\n def post_activation_redirect(self, request, user):\n \"\"\"\n\t\tReturn the name of the URL to redirect to after successful\n\t\taccount activation.\n\t\t\n\t\t\"\"\"\n newMember = StaffMember.objects.filter(user_id__exact=user.pk).get()\n labGroup = LabGroup.objects.filter(pk=1).get()\n newMember.lab_group = labGroup\n newMember.save()\n return 'registration_activation_complete', (), {}\n", "step-4": "from django.conf import settings\nfrom django.contrib.sites.models import RequestSite\nfrom django.contrib.sites.models import Site\nfrom fish.labinterface.models import *\nfrom registration import signals\nfrom registration.forms import RegistrationForm\nfrom registration.models import RegistrationProfile\nfrom labinterface.models import StaffMember\n\n\nclass CustomRegistrationBackend(object):\n\n def register(self, request, **kwargs):\n username, email, password = kwargs['username'], kwargs['email'\n ], kwargs['password1']\n if Site._meta.installed:\n site = Site.objects.get_current()\n else:\n site = RequestSite(request)\n new_user = RegistrationProfile.objects.create_inactive_user(username,\n email, password, site)\n signals.user_registered.send(sender=self.__class__, user=new_user,\n request=request)\n new_profile = StaffMember.objects.get(user=new_user)\n new_profile.first_name = kwargs['first_name']\n new_profile.last_name = kwargs['last_name']\n new_profile.position = kwargs['position']\n new_profile.save()\n return new_user\n\n def activate(self, request, activation_key):\n activated = RegistrationProfile.objects.activate_user(activation_key)\n if activated:\n signals.user_activated.send(sender=self.__class__, user=\n activated, request=request)\n return activated\n\n def registration_allowed(self, request):\n \"\"\"\n\t\tIndicate whether account registration is currently permitted,\n\t\tbased on the value of the setting ``REGISTRATION_OPEN``. This\n\t\tis determined as follows:\n\n\t\t* If ``REGISTRATION_OPEN`` is not specified in settings, or is\n\t\tset to ``True``, registration is permitted.\n\n\t\t* If ``REGISTRATION_OPEN`` is both specified and set to\n\t\t``False``, registration is not permitted.\n\t\t\n\t\t\"\"\"\n return getattr(settings, 'REGISTRATION_OPEN', True)\n\n def get_form_class(self, request):\n \"\"\"\n\t\tReturn the default form class used for user registration.\n\t\t\n\t\t\"\"\"\n return RegistrationForm\n\n def post_registration_redirect(self, request, user):\n \"\"\"\n\t\tReturn the name of the URL to redirect to after successful\n\t\tuser registration.\n\t\t\n\t\t\"\"\"\n return 'registration_complete', (), {}\n\n def post_activation_redirect(self, request, user):\n \"\"\"\n\t\tReturn the name of the URL to redirect to after successful\n\t\taccount activation.\n\t\t\n\t\t\"\"\"\n newMember = StaffMember.objects.filter(user_id__exact=user.pk).get()\n labGroup = LabGroup.objects.filter(pk=1).get()\n newMember.lab_group = labGroup\n newMember.save()\n return 'registration_activation_complete', (), {}\n", "step-5": "from django.conf import settings\nfrom django.contrib.sites.models import RequestSite\nfrom django.contrib.sites.models import Site\n\nfrom fish.labinterface.models import *\n\nfrom registration import signals\nfrom registration.forms import RegistrationForm\nfrom registration.models import RegistrationProfile\nfrom labinterface.models import StaffMember\n\n\nclass CustomRegistrationBackend(object):\n\tdef register(self, request, **kwargs):\n\t\tusername, email, password = kwargs['username'], kwargs['email'], kwargs['password1']\n\t\tif Site._meta.installed:\n\t\t\tsite = Site.objects.get_current()\n\t\telse:\n\t\t\tsite = RequestSite(request)\n\t\tnew_user = RegistrationProfile.objects.create_inactive_user(username, email, password, site)\n\t\tsignals.user_registered.send(sender=self.__class__, user=new_user, request=request)\n\t\tnew_profile = StaffMember.objects.get(user=new_user)\n\t\tnew_profile.first_name=kwargs['first_name']\n\t\tnew_profile.last_name=kwargs['last_name']\n\t\tnew_profile.position=kwargs['position']\n\t\tnew_profile.save()\n\t\treturn new_user\n\tdef activate(self, request, activation_key):\n\t\tactivated = RegistrationProfile.objects.activate_user(activation_key)\n\t\tif activated:\n\t\t\tsignals.user_activated.send(sender=self.__class__,\n\t\t\t\t\t\t\t\t\t\tuser=activated,\n\t\t\t\t\t\t\t\t\t\trequest=request)\n\t\treturn activated\n\n\tdef registration_allowed(self, request):\n\t\t\"\"\"\n\t\tIndicate whether account registration is currently permitted,\n\t\tbased on the value of the setting ``REGISTRATION_OPEN``. This\n\t\tis determined as follows:\n\n\t\t* If ``REGISTRATION_OPEN`` is not specified in settings, or is\n\t\tset to ``True``, registration is permitted.\n\n\t\t* If ``REGISTRATION_OPEN`` is both specified and set to\n\t\t``False``, registration is not permitted.\n\t\t\n\t\t\"\"\"\n\t\treturn getattr(settings, 'REGISTRATION_OPEN', True)\n\n\tdef get_form_class(self, request):\n\t\t\"\"\"\n\t\tReturn the default form class used for user registration.\n\t\t\n\t\t\"\"\"\n\t\treturn RegistrationForm\n\n\tdef post_registration_redirect(self, request, user):\n\t\t\"\"\"\n\t\tReturn the name of the URL to redirect to after successful\n\t\tuser registration.\n\t\t\n\t\t\"\"\"\n\t\treturn ('registration_complete', (), {})\n\n\tdef post_activation_redirect(self, request, user):\n\t\t\"\"\"\n\t\tReturn the name of the URL to redirect to after successful\n\t\taccount activation.\n\t\t\n\t\t\"\"\"\n\t\tnewMember = StaffMember.objects.filter(user_id__exact=user.pk).get()\n\t\tlabGroup = LabGroup.objects.filter(pk=1).get()\n\t\tnewMember.lab_group = labGroup\n\t\tnewMember.save()\n\t\treturn ('registration_activation_complete', (), {})", "step-ids": [ 5, 6, 7, 8, 9 ] }
[ 5, 6, 7, 8, 9 ]
a = 1 b = a print(a) print(b) a = 2 print(a) print(b) # 全部大写字符代表常量 USER_NAME = "常量" print(USER_NAME) print(USER_NAME)
normal
{ "blob_id": "1cc9a7bbe1bda06ce76fa8ec1cdc17c7b2fde73b", "index": 4051, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(a)\nprint(b)\n<mask token>\nprint(a)\nprint(b)\n<mask token>\nprint(USER_NAME)\nprint(USER_NAME)\n", "step-3": "a = 1\nb = a\nprint(a)\nprint(b)\na = 2\nprint(a)\nprint(b)\nUSER_NAME = '常量'\nprint(USER_NAME)\nprint(USER_NAME)\n", "step-4": "\na = 1\nb = a\nprint(a)\nprint(b)\n\na = 2\nprint(a)\nprint(b)\n\n# 全部大写字符代表常量\n\nUSER_NAME = \"常量\"\nprint(USER_NAME)\n\nprint(USER_NAME)", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from flask_wtf import FlaskForm from wtforms import ( StringField, TextAreaField, PasswordField, HiddenField) from wtforms.fields.html5 import URLField, EmailField from flask_wtf.file import FileField from wtforms.validators import ( InputRequired, Length, Email, Optional, URL, ValidationError, Regexp) from models import User from flask import g class UserBaseForm(FlaskForm): email = EmailField("Email", validators=[ InputRequired(message="Email cannot be blank."), Length(min=5, max=320), Email(check_deliverability=True, message="Invalid Email address")]) username = StringField("Username", validators=[ InputRequired(message="Username cannot be blank."), Length(min=2, max=30)]) class AddUserForm(UserBaseForm): password = PasswordField("Password", validators=[ InputRequired(message="Password cannot be blank."), Length(min=8, max=60), Regexp("^(?=.*[A-Za-z])(?=.*\d)(?=.*[$@$!%*#?&])[A-Za-z\d$@$!%*#?&]{8,}$", message='Please match the given requirements for password.')], # noqa e501 description="Minimum one each - uppercase letter, lowercase letter, number, special character.") # noqa e501 def validate_email(form, field): """Make sure email not in use.""" if User.query.filter_by(email=form.email.data).first(): form.email.errors.append( "Email already associated with account!") raise ValidationError def validate_username(form, field): """Make sure username not in use.""" if User.query.filter_by(username=form.username.data).first(): form.username.errors.append("Username already taken!") raise ValidationError class EditUserForm(UserBaseForm): """Edit User Form.""" avatar_url = URLField("Avatar Image URL", validators=[ Length(min=6, max=255), Optional()], description="Online image address") banner_url = URLField("Banner Image URL", validators=[ Length(min=6, max=255), Optional()], description="Online image address") byline = StringField("User Byline", validators=[ Length(min=2, max=200), Optional()], description="A short snippet shown under your username") bio = TextAreaField("User Bio", validators=[ Length(min=2, max=500), Optional()], description="500 character max") city = StringField("City", validators=[Length(min=2, max=50), Optional()]) state = StringField("State", validators=[ Length(min=2, max=50), Optional()]) country = StringField("Country", validators=[ Length(min=2, max=50), Optional()]) def validate_email(form, field): """Make sure email is not in use unless it's the current user's email.""" user = User.query.filter_by(email=form.email.data).first() if user and not user == g.user: form.email.errors = [ "Email already associated with account!", *form.email.errors ] raise ValidationError def validate_username(form, field): """Make sure username is not in use unless it's the current user's username.""" user = User.query.filter_by(username=form.username.data).first() if user and not user == g.user: form.username.errors = [ "Username already taken!", *form.username.errors ] raise ValidationError class LoginForm(FlaskForm): email = EmailField("Email", validators=[ InputRequired(message="Email cannot be blank."), Length(min=5, max=320), Email(check_deliverability=True, message="Invalid Email address")]) password = PasswordField("Password", validators=[ InputRequired( message="Password cannot be blank."), Length(min=8, max=60)]) class ReportBaseForm(FlaskForm): """Form for adding new report.""" text = TextAreaField("Report", validators=[ InputRequired(message="Report cannot be blank."), Length(min=2)]) photo_url = URLField( "Photo URL", validators=[URL(), Optional()], description=""" Either enter a photo URL or choose an image file to include an image.""") photo_file = FileField( "Upload Photo", validators=[Optional()], description=""" Either enter a photo URL or choose an image file to include an image. 4MB max.""") def validate(self): if not super().validate(): return False if self.photo_url.data and self.photo_file.data: msg = 'Please specify Photo URL or upload a photo, not both' self.photo_url.errors.append(msg) self.photo_file.errors.append(msg) return False return True class AddReportForm(ReportBaseForm): """Form for adding new report.""" pass class EditReportForm(ReportBaseForm): """Form for editing a report.""" cleared_file = HiddenField('cleared_file')
normal
{ "blob_id": "47b2857ac20e46897cc1f64371868ce5174799d6", "index": 4790, "step-1": "<mask token>\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n", "step-2": "<mask token>\n\n\nclass EditUserForm(UserBaseForm):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n user = User.query.filter_by(email=form.email.data).first()\n if user and not user == g.user:\n form.email.errors = ['Email already associated with account!',\n *form.email.errors]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n user = User.query.filter_by(username=form.username.data).first()\n if user and not user == g.user:\n form.username.errors = ['Username already taken!', *form.\n username.errors]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n text = TextAreaField('Report', validators=[InputRequired(message=\n 'Report cannot be blank.'), Length(min=2)])\n photo_url = URLField('Photo URL', validators=[URL(), Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\"\n )\n photo_file = FileField('Upload Photo', validators=[Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\"\n )\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n", "step-3": "<mask token>\n\n\nclass AddUserForm(UserBaseForm):\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60), Regexp(\n '^(?=.*[A-Za-z])(?=.*\\\\d)(?=.*[$@$!%*#?&])[A-Za-z\\\\d$@$!%*#?&]{8,}$',\n message='Please match the given requirements for password.')],\n description=\n 'Minimum one each - uppercase letter, lowercase letter, number, special character.'\n )\n\n def validate_email(form, field):\n \"\"\"Make sure email not in use.\"\"\"\n if User.query.filter_by(email=form.email.data).first():\n form.email.errors.append('Email already associated with account!')\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username not in use.\"\"\"\n if User.query.filter_by(username=form.username.data).first():\n form.username.errors.append('Username already taken!')\n raise ValidationError\n\n\nclass EditUserForm(UserBaseForm):\n \"\"\"Edit User Form.\"\"\"\n avatar_url = URLField('Avatar Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n banner_url = URLField('Banner Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n byline = StringField('User Byline', validators=[Length(min=2, max=200),\n Optional()], description='A short snippet shown under your username')\n bio = TextAreaField('User Bio', validators=[Length(min=2, max=500),\n Optional()], description='500 character max')\n city = StringField('City', validators=[Length(min=2, max=50), Optional()])\n state = StringField('State', validators=[Length(min=2, max=50), Optional()]\n )\n country = StringField('Country', validators=[Length(min=2, max=50),\n Optional()])\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n user = User.query.filter_by(email=form.email.data).first()\n if user and not user == g.user:\n form.email.errors = ['Email already associated with account!',\n *form.email.errors]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n user = User.query.filter_by(username=form.username.data).first()\n if user and not user == g.user:\n form.username.errors = ['Username already taken!', *form.\n username.errors]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n text = TextAreaField('Report', validators=[InputRequired(message=\n 'Report cannot be blank.'), Length(min=2)])\n photo_url = URLField('Photo URL', validators=[URL(), Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\"\n )\n photo_file = FileField('Upload Photo', validators=[Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\"\n )\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n", "step-4": "<mask token>\n\n\nclass UserBaseForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n username = StringField('Username', validators=[InputRequired(message=\n 'Username cannot be blank.'), Length(min=2, max=30)])\n\n\nclass AddUserForm(UserBaseForm):\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60), Regexp(\n '^(?=.*[A-Za-z])(?=.*\\\\d)(?=.*[$@$!%*#?&])[A-Za-z\\\\d$@$!%*#?&]{8,}$',\n message='Please match the given requirements for password.')],\n description=\n 'Minimum one each - uppercase letter, lowercase letter, number, special character.'\n )\n\n def validate_email(form, field):\n \"\"\"Make sure email not in use.\"\"\"\n if User.query.filter_by(email=form.email.data).first():\n form.email.errors.append('Email already associated with account!')\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username not in use.\"\"\"\n if User.query.filter_by(username=form.username.data).first():\n form.username.errors.append('Username already taken!')\n raise ValidationError\n\n\nclass EditUserForm(UserBaseForm):\n \"\"\"Edit User Form.\"\"\"\n avatar_url = URLField('Avatar Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n banner_url = URLField('Banner Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n byline = StringField('User Byline', validators=[Length(min=2, max=200),\n Optional()], description='A short snippet shown under your username')\n bio = TextAreaField('User Bio', validators=[Length(min=2, max=500),\n Optional()], description='500 character max')\n city = StringField('City', validators=[Length(min=2, max=50), Optional()])\n state = StringField('State', validators=[Length(min=2, max=50), Optional()]\n )\n country = StringField('Country', validators=[Length(min=2, max=50),\n Optional()])\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n user = User.query.filter_by(email=form.email.data).first()\n if user and not user == g.user:\n form.email.errors = ['Email already associated with account!',\n *form.email.errors]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n user = User.query.filter_by(username=form.username.data).first()\n if user and not user == g.user:\n form.username.errors = ['Username already taken!', *form.\n username.errors]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n text = TextAreaField('Report', validators=[InputRequired(message=\n 'Report cannot be blank.'), Length(min=2)])\n photo_url = URLField('Photo URL', validators=[URL(), Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\"\n )\n photo_file = FileField('Upload Photo', validators=[Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\"\n )\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n", "step-5": "from flask_wtf import FlaskForm\nfrom wtforms import (\n StringField, TextAreaField, PasswordField, HiddenField)\nfrom wtforms.fields.html5 import URLField, EmailField\nfrom flask_wtf.file import FileField\nfrom wtforms.validators import (\n InputRequired, Length, Email,\n Optional, URL, ValidationError, Regexp)\nfrom models import User\nfrom flask import g\n\n\nclass UserBaseForm(FlaskForm):\n email = EmailField(\"Email\", validators=[\n InputRequired(message=\"Email cannot be blank.\"),\n Length(min=5, max=320),\n Email(check_deliverability=True,\n message=\"Invalid Email address\")])\n\n username = StringField(\"Username\", validators=[\n InputRequired(message=\"Username cannot be blank.\"),\n Length(min=2, max=30)])\n\n\nclass AddUserForm(UserBaseForm):\n\n password = PasswordField(\"Password\", validators=[\n InputRequired(message=\"Password cannot be blank.\"),\n Length(min=8, max=60),\n Regexp(\"^(?=.*[A-Za-z])(?=.*\\d)(?=.*[$@$!%*#?&])[A-Za-z\\d$@$!%*#?&]{8,}$\", message='Please match the given requirements for password.')], # noqa e501\n description=\"Minimum one each - uppercase letter, lowercase letter, number, special character.\") # noqa e501\n\n def validate_email(form, field):\n \"\"\"Make sure email not in use.\"\"\"\n if User.query.filter_by(email=form.email.data).first():\n form.email.errors.append(\n \"Email already associated with account!\")\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username not in use.\"\"\"\n if User.query.filter_by(username=form.username.data).first():\n form.username.errors.append(\"Username already taken!\")\n raise ValidationError\n\n\nclass EditUserForm(UserBaseForm):\n \"\"\"Edit User Form.\"\"\"\n\n avatar_url = URLField(\"Avatar Image URL\", validators=[\n Length(min=6, max=255), Optional()],\n description=\"Online image address\")\n\n banner_url = URLField(\"Banner Image URL\", validators=[\n Length(min=6, max=255), Optional()],\n description=\"Online image address\")\n\n byline = StringField(\"User Byline\", validators=[\n Length(min=2, max=200), Optional()],\n description=\"A short snippet shown under your username\")\n\n bio = TextAreaField(\"User Bio\", validators=[\n Length(min=2, max=500), Optional()],\n description=\"500 character max\")\n\n city = StringField(\"City\", validators=[Length(min=2, max=50), Optional()])\n\n state = StringField(\"State\", validators=[\n Length(min=2, max=50), Optional()])\n\n country = StringField(\"Country\", validators=[\n Length(min=2, max=50), Optional()])\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n\n user = User.query.filter_by(email=form.email.data).first()\n\n if user and not user == g.user:\n form.email.errors = [\n \"Email already associated with account!\",\n *form.email.errors\n ]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n\n user = User.query.filter_by(username=form.username.data).first()\n\n if user and not user == g.user:\n form.username.errors = [\n \"Username already taken!\",\n *form.username.errors\n ]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField(\"Email\", validators=[\n InputRequired(message=\"Email cannot be blank.\"),\n Length(min=5, max=320),\n Email(check_deliverability=True,\n message=\"Invalid Email address\")])\n\n password = PasswordField(\"Password\", validators=[\n InputRequired(\n message=\"Password cannot be blank.\"),\n Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n\n text = TextAreaField(\"Report\", validators=[\n InputRequired(message=\"Report cannot be blank.\"),\n Length(min=2)])\n\n photo_url = URLField(\n \"Photo URL\", validators=[URL(), Optional()],\n description=\"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\")\n\n photo_file = FileField(\n \"Upload Photo\", validators=[Optional()],\n description=\"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\")\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n\n cleared_file = HiddenField('cleared_file')\n", "step-ids": [ 5, 14, 20, 22, 24 ] }
[ 5, 14, 20, 22, 24 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def pop(i): loc = i sentencesTrial = [] File = open('words.txt') lines = File.read() sentences = nltk.sent_tokenize(lines) locations = ['Castle', 'Beach', 'Beach', 'Ghost Town', 'Ghost Town', 'Haunted House', 'Jungle', 'Carnival', 'Ghost Town', 'Highway', 'Castle', 'Pyramid', 'Beach', 'Beach', 'Carnival', 'Highway', 'Castle', 'Jungle'] for sentence in sentences: for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))): if pos == 'NN': database.nouns.append(word.lower()) sentencesTrial.append('NN') elif pos == 'NNS': database.nounsplural.append(word.lower()) sentencesTrial.append('NNS') elif pos == 'NNP': database.propernounS.append(word.lower()) sentencesTrial.append('NNP') elif pos == 'NNPS': database.propernounP.append(word.lower()) sentencesTrial.append('NNPS') elif pos == 'JJ': database.adjective.append(word.lower()) sentencesTrial.append('JJ') elif pos == 'VB' or pos == 'VBG' or pos == 'VBN': database.verbs.append(word.lower()) sentencesTrial.append('VB') elif pos == 'VBD': database.verbpast.append(word.lower()) sentencesTrial.append('VBD') elif pos == 'VBZ' or pos == 'VBP': database.verb3person.append(word.lower()) sentencesTrial.append('VBZ') elif pos == 'RB' or pos == 'RBR' or pos == 'RBS': database.adverb.append(word) sentencesTrial.append('RB'.lower()) elif word == ',': database.useless.append(word) sentencesTrial.append(',') break elif word == '.': database.useless.append(word) sentencesTrial.append('.') break else: database.unUsedWords.append(word.lower()) break nounCount = [] trueNouns = [] for x in database.nouns: if x in trueNouns: a = trueNouns.index(x) nounCount[a] += 1 else: trueNouns.append(x) a = trueNouns.index(x) nounCount.append(1) for x in trueNouns: i = trueNouns.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NN', locations[loc], nounCount[i])) nounpCount = [] trueNounsp = [] for x in database.nounsplural: if x in trueNounsp: a = trueNounsp.index(x) nounpCount[a] += 1 else: trueNounsp.append(x) a = trueNounsp.index(x) nounpCount.append(1) for x in trueNounsp: i = trueNounsp.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NNS', locations[loc], nounpCount[i])) pnounCount = [] truepNouns = [] for x in database.propernounS: if x in truepNouns: a = truepNouns.index(x) pnounCount[a] += 1 else: truepNouns.append(x) a = truepNouns.index(x) pnounCount.append(1) for x in truepNouns: i = truepNouns.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NNP', locations[loc], pnounCount[i])) pnounpCount = [] truepNounsp = [] for x in database.propernounP: if x in truepNounsp: a = truepNounsp.index(x) pnounpCount[a] += 1 else: truepNounsp.append(x) a = truepNounsp.index(x) pnounpCount.append(1) for x in truepNounsp: i = truepNounsp.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NNPS', locations[loc], pnounpCount[i])) adjectCount = [] trueadject = [] for x in database.adjective: if x in trueadject: a = trueadject.index(x) adjectCount[a] += 1 else: trueadject.append(x) a = trueadject.index(x) adjectCount.append(1) for x in trueadject: i = trueadject.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'JJ', locations[loc], adjectCount[i])) verbCount = [] trueVerb = [] for x in database.verbs: if x in trueVerb: a = trueVerb.index(x) verbCount[a] += 1 else: trueVerb.append(x) a = trueVerb.index(x) verbCount.append(1) for x in trueVerb: i = trueVerb.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'VB', locations[loc], verbCount[i])) verbpCount = [] trueVerbp = [] for x in database.verbpast: if x in trueVerbp: a = trueVerbp.index(x) verbpCount[a] += 1 else: trueVerbp.append(x) a = trueVerbp.index(x) verbpCount.append(1) for x in trueVerbp: i = trueVerbp.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'VBD', locations[loc], verbpCount[i])) verb3pCount = [] trueVerb3p = [] for x in database.verb3person: if x in trueVerb3p: a = trueVerb3p.index(x) verb3pCount[a] += 1 else: trueVerb3p.append(x) a = trueVerb3p.index(x) verb3pCount.append(1) for x in trueVerb3p: i = trueVerb3p.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'VBZ', locations[loc], verb3pCount[i])) adverbCount = [] trueAdverb = [] for x in database.adverb: if x in trueAdverb: a = trueAdverb.index(x) adverbCount[a] += 1 else: trueAdverb.append(x) a = trueAdverb.index(x) adverbCount.append(1) for x in trueAdverb: i = trueAdverb.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'RB', locations[loc], adverbCount[i])) uselessCount = [] trueUseless = [] for x in database.useless: if x in trueUseless: a = trueUseless.index(x) uselessCount[a] += 1 else: trueUseless.append(x) a = trueUseless.index(x) uselessCount.append(1) for x in trueUseless: i = trueUseless.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'PU', locations[loc], uselessCount[i])) uuWCount = [] trueuuW = [] for x in database.unUsedWords: if x in trueuuW: a = trueuuW.index(x) uuWCount[a] += 1 else: trueuuW.append(x) a = trueuuW.index(x) uuWCount.append(1) for x in trueuuW: i = trueuuW.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'US', locations[loc], uuWCount[i])) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def pop(i): loc = i sentencesTrial = [] File = open('words.txt') lines = File.read() sentences = nltk.sent_tokenize(lines) locations = ['Castle', 'Beach', 'Beach', 'Ghost Town', 'Ghost Town', 'Haunted House', 'Jungle', 'Carnival', 'Ghost Town', 'Highway', 'Castle', 'Pyramid', 'Beach', 'Beach', 'Carnival', 'Highway', 'Castle', 'Jungle'] for sentence in sentences: for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))): if pos == 'NN': database.nouns.append(word.lower()) sentencesTrial.append('NN') elif pos == 'NNS': database.nounsplural.append(word.lower()) sentencesTrial.append('NNS') elif pos == 'NNP': database.propernounS.append(word.lower()) sentencesTrial.append('NNP') elif pos == 'NNPS': database.propernounP.append(word.lower()) sentencesTrial.append('NNPS') elif pos == 'JJ': database.adjective.append(word.lower()) sentencesTrial.append('JJ') elif pos == 'VB' or pos == 'VBG' or pos == 'VBN': database.verbs.append(word.lower()) sentencesTrial.append('VB') elif pos == 'VBD': database.verbpast.append(word.lower()) sentencesTrial.append('VBD') elif pos == 'VBZ' or pos == 'VBP': database.verb3person.append(word.lower()) sentencesTrial.append('VBZ') elif pos == 'RB' or pos == 'RBR' or pos == 'RBS': database.adverb.append(word) sentencesTrial.append('RB'.lower()) elif word == ',': database.useless.append(word) sentencesTrial.append(',') break elif word == '.': database.useless.append(word) sentencesTrial.append('.') break else: database.unUsedWords.append(word.lower()) break nounCount = [] trueNouns = [] for x in database.nouns: if x in trueNouns: a = trueNouns.index(x) nounCount[a] += 1 else: trueNouns.append(x) a = trueNouns.index(x) nounCount.append(1) for x in trueNouns: i = trueNouns.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NN', locations[loc], nounCount[i])) nounpCount = [] trueNounsp = [] for x in database.nounsplural: if x in trueNounsp: a = trueNounsp.index(x) nounpCount[a] += 1 else: trueNounsp.append(x) a = trueNounsp.index(x) nounpCount.append(1) for x in trueNounsp: i = trueNounsp.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NNS', locations[loc], nounpCount[i])) pnounCount = [] truepNouns = [] for x in database.propernounS: if x in truepNouns: a = truepNouns.index(x) pnounCount[a] += 1 else: truepNouns.append(x) a = truepNouns.index(x) pnounCount.append(1) for x in truepNouns: i = truepNouns.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NNP', locations[loc], pnounCount[i])) pnounpCount = [] truepNounsp = [] for x in database.propernounP: if x in truepNounsp: a = truepNounsp.index(x) pnounpCount[a] += 1 else: truepNounsp.append(x) a = truepNounsp.index(x) pnounpCount.append(1) for x in truepNounsp: i = truepNounsp.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NNPS', locations[loc], pnounpCount[i])) adjectCount = [] trueadject = [] for x in database.adjective: if x in trueadject: a = trueadject.index(x) adjectCount[a] += 1 else: trueadject.append(x) a = trueadject.index(x) adjectCount.append(1) for x in trueadject: i = trueadject.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'JJ', locations[loc], adjectCount[i])) verbCount = [] trueVerb = [] for x in database.verbs: if x in trueVerb: a = trueVerb.index(x) verbCount[a] += 1 else: trueVerb.append(x) a = trueVerb.index(x) verbCount.append(1) for x in trueVerb: i = trueVerb.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'VB', locations[loc], verbCount[i])) verbpCount = [] trueVerbp = [] for x in database.verbpast: if x in trueVerbp: a = trueVerbp.index(x) verbpCount[a] += 1 else: trueVerbp.append(x) a = trueVerbp.index(x) verbpCount.append(1) for x in trueVerbp: i = trueVerbp.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'VBD', locations[loc], verbpCount[i])) verb3pCount = [] trueVerb3p = [] for x in database.verb3person: if x in trueVerb3p: a = trueVerb3p.index(x) verb3pCount[a] += 1 else: trueVerb3p.append(x) a = trueVerb3p.index(x) verb3pCount.append(1) for x in trueVerb3p: i = trueVerb3p.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'VBZ', locations[loc], verb3pCount[i])) adverbCount = [] trueAdverb = [] for x in database.adverb: if x in trueAdverb: a = trueAdverb.index(x) adverbCount[a] += 1 else: trueAdverb.append(x) a = trueAdverb.index(x) adverbCount.append(1) for x in trueAdverb: i = trueAdverb.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'RB', locations[loc], adverbCount[i])) uselessCount = [] trueUseless = [] for x in database.useless: if x in trueUseless: a = trueUseless.index(x) uselessCount[a] += 1 else: trueUseless.append(x) a = trueUseless.index(x) uselessCount.append(1) for x in trueUseless: i = trueUseless.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'PU', locations[loc], uselessCount[i])) uuWCount = [] trueuuW = [] for x in database.unUsedWords: if x in trueuuW: a = trueuuW.index(x) uuWCount[a] += 1 else: trueuuW.append(x) a = trueuuW.index(x) uuWCount.append(1) for x in trueuuW: i = trueuuW.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'US', locations[loc], uuWCount[i])) def pop2(): database.cursor.execute( "INSERT INTO monsters VALUES ('Knight','Castle','Old Man Jenkins','Picture')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Vampire' , 'Castle' , 'Andrew the Tour', 'Vampire Make Up and fake blood')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Shadow' , 'Castle' , 'Frank the Janitor' , 'Black paint')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Ghost Pirate','Beach','Bill the Lifeguard','Pirate Costume')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Seaweed Monster','Beach','Old Fisherman Joe','Seaweed')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Shark','Beach','The Mayor','Shark fins')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Cowboy Ghost','Ghost Town','Jerry the Businessman ','Cowboy hat')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Miner Ghost','Ghost Town','Gold Hunter Phil','Dusty shoes')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Headless Horse Man','Ghost Town','Envirnmentalist Paddy','Drawing of rig to appear headless')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Francinstein','Haunted House','Sir Godfree','Green paint')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Zombie','Haunted House','The Waiter','Zombie Make Up and fake boy parts')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Ghost','Haunted House','Jimmy','Glow in the dark paint on cloths')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Ape Man','Jungle','Explorer Fred','Ape Costume')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Animal Ghosts','Jungle','Environmentalist Jennie','Scratch Marks')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Pterodactyl','Jungle','Tour Guide Bill','Book on flight')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Clown Ghost','Carnival','Ring Master','Old Clown Costumes')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Zombie','Carnival','Blind Knife Thrower','Eye tests saying he is not blind')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Animals','Carnival','Worlds Strongest Man','Scratch marks')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Ghost Car','Highway','Old Town Mayor','Car ownership documents')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('White Lady Ghost','Highway','Miss Anderson','White Dress')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Aliens','Highway','Conspiracy Tom','Fake Space ship blueprint')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Mummy','Pyramid','Museum Curator Petterson ','Bandages')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Sand Man','Pyramid','Ramesh the Tour Guide','Sand')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Sphynx','Pyramid','Tour Guide Bob','scratch marks')" ) database.cursor.execute( "INSERT INTO characters VALUES ('Scooby Doo','Scooby Dooby Doo')") database.cursor.execute( "INSERT INTO characters VALUES ('Shaggy','Zoinks!')") database.cursor.execute( "INSERT INTO characters VALUES ('Fred','Lets Split up and look for clues')" ) database.cursor.execute( "INSERT INTO characters VALUES ('Velma','My glasses. I cant find my glasses')" ) database.cursor.execute( "INSERT INTO characters VALUES ('Daphne','Do you want a Scooby Snack')" ) database.cursor.execute("INSERT INTO location VALUES ('Castle','Stormy')") database.cursor.execute("INSERT INTO location VALUES ('Castle','Raining')") database.cursor.execute("INSERT INTO location VALUES ('Castle','Misty')") database.cursor.execute("INSERT INTO location VALUES ('Castle','Dark')") database.cursor.execute("INSERT INTO location VALUES ('Beach','Sunny')") database.cursor.execute("INSERT INTO location VALUES ('Beach','Misty')") database.cursor.execute( "INSERT INTO location VALUES ('Ghost Town','Cloudy')") database.cursor.execute( "INSERT INTO location VALUES ('Ghost TOwn','Foggy')") database.cursor.execute( "INSERT INTO location VALUES ('Haunted House','Stormy')") database.cursor.execute( "INSERT INTO location VALUES ('Haunted House','Misty')") database.cursor.execute("INSERT INTO location VALUES ('Jungle','Sunny')") database.cursor.execute("INSERT INTO location VALUES ('Jungle','Raining')") database.cursor.execute("INSERT INTO location VALUES ('Carnival','Dark')") database.cursor.execute("INSERT INTO location VALUES ('Carnival','Cloudy')" ) database.cursor.execute( "INSERT INTO location VALUES ('Carnival','Overcast')") database.cursor.execute( "INSERT INTO location VALUES ('Highway','Overcast')") database.cursor.execute("INSERT INTO location VALUES ('Highway','Sunny')") database.cursor.execute( "INSERT INTO location VALUES ('Pyramid','Overcast')") database.cursor.execute("INSERT INTO location VALUES ('Pyramid','Sunny')") database.cursor.execute("INSERT INTO location VALUES ('Pyramid','Raining')" ) <|reserved_special_token_1|> import database import nltk def pop(i): loc = i sentencesTrial = [] File = open('words.txt') lines = File.read() sentences = nltk.sent_tokenize(lines) locations = ['Castle', 'Beach', 'Beach', 'Ghost Town', 'Ghost Town', 'Haunted House', 'Jungle', 'Carnival', 'Ghost Town', 'Highway', 'Castle', 'Pyramid', 'Beach', 'Beach', 'Carnival', 'Highway', 'Castle', 'Jungle'] for sentence in sentences: for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))): if pos == 'NN': database.nouns.append(word.lower()) sentencesTrial.append('NN') elif pos == 'NNS': database.nounsplural.append(word.lower()) sentencesTrial.append('NNS') elif pos == 'NNP': database.propernounS.append(word.lower()) sentencesTrial.append('NNP') elif pos == 'NNPS': database.propernounP.append(word.lower()) sentencesTrial.append('NNPS') elif pos == 'JJ': database.adjective.append(word.lower()) sentencesTrial.append('JJ') elif pos == 'VB' or pos == 'VBG' or pos == 'VBN': database.verbs.append(word.lower()) sentencesTrial.append('VB') elif pos == 'VBD': database.verbpast.append(word.lower()) sentencesTrial.append('VBD') elif pos == 'VBZ' or pos == 'VBP': database.verb3person.append(word.lower()) sentencesTrial.append('VBZ') elif pos == 'RB' or pos == 'RBR' or pos == 'RBS': database.adverb.append(word) sentencesTrial.append('RB'.lower()) elif word == ',': database.useless.append(word) sentencesTrial.append(',') break elif word == '.': database.useless.append(word) sentencesTrial.append('.') break else: database.unUsedWords.append(word.lower()) break nounCount = [] trueNouns = [] for x in database.nouns: if x in trueNouns: a = trueNouns.index(x) nounCount[a] += 1 else: trueNouns.append(x) a = trueNouns.index(x) nounCount.append(1) for x in trueNouns: i = trueNouns.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NN', locations[loc], nounCount[i])) nounpCount = [] trueNounsp = [] for x in database.nounsplural: if x in trueNounsp: a = trueNounsp.index(x) nounpCount[a] += 1 else: trueNounsp.append(x) a = trueNounsp.index(x) nounpCount.append(1) for x in trueNounsp: i = trueNounsp.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NNS', locations[loc], nounpCount[i])) pnounCount = [] truepNouns = [] for x in database.propernounS: if x in truepNouns: a = truepNouns.index(x) pnounCount[a] += 1 else: truepNouns.append(x) a = truepNouns.index(x) pnounCount.append(1) for x in truepNouns: i = truepNouns.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NNP', locations[loc], pnounCount[i])) pnounpCount = [] truepNounsp = [] for x in database.propernounP: if x in truepNounsp: a = truepNounsp.index(x) pnounpCount[a] += 1 else: truepNounsp.append(x) a = truepNounsp.index(x) pnounpCount.append(1) for x in truepNounsp: i = truepNounsp.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'NNPS', locations[loc], pnounpCount[i])) adjectCount = [] trueadject = [] for x in database.adjective: if x in trueadject: a = trueadject.index(x) adjectCount[a] += 1 else: trueadject.append(x) a = trueadject.index(x) adjectCount.append(1) for x in trueadject: i = trueadject.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'JJ', locations[loc], adjectCount[i])) verbCount = [] trueVerb = [] for x in database.verbs: if x in trueVerb: a = trueVerb.index(x) verbCount[a] += 1 else: trueVerb.append(x) a = trueVerb.index(x) verbCount.append(1) for x in trueVerb: i = trueVerb.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'VB', locations[loc], verbCount[i])) verbpCount = [] trueVerbp = [] for x in database.verbpast: if x in trueVerbp: a = trueVerbp.index(x) verbpCount[a] += 1 else: trueVerbp.append(x) a = trueVerbp.index(x) verbpCount.append(1) for x in trueVerbp: i = trueVerbp.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'VBD', locations[loc], verbpCount[i])) verb3pCount = [] trueVerb3p = [] for x in database.verb3person: if x in trueVerb3p: a = trueVerb3p.index(x) verb3pCount[a] += 1 else: trueVerb3p.append(x) a = trueVerb3p.index(x) verb3pCount.append(1) for x in trueVerb3p: i = trueVerb3p.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'VBZ', locations[loc], verb3pCount[i])) adverbCount = [] trueAdverb = [] for x in database.adverb: if x in trueAdverb: a = trueAdverb.index(x) adverbCount[a] += 1 else: trueAdverb.append(x) a = trueAdverb.index(x) adverbCount.append(1) for x in trueAdverb: i = trueAdverb.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'RB', locations[loc], adverbCount[i])) uselessCount = [] trueUseless = [] for x in database.useless: if x in trueUseless: a = trueUseless.index(x) uselessCount[a] += 1 else: trueUseless.append(x) a = trueUseless.index(x) uselessCount.append(1) for x in trueUseless: i = trueUseless.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'PU', locations[loc], uselessCount[i])) uuWCount = [] trueuuW = [] for x in database.unUsedWords: if x in trueuuW: a = trueuuW.index(x) uuWCount[a] += 1 else: trueuuW.append(x) a = trueuuW.index(x) uuWCount.append(1) for x in trueuuW: i = trueuuW.index(x) database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x, 'US', locations[loc], uuWCount[i])) def pop2(): database.cursor.execute( "INSERT INTO monsters VALUES ('Knight','Castle','Old Man Jenkins','Picture')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Vampire' , 'Castle' , 'Andrew the Tour', 'Vampire Make Up and fake blood')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Shadow' , 'Castle' , 'Frank the Janitor' , 'Black paint')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Ghost Pirate','Beach','Bill the Lifeguard','Pirate Costume')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Seaweed Monster','Beach','Old Fisherman Joe','Seaweed')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Shark','Beach','The Mayor','Shark fins')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Cowboy Ghost','Ghost Town','Jerry the Businessman ','Cowboy hat')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Miner Ghost','Ghost Town','Gold Hunter Phil','Dusty shoes')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Headless Horse Man','Ghost Town','Envirnmentalist Paddy','Drawing of rig to appear headless')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Francinstein','Haunted House','Sir Godfree','Green paint')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Zombie','Haunted House','The Waiter','Zombie Make Up and fake boy parts')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Ghost','Haunted House','Jimmy','Glow in the dark paint on cloths')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Ape Man','Jungle','Explorer Fred','Ape Costume')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Animal Ghosts','Jungle','Environmentalist Jennie','Scratch Marks')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Pterodactyl','Jungle','Tour Guide Bill','Book on flight')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Clown Ghost','Carnival','Ring Master','Old Clown Costumes')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Zombie','Carnival','Blind Knife Thrower','Eye tests saying he is not blind')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Animals','Carnival','Worlds Strongest Man','Scratch marks')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Ghost Car','Highway','Old Town Mayor','Car ownership documents')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('White Lady Ghost','Highway','Miss Anderson','White Dress')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Aliens','Highway','Conspiracy Tom','Fake Space ship blueprint')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Mummy','Pyramid','Museum Curator Petterson ','Bandages')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Sand Man','Pyramid','Ramesh the Tour Guide','Sand')" ) database.cursor.execute( "INSERT INTO monsters VALUES ('Sphynx','Pyramid','Tour Guide Bob','scratch marks')" ) database.cursor.execute( "INSERT INTO characters VALUES ('Scooby Doo','Scooby Dooby Doo')") database.cursor.execute( "INSERT INTO characters VALUES ('Shaggy','Zoinks!')") database.cursor.execute( "INSERT INTO characters VALUES ('Fred','Lets Split up and look for clues')" ) database.cursor.execute( "INSERT INTO characters VALUES ('Velma','My glasses. I cant find my glasses')" ) database.cursor.execute( "INSERT INTO characters VALUES ('Daphne','Do you want a Scooby Snack')" ) database.cursor.execute("INSERT INTO location VALUES ('Castle','Stormy')") database.cursor.execute("INSERT INTO location VALUES ('Castle','Raining')") database.cursor.execute("INSERT INTO location VALUES ('Castle','Misty')") database.cursor.execute("INSERT INTO location VALUES ('Castle','Dark')") database.cursor.execute("INSERT INTO location VALUES ('Beach','Sunny')") database.cursor.execute("INSERT INTO location VALUES ('Beach','Misty')") database.cursor.execute( "INSERT INTO location VALUES ('Ghost Town','Cloudy')") database.cursor.execute( "INSERT INTO location VALUES ('Ghost TOwn','Foggy')") database.cursor.execute( "INSERT INTO location VALUES ('Haunted House','Stormy')") database.cursor.execute( "INSERT INTO location VALUES ('Haunted House','Misty')") database.cursor.execute("INSERT INTO location VALUES ('Jungle','Sunny')") database.cursor.execute("INSERT INTO location VALUES ('Jungle','Raining')") database.cursor.execute("INSERT INTO location VALUES ('Carnival','Dark')") database.cursor.execute("INSERT INTO location VALUES ('Carnival','Cloudy')" ) database.cursor.execute( "INSERT INTO location VALUES ('Carnival','Overcast')") database.cursor.execute( "INSERT INTO location VALUES ('Highway','Overcast')") database.cursor.execute("INSERT INTO location VALUES ('Highway','Sunny')") database.cursor.execute( "INSERT INTO location VALUES ('Pyramid','Overcast')") database.cursor.execute("INSERT INTO location VALUES ('Pyramid','Sunny')") database.cursor.execute("INSERT INTO location VALUES ('Pyramid','Raining')" ) <|reserved_special_token_1|> import database import nltk def pop(i): # pupulate the words table loc = i sentencesTrial = [] File = open('words.txt') lines = File.read() sentences = nltk.sent_tokenize(lines) locations = ["Castle","Beach","Beach","Ghost Town","Ghost Town","Haunted House","Jungle","Carnival", "Ghost Town", "Highway", "Castle", "Pyramid","Beach","Beach","Carnival", "Highway", "Castle" ,"Jungle" ] for sentence in sentences: for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))): if(pos == 'NN'): database.nouns.append(word.lower()) sentencesTrial.append("NN") elif (pos == 'NNS'): database.nounsplural.append(word.lower()) sentencesTrial.append("NNS") elif (pos == 'NNP'): database.propernounS.append(word.lower()) sentencesTrial.append("NNP") elif (pos == 'NNPS'): database.propernounP.append(word.lower()) sentencesTrial.append("NNPS") elif (pos == 'JJ'): database.adjective.append(word.lower()) sentencesTrial.append("JJ") elif (pos == 'VB' or pos == 'VBG' or pos == 'VBN'): database.verbs.append(word.lower()) sentencesTrial.append("VB") elif (pos == 'VBD'): database.verbpast.append(word.lower()) sentencesTrial.append("VBD") elif (pos == 'VBZ' or pos == 'VBP'): database.verb3person.append(word.lower()) sentencesTrial.append("VBZ") elif (pos == 'RB' or pos == 'RBR' or pos == 'RBS'): database.adverb.append(word) sentencesTrial.append("RB".lower()) else: if(word == ","): database.useless.append(word) sentencesTrial.append(",") break elif(word == "."): database.useless.append(word) sentencesTrial.append(".") break else: database.unUsedWords.append(word.lower()) break nounCount = [] trueNouns = [] for x in database.nouns: if x in trueNouns: a = trueNouns.index(x) nounCount[a] +=1 else: trueNouns.append(x) a = trueNouns.index(x) nounCount.append(1) for x in trueNouns: i = trueNouns.index(x) database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x,'NN',locations[loc],nounCount[i])) nounpCount = [] trueNounsp = [] for x in database.nounsplural: if x in trueNounsp: a = trueNounsp.index(x) nounpCount[a] += 1 else: trueNounsp.append(x) a = trueNounsp.index(x) nounpCount.append(1) for x in trueNounsp: i = trueNounsp.index(x) database.cursor.execute( "INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'NNS', locations[loc], nounpCount[i])) pnounCount = [] truepNouns = [] for x in database.propernounS: if x in truepNouns: a = truepNouns.index(x) pnounCount[a] += 1 else: truepNouns.append(x) a = truepNouns.index(x) pnounCount.append(1) for x in truepNouns: i = truepNouns.index(x) database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'NNP', locations[loc], pnounCount[i])) pnounpCount = [] truepNounsp = [] for x in database.propernounP: if x in truepNounsp: a = truepNounsp.index(x) pnounpCount[a] += 1 else: truepNounsp.append(x) a = truepNounsp.index(x) pnounpCount.append(1) for x in truepNounsp: i = truepNounsp.index(x) database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'NNPS', locations[loc], pnounpCount[i])) adjectCount = [] trueadject = [] for x in database.adjective: if x in trueadject: a = trueadject.index(x) adjectCount[a] += 1 else: trueadject.append(x) a = trueadject.index(x) adjectCount.append(1) for x in trueadject: i = trueadject.index(x) database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'JJ', locations[loc], adjectCount[i])) verbCount = [] trueVerb = [] for x in database.verbs: if x in trueVerb: a = trueVerb.index(x) verbCount[a] += 1 else: trueVerb.append(x) a = trueVerb.index(x) verbCount.append(1) for x in trueVerb: i = trueVerb.index(x) database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'VB', locations[loc], verbCount[i])) verbpCount = [] trueVerbp = [] for x in database.verbpast: if x in trueVerbp: a = trueVerbp.index(x) verbpCount[a] += 1 else: trueVerbp.append(x) a = trueVerbp.index(x) verbpCount.append(1) for x in trueVerbp: i = trueVerbp.index(x) database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'VBD', locations[loc], verbpCount[i])) verb3pCount = [] trueVerb3p = [] for x in database.verb3person: if x in trueVerb3p: a = trueVerb3p.index(x) verb3pCount[a] += 1 else: trueVerb3p.append(x) a = trueVerb3p.index(x) verb3pCount.append(1) for x in trueVerb3p: i = trueVerb3p.index(x) database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'VBZ', locations[loc], verb3pCount[i])) adverbCount = [] trueAdverb = [] for x in database.adverb: if x in trueAdverb: a = trueAdverb.index(x) adverbCount[a] += 1 else: trueAdverb.append(x) a = trueAdverb.index(x) adverbCount.append(1) for x in trueAdverb: i = trueAdverb.index(x) database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'RB', locations[loc], adverbCount[i])) uselessCount = [] trueUseless = [] for x in database.useless: if x in trueUseless: a = trueUseless.index(x) uselessCount[a] += 1 else: trueUseless.append(x) a = trueUseless.index(x) uselessCount.append(1) for x in trueUseless: i = trueUseless.index(x) database.cursor.execute( "INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'PU', locations[loc], uselessCount[i])) uuWCount = [] trueuuW = [] for x in database.unUsedWords: if x in trueuuW: a = trueuuW.index(x) uuWCount[a] += 1 else: trueuuW.append(x) a = trueuuW.index(x) uuWCount.append(1) for x in trueuuW: i = trueuuW.index(x) database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'US', locations[loc], uuWCount[i])) def pop2(): #populate the monster and characters table ####populating the monsters database.cursor.execute("INSERT INTO monsters VALUES ('Knight','Castle','Old Man Jenkins','Picture')") database.cursor.execute("INSERT INTO monsters VALUES ('Vampire' , 'Castle' , 'Andrew the Tour', 'Vampire Make Up and fake blood')") database.cursor.execute("INSERT INTO monsters VALUES ('Shadow' , 'Castle' , 'Frank the Janitor' , 'Black paint')") database.cursor.execute("INSERT INTO monsters VALUES ('Ghost Pirate','Beach','Bill the Lifeguard','Pirate Costume')") database.cursor.execute("INSERT INTO monsters VALUES ('Seaweed Monster','Beach','Old Fisherman Joe','Seaweed')") database.cursor.execute("INSERT INTO monsters VALUES ('Shark','Beach','The Mayor','Shark fins')") database.cursor.execute("INSERT INTO monsters VALUES ('Cowboy Ghost','Ghost Town','Jerry the Businessman ','Cowboy hat')") database.cursor.execute("INSERT INTO monsters VALUES ('Miner Ghost','Ghost Town','Gold Hunter Phil','Dusty shoes')") database.cursor.execute("INSERT INTO monsters VALUES ('Headless Horse Man','Ghost Town','Envirnmentalist Paddy','Drawing of rig to appear headless')") database.cursor.execute("INSERT INTO monsters VALUES ('Francinstein','Haunted House','Sir Godfree','Green paint')") database.cursor.execute("INSERT INTO monsters VALUES ('Zombie','Haunted House','The Waiter','Zombie Make Up and fake boy parts')") database.cursor.execute("INSERT INTO monsters VALUES ('Ghost','Haunted House','Jimmy','Glow in the dark paint on cloths')") database.cursor.execute("INSERT INTO monsters VALUES ('Ape Man','Jungle','Explorer Fred','Ape Costume')") database.cursor.execute("INSERT INTO monsters VALUES ('Animal Ghosts','Jungle','Environmentalist Jennie','Scratch Marks')") database.cursor.execute("INSERT INTO monsters VALUES ('Pterodactyl','Jungle','Tour Guide Bill','Book on flight')") database.cursor.execute("INSERT INTO monsters VALUES ('Clown Ghost','Carnival','Ring Master','Old Clown Costumes')") database.cursor.execute("INSERT INTO monsters VALUES ('Zombie','Carnival','Blind Knife Thrower','Eye tests saying he is not blind')") database.cursor.execute("INSERT INTO monsters VALUES ('Animals','Carnival','Worlds Strongest Man','Scratch marks')") database.cursor.execute("INSERT INTO monsters VALUES ('Ghost Car','Highway','Old Town Mayor','Car ownership documents')") database.cursor.execute("INSERT INTO monsters VALUES ('White Lady Ghost','Highway','Miss Anderson','White Dress')") database.cursor.execute("INSERT INTO monsters VALUES ('Aliens','Highway','Conspiracy Tom','Fake Space ship blueprint')") database.cursor.execute("INSERT INTO monsters VALUES ('Mummy','Pyramid','Museum Curator Petterson ','Bandages')") database.cursor.execute("INSERT INTO monsters VALUES ('Sand Man','Pyramid','Ramesh the Tour Guide','Sand')") database.cursor.execute("INSERT INTO monsters VALUES ('Sphynx','Pyramid','Tour Guide Bob','scratch marks')") ####populating the characters database.cursor.execute("INSERT INTO characters VALUES ('Scooby Doo','Scooby Dooby Doo')") database.cursor.execute("INSERT INTO characters VALUES ('Shaggy','Zoinks!')") database.cursor.execute("INSERT INTO characters VALUES ('Fred','Lets Split up and look for clues')") database.cursor.execute("INSERT INTO characters VALUES ('Velma','My glasses. I cant find my glasses')") database.cursor.execute("INSERT INTO characters VALUES ('Daphne','Do you want a Scooby Snack')") database.cursor.execute("INSERT INTO location VALUES ('Castle','Stormy')") database.cursor.execute("INSERT INTO location VALUES ('Castle','Raining')") database.cursor.execute("INSERT INTO location VALUES ('Castle','Misty')") database.cursor.execute("INSERT INTO location VALUES ('Castle','Dark')") database.cursor.execute("INSERT INTO location VALUES ('Beach','Sunny')") database.cursor.execute("INSERT INTO location VALUES ('Beach','Misty')") database.cursor.execute("INSERT INTO location VALUES ('Ghost Town','Cloudy')") database.cursor.execute("INSERT INTO location VALUES ('Ghost TOwn','Foggy')") database.cursor.execute("INSERT INTO location VALUES ('Haunted House','Stormy')") database.cursor.execute("INSERT INTO location VALUES ('Haunted House','Misty')") database.cursor.execute("INSERT INTO location VALUES ('Jungle','Sunny')") database.cursor.execute("INSERT INTO location VALUES ('Jungle','Raining')") database.cursor.execute("INSERT INTO location VALUES ('Carnival','Dark')") database.cursor.execute("INSERT INTO location VALUES ('Carnival','Cloudy')") database.cursor.execute("INSERT INTO location VALUES ('Carnival','Overcast')") database.cursor.execute("INSERT INTO location VALUES ('Highway','Overcast')") database.cursor.execute("INSERT INTO location VALUES ('Highway','Sunny')") database.cursor.execute("INSERT INTO location VALUES ('Pyramid','Overcast')") database.cursor.execute("INSERT INTO location VALUES ('Pyramid','Sunny')") database.cursor.execute("INSERT INTO location VALUES ('Pyramid','Raining')")
flexible
{ "blob_id": "e7ac5c1010330aec81ce505fd7f52ccdeddb76de", "index": 8923, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef pop(i):\n loc = i\n sentencesTrial = []\n File = open('words.txt')\n lines = File.read()\n sentences = nltk.sent_tokenize(lines)\n locations = ['Castle', 'Beach', 'Beach', 'Ghost Town', 'Ghost Town',\n 'Haunted House', 'Jungle', 'Carnival', 'Ghost Town', 'Highway',\n 'Castle', 'Pyramid', 'Beach', 'Beach', 'Carnival', 'Highway',\n 'Castle', 'Jungle']\n for sentence in sentences:\n for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))):\n if pos == 'NN':\n database.nouns.append(word.lower())\n sentencesTrial.append('NN')\n elif pos == 'NNS':\n database.nounsplural.append(word.lower())\n sentencesTrial.append('NNS')\n elif pos == 'NNP':\n database.propernounS.append(word.lower())\n sentencesTrial.append('NNP')\n elif pos == 'NNPS':\n database.propernounP.append(word.lower())\n sentencesTrial.append('NNPS')\n elif pos == 'JJ':\n database.adjective.append(word.lower())\n sentencesTrial.append('JJ')\n elif pos == 'VB' or pos == 'VBG' or pos == 'VBN':\n database.verbs.append(word.lower())\n sentencesTrial.append('VB')\n elif pos == 'VBD':\n database.verbpast.append(word.lower())\n sentencesTrial.append('VBD')\n elif pos == 'VBZ' or pos == 'VBP':\n database.verb3person.append(word.lower())\n sentencesTrial.append('VBZ')\n elif pos == 'RB' or pos == 'RBR' or pos == 'RBS':\n database.adverb.append(word)\n sentencesTrial.append('RB'.lower())\n elif word == ',':\n database.useless.append(word)\n sentencesTrial.append(',')\n break\n elif word == '.':\n database.useless.append(word)\n sentencesTrial.append('.')\n break\n else:\n database.unUsedWords.append(word.lower())\n break\n nounCount = []\n trueNouns = []\n for x in database.nouns:\n if x in trueNouns:\n a = trueNouns.index(x)\n nounCount[a] += 1\n else:\n trueNouns.append(x)\n a = trueNouns.index(x)\n nounCount.append(1)\n for x in trueNouns:\n i = trueNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NN', locations[loc], nounCount[i]))\n nounpCount = []\n trueNounsp = []\n for x in database.nounsplural:\n if x in trueNounsp:\n a = trueNounsp.index(x)\n nounpCount[a] += 1\n else:\n trueNounsp.append(x)\n a = trueNounsp.index(x)\n nounpCount.append(1)\n for x in trueNounsp:\n i = trueNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNS', locations[loc], nounpCount[i]))\n pnounCount = []\n truepNouns = []\n for x in database.propernounS:\n if x in truepNouns:\n a = truepNouns.index(x)\n pnounCount[a] += 1\n else:\n truepNouns.append(x)\n a = truepNouns.index(x)\n pnounCount.append(1)\n for x in truepNouns:\n i = truepNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNP', locations[loc], pnounCount[i]))\n pnounpCount = []\n truepNounsp = []\n for x in database.propernounP:\n if x in truepNounsp:\n a = truepNounsp.index(x)\n pnounpCount[a] += 1\n else:\n truepNounsp.append(x)\n a = truepNounsp.index(x)\n pnounpCount.append(1)\n for x in truepNounsp:\n i = truepNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNPS', locations[loc], pnounpCount[i]))\n adjectCount = []\n trueadject = []\n for x in database.adjective:\n if x in trueadject:\n a = trueadject.index(x)\n adjectCount[a] += 1\n else:\n trueadject.append(x)\n a = trueadject.index(x)\n adjectCount.append(1)\n for x in trueadject:\n i = trueadject.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'JJ', locations[loc], adjectCount[i]))\n verbCount = []\n trueVerb = []\n for x in database.verbs:\n if x in trueVerb:\n a = trueVerb.index(x)\n verbCount[a] += 1\n else:\n trueVerb.append(x)\n a = trueVerb.index(x)\n verbCount.append(1)\n for x in trueVerb:\n i = trueVerb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VB', locations[loc], verbCount[i]))\n verbpCount = []\n trueVerbp = []\n for x in database.verbpast:\n if x in trueVerbp:\n a = trueVerbp.index(x)\n verbpCount[a] += 1\n else:\n trueVerbp.append(x)\n a = trueVerbp.index(x)\n verbpCount.append(1)\n for x in trueVerbp:\n i = trueVerbp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBD', locations[loc], verbpCount[i]))\n verb3pCount = []\n trueVerb3p = []\n for x in database.verb3person:\n if x in trueVerb3p:\n a = trueVerb3p.index(x)\n verb3pCount[a] += 1\n else:\n trueVerb3p.append(x)\n a = trueVerb3p.index(x)\n verb3pCount.append(1)\n for x in trueVerb3p:\n i = trueVerb3p.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBZ', locations[loc], verb3pCount[i]))\n adverbCount = []\n trueAdverb = []\n for x in database.adverb:\n if x in trueAdverb:\n a = trueAdverb.index(x)\n adverbCount[a] += 1\n else:\n trueAdverb.append(x)\n a = trueAdverb.index(x)\n adverbCount.append(1)\n for x in trueAdverb:\n i = trueAdverb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'RB', locations[loc], adverbCount[i]))\n uselessCount = []\n trueUseless = []\n for x in database.useless:\n if x in trueUseless:\n a = trueUseless.index(x)\n uselessCount[a] += 1\n else:\n trueUseless.append(x)\n a = trueUseless.index(x)\n uselessCount.append(1)\n for x in trueUseless:\n i = trueUseless.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'PU', locations[loc], uselessCount[i]))\n uuWCount = []\n trueuuW = []\n for x in database.unUsedWords:\n if x in trueuuW:\n a = trueuuW.index(x)\n uuWCount[a] += 1\n else:\n trueuuW.append(x)\n a = trueuuW.index(x)\n uuWCount.append(1)\n for x in trueuuW:\n i = trueuuW.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'US', locations[loc], uuWCount[i]))\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef pop(i):\n loc = i\n sentencesTrial = []\n File = open('words.txt')\n lines = File.read()\n sentences = nltk.sent_tokenize(lines)\n locations = ['Castle', 'Beach', 'Beach', 'Ghost Town', 'Ghost Town',\n 'Haunted House', 'Jungle', 'Carnival', 'Ghost Town', 'Highway',\n 'Castle', 'Pyramid', 'Beach', 'Beach', 'Carnival', 'Highway',\n 'Castle', 'Jungle']\n for sentence in sentences:\n for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))):\n if pos == 'NN':\n database.nouns.append(word.lower())\n sentencesTrial.append('NN')\n elif pos == 'NNS':\n database.nounsplural.append(word.lower())\n sentencesTrial.append('NNS')\n elif pos == 'NNP':\n database.propernounS.append(word.lower())\n sentencesTrial.append('NNP')\n elif pos == 'NNPS':\n database.propernounP.append(word.lower())\n sentencesTrial.append('NNPS')\n elif pos == 'JJ':\n database.adjective.append(word.lower())\n sentencesTrial.append('JJ')\n elif pos == 'VB' or pos == 'VBG' or pos == 'VBN':\n database.verbs.append(word.lower())\n sentencesTrial.append('VB')\n elif pos == 'VBD':\n database.verbpast.append(word.lower())\n sentencesTrial.append('VBD')\n elif pos == 'VBZ' or pos == 'VBP':\n database.verb3person.append(word.lower())\n sentencesTrial.append('VBZ')\n elif pos == 'RB' or pos == 'RBR' or pos == 'RBS':\n database.adverb.append(word)\n sentencesTrial.append('RB'.lower())\n elif word == ',':\n database.useless.append(word)\n sentencesTrial.append(',')\n break\n elif word == '.':\n database.useless.append(word)\n sentencesTrial.append('.')\n break\n else:\n database.unUsedWords.append(word.lower())\n break\n nounCount = []\n trueNouns = []\n for x in database.nouns:\n if x in trueNouns:\n a = trueNouns.index(x)\n nounCount[a] += 1\n else:\n trueNouns.append(x)\n a = trueNouns.index(x)\n nounCount.append(1)\n for x in trueNouns:\n i = trueNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NN', locations[loc], nounCount[i]))\n nounpCount = []\n trueNounsp = []\n for x in database.nounsplural:\n if x in trueNounsp:\n a = trueNounsp.index(x)\n nounpCount[a] += 1\n else:\n trueNounsp.append(x)\n a = trueNounsp.index(x)\n nounpCount.append(1)\n for x in trueNounsp:\n i = trueNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNS', locations[loc], nounpCount[i]))\n pnounCount = []\n truepNouns = []\n for x in database.propernounS:\n if x in truepNouns:\n a = truepNouns.index(x)\n pnounCount[a] += 1\n else:\n truepNouns.append(x)\n a = truepNouns.index(x)\n pnounCount.append(1)\n for x in truepNouns:\n i = truepNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNP', locations[loc], pnounCount[i]))\n pnounpCount = []\n truepNounsp = []\n for x in database.propernounP:\n if x in truepNounsp:\n a = truepNounsp.index(x)\n pnounpCount[a] += 1\n else:\n truepNounsp.append(x)\n a = truepNounsp.index(x)\n pnounpCount.append(1)\n for x in truepNounsp:\n i = truepNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNPS', locations[loc], pnounpCount[i]))\n adjectCount = []\n trueadject = []\n for x in database.adjective:\n if x in trueadject:\n a = trueadject.index(x)\n adjectCount[a] += 1\n else:\n trueadject.append(x)\n a = trueadject.index(x)\n adjectCount.append(1)\n for x in trueadject:\n i = trueadject.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'JJ', locations[loc], adjectCount[i]))\n verbCount = []\n trueVerb = []\n for x in database.verbs:\n if x in trueVerb:\n a = trueVerb.index(x)\n verbCount[a] += 1\n else:\n trueVerb.append(x)\n a = trueVerb.index(x)\n verbCount.append(1)\n for x in trueVerb:\n i = trueVerb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VB', locations[loc], verbCount[i]))\n verbpCount = []\n trueVerbp = []\n for x in database.verbpast:\n if x in trueVerbp:\n a = trueVerbp.index(x)\n verbpCount[a] += 1\n else:\n trueVerbp.append(x)\n a = trueVerbp.index(x)\n verbpCount.append(1)\n for x in trueVerbp:\n i = trueVerbp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBD', locations[loc], verbpCount[i]))\n verb3pCount = []\n trueVerb3p = []\n for x in database.verb3person:\n if x in trueVerb3p:\n a = trueVerb3p.index(x)\n verb3pCount[a] += 1\n else:\n trueVerb3p.append(x)\n a = trueVerb3p.index(x)\n verb3pCount.append(1)\n for x in trueVerb3p:\n i = trueVerb3p.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBZ', locations[loc], verb3pCount[i]))\n adverbCount = []\n trueAdverb = []\n for x in database.adverb:\n if x in trueAdverb:\n a = trueAdverb.index(x)\n adverbCount[a] += 1\n else:\n trueAdverb.append(x)\n a = trueAdverb.index(x)\n adverbCount.append(1)\n for x in trueAdverb:\n i = trueAdverb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'RB', locations[loc], adverbCount[i]))\n uselessCount = []\n trueUseless = []\n for x in database.useless:\n if x in trueUseless:\n a = trueUseless.index(x)\n uselessCount[a] += 1\n else:\n trueUseless.append(x)\n a = trueUseless.index(x)\n uselessCount.append(1)\n for x in trueUseless:\n i = trueUseless.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'PU', locations[loc], uselessCount[i]))\n uuWCount = []\n trueuuW = []\n for x in database.unUsedWords:\n if x in trueuuW:\n a = trueuuW.index(x)\n uuWCount[a] += 1\n else:\n trueuuW.append(x)\n a = trueuuW.index(x)\n uuWCount.append(1)\n for x in trueuuW:\n i = trueuuW.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'US', locations[loc], uuWCount[i]))\n\n\ndef pop2():\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Knight','Castle','Old Man Jenkins','Picture')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Vampire' , 'Castle' , 'Andrew the Tour', 'Vampire Make Up and fake blood')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Shadow' , 'Castle' , 'Frank the Janitor' , 'Black paint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost Pirate','Beach','Bill the Lifeguard','Pirate Costume')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Seaweed Monster','Beach','Old Fisherman Joe','Seaweed')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Shark','Beach','The Mayor','Shark fins')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Cowboy Ghost','Ghost Town','Jerry the Businessman ','Cowboy hat')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Miner Ghost','Ghost Town','Gold Hunter Phil','Dusty shoes')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Headless Horse Man','Ghost Town','Envirnmentalist Paddy','Drawing of rig to appear headless')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Francinstein','Haunted House','Sir Godfree','Green paint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Zombie','Haunted House','The Waiter','Zombie Make Up and fake boy parts')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost','Haunted House','Jimmy','Glow in the dark paint on cloths')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ape Man','Jungle','Explorer Fred','Ape Costume')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Animal Ghosts','Jungle','Environmentalist Jennie','Scratch Marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Pterodactyl','Jungle','Tour Guide Bill','Book on flight')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Clown Ghost','Carnival','Ring Master','Old Clown Costumes')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Zombie','Carnival','Blind Knife Thrower','Eye tests saying he is not blind')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Animals','Carnival','Worlds Strongest Man','Scratch marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost Car','Highway','Old Town Mayor','Car ownership documents')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('White Lady Ghost','Highway','Miss Anderson','White Dress')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Aliens','Highway','Conspiracy Tom','Fake Space ship blueprint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Mummy','Pyramid','Museum Curator Petterson ','Bandages')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Sand Man','Pyramid','Ramesh the Tour Guide','Sand')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Sphynx','Pyramid','Tour Guide Bob','scratch marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Scooby Doo','Scooby Dooby Doo')\")\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Shaggy','Zoinks!')\")\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Fred','Lets Split up and look for clues')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Velma','My glasses. I cant find my glasses')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Daphne','Do you want a Scooby Snack')\"\n )\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Stormy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Misty')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Ghost Town','Cloudy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Ghost TOwn','Foggy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Haunted House','Stormy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Haunted House','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Cloudy')\"\n )\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Carnival','Overcast')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Highway','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Highway','Sunny')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Pyramid','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Raining')\"\n )\n", "step-4": "import database\nimport nltk\n\n\ndef pop(i):\n loc = i\n sentencesTrial = []\n File = open('words.txt')\n lines = File.read()\n sentences = nltk.sent_tokenize(lines)\n locations = ['Castle', 'Beach', 'Beach', 'Ghost Town', 'Ghost Town',\n 'Haunted House', 'Jungle', 'Carnival', 'Ghost Town', 'Highway',\n 'Castle', 'Pyramid', 'Beach', 'Beach', 'Carnival', 'Highway',\n 'Castle', 'Jungle']\n for sentence in sentences:\n for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))):\n if pos == 'NN':\n database.nouns.append(word.lower())\n sentencesTrial.append('NN')\n elif pos == 'NNS':\n database.nounsplural.append(word.lower())\n sentencesTrial.append('NNS')\n elif pos == 'NNP':\n database.propernounS.append(word.lower())\n sentencesTrial.append('NNP')\n elif pos == 'NNPS':\n database.propernounP.append(word.lower())\n sentencesTrial.append('NNPS')\n elif pos == 'JJ':\n database.adjective.append(word.lower())\n sentencesTrial.append('JJ')\n elif pos == 'VB' or pos == 'VBG' or pos == 'VBN':\n database.verbs.append(word.lower())\n sentencesTrial.append('VB')\n elif pos == 'VBD':\n database.verbpast.append(word.lower())\n sentencesTrial.append('VBD')\n elif pos == 'VBZ' or pos == 'VBP':\n database.verb3person.append(word.lower())\n sentencesTrial.append('VBZ')\n elif pos == 'RB' or pos == 'RBR' or pos == 'RBS':\n database.adverb.append(word)\n sentencesTrial.append('RB'.lower())\n elif word == ',':\n database.useless.append(word)\n sentencesTrial.append(',')\n break\n elif word == '.':\n database.useless.append(word)\n sentencesTrial.append('.')\n break\n else:\n database.unUsedWords.append(word.lower())\n break\n nounCount = []\n trueNouns = []\n for x in database.nouns:\n if x in trueNouns:\n a = trueNouns.index(x)\n nounCount[a] += 1\n else:\n trueNouns.append(x)\n a = trueNouns.index(x)\n nounCount.append(1)\n for x in trueNouns:\n i = trueNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NN', locations[loc], nounCount[i]))\n nounpCount = []\n trueNounsp = []\n for x in database.nounsplural:\n if x in trueNounsp:\n a = trueNounsp.index(x)\n nounpCount[a] += 1\n else:\n trueNounsp.append(x)\n a = trueNounsp.index(x)\n nounpCount.append(1)\n for x in trueNounsp:\n i = trueNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNS', locations[loc], nounpCount[i]))\n pnounCount = []\n truepNouns = []\n for x in database.propernounS:\n if x in truepNouns:\n a = truepNouns.index(x)\n pnounCount[a] += 1\n else:\n truepNouns.append(x)\n a = truepNouns.index(x)\n pnounCount.append(1)\n for x in truepNouns:\n i = truepNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNP', locations[loc], pnounCount[i]))\n pnounpCount = []\n truepNounsp = []\n for x in database.propernounP:\n if x in truepNounsp:\n a = truepNounsp.index(x)\n pnounpCount[a] += 1\n else:\n truepNounsp.append(x)\n a = truepNounsp.index(x)\n pnounpCount.append(1)\n for x in truepNounsp:\n i = truepNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNPS', locations[loc], pnounpCount[i]))\n adjectCount = []\n trueadject = []\n for x in database.adjective:\n if x in trueadject:\n a = trueadject.index(x)\n adjectCount[a] += 1\n else:\n trueadject.append(x)\n a = trueadject.index(x)\n adjectCount.append(1)\n for x in trueadject:\n i = trueadject.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'JJ', locations[loc], adjectCount[i]))\n verbCount = []\n trueVerb = []\n for x in database.verbs:\n if x in trueVerb:\n a = trueVerb.index(x)\n verbCount[a] += 1\n else:\n trueVerb.append(x)\n a = trueVerb.index(x)\n verbCount.append(1)\n for x in trueVerb:\n i = trueVerb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VB', locations[loc], verbCount[i]))\n verbpCount = []\n trueVerbp = []\n for x in database.verbpast:\n if x in trueVerbp:\n a = trueVerbp.index(x)\n verbpCount[a] += 1\n else:\n trueVerbp.append(x)\n a = trueVerbp.index(x)\n verbpCount.append(1)\n for x in trueVerbp:\n i = trueVerbp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBD', locations[loc], verbpCount[i]))\n verb3pCount = []\n trueVerb3p = []\n for x in database.verb3person:\n if x in trueVerb3p:\n a = trueVerb3p.index(x)\n verb3pCount[a] += 1\n else:\n trueVerb3p.append(x)\n a = trueVerb3p.index(x)\n verb3pCount.append(1)\n for x in trueVerb3p:\n i = trueVerb3p.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBZ', locations[loc], verb3pCount[i]))\n adverbCount = []\n trueAdverb = []\n for x in database.adverb:\n if x in trueAdverb:\n a = trueAdverb.index(x)\n adverbCount[a] += 1\n else:\n trueAdverb.append(x)\n a = trueAdverb.index(x)\n adverbCount.append(1)\n for x in trueAdverb:\n i = trueAdverb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'RB', locations[loc], adverbCount[i]))\n uselessCount = []\n trueUseless = []\n for x in database.useless:\n if x in trueUseless:\n a = trueUseless.index(x)\n uselessCount[a] += 1\n else:\n trueUseless.append(x)\n a = trueUseless.index(x)\n uselessCount.append(1)\n for x in trueUseless:\n i = trueUseless.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'PU', locations[loc], uselessCount[i]))\n uuWCount = []\n trueuuW = []\n for x in database.unUsedWords:\n if x in trueuuW:\n a = trueuuW.index(x)\n uuWCount[a] += 1\n else:\n trueuuW.append(x)\n a = trueuuW.index(x)\n uuWCount.append(1)\n for x in trueuuW:\n i = trueuuW.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'US', locations[loc], uuWCount[i]))\n\n\ndef pop2():\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Knight','Castle','Old Man Jenkins','Picture')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Vampire' , 'Castle' , 'Andrew the Tour', 'Vampire Make Up and fake blood')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Shadow' , 'Castle' , 'Frank the Janitor' , 'Black paint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost Pirate','Beach','Bill the Lifeguard','Pirate Costume')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Seaweed Monster','Beach','Old Fisherman Joe','Seaweed')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Shark','Beach','The Mayor','Shark fins')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Cowboy Ghost','Ghost Town','Jerry the Businessman ','Cowboy hat')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Miner Ghost','Ghost Town','Gold Hunter Phil','Dusty shoes')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Headless Horse Man','Ghost Town','Envirnmentalist Paddy','Drawing of rig to appear headless')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Francinstein','Haunted House','Sir Godfree','Green paint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Zombie','Haunted House','The Waiter','Zombie Make Up and fake boy parts')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost','Haunted House','Jimmy','Glow in the dark paint on cloths')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ape Man','Jungle','Explorer Fred','Ape Costume')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Animal Ghosts','Jungle','Environmentalist Jennie','Scratch Marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Pterodactyl','Jungle','Tour Guide Bill','Book on flight')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Clown Ghost','Carnival','Ring Master','Old Clown Costumes')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Zombie','Carnival','Blind Knife Thrower','Eye tests saying he is not blind')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Animals','Carnival','Worlds Strongest Man','Scratch marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost Car','Highway','Old Town Mayor','Car ownership documents')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('White Lady Ghost','Highway','Miss Anderson','White Dress')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Aliens','Highway','Conspiracy Tom','Fake Space ship blueprint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Mummy','Pyramid','Museum Curator Petterson ','Bandages')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Sand Man','Pyramid','Ramesh the Tour Guide','Sand')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Sphynx','Pyramid','Tour Guide Bob','scratch marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Scooby Doo','Scooby Dooby Doo')\")\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Shaggy','Zoinks!')\")\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Fred','Lets Split up and look for clues')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Velma','My glasses. I cant find my glasses')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Daphne','Do you want a Scooby Snack')\"\n )\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Stormy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Misty')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Ghost Town','Cloudy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Ghost TOwn','Foggy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Haunted House','Stormy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Haunted House','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Cloudy')\"\n )\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Carnival','Overcast')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Highway','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Highway','Sunny')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Pyramid','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Raining')\"\n )\n", "step-5": "import database\nimport nltk\ndef pop(i): # pupulate the words table\n loc = i\n sentencesTrial = []\n File = open('words.txt')\n lines = File.read()\n sentences = nltk.sent_tokenize(lines)\n locations = [\"Castle\",\"Beach\",\"Beach\",\"Ghost Town\",\"Ghost Town\",\"Haunted House\",\"Jungle\",\"Carnival\", \"Ghost Town\", \"Highway\", \"Castle\", \"Pyramid\",\"Beach\",\"Beach\",\"Carnival\", \"Highway\", \"Castle\" ,\"Jungle\" ]\n\n for sentence in sentences:\n for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))):\n if(pos == 'NN'):\n database.nouns.append(word.lower())\n sentencesTrial.append(\"NN\")\n elif (pos == 'NNS'):\n database.nounsplural.append(word.lower())\n sentencesTrial.append(\"NNS\")\n elif (pos == 'NNP'):\n database.propernounS.append(word.lower())\n sentencesTrial.append(\"NNP\")\n elif (pos == 'NNPS'):\n database.propernounP.append(word.lower())\n sentencesTrial.append(\"NNPS\")\n elif (pos == 'JJ'):\n database.adjective.append(word.lower())\n sentencesTrial.append(\"JJ\")\n elif (pos == 'VB' or pos == 'VBG' or pos == 'VBN'):\n database.verbs.append(word.lower())\n sentencesTrial.append(\"VB\")\n elif (pos == 'VBD'):\n database.verbpast.append(word.lower())\n sentencesTrial.append(\"VBD\")\n elif (pos == 'VBZ' or pos == 'VBP'):\n database.verb3person.append(word.lower())\n sentencesTrial.append(\"VBZ\")\n elif (pos == 'RB' or pos == 'RBR' or pos == 'RBS'):\n database.adverb.append(word)\n sentencesTrial.append(\"RB\".lower())\n else:\n if(word == \",\"):\n database.useless.append(word)\n sentencesTrial.append(\",\")\n break\n elif(word == \".\"):\n database.useless.append(word)\n sentencesTrial.append(\".\")\n break\n else:\n database.unUsedWords.append(word.lower())\n break\n\n nounCount = []\n trueNouns = []\n\n for x in database.nouns:\n if x in trueNouns:\n a = trueNouns.index(x)\n nounCount[a] +=1\n else:\n trueNouns.append(x)\n a = trueNouns.index(x)\n nounCount.append(1)\n\n for x in trueNouns:\n i = trueNouns.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x,'NN',locations[loc],nounCount[i]))\n\n nounpCount = []\n trueNounsp = []\n\n for x in database.nounsplural:\n if x in trueNounsp:\n a = trueNounsp.index(x)\n nounpCount[a] += 1\n else:\n trueNounsp.append(x)\n a = trueNounsp.index(x)\n nounpCount.append(1)\n\n for x in trueNounsp:\n i = trueNounsp.index(x)\n database.cursor.execute(\n \"INSERT INTO words VALUES (?, ?, ?, ?)\",\n (x, 'NNS', locations[loc], nounpCount[i]))\n\n pnounCount = []\n truepNouns = []\n\n for x in database.propernounS:\n if x in truepNouns:\n a = truepNouns.index(x)\n pnounCount[a] += 1\n else:\n truepNouns.append(x)\n a = truepNouns.index(x)\n pnounCount.append(1)\n\n for x in truepNouns:\n i = truepNouns.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'NNP', locations[loc], pnounCount[i]))\n\n pnounpCount = []\n truepNounsp = []\n\n for x in database.propernounP:\n if x in truepNounsp:\n a = truepNounsp.index(x)\n pnounpCount[a] += 1\n else:\n truepNounsp.append(x)\n a = truepNounsp.index(x)\n pnounpCount.append(1)\n\n for x in truepNounsp:\n i = truepNounsp.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'NNPS', locations[loc], pnounpCount[i]))\n\n adjectCount = []\n trueadject = []\n\n for x in database.adjective:\n if x in trueadject:\n a = trueadject.index(x)\n adjectCount[a] += 1\n else:\n trueadject.append(x)\n a = trueadject.index(x)\n adjectCount.append(1)\n\n for x in trueadject:\n i = trueadject.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'JJ', locations[loc], adjectCount[i]))\n\n verbCount = []\n trueVerb = []\n\n for x in database.verbs:\n if x in trueVerb:\n a = trueVerb.index(x)\n verbCount[a] += 1\n else:\n trueVerb.append(x)\n a = trueVerb.index(x)\n verbCount.append(1)\n\n for x in trueVerb:\n i = trueVerb.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'VB', locations[loc], verbCount[i]))\n\n verbpCount = []\n trueVerbp = []\n\n for x in database.verbpast:\n if x in trueVerbp:\n a = trueVerbp.index(x)\n verbpCount[a] += 1\n else:\n trueVerbp.append(x)\n a = trueVerbp.index(x)\n verbpCount.append(1)\n\n for x in trueVerbp:\n i = trueVerbp.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'VBD', locations[loc], verbpCount[i]))\n\n verb3pCount = []\n trueVerb3p = []\n\n for x in database.verb3person:\n if x in trueVerb3p:\n a = trueVerb3p.index(x)\n verb3pCount[a] += 1\n else:\n trueVerb3p.append(x)\n a = trueVerb3p.index(x)\n verb3pCount.append(1)\n\n for x in trueVerb3p:\n i = trueVerb3p.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'VBZ', locations[loc], verb3pCount[i]))\n\n adverbCount = []\n trueAdverb = []\n\n for x in database.adverb:\n if x in trueAdverb:\n a = trueAdverb.index(x)\n adverbCount[a] += 1\n else:\n trueAdverb.append(x)\n a = trueAdverb.index(x)\n adverbCount.append(1)\n\n for x in trueAdverb:\n i = trueAdverb.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'RB', locations[loc], adverbCount[i]))\n\n uselessCount = []\n trueUseless = []\n\n for x in database.useless:\n if x in trueUseless:\n a = trueUseless.index(x)\n uselessCount[a] += 1\n else:\n trueUseless.append(x)\n a = trueUseless.index(x)\n uselessCount.append(1)\n\n for x in trueUseless:\n i = trueUseless.index(x)\n database.cursor.execute(\n \"INSERT INTO words VALUES (?, ?, ?, ?)\",\n (x, 'PU', locations[loc], uselessCount[i]))\n\n uuWCount = []\n trueuuW = []\n\n for x in database.unUsedWords:\n if x in trueuuW:\n a = trueuuW.index(x)\n uuWCount[a] += 1\n else:\n trueuuW.append(x)\n a = trueuuW.index(x)\n uuWCount.append(1)\n\n for x in trueuuW:\n i = trueuuW.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'US', locations[loc], uuWCount[i]))\n\n\ndef pop2(): #populate the monster and characters table\n\n####populating the monsters\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Knight','Castle','Old Man Jenkins','Picture')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Vampire' , 'Castle' , 'Andrew the Tour', 'Vampire Make Up and fake blood')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Shadow' , 'Castle' , 'Frank the Janitor' , 'Black paint')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Ghost Pirate','Beach','Bill the Lifeguard','Pirate Costume')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Seaweed Monster','Beach','Old Fisherman Joe','Seaweed')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Shark','Beach','The Mayor','Shark fins')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Cowboy Ghost','Ghost Town','Jerry the Businessman ','Cowboy hat')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Miner Ghost','Ghost Town','Gold Hunter Phil','Dusty shoes')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Headless Horse Man','Ghost Town','Envirnmentalist Paddy','Drawing of rig to appear headless')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Francinstein','Haunted House','Sir Godfree','Green paint')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Zombie','Haunted House','The Waiter','Zombie Make Up and fake boy parts')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Ghost','Haunted House','Jimmy','Glow in the dark paint on cloths')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Ape Man','Jungle','Explorer Fred','Ape Costume')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Animal Ghosts','Jungle','Environmentalist Jennie','Scratch Marks')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Pterodactyl','Jungle','Tour Guide Bill','Book on flight')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Clown Ghost','Carnival','Ring Master','Old Clown Costumes')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Zombie','Carnival','Blind Knife Thrower','Eye tests saying he is not blind')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Animals','Carnival','Worlds Strongest Man','Scratch marks')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Ghost Car','Highway','Old Town Mayor','Car ownership documents')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('White Lady Ghost','Highway','Miss Anderson','White Dress')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Aliens','Highway','Conspiracy Tom','Fake Space ship blueprint')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Mummy','Pyramid','Museum Curator Petterson ','Bandages')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Sand Man','Pyramid','Ramesh the Tour Guide','Sand')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Sphynx','Pyramid','Tour Guide Bob','scratch marks')\")\n\n####populating the characters\n\n\n database.cursor.execute(\"INSERT INTO characters VALUES ('Scooby Doo','Scooby Dooby Doo')\")\n database.cursor.execute(\"INSERT INTO characters VALUES ('Shaggy','Zoinks!')\")\n database.cursor.execute(\"INSERT INTO characters VALUES ('Fred','Lets Split up and look for clues')\")\n database.cursor.execute(\"INSERT INTO characters VALUES ('Velma','My glasses. I cant find my glasses')\")\n database.cursor.execute(\"INSERT INTO characters VALUES ('Daphne','Do you want a Scooby Snack')\")\n\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Stormy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Ghost Town','Cloudy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Ghost TOwn','Foggy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Haunted House','Stormy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Haunted House','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Cloudy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Highway','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Highway','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Raining')\")", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
try: fh = open("testfile","w") fh.write("test") except IOError: print("Error:没有找到文件") else: print("sucess") fh.close()
normal
{ "blob_id": "15e0b396a4726f98ce5ae2620338d7d48985707e", "index": 9533, "step-1": "<mask token>\n", "step-2": "try:\n fh = open('testfile', 'w')\n fh.write('test')\nexcept IOError:\n print('Error:没有找到文件')\nelse:\n print('sucess')\n fh.close()\n", "step-3": "try:\r\n\tfh = open(\"testfile\",\"w\")\r\n\tfh.write(\"test\")\r\nexcept IOError:\r\n\tprint(\"Error:没有找到文件\")\r\nelse:\r\n\tprint(\"sucess\")\r\n\tfh.close()\r\n\r\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> class BaseProperty(object): <|reserved_special_token_0|> <|reserved_special_token_0|> def set(self, obj, value): """Set property value on object. :param obj: Item :type obj: byte.model.Model :param value: Value :type value: any """ raise NotImplementedError <|reserved_special_token_1|> <|reserved_special_token_0|> class BaseProperty(object): <|reserved_special_token_0|> def get(self, obj): """Get property value from object. :param obj: Item :type obj: byte.model.Model """ raise NotImplementedError def set(self, obj, value): """Set property value on object. :param obj: Item :type obj: byte.model.Model :param value: Value :type value: any """ raise NotImplementedError <|reserved_special_token_1|> <|reserved_special_token_0|> class BaseProperty(object): """Base class for properties.""" def get(self, obj): """Get property value from object. :param obj: Item :type obj: byte.model.Model """ raise NotImplementedError def set(self, obj, value): """Set property value on object. :param obj: Item :type obj: byte.model.Model :param value: Value :type value: any """ raise NotImplementedError <|reserved_special_token_1|> <|reserved_special_token_0|> from __future__ import absolute_import, division, print_function class BaseProperty(object): """Base class for properties.""" def get(self, obj): """Get property value from object. :param obj: Item :type obj: byte.model.Model """ raise NotImplementedError def set(self, obj, value): """Set property value on object. :param obj: Item :type obj: byte.model.Model :param value: Value :type value: any """ raise NotImplementedError <|reserved_special_token_1|> """byte - property model module.""" from __future__ import absolute_import, division, print_function class BaseProperty(object): """Base class for properties.""" def get(self, obj): """Get property value from object. :param obj: Item :type obj: byte.model.Model """ raise NotImplementedError def set(self, obj, value): """Set property value on object. :param obj: Item :type obj: byte.model.Model :param value: Value :type value: any """ raise NotImplementedError
flexible
{ "blob_id": "382f7119beba81087c497baf170eb6814c26c03e", "index": 5458, "step-1": "<mask token>\n\n\nclass BaseProperty(object):\n <mask token>\n <mask token>\n\n def set(self, obj, value):\n \"\"\"Set property value on object.\n\n :param obj: Item\n :type obj: byte.model.Model\n\n :param value: Value\n :type value: any\n \"\"\"\n raise NotImplementedError\n", "step-2": "<mask token>\n\n\nclass BaseProperty(object):\n <mask token>\n\n def get(self, obj):\n \"\"\"Get property value from object.\n\n :param obj: Item\n :type obj: byte.model.Model\n \"\"\"\n raise NotImplementedError\n\n def set(self, obj, value):\n \"\"\"Set property value on object.\n\n :param obj: Item\n :type obj: byte.model.Model\n\n :param value: Value\n :type value: any\n \"\"\"\n raise NotImplementedError\n", "step-3": "<mask token>\n\n\nclass BaseProperty(object):\n \"\"\"Base class for properties.\"\"\"\n\n def get(self, obj):\n \"\"\"Get property value from object.\n\n :param obj: Item\n :type obj: byte.model.Model\n \"\"\"\n raise NotImplementedError\n\n def set(self, obj, value):\n \"\"\"Set property value on object.\n\n :param obj: Item\n :type obj: byte.model.Model\n\n :param value: Value\n :type value: any\n \"\"\"\n raise NotImplementedError\n", "step-4": "<mask token>\nfrom __future__ import absolute_import, division, print_function\n\n\nclass BaseProperty(object):\n \"\"\"Base class for properties.\"\"\"\n\n def get(self, obj):\n \"\"\"Get property value from object.\n\n :param obj: Item\n :type obj: byte.model.Model\n \"\"\"\n raise NotImplementedError\n\n def set(self, obj, value):\n \"\"\"Set property value on object.\n\n :param obj: Item\n :type obj: byte.model.Model\n\n :param value: Value\n :type value: any\n \"\"\"\n raise NotImplementedError\n", "step-5": "\"\"\"byte - property model module.\"\"\"\nfrom __future__ import absolute_import, division, print_function\n\n\nclass BaseProperty(object):\n \"\"\"Base class for properties.\"\"\"\n\n def get(self, obj):\n \"\"\"Get property value from object.\n\n :param obj: Item\n :type obj: byte.model.Model\n \"\"\"\n raise NotImplementedError\n\n def set(self, obj, value):\n \"\"\"Set property value on object.\n\n :param obj: Item\n :type obj: byte.model.Model\n\n :param value: Value\n :type value: any\n \"\"\"\n raise NotImplementedError\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
<|reserved_special_token_0|> def base_model(): input_shape = 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE inputs = Input(shape=input_shape) conv1 = Convolution3D(32, 5, 5, 5, activation='relu')(inputs) drop1 = Dropout(0.2)(conv1) conv2 = Convolution3D(32, 5, 5, 5, activation='relu')(drop1) pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv2) flatten = Flatten()(pool1) dense2 = Dense(512, activation='tanh')(flatten) drop4 = Dropout(0.2)(dense2) dense3 = Dense(128, activation='tanh')(drop4) dense4 = Dense(2, activation='sigmoid')(dense3) model = Model(input=inputs, output=dense4) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=[ 'accuracy']) return model <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> K.set_image_dim_ordering('th') <|reserved_special_token_0|> np.random.seed(123) <|reserved_special_token_0|> for i in range(0, len(X_init)): try: X[i] = X_init[i] y[i] = y_init[i] except: print('problem') continue print('done') <|reserved_special_token_0|> for i in range(0, len(test_data)): try: X_test[i] = test_data[i] except: print('problem_test') continue <|reserved_special_token_0|> for ind, row in solution.iterrows(): n = patient_order.index(ind) y_test[n] = row[0] print('done') <|reserved_special_token_0|> def base_model(): input_shape = 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE inputs = Input(shape=input_shape) conv1 = Convolution3D(32, 5, 5, 5, activation='relu')(inputs) drop1 = Dropout(0.2)(conv1) conv2 = Convolution3D(32, 5, 5, 5, activation='relu')(drop1) pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv2) flatten = Flatten()(pool1) dense2 = Dense(512, activation='tanh')(flatten) drop4 = Dropout(0.2)(dense2) dense3 = Dense(128, activation='tanh')(drop4) dense4 = Dense(2, activation='sigmoid')(dense3) model = Model(input=inputs, output=dense4) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=[ 'accuracy']) return model <|reserved_special_token_0|> model.fit(X, y, nb_epoch=20, batch_size=30, verbose=2) model.summary() <|reserved_special_token_0|> print(logloss) print('Total time: {} seconds'.format(time.time() - start_time)) <|reserved_special_token_1|> <|reserved_special_token_0|> K.set_image_dim_ordering('th') start_time = time.time() np.random.seed(123) much_data = np.load('muchdata-50-50-20.npy') X_init = much_data[:, 0] y_init = much_data[:, 1] test_data = np.load('testdata-50-50-20.npy') patient_order = np.load('testpatientorder.npy') patient_order = list(patient_order) IMG_PX_SIZE = 50 HM_SLICES = 20 X = np.zeros((len(X_init), HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE)) y = np.zeros((len(y_init), 2)) for i in range(0, len(X_init)): try: X[i] = X_init[i] y[i] = y_init[i] except: print('problem') continue print('done') X_test = np.zeros((len(test_data), 20, 50, 50)) y_test = np.zeros((len(test_data), 1)) for i in range(0, len(test_data)): try: X_test[i] = test_data[i] except: print('problem_test') continue solution = pd.read_csv('stage1_solution.csv', index_col=0) for ind, row in solution.iterrows(): n = patient_order.index(ind) y_test[n] = row[0] print('done') X = X.reshape(X.shape[0], 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE).astype( 'float32') X_test = X_test.reshape(X_test.shape[0], 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE ).astype('float32') def base_model(): input_shape = 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE inputs = Input(shape=input_shape) conv1 = Convolution3D(32, 5, 5, 5, activation='relu')(inputs) drop1 = Dropout(0.2)(conv1) conv2 = Convolution3D(32, 5, 5, 5, activation='relu')(drop1) pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv2) flatten = Flatten()(pool1) dense2 = Dense(512, activation='tanh')(flatten) drop4 = Dropout(0.2)(dense2) dense3 = Dense(128, activation='tanh')(drop4) dense4 = Dense(2, activation='sigmoid')(dense3) model = Model(input=inputs, output=dense4) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=[ 'accuracy']) return model model = base_model() model.fit(X, y, nb_epoch=20, batch_size=30, verbose=2) model.summary() predictions = model.predict(test_data, verbose=1) logloss = log_loss(y_test, predictions) print(logloss) print('Total time: {} seconds'.format(time.time() - start_time)) <|reserved_special_token_1|> import numpy as np import pandas as pd import time from sklearn.metrics import log_loss from keras.models import Sequential, Model from keras.layers import Dense, Input from keras.layers import Dropout from keras.layers import Flatten from keras.layers import LSTM from keras.layers.convolutional import Convolution3D from keras.layers.convolutional import MaxPooling3D from keras import backend as K K.set_image_dim_ordering('th') start_time = time.time() np.random.seed(123) much_data = np.load('muchdata-50-50-20.npy') X_init = much_data[:, 0] y_init = much_data[:, 1] test_data = np.load('testdata-50-50-20.npy') patient_order = np.load('testpatientorder.npy') patient_order = list(patient_order) IMG_PX_SIZE = 50 HM_SLICES = 20 X = np.zeros((len(X_init), HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE)) y = np.zeros((len(y_init), 2)) for i in range(0, len(X_init)): try: X[i] = X_init[i] y[i] = y_init[i] except: print('problem') continue print('done') X_test = np.zeros((len(test_data), 20, 50, 50)) y_test = np.zeros((len(test_data), 1)) for i in range(0, len(test_data)): try: X_test[i] = test_data[i] except: print('problem_test') continue solution = pd.read_csv('stage1_solution.csv', index_col=0) for ind, row in solution.iterrows(): n = patient_order.index(ind) y_test[n] = row[0] print('done') X = X.reshape(X.shape[0], 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE).astype( 'float32') X_test = X_test.reshape(X_test.shape[0], 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE ).astype('float32') def base_model(): input_shape = 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE inputs = Input(shape=input_shape) conv1 = Convolution3D(32, 5, 5, 5, activation='relu')(inputs) drop1 = Dropout(0.2)(conv1) conv2 = Convolution3D(32, 5, 5, 5, activation='relu')(drop1) pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv2) flatten = Flatten()(pool1) dense2 = Dense(512, activation='tanh')(flatten) drop4 = Dropout(0.2)(dense2) dense3 = Dense(128, activation='tanh')(drop4) dense4 = Dense(2, activation='sigmoid')(dense3) model = Model(input=inputs, output=dense4) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=[ 'accuracy']) return model model = base_model() model.fit(X, y, nb_epoch=20, batch_size=30, verbose=2) model.summary() predictions = model.predict(test_data, verbose=1) logloss = log_loss(y_test, predictions) print(logloss) print('Total time: {} seconds'.format(time.time() - start_time)) <|reserved_special_token_1|> import numpy as np import pandas as pd import time from sklearn.metrics import log_loss from keras.models import Sequential, Model from keras.layers import Dense, Input from keras.layers import Dropout from keras.layers import Flatten from keras.layers import LSTM from keras.layers.convolutional import Convolution3D from keras.layers.convolutional import MaxPooling3D from keras import backend as K K.set_image_dim_ordering('th') start_time = time.time() #Random seed np.random.seed(123) #Load training data much_data = np.load('muchdata-50-50-20.npy') X_init = much_data[:,0] y_init = much_data[:,1] #Load test data test_data = np.load('testdata-50-50-20.npy') patient_order = np.load('testpatientorder.npy') patient_order = list(patient_order) IMG_PX_SIZE = 50 HM_SLICES = 20 X = np.zeros((len(X_init),HM_SLICES,IMG_PX_SIZE,IMG_PX_SIZE)) y = np.zeros((len(y_init),2)) for i in range(0,len(X_init)): try: X[i] = X_init[i] y[i] = y_init[i] except: print("problem") continue print("done") X_test = np.zeros((len(test_data),20,50,50)) y_test = np.zeros((len(test_data),1)) for i in range(0,len(test_data)): try: X_test[i] = test_data[i] except: print("problem_test") continue solution = pd.read_csv('stage1_solution.csv', index_col=0) for ind, row in solution.iterrows(): n = patient_order.index(ind) y_test[n] = row[0] print("done") #Reshape to [samples][channels][width][height] X = X.reshape(X.shape[0],1,HM_SLICES,IMG_PX_SIZE,IMG_PX_SIZE).astype('float32') X_test = X_test.reshape(X_test.shape[0],1,HM_SLICES,IMG_PX_SIZE,IMG_PX_SIZE).astype('float32') def base_model(): input_shape=(1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE) inputs = Input(shape=input_shape) conv1 = Convolution3D(32, 5, 5, 5, activation='relu')(inputs) drop1 = Dropout(0.2)(conv1) conv2 = Convolution3D(32, 5, 5, 5, activation='relu')(drop1) pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv2) ## conv3 = Convolution3D(64, 5, 5, 5, activation='relu')(pool1) ## drop2 = Dropout(0.2)(conv3) ## conv4 = Convolution3D(64, 5, 5, 5, activation='relu')(drop2) ## pool2 = MaxPooling3D(pool_size=(2, 2, 2))(conv4) flatten = Flatten()(pool1) ## dense1 = Dense(1024, activation='tanh')(flatten) ## drop3 = Dropout(0.2)(dense1) dense2 = Dense(512, activation='tanh')(flatten) drop4 = Dropout(0.2)(dense2) dense3 = Dense(128, activation='tanh')(drop4) dense4 = Dense(2, activation='sigmoid')(dense3) model = Model(input=inputs, output=dense4) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) return model # build the model model = base_model() # Fit the model model.fit(X, y, nb_epoch=20, batch_size=30,verbose=2) model.summary() #Prediction predictions = model.predict(test_data, verbose=1) logloss = log_loss(y_test,predictions) print(logloss) print("Total time: {} seconds".format(time.time() - start_time))
flexible
{ "blob_id": "e3d886dedaf5b120392d0dc81c4c71398f08f8d6", "index": 8234, "step-1": "<mask token>\n\n\ndef base_model():\n input_shape = 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE\n inputs = Input(shape=input_shape)\n conv1 = Convolution3D(32, 5, 5, 5, activation='relu')(inputs)\n drop1 = Dropout(0.2)(conv1)\n conv2 = Convolution3D(32, 5, 5, 5, activation='relu')(drop1)\n pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv2)\n flatten = Flatten()(pool1)\n dense2 = Dense(512, activation='tanh')(flatten)\n drop4 = Dropout(0.2)(dense2)\n dense3 = Dense(128, activation='tanh')(drop4)\n dense4 = Dense(2, activation='sigmoid')(dense3)\n model = Model(input=inputs, output=dense4)\n model.compile(loss='binary_crossentropy', optimizer='adam', metrics=[\n 'accuracy'])\n return model\n\n\n<mask token>\n", "step-2": "<mask token>\nK.set_image_dim_ordering('th')\n<mask token>\nnp.random.seed(123)\n<mask token>\nfor i in range(0, len(X_init)):\n try:\n X[i] = X_init[i]\n y[i] = y_init[i]\n except:\n print('problem')\n continue\nprint('done')\n<mask token>\nfor i in range(0, len(test_data)):\n try:\n X_test[i] = test_data[i]\n except:\n print('problem_test')\n continue\n<mask token>\nfor ind, row in solution.iterrows():\n n = patient_order.index(ind)\n y_test[n] = row[0]\nprint('done')\n<mask token>\n\n\ndef base_model():\n input_shape = 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE\n inputs = Input(shape=input_shape)\n conv1 = Convolution3D(32, 5, 5, 5, activation='relu')(inputs)\n drop1 = Dropout(0.2)(conv1)\n conv2 = Convolution3D(32, 5, 5, 5, activation='relu')(drop1)\n pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv2)\n flatten = Flatten()(pool1)\n dense2 = Dense(512, activation='tanh')(flatten)\n drop4 = Dropout(0.2)(dense2)\n dense3 = Dense(128, activation='tanh')(drop4)\n dense4 = Dense(2, activation='sigmoid')(dense3)\n model = Model(input=inputs, output=dense4)\n model.compile(loss='binary_crossentropy', optimizer='adam', metrics=[\n 'accuracy'])\n return model\n\n\n<mask token>\nmodel.fit(X, y, nb_epoch=20, batch_size=30, verbose=2)\nmodel.summary()\n<mask token>\nprint(logloss)\nprint('Total time: {} seconds'.format(time.time() - start_time))\n", "step-3": "<mask token>\nK.set_image_dim_ordering('th')\nstart_time = time.time()\nnp.random.seed(123)\nmuch_data = np.load('muchdata-50-50-20.npy')\nX_init = much_data[:, 0]\ny_init = much_data[:, 1]\ntest_data = np.load('testdata-50-50-20.npy')\npatient_order = np.load('testpatientorder.npy')\npatient_order = list(patient_order)\nIMG_PX_SIZE = 50\nHM_SLICES = 20\nX = np.zeros((len(X_init), HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE))\ny = np.zeros((len(y_init), 2))\nfor i in range(0, len(X_init)):\n try:\n X[i] = X_init[i]\n y[i] = y_init[i]\n except:\n print('problem')\n continue\nprint('done')\nX_test = np.zeros((len(test_data), 20, 50, 50))\ny_test = np.zeros((len(test_data), 1))\nfor i in range(0, len(test_data)):\n try:\n X_test[i] = test_data[i]\n except:\n print('problem_test')\n continue\nsolution = pd.read_csv('stage1_solution.csv', index_col=0)\nfor ind, row in solution.iterrows():\n n = patient_order.index(ind)\n y_test[n] = row[0]\nprint('done')\nX = X.reshape(X.shape[0], 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE).astype(\n 'float32')\nX_test = X_test.reshape(X_test.shape[0], 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE\n ).astype('float32')\n\n\ndef base_model():\n input_shape = 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE\n inputs = Input(shape=input_shape)\n conv1 = Convolution3D(32, 5, 5, 5, activation='relu')(inputs)\n drop1 = Dropout(0.2)(conv1)\n conv2 = Convolution3D(32, 5, 5, 5, activation='relu')(drop1)\n pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv2)\n flatten = Flatten()(pool1)\n dense2 = Dense(512, activation='tanh')(flatten)\n drop4 = Dropout(0.2)(dense2)\n dense3 = Dense(128, activation='tanh')(drop4)\n dense4 = Dense(2, activation='sigmoid')(dense3)\n model = Model(input=inputs, output=dense4)\n model.compile(loss='binary_crossentropy', optimizer='adam', metrics=[\n 'accuracy'])\n return model\n\n\nmodel = base_model()\nmodel.fit(X, y, nb_epoch=20, batch_size=30, verbose=2)\nmodel.summary()\npredictions = model.predict(test_data, verbose=1)\nlogloss = log_loss(y_test, predictions)\nprint(logloss)\nprint('Total time: {} seconds'.format(time.time() - start_time))\n", "step-4": "import numpy as np\nimport pandas as pd\nimport time\nfrom sklearn.metrics import log_loss\nfrom keras.models import Sequential, Model\nfrom keras.layers import Dense, Input\nfrom keras.layers import Dropout\nfrom keras.layers import Flatten\nfrom keras.layers import LSTM\nfrom keras.layers.convolutional import Convolution3D\nfrom keras.layers.convolutional import MaxPooling3D\nfrom keras import backend as K\nK.set_image_dim_ordering('th')\nstart_time = time.time()\nnp.random.seed(123)\nmuch_data = np.load('muchdata-50-50-20.npy')\nX_init = much_data[:, 0]\ny_init = much_data[:, 1]\ntest_data = np.load('testdata-50-50-20.npy')\npatient_order = np.load('testpatientorder.npy')\npatient_order = list(patient_order)\nIMG_PX_SIZE = 50\nHM_SLICES = 20\nX = np.zeros((len(X_init), HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE))\ny = np.zeros((len(y_init), 2))\nfor i in range(0, len(X_init)):\n try:\n X[i] = X_init[i]\n y[i] = y_init[i]\n except:\n print('problem')\n continue\nprint('done')\nX_test = np.zeros((len(test_data), 20, 50, 50))\ny_test = np.zeros((len(test_data), 1))\nfor i in range(0, len(test_data)):\n try:\n X_test[i] = test_data[i]\n except:\n print('problem_test')\n continue\nsolution = pd.read_csv('stage1_solution.csv', index_col=0)\nfor ind, row in solution.iterrows():\n n = patient_order.index(ind)\n y_test[n] = row[0]\nprint('done')\nX = X.reshape(X.shape[0], 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE).astype(\n 'float32')\nX_test = X_test.reshape(X_test.shape[0], 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE\n ).astype('float32')\n\n\ndef base_model():\n input_shape = 1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE\n inputs = Input(shape=input_shape)\n conv1 = Convolution3D(32, 5, 5, 5, activation='relu')(inputs)\n drop1 = Dropout(0.2)(conv1)\n conv2 = Convolution3D(32, 5, 5, 5, activation='relu')(drop1)\n pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv2)\n flatten = Flatten()(pool1)\n dense2 = Dense(512, activation='tanh')(flatten)\n drop4 = Dropout(0.2)(dense2)\n dense3 = Dense(128, activation='tanh')(drop4)\n dense4 = Dense(2, activation='sigmoid')(dense3)\n model = Model(input=inputs, output=dense4)\n model.compile(loss='binary_crossentropy', optimizer='adam', metrics=[\n 'accuracy'])\n return model\n\n\nmodel = base_model()\nmodel.fit(X, y, nb_epoch=20, batch_size=30, verbose=2)\nmodel.summary()\npredictions = model.predict(test_data, verbose=1)\nlogloss = log_loss(y_test, predictions)\nprint(logloss)\nprint('Total time: {} seconds'.format(time.time() - start_time))\n", "step-5": "import numpy as np\nimport pandas as pd\nimport time\nfrom sklearn.metrics import log_loss\n\nfrom keras.models import Sequential, Model\nfrom keras.layers import Dense, Input\nfrom keras.layers import Dropout\nfrom keras.layers import Flatten\nfrom keras.layers import LSTM\nfrom keras.layers.convolutional import Convolution3D\nfrom keras.layers.convolutional import MaxPooling3D\nfrom keras import backend as K\nK.set_image_dim_ordering('th')\nstart_time = time.time()\n#Random seed\nnp.random.seed(123)\n#Load training data\nmuch_data = np.load('muchdata-50-50-20.npy')\nX_init = much_data[:,0]\ny_init = much_data[:,1]\n#Load test data\ntest_data = np.load('testdata-50-50-20.npy')\npatient_order = np.load('testpatientorder.npy')\npatient_order = list(patient_order)\n\nIMG_PX_SIZE = 50\nHM_SLICES = 20\n\nX = np.zeros((len(X_init),HM_SLICES,IMG_PX_SIZE,IMG_PX_SIZE))\ny = np.zeros((len(y_init),2))\nfor i in range(0,len(X_init)):\n try:\n X[i] = X_init[i]\n y[i] = y_init[i]\n except:\n print(\"problem\")\n continue\n\nprint(\"done\")\nX_test = np.zeros((len(test_data),20,50,50))\ny_test = np.zeros((len(test_data),1))\nfor i in range(0,len(test_data)):\n try:\n X_test[i] = test_data[i]\n except:\n print(\"problem_test\")\n continue\n\nsolution = pd.read_csv('stage1_solution.csv', index_col=0)\nfor ind, row in solution.iterrows():\n n = patient_order.index(ind)\n y_test[n] = row[0]\nprint(\"done\")\n\n#Reshape to [samples][channels][width][height]\nX = X.reshape(X.shape[0],1,HM_SLICES,IMG_PX_SIZE,IMG_PX_SIZE).astype('float32')\nX_test = X_test.reshape(X_test.shape[0],1,HM_SLICES,IMG_PX_SIZE,IMG_PX_SIZE).astype('float32')\n\ndef base_model():\n input_shape=(1, HM_SLICES, IMG_PX_SIZE, IMG_PX_SIZE)\n inputs = Input(shape=input_shape)\n conv1 = Convolution3D(32, 5, 5, 5, activation='relu')(inputs)\n drop1 = Dropout(0.2)(conv1)\n conv2 = Convolution3D(32, 5, 5, 5, activation='relu')(drop1)\n pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv2)\n## conv3 = Convolution3D(64, 5, 5, 5, activation='relu')(pool1)\n## drop2 = Dropout(0.2)(conv3)\n## conv4 = Convolution3D(64, 5, 5, 5, activation='relu')(drop2)\n## pool2 = MaxPooling3D(pool_size=(2, 2, 2))(conv4)\n flatten = Flatten()(pool1)\n## dense1 = Dense(1024, activation='tanh')(flatten)\n## drop3 = Dropout(0.2)(dense1)\n dense2 = Dense(512, activation='tanh')(flatten)\n drop4 = Dropout(0.2)(dense2)\n dense3 = Dense(128, activation='tanh')(drop4)\n dense4 = Dense(2, activation='sigmoid')(dense3)\n model = Model(input=inputs, output=dense4)\n model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n return model\n \n# build the model\nmodel = base_model()\n# Fit the model\nmodel.fit(X, y, nb_epoch=20, batch_size=30,verbose=2)\nmodel.summary()\n#Prediction\npredictions = model.predict(test_data, verbose=1)\n\nlogloss = log_loss(y_test,predictions)\n\nprint(logloss)\n\nprint(\"Total time: {} seconds\".format(time.time() - start_time))\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
from conans import ConanFile, CMake, tools import os class Demo(ConanFile): name = "Demo" version = "0.1" license = "<Put the package license here>" url = "<Package recipe repository url here, for issues about the package>" description = "<Description of Testlib here>" settings = "os", "compiler", "build_type", "arch" options = {"shared": [True, False]} default_options = "shared=False" generators = "cmake" exports_sources = "src/*" requires = "TestLib/0.1@gbmhunter/testing" def build(self): cmake = CMake(self) cmake.configure(source_folder="src/") print('BLAHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH = ' + str(self.deps_cpp_info["TestLib"])) cmake.build() def imports(self): self.copy("*.dll", dst="bin", src="bin") self.copy("*.dylib*", dst="bin", src="lib") self.copy('*.so*', dst='bin', src='lib') def test(self): if not tools.cross_building(self.settings): os.chdir("bin") self.run(".%sexample" % os.sep) def package(self): self.copy("*.h", dst="include", src="src") self.copy("*.lib", dst="lib", keep_path=False) self.copy("*.dll", dst="bin", keep_path=False) self.copy("*.dylib*", dst="lib", keep_path=False) self.copy("*.so", dst="lib", keep_path=False) self.copy("*.a", dst="lib", keep_path=False)
normal
{ "blob_id": "c9bc331f4805a956146619c59d183fc3bcbe47cb", "index": 9728, "step-1": "<mask token>\n\n\nclass Demo(ConanFile):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def imports(self):\n self.copy('*.dll', dst='bin', src='bin')\n self.copy('*.dylib*', dst='bin', src='lib')\n self.copy('*.so*', dst='bin', src='lib')\n <mask token>\n\n def package(self):\n self.copy('*.h', dst='include', src='src')\n self.copy('*.lib', dst='lib', keep_path=False)\n self.copy('*.dll', dst='bin', keep_path=False)\n self.copy('*.dylib*', dst='lib', keep_path=False)\n self.copy('*.so', dst='lib', keep_path=False)\n self.copy('*.a', dst='lib', keep_path=False)\n", "step-2": "<mask token>\n\n\nclass Demo(ConanFile):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def imports(self):\n self.copy('*.dll', dst='bin', src='bin')\n self.copy('*.dylib*', dst='bin', src='lib')\n self.copy('*.so*', dst='bin', src='lib')\n\n def test(self):\n if not tools.cross_building(self.settings):\n os.chdir('bin')\n self.run('.%sexample' % os.sep)\n\n def package(self):\n self.copy('*.h', dst='include', src='src')\n self.copy('*.lib', dst='lib', keep_path=False)\n self.copy('*.dll', dst='bin', keep_path=False)\n self.copy('*.dylib*', dst='lib', keep_path=False)\n self.copy('*.so', dst='lib', keep_path=False)\n self.copy('*.a', dst='lib', keep_path=False)\n", "step-3": "<mask token>\n\n\nclass Demo(ConanFile):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def build(self):\n cmake = CMake(self)\n cmake.configure(source_folder='src/')\n print('BLAHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH = ' + str(self.\n deps_cpp_info['TestLib']))\n cmake.build()\n\n def imports(self):\n self.copy('*.dll', dst='bin', src='bin')\n self.copy('*.dylib*', dst='bin', src='lib')\n self.copy('*.so*', dst='bin', src='lib')\n\n def test(self):\n if not tools.cross_building(self.settings):\n os.chdir('bin')\n self.run('.%sexample' % os.sep)\n\n def package(self):\n self.copy('*.h', dst='include', src='src')\n self.copy('*.lib', dst='lib', keep_path=False)\n self.copy('*.dll', dst='bin', keep_path=False)\n self.copy('*.dylib*', dst='lib', keep_path=False)\n self.copy('*.so', dst='lib', keep_path=False)\n self.copy('*.a', dst='lib', keep_path=False)\n", "step-4": "<mask token>\n\n\nclass Demo(ConanFile):\n name = 'Demo'\n version = '0.1'\n license = '<Put the package license here>'\n url = '<Package recipe repository url here, for issues about the package>'\n description = '<Description of Testlib here>'\n settings = 'os', 'compiler', 'build_type', 'arch'\n options = {'shared': [True, False]}\n default_options = 'shared=False'\n generators = 'cmake'\n exports_sources = 'src/*'\n requires = 'TestLib/0.1@gbmhunter/testing'\n\n def build(self):\n cmake = CMake(self)\n cmake.configure(source_folder='src/')\n print('BLAHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH = ' + str(self.\n deps_cpp_info['TestLib']))\n cmake.build()\n\n def imports(self):\n self.copy('*.dll', dst='bin', src='bin')\n self.copy('*.dylib*', dst='bin', src='lib')\n self.copy('*.so*', dst='bin', src='lib')\n\n def test(self):\n if not tools.cross_building(self.settings):\n os.chdir('bin')\n self.run('.%sexample' % os.sep)\n\n def package(self):\n self.copy('*.h', dst='include', src='src')\n self.copy('*.lib', dst='lib', keep_path=False)\n self.copy('*.dll', dst='bin', keep_path=False)\n self.copy('*.dylib*', dst='lib', keep_path=False)\n self.copy('*.so', dst='lib', keep_path=False)\n self.copy('*.a', dst='lib', keep_path=False)\n", "step-5": "from conans import ConanFile, CMake, tools\nimport os\n\nclass Demo(ConanFile):\n name = \"Demo\"\n version = \"0.1\"\n license = \"<Put the package license here>\"\n url = \"<Package recipe repository url here, for issues about the package>\"\n description = \"<Description of Testlib here>\"\n settings = \"os\", \"compiler\", \"build_type\", \"arch\"\n options = {\"shared\": [True, False]}\n default_options = \"shared=False\"\n generators = \"cmake\"\n exports_sources = \"src/*\"\n requires = \"TestLib/0.1@gbmhunter/testing\"\n\n def build(self):\n cmake = CMake(self) \n cmake.configure(source_folder=\"src/\")\n\n print('BLAHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH = ' + str(self.deps_cpp_info[\"TestLib\"]))\n cmake.build()\n\n def imports(self):\n self.copy(\"*.dll\", dst=\"bin\", src=\"bin\")\n self.copy(\"*.dylib*\", dst=\"bin\", src=\"lib\")\n self.copy('*.so*', dst='bin', src='lib')\n\n def test(self):\n if not tools.cross_building(self.settings):\n os.chdir(\"bin\")\n self.run(\".%sexample\" % os.sep)\n\n def package(self):\n self.copy(\"*.h\", dst=\"include\", src=\"src\")\n self.copy(\"*.lib\", dst=\"lib\", keep_path=False)\n self.copy(\"*.dll\", dst=\"bin\", keep_path=False)\n self.copy(\"*.dylib*\", dst=\"lib\", keep_path=False)\n self.copy(\"*.so\", dst=\"lib\", keep_path=False)\n self.copy(\"*.a\", dst=\"lib\", keep_path=False)", "step-ids": [ 3, 4, 5, 6, 8 ] }
[ 3, 4, 5, 6, 8 ]
<|reserved_special_token_0|> class Car: <|reserved_special_token_0|> def activate(self): self.deactivate() self.pi.write(self.STBY, 1) <|reserved_special_token_0|> def setDrive(self, direction, dutycycle=100): dc = int(255.0 / 100.0 * dutycycle) if direction == 1: self.pi.write(self.driveIN1, 1) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, dc) elif direction == -1: self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 1) self.pi.set_PWM_dutycycle(self.drivePWM, dc) else: self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, 0) <|reserved_special_token_0|> def updateDistances(self): if self.sensorTrig > 0: for sensor in range(len(self.sensors)): while self.pi.read(self.sensors[sensor]): continue self.pi.write(self.sensorTrig, True) time.sleep(1e-06) self.pi.write(self.sensorTrig, False) startT = time.time() while not self.pi.read(self.sensors[sensor]) and time.time( ) - startT < 0.001: continue startT = time.time() while self.pi.read(self.sensors[sensor]): continue endT = time.time() self.distances[sensor] = round((endT - startT) * 17150, 2) """ # trial to read multiple sensors at once but was having issues # definitely can be optimized better and needs code hang detection startT = {} endT = {} self.pi.write(self.sensorTrig, True) time.sleep(0.0000001) self.pi.write(self.sensorTrig, False) sensorCount = len(self.sensors) while len(endT) < sensorCount: for sensor in range(sensorCount): if sensor not in startT.keys(): if self.pi.read(self.sensors[sensor]): startT[sensor] = time.time() elif not sensor in endT.keys(): if not self.pi.read(self.sensors[sensor]): endT[sensor] = time.time() for sensor in range(len(self.sensors)): self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2) """ <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Car: def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig =0, sensors=[]): self.pi = pigpio.pi() if not self.pi.connected: print('Pi not connected to pigpio.') return self.STBY = STBY self.drivePWM = PWMA self.driveIN1 = AIN1 self.driveIN2 = AIN2 self.steerPWM = PWMB self.steerIN1 = BIN1 self.steerIN2 = BIN2 self.pi.set_mode(STBY, pigpio.OUTPUT) self.pi.set_mode(PWMA, pigpio.OUTPUT) self.pi.set_mode(AIN1, pigpio.OUTPUT) self.pi.set_mode(AIN2, pigpio.OUTPUT) self.pi.set_mode(PWMB, pigpio.OUTPUT) self.pi.set_mode(BIN1, pigpio.OUTPUT) self.pi.set_mode(BIN2, pigpio.OUTPUT) self.pi.set_PWM_frequency(PWMA, 50) self.pi.set_PWM_frequency(PWMB, 50) self.sensorTrig = sensorTrig self.sensors = sensors self.distances = [] for i in range(len(sensors)): self.distances.append(0) if sensorTrig > 0: self.pi.set_mode(sensorTrig, pigpio.OUTPUT) for sensor in range(len(sensors)): if sensors[sensor] > 0: self.pi.set_mode(sensors[sensor], pigpio.INPUT) self.activate() def activate(self): self.deactivate() self.pi.write(self.STBY, 1) <|reserved_special_token_0|> def setDrive(self, direction, dutycycle=100): dc = int(255.0 / 100.0 * dutycycle) if direction == 1: self.pi.write(self.driveIN1, 1) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, dc) elif direction == -1: self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 1) self.pi.set_PWM_dutycycle(self.drivePWM, dc) else: self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, 0) <|reserved_special_token_0|> def updateDistances(self): if self.sensorTrig > 0: for sensor in range(len(self.sensors)): while self.pi.read(self.sensors[sensor]): continue self.pi.write(self.sensorTrig, True) time.sleep(1e-06) self.pi.write(self.sensorTrig, False) startT = time.time() while not self.pi.read(self.sensors[sensor]) and time.time( ) - startT < 0.001: continue startT = time.time() while self.pi.read(self.sensors[sensor]): continue endT = time.time() self.distances[sensor] = round((endT - startT) * 17150, 2) """ # trial to read multiple sensors at once but was having issues # definitely can be optimized better and needs code hang detection startT = {} endT = {} self.pi.write(self.sensorTrig, True) time.sleep(0.0000001) self.pi.write(self.sensorTrig, False) sensorCount = len(self.sensors) while len(endT) < sensorCount: for sensor in range(sensorCount): if sensor not in startT.keys(): if self.pi.read(self.sensors[sensor]): startT[sensor] = time.time() elif not sensor in endT.keys(): if not self.pi.read(self.sensors[sensor]): endT[sensor] = time.time() for sensor in range(len(self.sensors)): self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2) """ def stop(self): self.deactivate() self.pi.stop() <|reserved_special_token_1|> <|reserved_special_token_0|> class Car: def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig =0, sensors=[]): self.pi = pigpio.pi() if not self.pi.connected: print('Pi not connected to pigpio.') return self.STBY = STBY self.drivePWM = PWMA self.driveIN1 = AIN1 self.driveIN2 = AIN2 self.steerPWM = PWMB self.steerIN1 = BIN1 self.steerIN2 = BIN2 self.pi.set_mode(STBY, pigpio.OUTPUT) self.pi.set_mode(PWMA, pigpio.OUTPUT) self.pi.set_mode(AIN1, pigpio.OUTPUT) self.pi.set_mode(AIN2, pigpio.OUTPUT) self.pi.set_mode(PWMB, pigpio.OUTPUT) self.pi.set_mode(BIN1, pigpio.OUTPUT) self.pi.set_mode(BIN2, pigpio.OUTPUT) self.pi.set_PWM_frequency(PWMA, 50) self.pi.set_PWM_frequency(PWMB, 50) self.sensorTrig = sensorTrig self.sensors = sensors self.distances = [] for i in range(len(sensors)): self.distances.append(0) if sensorTrig > 0: self.pi.set_mode(sensorTrig, pigpio.OUTPUT) for sensor in range(len(sensors)): if sensors[sensor] > 0: self.pi.set_mode(sensors[sensor], pigpio.INPUT) self.activate() def activate(self): self.deactivate() self.pi.write(self.STBY, 1) <|reserved_special_token_0|> def setDrive(self, direction, dutycycle=100): dc = int(255.0 / 100.0 * dutycycle) if direction == 1: self.pi.write(self.driveIN1, 1) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, dc) elif direction == -1: self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 1) self.pi.set_PWM_dutycycle(self.drivePWM, dc) else: self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, 0) def setSteering(self, direction, dutycycle=100): dc = int(255.0 / 100.0 * dutycycle) if direction == 1: self.pi.write(self.steerIN1, 0) self.pi.write(self.steerIN2, 1) self.pi.set_PWM_dutycycle(self.steerPWM, dc) elif direction == -1: self.pi.write(self.steerIN1, 1) self.pi.write(self.steerIN2, 0) self.pi.set_PWM_dutycycle(self.steerPWM, dc) else: self.pi.write(self.steerIN1, 0) self.pi.write(self.steerIN2, 0) self.pi.set_PWM_dutycycle(self.steerPWM, 0) def updateDistances(self): if self.sensorTrig > 0: for sensor in range(len(self.sensors)): while self.pi.read(self.sensors[sensor]): continue self.pi.write(self.sensorTrig, True) time.sleep(1e-06) self.pi.write(self.sensorTrig, False) startT = time.time() while not self.pi.read(self.sensors[sensor]) and time.time( ) - startT < 0.001: continue startT = time.time() while self.pi.read(self.sensors[sensor]): continue endT = time.time() self.distances[sensor] = round((endT - startT) * 17150, 2) """ # trial to read multiple sensors at once but was having issues # definitely can be optimized better and needs code hang detection startT = {} endT = {} self.pi.write(self.sensorTrig, True) time.sleep(0.0000001) self.pi.write(self.sensorTrig, False) sensorCount = len(self.sensors) while len(endT) < sensorCount: for sensor in range(sensorCount): if sensor not in startT.keys(): if self.pi.read(self.sensors[sensor]): startT[sensor] = time.time() elif not sensor in endT.keys(): if not self.pi.read(self.sensors[sensor]): endT[sensor] = time.time() for sensor in range(len(self.sensors)): self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2) """ def stop(self): self.deactivate() self.pi.stop() <|reserved_special_token_1|> <|reserved_special_token_0|> class Car: def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig =0, sensors=[]): self.pi = pigpio.pi() if not self.pi.connected: print('Pi not connected to pigpio.') return self.STBY = STBY self.drivePWM = PWMA self.driveIN1 = AIN1 self.driveIN2 = AIN2 self.steerPWM = PWMB self.steerIN1 = BIN1 self.steerIN2 = BIN2 self.pi.set_mode(STBY, pigpio.OUTPUT) self.pi.set_mode(PWMA, pigpio.OUTPUT) self.pi.set_mode(AIN1, pigpio.OUTPUT) self.pi.set_mode(AIN2, pigpio.OUTPUT) self.pi.set_mode(PWMB, pigpio.OUTPUT) self.pi.set_mode(BIN1, pigpio.OUTPUT) self.pi.set_mode(BIN2, pigpio.OUTPUT) self.pi.set_PWM_frequency(PWMA, 50) self.pi.set_PWM_frequency(PWMB, 50) self.sensorTrig = sensorTrig self.sensors = sensors self.distances = [] for i in range(len(sensors)): self.distances.append(0) if sensorTrig > 0: self.pi.set_mode(sensorTrig, pigpio.OUTPUT) for sensor in range(len(sensors)): if sensors[sensor] > 0: self.pi.set_mode(sensors[sensor], pigpio.INPUT) self.activate() def activate(self): self.deactivate() self.pi.write(self.STBY, 1) def deactivate(self): self.pi.write(self.STBY, 0) self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, 0) self.pi.write(self.steerIN1, 0) self.pi.write(self.steerIN2, 0) self.pi.set_PWM_dutycycle(self.steerPWM, 0) if self.sensorTrig > 0: self.pi.write(self.sensorTrig, False) def setDrive(self, direction, dutycycle=100): dc = int(255.0 / 100.0 * dutycycle) if direction == 1: self.pi.write(self.driveIN1, 1) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, dc) elif direction == -1: self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 1) self.pi.set_PWM_dutycycle(self.drivePWM, dc) else: self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, 0) def setSteering(self, direction, dutycycle=100): dc = int(255.0 / 100.0 * dutycycle) if direction == 1: self.pi.write(self.steerIN1, 0) self.pi.write(self.steerIN2, 1) self.pi.set_PWM_dutycycle(self.steerPWM, dc) elif direction == -1: self.pi.write(self.steerIN1, 1) self.pi.write(self.steerIN2, 0) self.pi.set_PWM_dutycycle(self.steerPWM, dc) else: self.pi.write(self.steerIN1, 0) self.pi.write(self.steerIN2, 0) self.pi.set_PWM_dutycycle(self.steerPWM, 0) def updateDistances(self): if self.sensorTrig > 0: for sensor in range(len(self.sensors)): while self.pi.read(self.sensors[sensor]): continue self.pi.write(self.sensorTrig, True) time.sleep(1e-06) self.pi.write(self.sensorTrig, False) startT = time.time() while not self.pi.read(self.sensors[sensor]) and time.time( ) - startT < 0.001: continue startT = time.time() while self.pi.read(self.sensors[sensor]): continue endT = time.time() self.distances[sensor] = round((endT - startT) * 17150, 2) """ # trial to read multiple sensors at once but was having issues # definitely can be optimized better and needs code hang detection startT = {} endT = {} self.pi.write(self.sensorTrig, True) time.sleep(0.0000001) self.pi.write(self.sensorTrig, False) sensorCount = len(self.sensors) while len(endT) < sensorCount: for sensor in range(sensorCount): if sensor not in startT.keys(): if self.pi.read(self.sensors[sensor]): startT[sensor] = time.time() elif not sensor in endT.keys(): if not self.pi.read(self.sensors[sensor]): endT[sensor] = time.time() for sensor in range(len(self.sensors)): self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2) """ def stop(self): self.deactivate() self.pi.stop() <|reserved_special_token_1|> import time import pigpio class Car: def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig=0, sensors=[]): self.pi = pigpio.pi() if not self.pi.connected: print("Pi not connected to pigpio.") return # GPIO Drive Pin locations self.STBY = STBY # drive motor self.drivePWM = PWMA self.driveIN1 = AIN1 self.driveIN2 = AIN2 # steering motor self.steerPWM = PWMB self.steerIN1 = BIN1 self.steerIN2 = BIN2 # initialize GPIO self.pi.set_mode(STBY, pigpio.OUTPUT) self.pi.set_mode(PWMA, pigpio.OUTPUT) self.pi.set_mode(AIN1, pigpio.OUTPUT) self.pi.set_mode(AIN2, pigpio.OUTPUT) self.pi.set_mode(PWMB, pigpio.OUTPUT) self.pi.set_mode(BIN1, pigpio.OUTPUT) self.pi.set_mode(BIN2, pigpio.OUTPUT) self.pi.set_PWM_frequency(PWMA, 50) self.pi.set_PWM_frequency(PWMB, 50) # Sensor GPIO Pin locations self.sensorTrig = sensorTrig self.sensors = sensors self.distances = [] for i in range(len(sensors)): self.distances.append(0) # initialize sensor GPIO if sensorTrig > 0: self.pi.set_mode(sensorTrig, pigpio.OUTPUT) for sensor in range(len(sensors)): if sensors[sensor] > 0: self.pi.set_mode(sensors[sensor], pigpio.INPUT) # activate car self.activate() # activate motors def activate(self): self.deactivate() self.pi.write(self.STBY, 1) # shut off motors def deactivate(self): self.pi.write(self.STBY, 0) # shut off drive motor self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, 0) # shut off steering motor self.pi.write(self.steerIN1, 0) self.pi.write(self.steerIN2, 0) self.pi.set_PWM_dutycycle(self.steerPWM, 0) if self.sensorTrig > 0: # make sure sensors aren't triggered self.pi.write(self.sensorTrig, False) # set drive motor def setDrive(self, direction, dutycycle=100): dc = int((255.0 / 100.0) * dutycycle) if direction == 1: self.pi.write(self.driveIN1, 1) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, dc) elif direction == -1: self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 1) self.pi.set_PWM_dutycycle(self.drivePWM, dc) else: self.pi.write(self.driveIN1, 0) self.pi.write(self.driveIN2, 0) self.pi.set_PWM_dutycycle(self.drivePWM, 0) # set steering motor def setSteering(self, direction, dutycycle=100): dc = int((255.0 / 100.0) * dutycycle) if direction == 1: self.pi.write(self.steerIN1, 0) self.pi.write(self.steerIN2, 1) self.pi.set_PWM_dutycycle(self.steerPWM, dc) elif direction == -1: self.pi.write(self.steerIN1, 1) self.pi.write(self.steerIN2, 0) self.pi.set_PWM_dutycycle(self.steerPWM, dc) else: self.pi.write(self.steerIN1, 0) self.pi.write(self.steerIN2, 0) self.pi.set_PWM_dutycycle(self.steerPWM, 0) # update sensors distance def updateDistances(self): if self.sensorTrig > 0: for sensor in range(len(self.sensors)): while self.pi.read(self.sensors[sensor]): continue # trigger the sensors so they start reading self.pi.write(self.sensorTrig, True) time.sleep(0.000001) self.pi.write(self.sensorTrig, False) # wait until the sensor starts reading, if it takes longer than .001 seconds then something went wrong startT = time.time() while not self.pi.read(self.sensors[sensor]) and time.time() - startT < .001: continue startT = time.time() # wait for the sensor to become inactive which gives us the ending time while self.pi.read(self.sensors[sensor]): continue endT = time.time() # convert the sensor readings to distance in centimeters self.distances[sensor] = round((endT - startT) * 17150, 2) ''' # trial to read multiple sensors at once but was having issues # definitely can be optimized better and needs code hang detection startT = {} endT = {} self.pi.write(self.sensorTrig, True) time.sleep(0.0000001) self.pi.write(self.sensorTrig, False) sensorCount = len(self.sensors) while len(endT) < sensorCount: for sensor in range(sensorCount): if sensor not in startT.keys(): if self.pi.read(self.sensors[sensor]): startT[sensor] = time.time() elif not sensor in endT.keys(): if not self.pi.read(self.sensors[sensor]): endT[sensor] = time.time() for sensor in range(len(self.sensors)): self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2) ''' # shut everything off and disconnect from pi def stop(self): self.deactivate() self.pi.stop()
flexible
{ "blob_id": "5b9f1b3ca4b50a4e9e8bd6715e73c62b4f778929", "index": 1594, "step-1": "<mask token>\n\n\nclass Car:\n <mask token>\n\n def activate(self):\n self.deactivate()\n self.pi.write(self.STBY, 1)\n <mask token>\n\n def setDrive(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.driveIN1, 1)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n elif direction == -1:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 1)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n else:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\n <mask token>\n\n def updateDistances(self):\n if self.sensorTrig > 0:\n for sensor in range(len(self.sensors)):\n while self.pi.read(self.sensors[sensor]):\n continue\n self.pi.write(self.sensorTrig, True)\n time.sleep(1e-06)\n self.pi.write(self.sensorTrig, False)\n startT = time.time()\n while not self.pi.read(self.sensors[sensor]) and time.time(\n ) - startT < 0.001:\n continue\n startT = time.time()\n while self.pi.read(self.sensors[sensor]):\n continue\n endT = time.time()\n self.distances[sensor] = round((endT - startT) * 17150, 2)\n \"\"\"\n # trial to read multiple sensors at once but was having issues\n # definitely can be optimized better and needs code hang detection\n startT = {}\n endT = {}\n self.pi.write(self.sensorTrig, True)\n time.sleep(0.0000001)\n self.pi.write(self.sensorTrig, False)\n sensorCount = len(self.sensors)\n while len(endT) < sensorCount:\n for sensor in range(sensorCount):\n if sensor not in startT.keys():\n if self.pi.read(self.sensors[sensor]):\n startT[sensor] = time.time()\n elif not sensor in endT.keys():\n if not self.pi.read(self.sensors[sensor]):\n endT[sensor] = time.time()\n for sensor in range(len(self.sensors)):\n self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2)\n \"\"\"\n <mask token>\n", "step-2": "<mask token>\n\n\nclass Car:\n\n def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig\n =0, sensors=[]):\n self.pi = pigpio.pi()\n if not self.pi.connected:\n print('Pi not connected to pigpio.')\n return\n self.STBY = STBY\n self.drivePWM = PWMA\n self.driveIN1 = AIN1\n self.driveIN2 = AIN2\n self.steerPWM = PWMB\n self.steerIN1 = BIN1\n self.steerIN2 = BIN2\n self.pi.set_mode(STBY, pigpio.OUTPUT)\n self.pi.set_mode(PWMA, pigpio.OUTPUT)\n self.pi.set_mode(AIN1, pigpio.OUTPUT)\n self.pi.set_mode(AIN2, pigpio.OUTPUT)\n self.pi.set_mode(PWMB, pigpio.OUTPUT)\n self.pi.set_mode(BIN1, pigpio.OUTPUT)\n self.pi.set_mode(BIN2, pigpio.OUTPUT)\n self.pi.set_PWM_frequency(PWMA, 50)\n self.pi.set_PWM_frequency(PWMB, 50)\n self.sensorTrig = sensorTrig\n self.sensors = sensors\n self.distances = []\n for i in range(len(sensors)):\n self.distances.append(0)\n if sensorTrig > 0:\n self.pi.set_mode(sensorTrig, pigpio.OUTPUT)\n for sensor in range(len(sensors)):\n if sensors[sensor] > 0:\n self.pi.set_mode(sensors[sensor], pigpio.INPUT)\n self.activate()\n\n def activate(self):\n self.deactivate()\n self.pi.write(self.STBY, 1)\n <mask token>\n\n def setDrive(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.driveIN1, 1)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n elif direction == -1:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 1)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n else:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\n <mask token>\n\n def updateDistances(self):\n if self.sensorTrig > 0:\n for sensor in range(len(self.sensors)):\n while self.pi.read(self.sensors[sensor]):\n continue\n self.pi.write(self.sensorTrig, True)\n time.sleep(1e-06)\n self.pi.write(self.sensorTrig, False)\n startT = time.time()\n while not self.pi.read(self.sensors[sensor]) and time.time(\n ) - startT < 0.001:\n continue\n startT = time.time()\n while self.pi.read(self.sensors[sensor]):\n continue\n endT = time.time()\n self.distances[sensor] = round((endT - startT) * 17150, 2)\n \"\"\"\n # trial to read multiple sensors at once but was having issues\n # definitely can be optimized better and needs code hang detection\n startT = {}\n endT = {}\n self.pi.write(self.sensorTrig, True)\n time.sleep(0.0000001)\n self.pi.write(self.sensorTrig, False)\n sensorCount = len(self.sensors)\n while len(endT) < sensorCount:\n for sensor in range(sensorCount):\n if sensor not in startT.keys():\n if self.pi.read(self.sensors[sensor]):\n startT[sensor] = time.time()\n elif not sensor in endT.keys():\n if not self.pi.read(self.sensors[sensor]):\n endT[sensor] = time.time()\n for sensor in range(len(self.sensors)):\n self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2)\n \"\"\"\n\n def stop(self):\n self.deactivate()\n self.pi.stop()\n", "step-3": "<mask token>\n\n\nclass Car:\n\n def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig\n =0, sensors=[]):\n self.pi = pigpio.pi()\n if not self.pi.connected:\n print('Pi not connected to pigpio.')\n return\n self.STBY = STBY\n self.drivePWM = PWMA\n self.driveIN1 = AIN1\n self.driveIN2 = AIN2\n self.steerPWM = PWMB\n self.steerIN1 = BIN1\n self.steerIN2 = BIN2\n self.pi.set_mode(STBY, pigpio.OUTPUT)\n self.pi.set_mode(PWMA, pigpio.OUTPUT)\n self.pi.set_mode(AIN1, pigpio.OUTPUT)\n self.pi.set_mode(AIN2, pigpio.OUTPUT)\n self.pi.set_mode(PWMB, pigpio.OUTPUT)\n self.pi.set_mode(BIN1, pigpio.OUTPUT)\n self.pi.set_mode(BIN2, pigpio.OUTPUT)\n self.pi.set_PWM_frequency(PWMA, 50)\n self.pi.set_PWM_frequency(PWMB, 50)\n self.sensorTrig = sensorTrig\n self.sensors = sensors\n self.distances = []\n for i in range(len(sensors)):\n self.distances.append(0)\n if sensorTrig > 0:\n self.pi.set_mode(sensorTrig, pigpio.OUTPUT)\n for sensor in range(len(sensors)):\n if sensors[sensor] > 0:\n self.pi.set_mode(sensors[sensor], pigpio.INPUT)\n self.activate()\n\n def activate(self):\n self.deactivate()\n self.pi.write(self.STBY, 1)\n <mask token>\n\n def setDrive(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.driveIN1, 1)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n elif direction == -1:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 1)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n else:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\n\n def setSteering(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.steerIN1, 0)\n self.pi.write(self.steerIN2, 1)\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\n elif direction == -1:\n self.pi.write(self.steerIN1, 1)\n self.pi.write(self.steerIN2, 0)\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\n else:\n self.pi.write(self.steerIN1, 0)\n self.pi.write(self.steerIN2, 0)\n self.pi.set_PWM_dutycycle(self.steerPWM, 0)\n\n def updateDistances(self):\n if self.sensorTrig > 0:\n for sensor in range(len(self.sensors)):\n while self.pi.read(self.sensors[sensor]):\n continue\n self.pi.write(self.sensorTrig, True)\n time.sleep(1e-06)\n self.pi.write(self.sensorTrig, False)\n startT = time.time()\n while not self.pi.read(self.sensors[sensor]) and time.time(\n ) - startT < 0.001:\n continue\n startT = time.time()\n while self.pi.read(self.sensors[sensor]):\n continue\n endT = time.time()\n self.distances[sensor] = round((endT - startT) * 17150, 2)\n \"\"\"\n # trial to read multiple sensors at once but was having issues\n # definitely can be optimized better and needs code hang detection\n startT = {}\n endT = {}\n self.pi.write(self.sensorTrig, True)\n time.sleep(0.0000001)\n self.pi.write(self.sensorTrig, False)\n sensorCount = len(self.sensors)\n while len(endT) < sensorCount:\n for sensor in range(sensorCount):\n if sensor not in startT.keys():\n if self.pi.read(self.sensors[sensor]):\n startT[sensor] = time.time()\n elif not sensor in endT.keys():\n if not self.pi.read(self.sensors[sensor]):\n endT[sensor] = time.time()\n for sensor in range(len(self.sensors)):\n self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2)\n \"\"\"\n\n def stop(self):\n self.deactivate()\n self.pi.stop()\n", "step-4": "<mask token>\n\n\nclass Car:\n\n def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig\n =0, sensors=[]):\n self.pi = pigpio.pi()\n if not self.pi.connected:\n print('Pi not connected to pigpio.')\n return\n self.STBY = STBY\n self.drivePWM = PWMA\n self.driveIN1 = AIN1\n self.driveIN2 = AIN2\n self.steerPWM = PWMB\n self.steerIN1 = BIN1\n self.steerIN2 = BIN2\n self.pi.set_mode(STBY, pigpio.OUTPUT)\n self.pi.set_mode(PWMA, pigpio.OUTPUT)\n self.pi.set_mode(AIN1, pigpio.OUTPUT)\n self.pi.set_mode(AIN2, pigpio.OUTPUT)\n self.pi.set_mode(PWMB, pigpio.OUTPUT)\n self.pi.set_mode(BIN1, pigpio.OUTPUT)\n self.pi.set_mode(BIN2, pigpio.OUTPUT)\n self.pi.set_PWM_frequency(PWMA, 50)\n self.pi.set_PWM_frequency(PWMB, 50)\n self.sensorTrig = sensorTrig\n self.sensors = sensors\n self.distances = []\n for i in range(len(sensors)):\n self.distances.append(0)\n if sensorTrig > 0:\n self.pi.set_mode(sensorTrig, pigpio.OUTPUT)\n for sensor in range(len(sensors)):\n if sensors[sensor] > 0:\n self.pi.set_mode(sensors[sensor], pigpio.INPUT)\n self.activate()\n\n def activate(self):\n self.deactivate()\n self.pi.write(self.STBY, 1)\n\n def deactivate(self):\n self.pi.write(self.STBY, 0)\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\n self.pi.write(self.steerIN1, 0)\n self.pi.write(self.steerIN2, 0)\n self.pi.set_PWM_dutycycle(self.steerPWM, 0)\n if self.sensorTrig > 0:\n self.pi.write(self.sensorTrig, False)\n\n def setDrive(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.driveIN1, 1)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n elif direction == -1:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 1)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n else:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\n\n def setSteering(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.steerIN1, 0)\n self.pi.write(self.steerIN2, 1)\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\n elif direction == -1:\n self.pi.write(self.steerIN1, 1)\n self.pi.write(self.steerIN2, 0)\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\n else:\n self.pi.write(self.steerIN1, 0)\n self.pi.write(self.steerIN2, 0)\n self.pi.set_PWM_dutycycle(self.steerPWM, 0)\n\n def updateDistances(self):\n if self.sensorTrig > 0:\n for sensor in range(len(self.sensors)):\n while self.pi.read(self.sensors[sensor]):\n continue\n self.pi.write(self.sensorTrig, True)\n time.sleep(1e-06)\n self.pi.write(self.sensorTrig, False)\n startT = time.time()\n while not self.pi.read(self.sensors[sensor]) and time.time(\n ) - startT < 0.001:\n continue\n startT = time.time()\n while self.pi.read(self.sensors[sensor]):\n continue\n endT = time.time()\n self.distances[sensor] = round((endT - startT) * 17150, 2)\n \"\"\"\n # trial to read multiple sensors at once but was having issues\n # definitely can be optimized better and needs code hang detection\n startT = {}\n endT = {}\n self.pi.write(self.sensorTrig, True)\n time.sleep(0.0000001)\n self.pi.write(self.sensorTrig, False)\n sensorCount = len(self.sensors)\n while len(endT) < sensorCount:\n for sensor in range(sensorCount):\n if sensor not in startT.keys():\n if self.pi.read(self.sensors[sensor]):\n startT[sensor] = time.time()\n elif not sensor in endT.keys():\n if not self.pi.read(self.sensors[sensor]):\n endT[sensor] = time.time()\n for sensor in range(len(self.sensors)):\n self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2)\n \"\"\"\n\n def stop(self):\n self.deactivate()\n self.pi.stop()\n", "step-5": "import time\r\nimport pigpio\r\n\r\nclass Car:\r\n def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig=0, sensors=[]):\r\n self.pi = pigpio.pi()\r\n if not self.pi.connected:\r\n print(\"Pi not connected to pigpio.\")\r\n return\r\n \r\n # GPIO Drive Pin locations\r\n self.STBY = STBY\r\n # drive motor\r\n self.drivePWM = PWMA\r\n self.driveIN1 = AIN1\r\n self.driveIN2 = AIN2\r\n # steering motor\r\n self.steerPWM = PWMB\r\n self.steerIN1 = BIN1\r\n self.steerIN2 = BIN2\r\n \r\n # initialize GPIO\r\n self.pi.set_mode(STBY, pigpio.OUTPUT)\r\n self.pi.set_mode(PWMA, pigpio.OUTPUT)\r\n self.pi.set_mode(AIN1, pigpio.OUTPUT)\r\n self.pi.set_mode(AIN2, pigpio.OUTPUT)\r\n self.pi.set_mode(PWMB, pigpio.OUTPUT)\r\n self.pi.set_mode(BIN1, pigpio.OUTPUT)\r\n self.pi.set_mode(BIN2, pigpio.OUTPUT)\r\n \r\n self.pi.set_PWM_frequency(PWMA, 50)\r\n self.pi.set_PWM_frequency(PWMB, 50)\r\n \r\n \r\n # Sensor GPIO Pin locations\r\n self.sensorTrig = sensorTrig\r\n self.sensors = sensors\r\n self.distances = []\r\n for i in range(len(sensors)):\r\n self.distances.append(0)\r\n \r\n # initialize sensor GPIO\r\n if sensorTrig > 0:\r\n self.pi.set_mode(sensorTrig, pigpio.OUTPUT)\r\n for sensor in range(len(sensors)):\r\n if sensors[sensor] > 0:\r\n self.pi.set_mode(sensors[sensor], pigpio.INPUT)\r\n \r\n # activate car\r\n self.activate()\r\n \r\n # activate motors\r\n def activate(self): \r\n self.deactivate()\r\n self.pi.write(self.STBY, 1)\r\n \r\n # shut off motors\r\n def deactivate(self):\r\n self.pi.write(self.STBY, 0)\r\n # shut off drive motor\r\n self.pi.write(self.driveIN1, 0)\r\n self.pi.write(self.driveIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\r\n # shut off steering motor\r\n self.pi.write(self.steerIN1, 0)\r\n self.pi.write(self.steerIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.steerPWM, 0)\r\n if self.sensorTrig > 0:\r\n # make sure sensors aren't triggered\r\n self.pi.write(self.sensorTrig, False)\r\n \r\n # set drive motor\r\n def setDrive(self, direction, dutycycle=100):\r\n dc = int((255.0 / 100.0) * dutycycle)\r\n if direction == 1:\r\n self.pi.write(self.driveIN1, 1)\r\n self.pi.write(self.driveIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\r\n elif direction == -1:\r\n self.pi.write(self.driveIN1, 0)\r\n self.pi.write(self.driveIN2, 1)\r\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\r\n else:\r\n self.pi.write(self.driveIN1, 0)\r\n self.pi.write(self.driveIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\r\n \r\n # set steering motor\r\n def setSteering(self, direction, dutycycle=100):\r\n dc = int((255.0 / 100.0) * dutycycle)\r\n if direction == 1:\r\n self.pi.write(self.steerIN1, 0)\r\n self.pi.write(self.steerIN2, 1)\r\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\r\n elif direction == -1:\r\n self.pi.write(self.steerIN1, 1)\r\n self.pi.write(self.steerIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\r\n else:\r\n self.pi.write(self.steerIN1, 0)\r\n self.pi.write(self.steerIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.steerPWM, 0)\r\n \r\n # update sensors distance\r\n def updateDistances(self):\r\n if self.sensorTrig > 0:\r\n for sensor in range(len(self.sensors)):\r\n while self.pi.read(self.sensors[sensor]):\r\n continue\r\n # trigger the sensors so they start reading\r\n self.pi.write(self.sensorTrig, True)\r\n time.sleep(0.000001)\r\n self.pi.write(self.sensorTrig, False)\r\n # wait until the sensor starts reading, if it takes longer than .001 seconds then something went wrong\r\n startT = time.time()\r\n while not self.pi.read(self.sensors[sensor]) and time.time() - startT < .001:\r\n continue\r\n startT = time.time()\r\n # wait for the sensor to become inactive which gives us the ending time\r\n while self.pi.read(self.sensors[sensor]):\r\n continue\r\n endT = time.time()\r\n # convert the sensor readings to distance in centimeters\r\n self.distances[sensor] = round((endT - startT) * 17150, 2)\r\n \r\n '''\r\n # trial to read multiple sensors at once but was having issues\r\n # definitely can be optimized better and needs code hang detection\r\n startT = {}\r\n endT = {}\r\n self.pi.write(self.sensorTrig, True)\r\n time.sleep(0.0000001)\r\n self.pi.write(self.sensorTrig, False)\r\n sensorCount = len(self.sensors)\r\n while len(endT) < sensorCount:\r\n for sensor in range(sensorCount):\r\n if sensor not in startT.keys():\r\n if self.pi.read(self.sensors[sensor]):\r\n startT[sensor] = time.time()\r\n elif not sensor in endT.keys():\r\n if not self.pi.read(self.sensors[sensor]):\r\n endT[sensor] = time.time()\r\n for sensor in range(len(self.sensors)):\r\n self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2)\r\n '''\r\n \r\n # shut everything off and disconnect from pi\r\n def stop(self):\r\n self.deactivate()\r\n self.pi.stop()\r\n", "step-ids": [ 4, 6, 7, 8, 10 ] }
[ 4, 6, 7, 8, 10 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> for month in months: print('The next month is:\t' + month) print('\n') print(""" End of program """) <|reserved_special_token_1|> months = ['january', 'fabruary', 'march', 'april', 'may', 'june', 'july', 'august', 'september', 'october', 'november', 'december'] for month in months: print('The next month is:\t' + month) print('\n') print(""" End of program """) <|reserved_special_token_1|> ### 15/04/2020 ### Author: Omer Goder ### Looping through a list months = ['january','fabruary','march','april','may','june','july','august','september','october','november','december'] # Using a for loop to print a list for month in months: print("The next month is:\t" + month) print('\n') print("\nEnd of program\n") # Print out once - not in the loop #example for indexing using enumeration (considers non-pythonic) #for index, month in enumerate(months): # print(index, month.title() + " is a name of a month\n")
flexible
{ "blob_id": "bc8bc5c3b6954302d005fe618827c644f93ad14e", "index": 6030, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor month in months:\n print('The next month is:\\t' + month)\n print('\\n')\nprint(\"\"\"\nEnd of program\n\"\"\")\n", "step-3": "months = ['january', 'fabruary', 'march', 'april', 'may', 'june', 'july',\n 'august', 'september', 'october', 'november', 'december']\nfor month in months:\n print('The next month is:\\t' + month)\n print('\\n')\nprint(\"\"\"\nEnd of program\n\"\"\")\n", "step-4": "### 15/04/2020\r\n### Author: Omer Goder\r\n### Looping through a list\r\n\r\nmonths = ['january','fabruary','march','april','may','june','july','august','september','october','november','december']\r\n\r\n# Using a for loop to print a list\r\nfor month in months:\r\n\tprint(\"The next month is:\\t\" + month)\r\n\tprint('\\n')\r\nprint(\"\\nEnd of program\\n\") # Print out once - not in the loop\r\n\r\n\r\n#example for indexing using enumeration (considers non-pythonic)\r\n#for index, month in enumerate(months):\r\n\t# print(index, month.title() + \" is a name of a month\\n\")\r\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def factorial(n): """returns n!""" return 1 if n < 2 else n * factorial(n - 1) <|reserved_special_token_0|> <|reserved_special_token_1|> def factorial(n): """returns n!""" return 1 if n < 2 else n * factorial(n - 1) <|reserved_special_token_0|> print(list(map(fact, range(6)))) print([fact(n) for n in range(6)]) print(list(map(factorial, filter(lambda n: n % 2, range(6))))) print([factorial(n) for n in range(6) if n % 2]) <|reserved_special_token_1|> def factorial(n): """returns n!""" return 1 if n < 2 else n * factorial(n - 1) fact = factorial print(list(map(fact, range(6)))) print([fact(n) for n in range(6)]) print(list(map(factorial, filter(lambda n: n % 2, range(6))))) print([factorial(n) for n in range(6) if n % 2]) <|reserved_special_token_1|> # 把函数视作对象 def factorial(n): """returns n!""" return 1 if n < 2 else n * factorial(n - 1) fact = factorial print(list(map(fact, range(6)))) # 构建 0! 和 5! 的一个阶乘列表。 print([fact(n) for n in range(6)]) # 使用列表推导执行相同的操作。 # filter() 函数用于过滤序列,过滤掉不符合条件的元素,返回一个迭代器对象,如果要转换为列表,可以使用 list() 来转换。 # 该接收两个参数,第一个为函数,第二个为序列,序列的每个元素作为参数传递给函数进行判,然后返回 True 或 False, # 最后将返回 True 的元素放到新列表中。 print(list(map(factorial, filter(lambda n: n % 2, range(6))))) # 使用 map 和 filter 计算直到 5! 的奇数阶乘列表。 print([factorial(n) for n in range(6) if n % 2]) # 使用列表推导做相同的工作,换掉 map 和 filter,并避免了使用 lambda 表达式。
flexible
{ "blob_id": "4411c81351ac76d72512faaa6b498cd577815691", "index": 2572, "step-1": "<mask token>\n", "step-2": "def factorial(n):\n \"\"\"returns n!\"\"\"\n return 1 if n < 2 else n * factorial(n - 1)\n\n\n<mask token>\n", "step-3": "def factorial(n):\n \"\"\"returns n!\"\"\"\n return 1 if n < 2 else n * factorial(n - 1)\n\n\n<mask token>\nprint(list(map(fact, range(6))))\nprint([fact(n) for n in range(6)])\nprint(list(map(factorial, filter(lambda n: n % 2, range(6)))))\nprint([factorial(n) for n in range(6) if n % 2])\n", "step-4": "def factorial(n):\n \"\"\"returns n!\"\"\"\n return 1 if n < 2 else n * factorial(n - 1)\n\n\nfact = factorial\nprint(list(map(fact, range(6))))\nprint([fact(n) for n in range(6)])\nprint(list(map(factorial, filter(lambda n: n % 2, range(6)))))\nprint([factorial(n) for n in range(6) if n % 2])\n", "step-5": "# 把函数视作对象\r\ndef factorial(n):\r\n \"\"\"returns n!\"\"\"\r\n return 1 if n < 2 else n * factorial(n - 1)\r\n\r\n\r\nfact = factorial\r\n\r\nprint(list(map(fact, range(6)))) # 构建 0! 和 5! 的一个阶乘列表。\r\n\r\nprint([fact(n) for n in range(6)]) # 使用列表推导执行相同的操作。\r\n\r\n# filter() 函数用于过滤序列,过滤掉不符合条件的元素,返回一个迭代器对象,如果要转换为列表,可以使用 list() 来转换。\r\n# 该接收两个参数,第一个为函数,第二个为序列,序列的每个元素作为参数传递给函数进行判,然后返回 True 或 False,\r\n# 最后将返回 True 的元素放到新列表中。\r\nprint(list(map(factorial, filter(lambda n: n % 2, range(6))))) # 使用 map 和 filter 计算直到 5! 的奇数阶乘列表。\r\n\r\nprint([factorial(n) for n in range(6) if n % 2]) # 使用列表推导做相同的工作,换掉 map 和 filter,并避免了使用 lambda 表达式。\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class Test(unittest.TestCase): def setUp(self): """Before each test, set up a blank database""" self.app = create_app('configmodule.TestingConfig') self.app.testing = True self.client = self.app.test_client() with self.app.app_context(): db.drop_all() db.create_all() <|reserved_special_token_0|> def test_user(self): rv = self.client.post('/api/users/', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['id'] == 1 assert 'password' not in rv_dict assert rv_dict['email'] == '[email protected]' rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc1234'})) assert rv.status_code == 401 rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc1234'})) assert rv.status_code == 401 rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 headers = {'Authorization': 'Bearer ' + rv_dict['access_token']} rv = self.client.get('/api/users/', headers=headers) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['email'] == '[email protected]' rv = self.client.put('/api/users/', data=json.dumps({'name': 'carl carlsson'}), headers=headers) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['name'] == 'Carl Carlsson' def test_empty(self): rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) assert rv.status_code == 401 <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Test(unittest.TestCase): def setUp(self): """Before each test, set up a blank database""" self.app = create_app('configmodule.TestingConfig') self.app.testing = True self.client = self.app.test_client() with self.app.app_context(): db.drop_all() db.create_all() def tearDown(self): with self.app.app_context(): db.session.remove() db.drop_all() def test_user(self): rv = self.client.post('/api/users/', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['id'] == 1 assert 'password' not in rv_dict assert rv_dict['email'] == '[email protected]' rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc1234'})) assert rv.status_code == 401 rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc1234'})) assert rv.status_code == 401 rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 headers = {'Authorization': 'Bearer ' + rv_dict['access_token']} rv = self.client.get('/api/users/', headers=headers) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['email'] == '[email protected]' rv = self.client.put('/api/users/', data=json.dumps({'name': 'carl carlsson'}), headers=headers) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['name'] == 'Carl Carlsson' def test_empty(self): rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) assert rv.status_code == 401 <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Test(unittest.TestCase): def setUp(self): """Before each test, set up a blank database""" self.app = create_app('configmodule.TestingConfig') self.app.testing = True self.client = self.app.test_client() with self.app.app_context(): db.drop_all() db.create_all() def tearDown(self): with self.app.app_context(): db.session.remove() db.drop_all() def test_user(self): rv = self.client.post('/api/users/', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['id'] == 1 assert 'password' not in rv_dict assert rv_dict['email'] == '[email protected]' rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc1234'})) assert rv.status_code == 401 rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc1234'})) assert rv.status_code == 401 rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 headers = {'Authorization': 'Bearer ' + rv_dict['access_token']} rv = self.client.get('/api/users/', headers=headers) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['email'] == '[email protected]' rv = self.client.put('/api/users/', data=json.dumps({'name': 'carl carlsson'}), headers=headers) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['name'] == 'Carl Carlsson' def test_empty(self): rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) assert rv.status_code == 401 if __name__ == '__main__': unittest.main() <|reserved_special_token_1|> from app import create_app, db import unittest import json class Test(unittest.TestCase): def setUp(self): """Before each test, set up a blank database""" self.app = create_app('configmodule.TestingConfig') self.app.testing = True self.client = self.app.test_client() with self.app.app_context(): db.drop_all() db.create_all() def tearDown(self): with self.app.app_context(): db.session.remove() db.drop_all() def test_user(self): rv = self.client.post('/api/users/', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['id'] == 1 assert 'password' not in rv_dict assert rv_dict['email'] == '[email protected]' rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc1234'})) assert rv.status_code == 401 rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc1234'})) assert rv.status_code == 401 rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 headers = {'Authorization': 'Bearer ' + rv_dict['access_token']} rv = self.client.get('/api/users/', headers=headers) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['email'] == '[email protected]' rv = self.client.put('/api/users/', data=json.dumps({'name': 'carl carlsson'}), headers=headers) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict['name'] == 'Carl Carlsson' def test_empty(self): rv = self.client.post('/api/users/login', data=json.dumps({'email': '[email protected]', 'password': 'abc123'})) assert rv.status_code == 401 if __name__ == '__main__': unittest.main() <|reserved_special_token_1|> from app import create_app, db import unittest import json class Test(unittest.TestCase): def setUp(self): """Before each test, set up a blank database""" self.app = create_app("configmodule.TestingConfig") self.app.testing = True self.client = self.app.test_client() with self.app.app_context(): db.drop_all() db.create_all() # Called after every test def tearDown(self): with self.app.app_context(): db.session.remove() db.drop_all() def test_user(self): # Create user rv = self.client.post( "/api/users/", data=json.dumps({"email": "[email protected]", "password": "abc123"}), ) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict["id"] == 1 assert "password" not in rv_dict assert rv_dict["email"] == "[email protected]" # Try loggin with wrong PASSWORD rv = self.client.post("/api/users/login", data=json.dumps({"email": "[email protected]", "password": "abc1234"})) assert rv.status_code == 401 # Try loggin with wrong Email rv = self.client.post("/api/users/login", data=json.dumps({"email": "[email protected]", "password": "abc1234"})) assert rv.status_code == 401 # Try loggin with right PASSWORD rv = self.client.post("/api/users/login", data=json.dumps({"email": "[email protected]", "password": "abc123"})) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 headers = {"Authorization": "Bearer " + rv_dict["access_token"]} # Get the current user rv = self.client.get("/api/users/", headers=headers) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict["email"] == "[email protected]" rv = self.client.put("/api/users/", data=json.dumps({"name": "carl carlsson"}), headers=headers) rv_dict = json.loads(rv.data.decode()) assert rv.status_code == 200 assert rv_dict["name"] == "Carl Carlsson" def test_empty(self): # Try loggin withou any users rv = self.client.post("/api/users/login", data=json.dumps({"email": "[email protected]", "password": "abc123"})) assert rv.status_code == 401 if __name__ == "__main__": unittest.main()
flexible
{ "blob_id": "56b4262e88793be366d8ffe0fe4427fdb2a99bd7", "index": 7447, "step-1": "<mask token>\n\n\nclass Test(unittest.TestCase):\n\n def setUp(self):\n \"\"\"Before each test, set up a blank database\"\"\"\n self.app = create_app('configmodule.TestingConfig')\n self.app.testing = True\n self.client = self.app.test_client()\n with self.app.app_context():\n db.drop_all()\n db.create_all()\n <mask token>\n\n def test_user(self):\n rv = self.client.post('/api/users/', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['id'] == 1\n assert 'password' not in rv_dict\n assert rv_dict['email'] == '[email protected]'\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc1234'}))\n assert rv.status_code == 401\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc1234'}))\n assert rv.status_code == 401\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n headers = {'Authorization': 'Bearer ' + rv_dict['access_token']}\n rv = self.client.get('/api/users/', headers=headers)\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['email'] == '[email protected]'\n rv = self.client.put('/api/users/', data=json.dumps({'name':\n 'carl carlsson'}), headers=headers)\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['name'] == 'Carl Carlsson'\n\n def test_empty(self):\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n assert rv.status_code == 401\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Test(unittest.TestCase):\n\n def setUp(self):\n \"\"\"Before each test, set up a blank database\"\"\"\n self.app = create_app('configmodule.TestingConfig')\n self.app.testing = True\n self.client = self.app.test_client()\n with self.app.app_context():\n db.drop_all()\n db.create_all()\n\n def tearDown(self):\n with self.app.app_context():\n db.session.remove()\n db.drop_all()\n\n def test_user(self):\n rv = self.client.post('/api/users/', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['id'] == 1\n assert 'password' not in rv_dict\n assert rv_dict['email'] == '[email protected]'\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc1234'}))\n assert rv.status_code == 401\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc1234'}))\n assert rv.status_code == 401\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n headers = {'Authorization': 'Bearer ' + rv_dict['access_token']}\n rv = self.client.get('/api/users/', headers=headers)\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['email'] == '[email protected]'\n rv = self.client.put('/api/users/', data=json.dumps({'name':\n 'carl carlsson'}), headers=headers)\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['name'] == 'Carl Carlsson'\n\n def test_empty(self):\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n assert rv.status_code == 401\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Test(unittest.TestCase):\n\n def setUp(self):\n \"\"\"Before each test, set up a blank database\"\"\"\n self.app = create_app('configmodule.TestingConfig')\n self.app.testing = True\n self.client = self.app.test_client()\n with self.app.app_context():\n db.drop_all()\n db.create_all()\n\n def tearDown(self):\n with self.app.app_context():\n db.session.remove()\n db.drop_all()\n\n def test_user(self):\n rv = self.client.post('/api/users/', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['id'] == 1\n assert 'password' not in rv_dict\n assert rv_dict['email'] == '[email protected]'\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc1234'}))\n assert rv.status_code == 401\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc1234'}))\n assert rv.status_code == 401\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n headers = {'Authorization': 'Bearer ' + rv_dict['access_token']}\n rv = self.client.get('/api/users/', headers=headers)\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['email'] == '[email protected]'\n rv = self.client.put('/api/users/', data=json.dumps({'name':\n 'carl carlsson'}), headers=headers)\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['name'] == 'Carl Carlsson'\n\n def test_empty(self):\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n assert rv.status_code == 401\n\n\nif __name__ == '__main__':\n unittest.main()\n", "step-4": "from app import create_app, db\nimport unittest\nimport json\n\n\nclass Test(unittest.TestCase):\n\n def setUp(self):\n \"\"\"Before each test, set up a blank database\"\"\"\n self.app = create_app('configmodule.TestingConfig')\n self.app.testing = True\n self.client = self.app.test_client()\n with self.app.app_context():\n db.drop_all()\n db.create_all()\n\n def tearDown(self):\n with self.app.app_context():\n db.session.remove()\n db.drop_all()\n\n def test_user(self):\n rv = self.client.post('/api/users/', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['id'] == 1\n assert 'password' not in rv_dict\n assert rv_dict['email'] == '[email protected]'\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc1234'}))\n assert rv.status_code == 401\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc1234'}))\n assert rv.status_code == 401\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n headers = {'Authorization': 'Bearer ' + rv_dict['access_token']}\n rv = self.client.get('/api/users/', headers=headers)\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['email'] == '[email protected]'\n rv = self.client.put('/api/users/', data=json.dumps({'name':\n 'carl carlsson'}), headers=headers)\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict['name'] == 'Carl Carlsson'\n\n def test_empty(self):\n rv = self.client.post('/api/users/login', data=json.dumps({'email':\n '[email protected]', 'password': 'abc123'}))\n assert rv.status_code == 401\n\n\nif __name__ == '__main__':\n unittest.main()\n", "step-5": "from app import create_app, db\nimport unittest\nimport json\n\n\nclass Test(unittest.TestCase):\n def setUp(self):\n \"\"\"Before each test, set up a blank database\"\"\"\n self.app = create_app(\"configmodule.TestingConfig\")\n self.app.testing = True\n\n self.client = self.app.test_client()\n\n with self.app.app_context():\n db.drop_all()\n db.create_all()\n\n # Called after every test\n def tearDown(self):\n with self.app.app_context():\n db.session.remove()\n db.drop_all()\n\n def test_user(self):\n # Create user\n rv = self.client.post(\n \"/api/users/\",\n data=json.dumps({\"email\": \"[email protected]\", \"password\": \"abc123\"}),\n )\n rv_dict = json.loads(rv.data.decode())\n\n assert rv.status_code == 200\n assert rv_dict[\"id\"] == 1\n assert \"password\" not in rv_dict\n assert rv_dict[\"email\"] == \"[email protected]\"\n\n # Try loggin with wrong PASSWORD\n rv = self.client.post(\"/api/users/login\", data=json.dumps({\"email\": \"[email protected]\", \"password\": \"abc1234\"}))\n assert rv.status_code == 401\n\n # Try loggin with wrong Email\n rv = self.client.post(\"/api/users/login\", data=json.dumps({\"email\": \"[email protected]\", \"password\": \"abc1234\"}))\n assert rv.status_code == 401\n\n # Try loggin with right PASSWORD\n rv = self.client.post(\"/api/users/login\", data=json.dumps({\"email\": \"[email protected]\", \"password\": \"abc123\"}))\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n headers = {\"Authorization\": \"Bearer \" + rv_dict[\"access_token\"]}\n\n # Get the current user\n rv = self.client.get(\"/api/users/\", headers=headers)\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict[\"email\"] == \"[email protected]\"\n\n rv = self.client.put(\"/api/users/\", data=json.dumps({\"name\": \"carl carlsson\"}), headers=headers)\n rv_dict = json.loads(rv.data.decode())\n assert rv.status_code == 200\n assert rv_dict[\"name\"] == \"Carl Carlsson\"\n\n def test_empty(self):\n # Try loggin withou any users\n rv = self.client.post(\"/api/users/login\", data=json.dumps({\"email\": \"[email protected]\", \"password\": \"abc123\"}))\n assert rv.status_code == 401\n\n\nif __name__ == \"__main__\":\n unittest.main()\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
# -*- coding: utf-8 -*- """ Created on Fri Jul 13 14:52:03 2018 @author: mayn """ import matplotlib.pyplot as plt import DNA_Object import json def draw_area(dna, room): area_center, area = DNA_Object.get_room_area(room) plt.plot(area['x'], area['y'], linewidth='0.5', color='k',) plt.xlim((-15000, 20000)) plt.ylim((-15000, 20000)) #plt.plot(area_center[0], area_center[1], '*', linewidth='0.5') title = 'Id'+str(dna['solutionId']) plt.title(title) def draw_bounds(minx, miny, maxx, maxy): x = [minx, maxx, maxx, minx, minx] y = [miny, miny, maxy, maxy, miny] plt.plot(x, y, linewidth = '0.8', color='r') def draw_house_area(dna): #plt.figure() cur_dict = {} if 'roomList' in dna: cur_dict = dna elif 'request' in dna and 'feedback' not in dna: cur_dict = json.loads(dna['request']) elif 'feedback' in dna: cur_dict = json.loads(dna['feedback']) if cur_dict: room_num = len(cur_dict['roomList']) for i in range(room_num): room = cur_dict['roomList'][i] draw_area(dna, room) #bed = 318 #draw_room_obj(room, bed) '''if room['roomUsageName'] == '主卧': print('主卧area:',area)''' #plt.show() return True else: return False def draw_room_area(dna, room_name): plt.figure() cur_dict = {} if 'roomList' in dna: cur_dict = dna elif 'request' in dna and 'feedback' not in dna: cur_dict = json.loads(dna['request']) elif 'feedback' in dna: cur_dict = json.loads(dna['feedback']) if cur_dict: room_num = len(cur_dict['roomList']) for i in range(room_num): room = cur_dict['roomList'][i] if room['roomName'] == room_name: draw_area(dna, room) #bed = 318 #draw_room_obj(room, bed) return True else: return False def draw_house_wall(dna): cur_dict = {} if 'roomList' in dna: cur_dict = dna elif 'request' in dna and 'feedback' not in dna: cur_dict = json.loads(dna['request']) elif 'feedback' in dna: cur_dict = json.loads(dna['feedback']) if 'walls' in cur_dict: wall_num, wall_pos = DNA_Object.get_wall_from_dna(cur_dict) for i in range(wall_num): plt.plot(wall_pos['x'][i], wall_pos['y'][i], alpha=0.7, color='b', linewidth=1, solid_capstyle='round', zorder=2) return True else: return False def draw_house_window(dna): cur_dict = {} if 'roomList' in dna: cur_dict = dna elif 'request' in dna and 'feedback' not in dna: cur_dict = json.loads(dna['request']) elif 'feedback' in dna: cur_dict = json.loads(dna['feedback']) if 'windows' in cur_dict: window_num, window_pos = DNA_Object.get_window_info_from_dna(cur_dict) for i in range(window_num): plt.plot(window_pos['x'][i], window_pos['y'][i], alpha=0.7, color='c', linewidth='0.5', solid_capstyle='round', zorder=2) return True else: return False def draw_house_door(dna): cur_dict = {} if 'roomList' in dna: cur_dict = dna elif 'request' in dna and 'feedback' not in dna: cur_dict = json.loads(dna['request']) elif 'feedback' in dna: cur_dict = json.loads(dna['feedback']) if 'doors' in cur_dict: door_num, door_pos = DNA_Object.get_door_from_dna(cur_dict) for i in range(door_num): #print('【door',i,'pos】', door_pos['x'][i], door_pos['y'][i]) plt.plot(door_pos['x'][i], door_pos['y'][i], alpha=0.7, color='r', linewidth='0.5', solid_capstyle='round', zorder=2) return True else: return False def draw_room_obj(room, obj_category): obj_num, obj_center, obj_size, obj_point= DNA_Object.get_obj_info_from_room(room, obj_category) if obj_num > 0: plt.plot(obj_point['x'], obj_point['y'], linewidth='0.5') #print(bed_center) plt.plot(obj_center[0], obj_center[1], 'o') return True else: return False def draw_house_obj(dna, obj_category, close_flag): room_num, room = DNA_Object.get_room_list_from_dna(dna) count = 0 for i in range(room_num): flag = draw_room_obj(room[i], obj_category) if flag == True: count += 1 if count == 0 and close_flag == True: plt.close() def draw_relative_info(room2bed): plt.figure() plt.plot(room2bed['x'], room2bed['y'], '*') def draw_scatter_distribution(data, title_name, xlabel, ylabel): plt.figure() #解决中文显示问题 plt.rcParams['font.sans-serif']=['SimHei'] #用来正常显示中文标签 plt.rcParams['axes.unicode_minus']=False #用来正常显示负号 plt.plot(data['x'], data['y'], '*') plt.title(title_name) #plt.grid(True, linestyle="-.", color='k', linewidth='0.5') plt.plot([0, 10000], [0, 10000], '-') plt.plot([0, 10000], [3000, 3000], '-') plt.xlabel(xlabel) plt.ylabel(ylabel)
normal
{ "blob_id": "089bdd6d68a69aff6f3c11f7f5ffb75aed73cd24", "index": 131, "step-1": "<mask token>\n\n\ndef draw_area(dna, room):\n area_center, area = DNA_Object.get_room_area(room)\n plt.plot(area['x'], area['y'], linewidth='0.5', color='k')\n plt.xlim((-15000, 20000))\n plt.ylim((-15000, 20000))\n title = 'Id' + str(dna['solutionId'])\n plt.title(title)\n\n\ndef draw_bounds(minx, miny, maxx, maxy):\n x = [minx, maxx, maxx, minx, minx]\n y = [miny, miny, maxy, maxy, miny]\n plt.plot(x, y, linewidth='0.8', color='r')\n\n\ndef draw_house_area(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if cur_dict:\n room_num = len(cur_dict['roomList'])\n for i in range(room_num):\n room = cur_dict['roomList'][i]\n draw_area(dna, room)\n \"\"\"if room['roomUsageName'] == '主卧':\n print('主卧area:',area)\"\"\"\n return True\n else:\n return False\n\n\ndef draw_room_area(dna, room_name):\n plt.figure()\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if cur_dict:\n room_num = len(cur_dict['roomList'])\n for i in range(room_num):\n room = cur_dict['roomList'][i]\n if room['roomName'] == room_name:\n draw_area(dna, room)\n return True\n else:\n return False\n\n\ndef draw_house_wall(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if 'walls' in cur_dict:\n wall_num, wall_pos = DNA_Object.get_wall_from_dna(cur_dict)\n for i in range(wall_num):\n plt.plot(wall_pos['x'][i], wall_pos['y'][i], alpha=0.7, color=\n 'b', linewidth=1, solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\ndef draw_house_window(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if 'windows' in cur_dict:\n window_num, window_pos = DNA_Object.get_window_info_from_dna(cur_dict)\n for i in range(window_num):\n plt.plot(window_pos['x'][i], window_pos['y'][i], alpha=0.7,\n color='c', linewidth='0.5', solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\n<mask token>\n\n\ndef draw_house_obj(dna, obj_category, close_flag):\n room_num, room = DNA_Object.get_room_list_from_dna(dna)\n count = 0\n for i in range(room_num):\n flag = draw_room_obj(room[i], obj_category)\n if flag == True:\n count += 1\n if count == 0 and close_flag == True:\n plt.close()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef draw_area(dna, room):\n area_center, area = DNA_Object.get_room_area(room)\n plt.plot(area['x'], area['y'], linewidth='0.5', color='k')\n plt.xlim((-15000, 20000))\n plt.ylim((-15000, 20000))\n title = 'Id' + str(dna['solutionId'])\n plt.title(title)\n\n\ndef draw_bounds(minx, miny, maxx, maxy):\n x = [minx, maxx, maxx, minx, minx]\n y = [miny, miny, maxy, maxy, miny]\n plt.plot(x, y, linewidth='0.8', color='r')\n\n\ndef draw_house_area(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if cur_dict:\n room_num = len(cur_dict['roomList'])\n for i in range(room_num):\n room = cur_dict['roomList'][i]\n draw_area(dna, room)\n \"\"\"if room['roomUsageName'] == '主卧':\n print('主卧area:',area)\"\"\"\n return True\n else:\n return False\n\n\ndef draw_room_area(dna, room_name):\n plt.figure()\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if cur_dict:\n room_num = len(cur_dict['roomList'])\n for i in range(room_num):\n room = cur_dict['roomList'][i]\n if room['roomName'] == room_name:\n draw_area(dna, room)\n return True\n else:\n return False\n\n\ndef draw_house_wall(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if 'walls' in cur_dict:\n wall_num, wall_pos = DNA_Object.get_wall_from_dna(cur_dict)\n for i in range(wall_num):\n plt.plot(wall_pos['x'][i], wall_pos['y'][i], alpha=0.7, color=\n 'b', linewidth=1, solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\ndef draw_house_window(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if 'windows' in cur_dict:\n window_num, window_pos = DNA_Object.get_window_info_from_dna(cur_dict)\n for i in range(window_num):\n plt.plot(window_pos['x'][i], window_pos['y'][i], alpha=0.7,\n color='c', linewidth='0.5', solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\ndef draw_house_door(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if 'doors' in cur_dict:\n door_num, door_pos = DNA_Object.get_door_from_dna(cur_dict)\n for i in range(door_num):\n plt.plot(door_pos['x'][i], door_pos['y'][i], alpha=0.7, color=\n 'r', linewidth='0.5', solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\n<mask token>\n\n\ndef draw_house_obj(dna, obj_category, close_flag):\n room_num, room = DNA_Object.get_room_list_from_dna(dna)\n count = 0\n for i in range(room_num):\n flag = draw_room_obj(room[i], obj_category)\n if flag == True:\n count += 1\n if count == 0 and close_flag == True:\n plt.close()\n\n\ndef draw_relative_info(room2bed):\n plt.figure()\n plt.plot(room2bed['x'], room2bed['y'], '*')\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef draw_area(dna, room):\n area_center, area = DNA_Object.get_room_area(room)\n plt.plot(area['x'], area['y'], linewidth='0.5', color='k')\n plt.xlim((-15000, 20000))\n plt.ylim((-15000, 20000))\n title = 'Id' + str(dna['solutionId'])\n plt.title(title)\n\n\ndef draw_bounds(minx, miny, maxx, maxy):\n x = [minx, maxx, maxx, minx, minx]\n y = [miny, miny, maxy, maxy, miny]\n plt.plot(x, y, linewidth='0.8', color='r')\n\n\ndef draw_house_area(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if cur_dict:\n room_num = len(cur_dict['roomList'])\n for i in range(room_num):\n room = cur_dict['roomList'][i]\n draw_area(dna, room)\n \"\"\"if room['roomUsageName'] == '主卧':\n print('主卧area:',area)\"\"\"\n return True\n else:\n return False\n\n\ndef draw_room_area(dna, room_name):\n plt.figure()\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if cur_dict:\n room_num = len(cur_dict['roomList'])\n for i in range(room_num):\n room = cur_dict['roomList'][i]\n if room['roomName'] == room_name:\n draw_area(dna, room)\n return True\n else:\n return False\n\n\ndef draw_house_wall(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if 'walls' in cur_dict:\n wall_num, wall_pos = DNA_Object.get_wall_from_dna(cur_dict)\n for i in range(wall_num):\n plt.plot(wall_pos['x'][i], wall_pos['y'][i], alpha=0.7, color=\n 'b', linewidth=1, solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\ndef draw_house_window(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if 'windows' in cur_dict:\n window_num, window_pos = DNA_Object.get_window_info_from_dna(cur_dict)\n for i in range(window_num):\n plt.plot(window_pos['x'][i], window_pos['y'][i], alpha=0.7,\n color='c', linewidth='0.5', solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\ndef draw_house_door(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if 'doors' in cur_dict:\n door_num, door_pos = DNA_Object.get_door_from_dna(cur_dict)\n for i in range(door_num):\n plt.plot(door_pos['x'][i], door_pos['y'][i], alpha=0.7, color=\n 'r', linewidth='0.5', solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\ndef draw_room_obj(room, obj_category):\n obj_num, obj_center, obj_size, obj_point = (DNA_Object.\n get_obj_info_from_room(room, obj_category))\n if obj_num > 0:\n plt.plot(obj_point['x'], obj_point['y'], linewidth='0.5')\n plt.plot(obj_center[0], obj_center[1], 'o')\n return True\n else:\n return False\n\n\ndef draw_house_obj(dna, obj_category, close_flag):\n room_num, room = DNA_Object.get_room_list_from_dna(dna)\n count = 0\n for i in range(room_num):\n flag = draw_room_obj(room[i], obj_category)\n if flag == True:\n count += 1\n if count == 0 and close_flag == True:\n plt.close()\n\n\ndef draw_relative_info(room2bed):\n plt.figure()\n plt.plot(room2bed['x'], room2bed['y'], '*')\n\n\ndef draw_scatter_distribution(data, title_name, xlabel, ylabel):\n plt.figure()\n plt.rcParams['font.sans-serif'] = ['SimHei']\n plt.rcParams['axes.unicode_minus'] = False\n plt.plot(data['x'], data['y'], '*')\n plt.title(title_name)\n plt.plot([0, 10000], [0, 10000], '-')\n plt.plot([0, 10000], [3000, 3000], '-')\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n", "step-4": "<mask token>\nimport matplotlib.pyplot as plt\nimport DNA_Object\nimport json\n\n\ndef draw_area(dna, room):\n area_center, area = DNA_Object.get_room_area(room)\n plt.plot(area['x'], area['y'], linewidth='0.5', color='k')\n plt.xlim((-15000, 20000))\n plt.ylim((-15000, 20000))\n title = 'Id' + str(dna['solutionId'])\n plt.title(title)\n\n\ndef draw_bounds(minx, miny, maxx, maxy):\n x = [minx, maxx, maxx, minx, minx]\n y = [miny, miny, maxy, maxy, miny]\n plt.plot(x, y, linewidth='0.8', color='r')\n\n\ndef draw_house_area(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if cur_dict:\n room_num = len(cur_dict['roomList'])\n for i in range(room_num):\n room = cur_dict['roomList'][i]\n draw_area(dna, room)\n \"\"\"if room['roomUsageName'] == '主卧':\n print('主卧area:',area)\"\"\"\n return True\n else:\n return False\n\n\ndef draw_room_area(dna, room_name):\n plt.figure()\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if cur_dict:\n room_num = len(cur_dict['roomList'])\n for i in range(room_num):\n room = cur_dict['roomList'][i]\n if room['roomName'] == room_name:\n draw_area(dna, room)\n return True\n else:\n return False\n\n\ndef draw_house_wall(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if 'walls' in cur_dict:\n wall_num, wall_pos = DNA_Object.get_wall_from_dna(cur_dict)\n for i in range(wall_num):\n plt.plot(wall_pos['x'][i], wall_pos['y'][i], alpha=0.7, color=\n 'b', linewidth=1, solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\ndef draw_house_window(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if 'windows' in cur_dict:\n window_num, window_pos = DNA_Object.get_window_info_from_dna(cur_dict)\n for i in range(window_num):\n plt.plot(window_pos['x'][i], window_pos['y'][i], alpha=0.7,\n color='c', linewidth='0.5', solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\ndef draw_house_door(dna):\n cur_dict = {}\n if 'roomList' in dna:\n cur_dict = dna\n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n if 'doors' in cur_dict:\n door_num, door_pos = DNA_Object.get_door_from_dna(cur_dict)\n for i in range(door_num):\n plt.plot(door_pos['x'][i], door_pos['y'][i], alpha=0.7, color=\n 'r', linewidth='0.5', solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\ndef draw_room_obj(room, obj_category):\n obj_num, obj_center, obj_size, obj_point = (DNA_Object.\n get_obj_info_from_room(room, obj_category))\n if obj_num > 0:\n plt.plot(obj_point['x'], obj_point['y'], linewidth='0.5')\n plt.plot(obj_center[0], obj_center[1], 'o')\n return True\n else:\n return False\n\n\ndef draw_house_obj(dna, obj_category, close_flag):\n room_num, room = DNA_Object.get_room_list_from_dna(dna)\n count = 0\n for i in range(room_num):\n flag = draw_room_obj(room[i], obj_category)\n if flag == True:\n count += 1\n if count == 0 and close_flag == True:\n plt.close()\n\n\ndef draw_relative_info(room2bed):\n plt.figure()\n plt.plot(room2bed['x'], room2bed['y'], '*')\n\n\ndef draw_scatter_distribution(data, title_name, xlabel, ylabel):\n plt.figure()\n plt.rcParams['font.sans-serif'] = ['SimHei']\n plt.rcParams['axes.unicode_minus'] = False\n plt.plot(data['x'], data['y'], '*')\n plt.title(title_name)\n plt.plot([0, 10000], [0, 10000], '-')\n plt.plot([0, 10000], [3000, 3000], '-')\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n", "step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Jul 13 14:52:03 2018\n\n@author: mayn\n\"\"\"\n\nimport matplotlib.pyplot as plt\nimport DNA_Object\nimport json\n\n\ndef draw_area(dna, room):\n area_center, area = DNA_Object.get_room_area(room)\n plt.plot(area['x'], area['y'], linewidth='0.5', color='k',)\n plt.xlim((-15000, 20000))\n plt.ylim((-15000, 20000))\n #plt.plot(area_center[0], area_center[1], '*', linewidth='0.5')\n title = 'Id'+str(dna['solutionId'])\n plt.title(title)\n\ndef draw_bounds(minx, miny, maxx, maxy):\n x = [minx, maxx, maxx, minx, minx]\n y = [miny, miny, maxy, maxy, miny]\n plt.plot(x, y, linewidth = '0.8', color='r')\n\ndef draw_house_area(dna):\n #plt.figure()\n cur_dict = {}\n \n if 'roomList' in dna: \n cur_dict = dna\n \n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n \n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n\n if cur_dict: \n room_num = len(cur_dict['roomList'])\n \n for i in range(room_num):\n room = cur_dict['roomList'][i]\n draw_area(dna, room)\n #bed = 318\n #draw_room_obj(room, bed)\n '''if room['roomUsageName'] == '主卧':\n print('主卧area:',area)'''\n #plt.show()\n return True\n else:\n return False\n \ndef draw_room_area(dna, room_name):\n plt.figure()\n cur_dict = {}\n \n if 'roomList' in dna: \n cur_dict = dna\n \n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n \n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n\n if cur_dict: \n room_num = len(cur_dict['roomList'])\n \n for i in range(room_num):\n room = cur_dict['roomList'][i]\n if room['roomName'] == room_name:\n draw_area(dna, room)\n #bed = 318\n #draw_room_obj(room, bed) \n return True\n else:\n return False\ndef draw_house_wall(dna):\n cur_dict = {}\n \n if 'roomList' in dna: \n cur_dict = dna\n \n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n \n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n \n if 'walls' in cur_dict:\n wall_num, wall_pos = DNA_Object.get_wall_from_dna(cur_dict)\n for i in range(wall_num): \n plt.plot(wall_pos['x'][i], wall_pos['y'][i], alpha=0.7, color='b', linewidth=1, solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n \n \ndef draw_house_window(dna):\n cur_dict = {}\n \n if 'roomList' in dna: \n cur_dict = dna\n \n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n \n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n \n if 'windows' in cur_dict:\n window_num, window_pos = DNA_Object.get_window_info_from_dna(cur_dict)\n for i in range(window_num):\n plt.plot(window_pos['x'][i], window_pos['y'][i], alpha=0.7, color='c', linewidth='0.5', solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n\n\n\ndef draw_house_door(dna): \n cur_dict = {}\n \n if 'roomList' in dna: \n cur_dict = dna\n \n elif 'request' in dna and 'feedback' not in dna:\n cur_dict = json.loads(dna['request'])\n \n elif 'feedback' in dna:\n cur_dict = json.loads(dna['feedback'])\n \n if 'doors' in cur_dict:\n door_num, door_pos = DNA_Object.get_door_from_dna(cur_dict)\n for i in range(door_num):\n #print('【door',i,'pos】', door_pos['x'][i], door_pos['y'][i])\n plt.plot(door_pos['x'][i], door_pos['y'][i], alpha=0.7, color='r', linewidth='0.5', solid_capstyle='round', zorder=2)\n return True\n else:\n return False\n \ndef draw_room_obj(room, obj_category):\n obj_num, obj_center, obj_size, obj_point= DNA_Object.get_obj_info_from_room(room, obj_category)\n if obj_num > 0: \n plt.plot(obj_point['x'], obj_point['y'], linewidth='0.5')\n #print(bed_center)\n plt.plot(obj_center[0], obj_center[1], 'o') \n return True\n else:\n return False\n\ndef draw_house_obj(dna, obj_category, close_flag):\n room_num, room = DNA_Object.get_room_list_from_dna(dna)\n count = 0\n for i in range(room_num):\n flag = draw_room_obj(room[i], obj_category)\n if flag == True:\n count += 1\n if count == 0 and close_flag == True:\n plt.close()\n \n \ndef draw_relative_info(room2bed):\n plt.figure()\n plt.plot(room2bed['x'], room2bed['y'], '*')\n\n\n\ndef draw_scatter_distribution(data, title_name, xlabel, ylabel):\n plt.figure()\n #解决中文显示问题\n plt.rcParams['font.sans-serif']=['SimHei'] #用来正常显示中文标签\n plt.rcParams['axes.unicode_minus']=False #用来正常显示负号\n plt.plot(data['x'], data['y'], '*')\n plt.title(title_name)\n #plt.grid(True, linestyle=\"-.\", color='k', linewidth='0.5')\n plt.plot([0, 10000], [0, 10000], '-')\n plt.plot([0, 10000], [3000, 3000], '-')\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)", "step-ids": [ 7, 9, 11, 12, 13 ] }
[ 7, 9, 11, 12, 13 ]
# -*- coding: utf-8 -*- """ Editor de Spyder Este es un archivo temporal. """ def largo (l, n): i=0 cuenta=1 valor1=0 valor2=0 while cuenta < n+1 or cuenta==n+1: a=l[i] b=l[i+1] if a==b: cuenta+= 1 valor1=a i+=1 cuenta=1 while cuenta < n or cuenta == n and i<len(l)-1: c=l[i] d=l[i+1] if c==d: cuenta+= 1 valor2=c i+=1 alto=abs(valor1-valor2) return alto def hayBorde(l,n,h): if largo(l,n)==h: return True else: return False print(hayBorde([2,4,4,4,6,6,6,10,10],2,4))
normal
{ "blob_id": "f3b697e20f60e51d80d655ddf4809aa9afdfcd69", "index": 7495, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef largo(l, n):\n i = 0\n cuenta = 1\n valor1 = 0\n valor2 = 0\n while cuenta < n + 1 or cuenta == n + 1:\n a = l[i]\n b = l[i + 1]\n if a == b:\n cuenta += 1\n valor1 = a\n i += 1\n cuenta = 1\n while cuenta < n or cuenta == n and i < len(l) - 1:\n c = l[i]\n d = l[i + 1]\n if c == d:\n cuenta += 1\n valor2 = c\n i += 1\n alto = abs(valor1 - valor2)\n return alto\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef largo(l, n):\n i = 0\n cuenta = 1\n valor1 = 0\n valor2 = 0\n while cuenta < n + 1 or cuenta == n + 1:\n a = l[i]\n b = l[i + 1]\n if a == b:\n cuenta += 1\n valor1 = a\n i += 1\n cuenta = 1\n while cuenta < n or cuenta == n and i < len(l) - 1:\n c = l[i]\n d = l[i + 1]\n if c == d:\n cuenta += 1\n valor2 = c\n i += 1\n alto = abs(valor1 - valor2)\n return alto\n\n\ndef hayBorde(l, n, h):\n if largo(l, n) == h:\n return True\n else:\n return False\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\ndef largo(l, n):\n i = 0\n cuenta = 1\n valor1 = 0\n valor2 = 0\n while cuenta < n + 1 or cuenta == n + 1:\n a = l[i]\n b = l[i + 1]\n if a == b:\n cuenta += 1\n valor1 = a\n i += 1\n cuenta = 1\n while cuenta < n or cuenta == n and i < len(l) - 1:\n c = l[i]\n d = l[i + 1]\n if c == d:\n cuenta += 1\n valor2 = c\n i += 1\n alto = abs(valor1 - valor2)\n return alto\n\n\ndef hayBorde(l, n, h):\n if largo(l, n) == h:\n return True\n else:\n return False\n\n\nprint(hayBorde([2, 4, 4, 4, 6, 6, 6, 10, 10], 2, 4))\n", "step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nEditor de Spyder\n\nEste es un archivo temporal.\n\"\"\"\n\ndef largo (l, n):\n i=0\n cuenta=1\n valor1=0\n valor2=0\n while cuenta < n+1 or cuenta==n+1:\n a=l[i]\n b=l[i+1]\n if a==b:\n cuenta+= 1\n valor1=a\n i+=1\n cuenta=1\n while cuenta < n or cuenta == n and i<len(l)-1:\n c=l[i]\n d=l[i+1]\n if c==d:\n cuenta+= 1\n valor2=c\n i+=1\n alto=abs(valor1-valor2)\n return alto\n\ndef hayBorde(l,n,h):\n if largo(l,n)==h:\n return True\n else:\n return False\n\nprint(hayBorde([2,4,4,4,6,6,6,10,10],2,4))\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from django.urls import path,include from . import views urlpatterns = [ path('register_curier/',views.curier_register,name="register_curier"), path('private_сurier/',views.private_сurier,name="private_сurier"), path('private_сurier2/',views.private_сurier2,name="private_сurier2"), path('private_curier/select/<int:id>',views.curier_select,name="curier_select"), path('private_curier/cancel/<int:id>',views.curier_cancel,name="curier_cancel"), path("private_curier_raschet/<str:day>",views.rashet_view,name="curier_rashet"), path("private_curier_history/",views.curier_history,name="curier_history"), ]
normal
{ "blob_id": "c1a83c9551e83e395a365210a99330fee7877dff", "index": 6881, "step-1": "<mask token>\n", "step-2": "<mask token>\nurlpatterns = [path('register_curier/', views.curier_register, name=\n 'register_curier'), path('private_сurier/', views.private_сurier, name=\n 'private_сurier'), path('private_сurier2/', views.private_сurier2, name\n ='private_сurier2'), path('private_curier/select/<int:id>', views.\n curier_select, name='curier_select'), path(\n 'private_curier/cancel/<int:id>', views.curier_cancel, name=\n 'curier_cancel'), path('private_curier_raschet/<str:day>', views.\n rashet_view, name='curier_rashet'), path('private_curier_history/',\n views.curier_history, name='curier_history')]\n", "step-3": "from django.urls import path, include\nfrom . import views\nurlpatterns = [path('register_curier/', views.curier_register, name=\n 'register_curier'), path('private_сurier/', views.private_сurier, name=\n 'private_сurier'), path('private_сurier2/', views.private_сurier2, name\n ='private_сurier2'), path('private_curier/select/<int:id>', views.\n curier_select, name='curier_select'), path(\n 'private_curier/cancel/<int:id>', views.curier_cancel, name=\n 'curier_cancel'), path('private_curier_raschet/<str:day>', views.\n rashet_view, name='curier_rashet'), path('private_curier_history/',\n views.curier_history, name='curier_history')]\n", "step-4": "from django.urls import path,include\nfrom . import views\n\nurlpatterns = [\n path('register_curier/',views.curier_register,name=\"register_curier\"),\n path('private_сurier/',views.private_сurier,name=\"private_сurier\"),\n path('private_сurier2/',views.private_сurier2,name=\"private_сurier2\"),\n path('private_curier/select/<int:id>',views.curier_select,name=\"curier_select\"),\n path('private_curier/cancel/<int:id>',views.curier_cancel,name=\"curier_cancel\"),\n path(\"private_curier_raschet/<str:day>\",views.rashet_view,name=\"curier_rashet\"),\n path(\"private_curier_history/\",views.curier_history,name=\"curier_history\"),\n\n]\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
#coding:utf-8 #base string opeate #rstrip()删除字符串末尾被指定的字符,默认是空格,如末尾有多个相同的字符,则一并删除 str1="djcc" str2="adcd" print("this's rstrip() function---------") print(str1.rstrip("c")) print(str1.rstrip("d")) #replace()用新字符替换字符串中被指定的字符,str.replace(old, new[, max]),max表示替换多少个,如不指定,全部替换 str3="this is history,it is not fake" print("this's replace function----------") print(str3.replace("is","was")) print(str3.replace("is","was",3))#索引从1开始,0不算 #
normal
{ "blob_id": "59170e6b0b0705b9908ed1c32bbea87373126594", "index": 9484, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(\"this's rstrip() function---------\")\nprint(str1.rstrip('c'))\nprint(str1.rstrip('d'))\n<mask token>\nprint(\"this's replace function----------\")\nprint(str3.replace('is', 'was'))\nprint(str3.replace('is', 'was', 3))\n", "step-3": "str1 = 'djcc'\nstr2 = 'adcd'\nprint(\"this's rstrip() function---------\")\nprint(str1.rstrip('c'))\nprint(str1.rstrip('d'))\nstr3 = 'this is history,it is not fake'\nprint(\"this's replace function----------\")\nprint(str3.replace('is', 'was'))\nprint(str3.replace('is', 'was', 3))\n", "step-4": "#coding:utf-8\n\n#base string opeate\n\n#rstrip()删除字符串末尾被指定的字符,默认是空格,如末尾有多个相同的字符,则一并删除\nstr1=\"djcc\"\nstr2=\"adcd\"\nprint(\"this's rstrip() function---------\")\nprint(str1.rstrip(\"c\"))\nprint(str1.rstrip(\"d\"))\n\n\n#replace()用新字符替换字符串中被指定的字符,str.replace(old, new[, max]),max表示替换多少个,如不指定,全部替换\n\nstr3=\"this is history,it is not fake\"\nprint(\"this's replace function----------\")\nprint(str3.replace(\"is\",\"was\"))\nprint(str3.replace(\"is\",\"was\",3))#索引从1开始,0不算\n\n#\n\n\n\n\n\n\n\n\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> def send_msg(): global nameofcontact, msg css_path = 'span[title="' + nameofcontact + '"]' nameofcontact = driver.find_element_by_css_selector(css_path) nameofcontact.click() chatbox = driver.find_element_by_xpath( '//*[@id="main"]/footer/div[1]/div/div/div[2]/div[1]/div/div[2]') chatbox.send_keys(msg) chatbox.send_keys(Keys.RETURN) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> driver.get('https://web.whatsapp.com/') print('Scan the QR code to Log in...') time.sleep(10) <|reserved_special_token_0|> print('Enter Time of sending Message (Hrs, Min & Sec...)') <|reserved_special_token_0|> def send_msg(): global nameofcontact, msg css_path = 'span[title="' + nameofcontact + '"]' nameofcontact = driver.find_element_by_css_selector(css_path) nameofcontact.click() chatbox = driver.find_element_by_xpath( '//*[@id="main"]/footer/div[1]/div/div/div[2]/div[1]/div/div[2]') chatbox.send_keys(msg) chatbox.send_keys(Keys.RETURN) <|reserved_special_token_0|> t.start() <|reserved_special_token_1|> <|reserved_special_token_0|> PATH = 'C:\\Program Files (x86)\\chromedriver.exe' driver = webdriver.Chrome(PATH) <|reserved_special_token_0|> driver.get('https://web.whatsapp.com/') print('Scan the QR code to Log in...') time.sleep(10) nameofcontact = input('Give name of contact: ') msg = input('Type the message you want to send: ') print('Enter Time of sending Message (Hrs, Min & Sec...)') hrs = int(input('Hrs: ')) mins = int(input('Min: ')) secs = int(input('Sec: ')) x = datetime.today() y = x.replace(day=x.day + 1, hour=hrs, minute=mins, second=secs, microsecond=0) delta_t = y - x secs = delta_t.seconds + 1 def send_msg(): global nameofcontact, msg css_path = 'span[title="' + nameofcontact + '"]' nameofcontact = driver.find_element_by_css_selector(css_path) nameofcontact.click() chatbox = driver.find_element_by_xpath( '//*[@id="main"]/footer/div[1]/div/div/div[2]/div[1]/div/div[2]') chatbox.send_keys(msg) chatbox.send_keys(Keys.RETURN) t = Timer(secs, send_msg) t.start() <|reserved_special_token_1|> from selenium import webdriver PATH = 'C:\\Program Files (x86)\\chromedriver.exe' driver = webdriver.Chrome(PATH) from selenium.webdriver.common.keys import Keys import time from threading import Timer from datetime import datetime driver.get('https://web.whatsapp.com/') print('Scan the QR code to Log in...') time.sleep(10) nameofcontact = input('Give name of contact: ') msg = input('Type the message you want to send: ') print('Enter Time of sending Message (Hrs, Min & Sec...)') hrs = int(input('Hrs: ')) mins = int(input('Min: ')) secs = int(input('Sec: ')) x = datetime.today() y = x.replace(day=x.day + 1, hour=hrs, minute=mins, second=secs, microsecond=0) delta_t = y - x secs = delta_t.seconds + 1 def send_msg(): global nameofcontact, msg css_path = 'span[title="' + nameofcontact + '"]' nameofcontact = driver.find_element_by_css_selector(css_path) nameofcontact.click() chatbox = driver.find_element_by_xpath( '//*[@id="main"]/footer/div[1]/div/div/div[2]/div[1]/div/div[2]') chatbox.send_keys(msg) chatbox.send_keys(Keys.RETURN) t = Timer(secs, send_msg) t.start() <|reserved_special_token_1|> #Takes - Contact Name(Must be saved in phone's contact list), Message, Time as input # and sends message to the given contact at given time # Accuracy Level ~ Seconds. (Also depends on your network speed) from selenium import webdriver PATH = 'C:\Program Files (x86)\chromedriver.exe' driver = webdriver.Chrome(PATH) from selenium.webdriver.common.keys import Keys import time from threading import Timer from datetime import datetime driver.get("https://web.whatsapp.com/") print("Scan the QR code to Log in...") time.sleep(10) nameofcontact = input('Give name of contact: ') msg = input("Type the message you want to send: ") print("Enter Time of sending Message (Hrs, Min & Sec...)") hrs = int(input("Hrs: ")) mins = int(input("Min: ")) secs = int(input("Sec: ")) x=datetime.today() y=x.replace(day=x.day+1, hour=hrs, minute=mins, second=secs, microsecond=0) delta_t=y-x secs=delta_t.seconds+1 def send_msg(): global nameofcontact, msg css_path = 'span[title="' + nameofcontact + '"]' nameofcontact = driver.find_element_by_css_selector(css_path) nameofcontact.click() chatbox = driver.find_element_by_xpath('//*[@id="main"]/footer/div[1]/div/div/div[2]/div[1]/div/div[2]') chatbox.send_keys(msg) chatbox.send_keys(Keys.RETURN) t = Timer(secs, send_msg) t.start()
flexible
{ "blob_id": "1811c0c5aca9d209638e2221cad2c30e80ee5199", "index": 3116, "step-1": "<mask token>\n\n\ndef send_msg():\n global nameofcontact, msg\n css_path = 'span[title=\"' + nameofcontact + '\"]'\n nameofcontact = driver.find_element_by_css_selector(css_path)\n nameofcontact.click()\n chatbox = driver.find_element_by_xpath(\n '//*[@id=\"main\"]/footer/div[1]/div/div/div[2]/div[1]/div/div[2]')\n chatbox.send_keys(msg)\n chatbox.send_keys(Keys.RETURN)\n\n\n<mask token>\n", "step-2": "<mask token>\ndriver.get('https://web.whatsapp.com/')\nprint('Scan the QR code to Log in...')\ntime.sleep(10)\n<mask token>\nprint('Enter Time of sending Message (Hrs, Min & Sec...)')\n<mask token>\n\n\ndef send_msg():\n global nameofcontact, msg\n css_path = 'span[title=\"' + nameofcontact + '\"]'\n nameofcontact = driver.find_element_by_css_selector(css_path)\n nameofcontact.click()\n chatbox = driver.find_element_by_xpath(\n '//*[@id=\"main\"]/footer/div[1]/div/div/div[2]/div[1]/div/div[2]')\n chatbox.send_keys(msg)\n chatbox.send_keys(Keys.RETURN)\n\n\n<mask token>\nt.start()\n", "step-3": "<mask token>\nPATH = 'C:\\\\Program Files (x86)\\\\chromedriver.exe'\ndriver = webdriver.Chrome(PATH)\n<mask token>\ndriver.get('https://web.whatsapp.com/')\nprint('Scan the QR code to Log in...')\ntime.sleep(10)\nnameofcontact = input('Give name of contact: ')\nmsg = input('Type the message you want to send: ')\nprint('Enter Time of sending Message (Hrs, Min & Sec...)')\nhrs = int(input('Hrs: '))\nmins = int(input('Min: '))\nsecs = int(input('Sec: '))\nx = datetime.today()\ny = x.replace(day=x.day + 1, hour=hrs, minute=mins, second=secs, microsecond=0)\ndelta_t = y - x\nsecs = delta_t.seconds + 1\n\n\ndef send_msg():\n global nameofcontact, msg\n css_path = 'span[title=\"' + nameofcontact + '\"]'\n nameofcontact = driver.find_element_by_css_selector(css_path)\n nameofcontact.click()\n chatbox = driver.find_element_by_xpath(\n '//*[@id=\"main\"]/footer/div[1]/div/div/div[2]/div[1]/div/div[2]')\n chatbox.send_keys(msg)\n chatbox.send_keys(Keys.RETURN)\n\n\nt = Timer(secs, send_msg)\nt.start()\n", "step-4": "from selenium import webdriver\nPATH = 'C:\\\\Program Files (x86)\\\\chromedriver.exe'\ndriver = webdriver.Chrome(PATH)\nfrom selenium.webdriver.common.keys import Keys\nimport time\nfrom threading import Timer\nfrom datetime import datetime\ndriver.get('https://web.whatsapp.com/')\nprint('Scan the QR code to Log in...')\ntime.sleep(10)\nnameofcontact = input('Give name of contact: ')\nmsg = input('Type the message you want to send: ')\nprint('Enter Time of sending Message (Hrs, Min & Sec...)')\nhrs = int(input('Hrs: '))\nmins = int(input('Min: '))\nsecs = int(input('Sec: '))\nx = datetime.today()\ny = x.replace(day=x.day + 1, hour=hrs, minute=mins, second=secs, microsecond=0)\ndelta_t = y - x\nsecs = delta_t.seconds + 1\n\n\ndef send_msg():\n global nameofcontact, msg\n css_path = 'span[title=\"' + nameofcontact + '\"]'\n nameofcontact = driver.find_element_by_css_selector(css_path)\n nameofcontact.click()\n chatbox = driver.find_element_by_xpath(\n '//*[@id=\"main\"]/footer/div[1]/div/div/div[2]/div[1]/div/div[2]')\n chatbox.send_keys(msg)\n chatbox.send_keys(Keys.RETURN)\n\n\nt = Timer(secs, send_msg)\nt.start()\n", "step-5": "#Takes - Contact Name(Must be saved in phone's contact list), Message, Time as input\n# and sends message to the given contact at given time\n# Accuracy Level ~ Seconds. (Also depends on your network speed)\n\nfrom selenium import webdriver\nPATH = 'C:\\Program Files (x86)\\chromedriver.exe'\ndriver = webdriver.Chrome(PATH)\nfrom selenium.webdriver.common.keys import Keys\nimport time\nfrom threading import Timer\nfrom datetime import datetime\n\ndriver.get(\"https://web.whatsapp.com/\")\nprint(\"Scan the QR code to Log in...\")\ntime.sleep(10)\n\nnameofcontact = input('Give name of contact: ')\nmsg = input(\"Type the message you want to send: \")\nprint(\"Enter Time of sending Message (Hrs, Min & Sec...)\")\nhrs = int(input(\"Hrs: \"))\nmins = int(input(\"Min: \"))\nsecs = int(input(\"Sec: \"))\n\n\nx=datetime.today()\ny=x.replace(day=x.day+1, hour=hrs, minute=mins, second=secs, microsecond=0)\ndelta_t=y-x\n\nsecs=delta_t.seconds+1\n\ndef send_msg():\n global nameofcontact, msg\n css_path = 'span[title=\"' + nameofcontact + '\"]'\n nameofcontact = driver.find_element_by_css_selector(css_path)\n nameofcontact.click()\n\n chatbox = driver.find_element_by_xpath('//*[@id=\"main\"]/footer/div[1]/div/div/div[2]/div[1]/div/div[2]')\n chatbox.send_keys(msg)\n chatbox.send_keys(Keys.RETURN)\n\nt = Timer(secs, send_msg)\nt.start()\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def set_seed(n): global seed, py_rng, np_rng, t_rng seed = n py_rng = Random(seed) np_rng = np.random.RandomState(seed) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> seed = 42 py_rng = Random(seed) np_rng = np.random.RandomState(seed) t_rng = RandomStreams(seed) t_rng_2 = theano.tensor.shared_randomstreams.RandomStreams(seed) def set_seed(n): global seed, py_rng, np_rng, t_rng seed = n py_rng = Random(seed) np_rng = np.random.RandomState(seed) t_rng = RandomStreams(seed) <|reserved_special_token_1|> <|reserved_special_token_0|> import numpy as np from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams import theano.tensor.shared_randomstreams from random import Random seed = 42 py_rng = Random(seed) np_rng = np.random.RandomState(seed) t_rng = RandomStreams(seed) t_rng_2 = theano.tensor.shared_randomstreams.RandomStreams(seed) def set_seed(n): global seed, py_rng, np_rng, t_rng seed = n py_rng = Random(seed) np_rng = np.random.RandomState(seed) t_rng = RandomStreams(seed) <|reserved_special_token_1|> """ Copyright (c) 2017 - Philip Paquette Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ # Modified from https://raw.githubusercontent.com/Newmu/dcgan_code/master/lib/rng.py # MIT License import numpy as np from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams import theano.tensor.shared_randomstreams from random import Random seed = 42 py_rng = Random(seed) np_rng = np.random.RandomState(seed) t_rng = RandomStreams(seed) t_rng_2 = theano.tensor.shared_randomstreams.RandomStreams(seed) def set_seed(n): global seed, py_rng, np_rng, t_rng seed = n py_rng = Random(seed) np_rng = np.random.RandomState(seed) t_rng = RandomStreams(seed)
flexible
{ "blob_id": "9a183b1f81681b3dec1132a27b17e389438ab725", "index": 6045, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef set_seed(n):\n global seed, py_rng, np_rng, t_rng\n seed = n\n py_rng = Random(seed)\n np_rng = np.random.RandomState(seed)\n\n\n<mask token>\n", "step-3": "<mask token>\nseed = 42\npy_rng = Random(seed)\nnp_rng = np.random.RandomState(seed)\nt_rng = RandomStreams(seed)\nt_rng_2 = theano.tensor.shared_randomstreams.RandomStreams(seed)\n\n\ndef set_seed(n):\n global seed, py_rng, np_rng, t_rng\n seed = n\n py_rng = Random(seed)\n np_rng = np.random.RandomState(seed)\n\n\nt_rng = RandomStreams(seed)\n", "step-4": "<mask token>\nimport numpy as np\nfrom theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams\nimport theano.tensor.shared_randomstreams\nfrom random import Random\nseed = 42\npy_rng = Random(seed)\nnp_rng = np.random.RandomState(seed)\nt_rng = RandomStreams(seed)\nt_rng_2 = theano.tensor.shared_randomstreams.RandomStreams(seed)\n\n\ndef set_seed(n):\n global seed, py_rng, np_rng, t_rng\n seed = n\n py_rng = Random(seed)\n np_rng = np.random.RandomState(seed)\n\n\nt_rng = RandomStreams(seed)\n", "step-5": "\"\"\"\nCopyright (c) 2017 - Philip Paquette\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n\"\"\"\n\n# Modified from https://raw.githubusercontent.com/Newmu/dcgan_code/master/lib/rng.py\n# MIT License\nimport numpy as np\nfrom theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams\nimport theano.tensor.shared_randomstreams\nfrom random import Random\n\nseed = 42\n\npy_rng = Random(seed)\nnp_rng = np.random.RandomState(seed)\nt_rng = RandomStreams(seed)\nt_rng_2 = theano.tensor.shared_randomstreams.RandomStreams(seed)\n\ndef set_seed(n):\n global seed, py_rng, np_rng, t_rng\n\n seed = n\n py_rng = Random(seed)\n np_rng = np.random.RandomState(seed)\n\nt_rng = RandomStreams(seed)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class PileMosaic: def __init__(self): self.width, self.height = 2380, 2800 self.filename = 'pile_mosaic.png' self.crema = 240, 233, 227 self.choco = 89, 62, 53 self.luna = 43, 97, 123 self.latte = 195, 175, 148 self.piscina = 170, 200, 211 self.lavanda = 189, 192, 209 self.viola = 133, 108, 140 self.morado = 121, 69, 92 self.rosa = 222, 179, 172 self.flamingo = 238, 157, 140 self.color_tuple = (self.crema, self.choco, self.luna, self.latte, self.piscina) self.tile_width = 300 self.tile_height = 100 def create_new_image(self): self.image = Image.new('RGB', (self.width, self.height), 'white') self.data = [(255, 255, 255)] * (self.width * self.height) <|reserved_special_token_0|> def hex_to_rgb(value): value = value.lstrip('#') lv = len(value) return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3)) def rgb_to_hex(rgb): return '#%02x%02x%02x' % rgb def place_pile(self, color, x=0, y=0): for i in range(self.tile_width): for j in range(self.tile_height): self.image.im.putpixel((x + i, y + j), color) def fill_random(self): for x in range(self.width / self.tile_width): for y in range(self.height / self.tile_height): current_color = randrange(5) self.place_pile(self.color_tuple[current_color], x=x * self .tile_width, y=y * self.tile_height) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class PileMosaic: def __init__(self): self.width, self.height = 2380, 2800 self.filename = 'pile_mosaic.png' self.crema = 240, 233, 227 self.choco = 89, 62, 53 self.luna = 43, 97, 123 self.latte = 195, 175, 148 self.piscina = 170, 200, 211 self.lavanda = 189, 192, 209 self.viola = 133, 108, 140 self.morado = 121, 69, 92 self.rosa = 222, 179, 172 self.flamingo = 238, 157, 140 self.color_tuple = (self.crema, self.choco, self.luna, self.latte, self.piscina) self.tile_width = 300 self.tile_height = 100 def create_new_image(self): self.image = Image.new('RGB', (self.width, self.height), 'white') self.data = [(255, 255, 255)] * (self.width * self.height) <|reserved_special_token_0|> def hex_to_rgb(value): value = value.lstrip('#') lv = len(value) return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3)) def rgb_to_hex(rgb): return '#%02x%02x%02x' % rgb def place_pile(self, color, x=0, y=0): for i in range(self.tile_width): for j in range(self.tile_height): self.image.im.putpixel((x + i, y + j), color) def fill_random(self): for x in range(self.width / self.tile_width): for y in range(self.height / self.tile_height): current_color = randrange(5) self.place_pile(self.color_tuple[current_color], x=x * self .tile_width, y=y * self.tile_height) def create_random_pattern(self): initial_pattern = [] for x in range(self.width / self.tile_width): initial_pattern.append([]) for y in range(self.height / self.tile_height): temp_list = list(self.color_tuple) if x - 1 >= 0: try: temp_list.remove(initial_pattern[x - 1][y]) except ValueError: pass if y - 1 >= 0: try: temp_list.remove(initial_pattern[x][y - 1]) except ValueError: pass initial_pattern[x].append(temp_list[randrange(len(temp_list))]) return initial_pattern <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class PileMosaic: def __init__(self): self.width, self.height = 2380, 2800 self.filename = 'pile_mosaic.png' self.crema = 240, 233, 227 self.choco = 89, 62, 53 self.luna = 43, 97, 123 self.latte = 195, 175, 148 self.piscina = 170, 200, 211 self.lavanda = 189, 192, 209 self.viola = 133, 108, 140 self.morado = 121, 69, 92 self.rosa = 222, 179, 172 self.flamingo = 238, 157, 140 self.color_tuple = (self.crema, self.choco, self.luna, self.latte, self.piscina) self.tile_width = 300 self.tile_height = 100 def create_new_image(self): self.image = Image.new('RGB', (self.width, self.height), 'white') self.data = [(255, 255, 255)] * (self.width * self.height) def write_image(self): self.image.save(self.filename, 'PNG') def hex_to_rgb(value): value = value.lstrip('#') lv = len(value) return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3)) def rgb_to_hex(rgb): return '#%02x%02x%02x' % rgb def place_pile(self, color, x=0, y=0): for i in range(self.tile_width): for j in range(self.tile_height): self.image.im.putpixel((x + i, y + j), color) def fill_random(self): for x in range(self.width / self.tile_width): for y in range(self.height / self.tile_height): current_color = randrange(5) self.place_pile(self.color_tuple[current_color], x=x * self .tile_width, y=y * self.tile_height) def create_random_pattern(self): initial_pattern = [] for x in range(self.width / self.tile_width): initial_pattern.append([]) for y in range(self.height / self.tile_height): temp_list = list(self.color_tuple) if x - 1 >= 0: try: temp_list.remove(initial_pattern[x - 1][y]) except ValueError: pass if y - 1 >= 0: try: temp_list.remove(initial_pattern[x][y - 1]) except ValueError: pass initial_pattern[x].append(temp_list[randrange(len(temp_list))]) return initial_pattern def fill(self, pattern): for x in range(self.width / (self.tile_width + 4)): for y in range(self.height / (self.tile_height + 4)): self.place_pile(pattern[x][y], x=x * (self.tile_width + 4), y=y * (self.tile_height + 4)) pile = PileMosaic() pile.create_new_image() pile.fill(pile.create_random_pattern()) pile.write_image() <|reserved_special_token_1|> from PIL import Image from random import randrange class PileMosaic: def __init__(self): self.width, self.height = 2380, 2800 self.filename = 'pile_mosaic.png' self.crema = 240, 233, 227 self.choco = 89, 62, 53 self.luna = 43, 97, 123 self.latte = 195, 175, 148 self.piscina = 170, 200, 211 self.lavanda = 189, 192, 209 self.viola = 133, 108, 140 self.morado = 121, 69, 92 self.rosa = 222, 179, 172 self.flamingo = 238, 157, 140 self.color_tuple = (self.crema, self.choco, self.luna, self.latte, self.piscina) self.tile_width = 300 self.tile_height = 100 def create_new_image(self): self.image = Image.new('RGB', (self.width, self.height), 'white') self.data = [(255, 255, 255)] * (self.width * self.height) def write_image(self): self.image.save(self.filename, 'PNG') def hex_to_rgb(value): value = value.lstrip('#') lv = len(value) return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3)) def rgb_to_hex(rgb): return '#%02x%02x%02x' % rgb def place_pile(self, color, x=0, y=0): for i in range(self.tile_width): for j in range(self.tile_height): self.image.im.putpixel((x + i, y + j), color) def fill_random(self): for x in range(self.width / self.tile_width): for y in range(self.height / self.tile_height): current_color = randrange(5) self.place_pile(self.color_tuple[current_color], x=x * self .tile_width, y=y * self.tile_height) def create_random_pattern(self): initial_pattern = [] for x in range(self.width / self.tile_width): initial_pattern.append([]) for y in range(self.height / self.tile_height): temp_list = list(self.color_tuple) if x - 1 >= 0: try: temp_list.remove(initial_pattern[x - 1][y]) except ValueError: pass if y - 1 >= 0: try: temp_list.remove(initial_pattern[x][y - 1]) except ValueError: pass initial_pattern[x].append(temp_list[randrange(len(temp_list))]) return initial_pattern def fill(self, pattern): for x in range(self.width / (self.tile_width + 4)): for y in range(self.height / (self.tile_height + 4)): self.place_pile(pattern[x][y], x=x * (self.tile_width + 4), y=y * (self.tile_height + 4)) pile = PileMosaic() pile.create_new_image() pile.fill(pile.create_random_pattern()) pile.write_image() <|reserved_special_token_1|> from PIL import Image from random import randrange class PileMosaic: def __init__(self): self.width, self.height = 2380, 2800 self.filename = "pile_mosaic.png" self.crema = (240, 233, 227) self.choco = (89, 62, 53) self.luna = (43, 97, 123) self.latte = (195, 175, 148) self.piscina = (170, 200, 211) self.lavanda = (189, 192, 209) self.viola = (133, 108, 140) self.morado = (121, 69, 92) self.rosa = (222, 179, 172) self.flamingo = (238, 157, 140) self.color_tuple = (self.crema, self.choco, self.luna, self.latte, self.piscina) # self.color_tuple = (self.lavanda, self.viola, self.rosa, self.morado, self.flamingo) self.tile_width = 300 self.tile_height = 100 def create_new_image(self): self.image = Image.new("RGB", (self.width, self.height), "white") self.data = [(255, 255, 255)]*(self.width*self.height) def write_image(self): self.image.save(self.filename, "PNG") def hex_to_rgb(value): value = value.lstrip('#') lv = len(value) return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3)) def rgb_to_hex(rgb): return '#%02x%02x%02x' % rgb def place_pile(self, color, x=0, y=0): for i in range(self.tile_width): for j in range(self.tile_height): self.image.im.putpixel((x + i, y + j), color) def fill_random(self): for x in range(self.width / self.tile_width): for y in range(self.height / self.tile_height): current_color = randrange(5) self.place_pile(self.color_tuple[current_color], x=x*self.tile_width, y=y*self.tile_height) def create_random_pattern(self): initial_pattern = [] for x in range(self.width / self.tile_width): initial_pattern.append([]) for y in range(self.height / self.tile_height): temp_list = list(self.color_tuple) if x - 1 >= 0: try: temp_list.remove(initial_pattern[x - 1][y]) except ValueError: pass if y - 1 >= 0: try: temp_list.remove(initial_pattern[x][y - 1]) except ValueError: pass initial_pattern[x].append(temp_list[randrange(len(temp_list))]) return initial_pattern def fill(self, pattern): for x in range(self.width / (self.tile_width + 4)): for y in range(self.height / (self.tile_height + 4)): self.place_pile(pattern[x][y], x=x*(self.tile_width+4), y=y*(self.tile_height+4)) pile = PileMosaic() pile.create_new_image() pile.fill(pile.create_random_pattern()) pile.write_image()
flexible
{ "blob_id": "a484272ace089008e27f4e00d2e641118432665e", "index": 4592, "step-1": "<mask token>\n\n\nclass PileMosaic:\n\n def __init__(self):\n self.width, self.height = 2380, 2800\n self.filename = 'pile_mosaic.png'\n self.crema = 240, 233, 227\n self.choco = 89, 62, 53\n self.luna = 43, 97, 123\n self.latte = 195, 175, 148\n self.piscina = 170, 200, 211\n self.lavanda = 189, 192, 209\n self.viola = 133, 108, 140\n self.morado = 121, 69, 92\n self.rosa = 222, 179, 172\n self.flamingo = 238, 157, 140\n self.color_tuple = (self.crema, self.choco, self.luna, self.latte,\n self.piscina)\n self.tile_width = 300\n self.tile_height = 100\n\n def create_new_image(self):\n self.image = Image.new('RGB', (self.width, self.height), 'white')\n self.data = [(255, 255, 255)] * (self.width * self.height)\n <mask token>\n\n def hex_to_rgb(value):\n value = value.lstrip('#')\n lv = len(value)\n return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv //\n 3))\n\n def rgb_to_hex(rgb):\n return '#%02x%02x%02x' % rgb\n\n def place_pile(self, color, x=0, y=0):\n for i in range(self.tile_width):\n for j in range(self.tile_height):\n self.image.im.putpixel((x + i, y + j), color)\n\n def fill_random(self):\n for x in range(self.width / self.tile_width):\n for y in range(self.height / self.tile_height):\n current_color = randrange(5)\n self.place_pile(self.color_tuple[current_color], x=x * self\n .tile_width, y=y * self.tile_height)\n <mask token>\n <mask token>\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass PileMosaic:\n\n def __init__(self):\n self.width, self.height = 2380, 2800\n self.filename = 'pile_mosaic.png'\n self.crema = 240, 233, 227\n self.choco = 89, 62, 53\n self.luna = 43, 97, 123\n self.latte = 195, 175, 148\n self.piscina = 170, 200, 211\n self.lavanda = 189, 192, 209\n self.viola = 133, 108, 140\n self.morado = 121, 69, 92\n self.rosa = 222, 179, 172\n self.flamingo = 238, 157, 140\n self.color_tuple = (self.crema, self.choco, self.luna, self.latte,\n self.piscina)\n self.tile_width = 300\n self.tile_height = 100\n\n def create_new_image(self):\n self.image = Image.new('RGB', (self.width, self.height), 'white')\n self.data = [(255, 255, 255)] * (self.width * self.height)\n <mask token>\n\n def hex_to_rgb(value):\n value = value.lstrip('#')\n lv = len(value)\n return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv //\n 3))\n\n def rgb_to_hex(rgb):\n return '#%02x%02x%02x' % rgb\n\n def place_pile(self, color, x=0, y=0):\n for i in range(self.tile_width):\n for j in range(self.tile_height):\n self.image.im.putpixel((x + i, y + j), color)\n\n def fill_random(self):\n for x in range(self.width / self.tile_width):\n for y in range(self.height / self.tile_height):\n current_color = randrange(5)\n self.place_pile(self.color_tuple[current_color], x=x * self\n .tile_width, y=y * self.tile_height)\n\n def create_random_pattern(self):\n initial_pattern = []\n for x in range(self.width / self.tile_width):\n initial_pattern.append([])\n for y in range(self.height / self.tile_height):\n temp_list = list(self.color_tuple)\n if x - 1 >= 0:\n try:\n temp_list.remove(initial_pattern[x - 1][y])\n except ValueError:\n pass\n if y - 1 >= 0:\n try:\n temp_list.remove(initial_pattern[x][y - 1])\n except ValueError:\n pass\n initial_pattern[x].append(temp_list[randrange(len(temp_list))])\n return initial_pattern\n <mask token>\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass PileMosaic:\n\n def __init__(self):\n self.width, self.height = 2380, 2800\n self.filename = 'pile_mosaic.png'\n self.crema = 240, 233, 227\n self.choco = 89, 62, 53\n self.luna = 43, 97, 123\n self.latte = 195, 175, 148\n self.piscina = 170, 200, 211\n self.lavanda = 189, 192, 209\n self.viola = 133, 108, 140\n self.morado = 121, 69, 92\n self.rosa = 222, 179, 172\n self.flamingo = 238, 157, 140\n self.color_tuple = (self.crema, self.choco, self.luna, self.latte,\n self.piscina)\n self.tile_width = 300\n self.tile_height = 100\n\n def create_new_image(self):\n self.image = Image.new('RGB', (self.width, self.height), 'white')\n self.data = [(255, 255, 255)] * (self.width * self.height)\n\n def write_image(self):\n self.image.save(self.filename, 'PNG')\n\n def hex_to_rgb(value):\n value = value.lstrip('#')\n lv = len(value)\n return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv //\n 3))\n\n def rgb_to_hex(rgb):\n return '#%02x%02x%02x' % rgb\n\n def place_pile(self, color, x=0, y=0):\n for i in range(self.tile_width):\n for j in range(self.tile_height):\n self.image.im.putpixel((x + i, y + j), color)\n\n def fill_random(self):\n for x in range(self.width / self.tile_width):\n for y in range(self.height / self.tile_height):\n current_color = randrange(5)\n self.place_pile(self.color_tuple[current_color], x=x * self\n .tile_width, y=y * self.tile_height)\n\n def create_random_pattern(self):\n initial_pattern = []\n for x in range(self.width / self.tile_width):\n initial_pattern.append([])\n for y in range(self.height / self.tile_height):\n temp_list = list(self.color_tuple)\n if x - 1 >= 0:\n try:\n temp_list.remove(initial_pattern[x - 1][y])\n except ValueError:\n pass\n if y - 1 >= 0:\n try:\n temp_list.remove(initial_pattern[x][y - 1])\n except ValueError:\n pass\n initial_pattern[x].append(temp_list[randrange(len(temp_list))])\n return initial_pattern\n\n def fill(self, pattern):\n for x in range(self.width / (self.tile_width + 4)):\n for y in range(self.height / (self.tile_height + 4)):\n self.place_pile(pattern[x][y], x=x * (self.tile_width + 4),\n y=y * (self.tile_height + 4))\n\n\npile = PileMosaic()\npile.create_new_image()\npile.fill(pile.create_random_pattern())\npile.write_image()\n", "step-4": "from PIL import Image\nfrom random import randrange\n\n\nclass PileMosaic:\n\n def __init__(self):\n self.width, self.height = 2380, 2800\n self.filename = 'pile_mosaic.png'\n self.crema = 240, 233, 227\n self.choco = 89, 62, 53\n self.luna = 43, 97, 123\n self.latte = 195, 175, 148\n self.piscina = 170, 200, 211\n self.lavanda = 189, 192, 209\n self.viola = 133, 108, 140\n self.morado = 121, 69, 92\n self.rosa = 222, 179, 172\n self.flamingo = 238, 157, 140\n self.color_tuple = (self.crema, self.choco, self.luna, self.latte,\n self.piscina)\n self.tile_width = 300\n self.tile_height = 100\n\n def create_new_image(self):\n self.image = Image.new('RGB', (self.width, self.height), 'white')\n self.data = [(255, 255, 255)] * (self.width * self.height)\n\n def write_image(self):\n self.image.save(self.filename, 'PNG')\n\n def hex_to_rgb(value):\n value = value.lstrip('#')\n lv = len(value)\n return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv //\n 3))\n\n def rgb_to_hex(rgb):\n return '#%02x%02x%02x' % rgb\n\n def place_pile(self, color, x=0, y=0):\n for i in range(self.tile_width):\n for j in range(self.tile_height):\n self.image.im.putpixel((x + i, y + j), color)\n\n def fill_random(self):\n for x in range(self.width / self.tile_width):\n for y in range(self.height / self.tile_height):\n current_color = randrange(5)\n self.place_pile(self.color_tuple[current_color], x=x * self\n .tile_width, y=y * self.tile_height)\n\n def create_random_pattern(self):\n initial_pattern = []\n for x in range(self.width / self.tile_width):\n initial_pattern.append([])\n for y in range(self.height / self.tile_height):\n temp_list = list(self.color_tuple)\n if x - 1 >= 0:\n try:\n temp_list.remove(initial_pattern[x - 1][y])\n except ValueError:\n pass\n if y - 1 >= 0:\n try:\n temp_list.remove(initial_pattern[x][y - 1])\n except ValueError:\n pass\n initial_pattern[x].append(temp_list[randrange(len(temp_list))])\n return initial_pattern\n\n def fill(self, pattern):\n for x in range(self.width / (self.tile_width + 4)):\n for y in range(self.height / (self.tile_height + 4)):\n self.place_pile(pattern[x][y], x=x * (self.tile_width + 4),\n y=y * (self.tile_height + 4))\n\n\npile = PileMosaic()\npile.create_new_image()\npile.fill(pile.create_random_pattern())\npile.write_image()\n", "step-5": "from PIL import Image\nfrom random import randrange\n\nclass PileMosaic:\n def __init__(self):\n self.width, self.height = 2380, 2800\n self.filename = \"pile_mosaic.png\"\n self.crema = (240, 233, 227)\n self.choco = (89, 62, 53)\n self.luna = (43, 97, 123)\n self.latte = (195, 175, 148)\n self.piscina = (170, 200, 211)\n self.lavanda = (189, 192, 209)\n self.viola = (133, 108, 140)\n self.morado = (121, 69, 92)\n self.rosa = (222, 179, 172)\n self.flamingo = (238, 157, 140)\n self.color_tuple = (self.crema, self.choco, self.luna, self.latte, self.piscina)\n # self.color_tuple = (self.lavanda, self.viola, self.rosa, self.morado, self.flamingo)\n self.tile_width = 300\n self.tile_height = 100\n\n def create_new_image(self):\n self.image = Image.new(\"RGB\", (self.width, self.height), \"white\")\n self.data = [(255, 255, 255)]*(self.width*self.height)\n\n def write_image(self):\n self.image.save(self.filename, \"PNG\")\n\n def hex_to_rgb(value):\n value = value.lstrip('#')\n lv = len(value)\n return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3))\n\n def rgb_to_hex(rgb):\n return '#%02x%02x%02x' % rgb\n\n def place_pile(self, color, x=0, y=0):\n for i in range(self.tile_width):\n for j in range(self.tile_height):\n self.image.im.putpixel((x + i, y + j), color)\n\n def fill_random(self):\n for x in range(self.width / self.tile_width):\n for y in range(self.height / self.tile_height):\n current_color = randrange(5)\n self.place_pile(self.color_tuple[current_color], x=x*self.tile_width, y=y*self.tile_height)\n\n def create_random_pattern(self):\n initial_pattern = []\n for x in range(self.width / self.tile_width):\n initial_pattern.append([])\n for y in range(self.height / self.tile_height):\n temp_list = list(self.color_tuple)\n if x - 1 >= 0:\n try:\n temp_list.remove(initial_pattern[x - 1][y])\n except ValueError:\n pass\n if y - 1 >= 0:\n try:\n temp_list.remove(initial_pattern[x][y - 1])\n except ValueError:\n pass\n initial_pattern[x].append(temp_list[randrange(len(temp_list))])\n return initial_pattern\n \n def fill(self, pattern):\n for x in range(self.width / (self.tile_width + 4)):\n for y in range(self.height / (self.tile_height + 4)):\n self.place_pile(pattern[x][y], x=x*(self.tile_width+4), y=y*(self.tile_height+4))\n \n\npile = PileMosaic()\npile.create_new_image()\npile.fill(pile.create_random_pattern())\npile.write_image()\n", "step-ids": [ 7, 8, 12, 13, 14 ] }
[ 7, 8, 12, 13, 14 ]
<|reserved_special_token_0|> def leftEncoderCallback(answer): global leftSteps leftSteps = leftSteps + 1 global leftDistance leftDistance = leftDistance + 0.24 print('Left Encoder.') def rightEncoderCallback(answer): global rightSteps rightSteps = rightSteps + 1 global rightDistance rightDistance = rightDistance + 0.24 print('Right Encoder.') <|reserved_special_token_0|> def disableEncoderTracking(): GPIO.remove_event_detect(leftEncoderGPIO) GPIO.remove_event_detect(rightEncoderGPIO) <|reserved_special_token_0|> def turnOffMotors(): mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE) mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE) <|reserved_special_token_0|> def sig_handler(_signo, _stack_frame): turnOffMotors() GPIO.remove_event_detect(leftEncoderGPIO) GPIO.remove_event_detect(rightEncoderGPIO) GPIO.cleanup() print('\n') print(str(leftSteps) + ' left steps are ' + str(leftDistance) + ' cm driven.') print(str(rightSteps) + ' right steps are ' + str(rightDistance) + ' cm driven.\n') sys.exit(0) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> GPIO.setmode(GPIO.BCM) <|reserved_special_token_0|> print('setup...') GPIO.setup(leftEncoderGPIO, GPIO.IN) GPIO.setup(rightEncoderGPIO, GPIO.IN) <|reserved_special_token_0|> def leftEncoderCallback(answer): global leftSteps leftSteps = leftSteps + 1 global leftDistance leftDistance = leftDistance + 0.24 print('Left Encoder.') def rightEncoderCallback(answer): global rightSteps rightSteps = rightSteps + 1 global rightDistance rightDistance = rightDistance + 0.24 print('Right Encoder.') print('registering event handlers...') def enableEncoderTracking(): GPIO.add_event_detect(leftEncoderGPIO, GPIO.FALLING, callback= leftEncoderCallback) GPIO.add_event_detect(rightEncoderGPIO, GPIO.FALLING, callback= rightEncoderCallback) def disableEncoderTracking(): GPIO.remove_event_detect(leftEncoderGPIO) GPIO.remove_event_detect(rightEncoderGPIO) <|reserved_special_token_0|> def turnOffMotors(): mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE) mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE) <|reserved_special_token_0|> myMotor1.run(Adafruit_MotorHAT.RELEASE) myMotor2.run(Adafruit_MotorHAT.RELEASE) <|reserved_special_token_0|> myMotor1.setSpeed(startSpeed) myMotor2.setSpeed(startSpeed) def sig_handler(_signo, _stack_frame): turnOffMotors() GPIO.remove_event_detect(leftEncoderGPIO) GPIO.remove_event_detect(rightEncoderGPIO) GPIO.cleanup() print('\n') print(str(leftSteps) + ' left steps are ' + str(leftDistance) + ' cm driven.') print(str(rightSteps) + ' right steps are ' + str(rightDistance) + ' cm driven.\n') sys.exit(0) signal.signal(signal.SIGINT, sig_handler) signal.signal(signal.SIGHUP, sig_handler) signal.signal(signal.SIGTERM, sig_handler) print('Starting in 3...') time.sleep(1) print('Starting in 2...') time.sleep(1) print('Starting in 1...') time.sleep(1) print('GO!\n') while True: print('Forward! ') enableEncoderTracking() myMotor1.run(Adafruit_MotorHAT.FORWARD) myMotor2.run(Adafruit_MotorHAT.FORWARD) print('\tSpeed up...') for i in range(startSpeed, maxSpeed): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) print('+++ full speed for ' + str(fullSpeedDuration) + ' seconds +++') time.sleep(fullSpeedDuration) print('\tSlow down...') for i in range(maxSpeed, startSpeed, -1): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) disableEncoderTracking() time.sleep(1) """ print("Backward! ") myMotor1.run(Adafruit_MotorHAT.BACKWARD) myMotor2.run(Adafruit_MotorHAT.BACKWARD) print(" Speed up...") for i in range(startSpeed, maxSpeed): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) print(" Slow down...") for i in range(maxSpeed, startSpeed, -1): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) print("Release") myMotor1.run(Adafruit_MotorHAT.RELEASE) myMotor2.run(Adafruit_MotorHAT.RELEASE) """ time.sleep(0.25) <|reserved_special_token_1|> <|reserved_special_token_0|> GPIO.setmode(GPIO.BCM) leftEncoderGPIO = 27 rightEncoderGPIO = 22 print('setup...') GPIO.setup(leftEncoderGPIO, GPIO.IN) GPIO.setup(rightEncoderGPIO, GPIO.IN) leftSteps = 0 rightSteps = 0 leftDistance = 0 rightDistance = 0 def leftEncoderCallback(answer): global leftSteps leftSteps = leftSteps + 1 global leftDistance leftDistance = leftDistance + 0.24 print('Left Encoder.') def rightEncoderCallback(answer): global rightSteps rightSteps = rightSteps + 1 global rightDistance rightDistance = rightDistance + 0.24 print('Right Encoder.') print('registering event handlers...') def enableEncoderTracking(): GPIO.add_event_detect(leftEncoderGPIO, GPIO.FALLING, callback= leftEncoderCallback) GPIO.add_event_detect(rightEncoderGPIO, GPIO.FALLING, callback= rightEncoderCallback) def disableEncoderTracking(): GPIO.remove_event_detect(leftEncoderGPIO) GPIO.remove_event_detect(rightEncoderGPIO) <|reserved_special_token_0|> mh = Adafruit_MotorHAT(addr=96) def turnOffMotors(): mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE) mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE) myMotor1 = mh.getMotor(1) myMotor2 = mh.getMotor(2) myMotor1.run(Adafruit_MotorHAT.RELEASE) myMotor2.run(Adafruit_MotorHAT.RELEASE) startSpeed = 100 maxSpeed = 255 fullSpeedDuration = 0 myMotor1.setSpeed(startSpeed) myMotor2.setSpeed(startSpeed) def sig_handler(_signo, _stack_frame): turnOffMotors() GPIO.remove_event_detect(leftEncoderGPIO) GPIO.remove_event_detect(rightEncoderGPIO) GPIO.cleanup() print('\n') print(str(leftSteps) + ' left steps are ' + str(leftDistance) + ' cm driven.') print(str(rightSteps) + ' right steps are ' + str(rightDistance) + ' cm driven.\n') sys.exit(0) signal.signal(signal.SIGINT, sig_handler) signal.signal(signal.SIGHUP, sig_handler) signal.signal(signal.SIGTERM, sig_handler) print('Starting in 3...') time.sleep(1) print('Starting in 2...') time.sleep(1) print('Starting in 1...') time.sleep(1) print('GO!\n') while True: print('Forward! ') enableEncoderTracking() myMotor1.run(Adafruit_MotorHAT.FORWARD) myMotor2.run(Adafruit_MotorHAT.FORWARD) print('\tSpeed up...') for i in range(startSpeed, maxSpeed): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) print('+++ full speed for ' + str(fullSpeedDuration) + ' seconds +++') time.sleep(fullSpeedDuration) print('\tSlow down...') for i in range(maxSpeed, startSpeed, -1): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) disableEncoderTracking() time.sleep(1) """ print("Backward! ") myMotor1.run(Adafruit_MotorHAT.BACKWARD) myMotor2.run(Adafruit_MotorHAT.BACKWARD) print(" Speed up...") for i in range(startSpeed, maxSpeed): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) print(" Slow down...") for i in range(maxSpeed, startSpeed, -1): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) print("Release") myMotor1.run(Adafruit_MotorHAT.RELEASE) myMotor2.run(Adafruit_MotorHAT.RELEASE) """ time.sleep(0.25) <|reserved_special_token_1|> import time import atexit import signal import sys import RPi.GPIO as GPIO GPIO.setmode(GPIO.BCM) leftEncoderGPIO = 27 rightEncoderGPIO = 22 print('setup...') GPIO.setup(leftEncoderGPIO, GPIO.IN) GPIO.setup(rightEncoderGPIO, GPIO.IN) leftSteps = 0 rightSteps = 0 leftDistance = 0 rightDistance = 0 def leftEncoderCallback(answer): global leftSteps leftSteps = leftSteps + 1 global leftDistance leftDistance = leftDistance + 0.24 print('Left Encoder.') def rightEncoderCallback(answer): global rightSteps rightSteps = rightSteps + 1 global rightDistance rightDistance = rightDistance + 0.24 print('Right Encoder.') print('registering event handlers...') def enableEncoderTracking(): GPIO.add_event_detect(leftEncoderGPIO, GPIO.FALLING, callback= leftEncoderCallback) GPIO.add_event_detect(rightEncoderGPIO, GPIO.FALLING, callback= rightEncoderCallback) def disableEncoderTracking(): GPIO.remove_event_detect(leftEncoderGPIO) GPIO.remove_event_detect(rightEncoderGPIO) from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor mh = Adafruit_MotorHAT(addr=96) def turnOffMotors(): mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE) mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE) myMotor1 = mh.getMotor(1) myMotor2 = mh.getMotor(2) myMotor1.run(Adafruit_MotorHAT.RELEASE) myMotor2.run(Adafruit_MotorHAT.RELEASE) startSpeed = 100 maxSpeed = 255 fullSpeedDuration = 0 myMotor1.setSpeed(startSpeed) myMotor2.setSpeed(startSpeed) def sig_handler(_signo, _stack_frame): turnOffMotors() GPIO.remove_event_detect(leftEncoderGPIO) GPIO.remove_event_detect(rightEncoderGPIO) GPIO.cleanup() print('\n') print(str(leftSteps) + ' left steps are ' + str(leftDistance) + ' cm driven.') print(str(rightSteps) + ' right steps are ' + str(rightDistance) + ' cm driven.\n') sys.exit(0) signal.signal(signal.SIGINT, sig_handler) signal.signal(signal.SIGHUP, sig_handler) signal.signal(signal.SIGTERM, sig_handler) print('Starting in 3...') time.sleep(1) print('Starting in 2...') time.sleep(1) print('Starting in 1...') time.sleep(1) print('GO!\n') while True: print('Forward! ') enableEncoderTracking() myMotor1.run(Adafruit_MotorHAT.FORWARD) myMotor2.run(Adafruit_MotorHAT.FORWARD) print('\tSpeed up...') for i in range(startSpeed, maxSpeed): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) print('+++ full speed for ' + str(fullSpeedDuration) + ' seconds +++') time.sleep(fullSpeedDuration) print('\tSlow down...') for i in range(maxSpeed, startSpeed, -1): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) disableEncoderTracking() time.sleep(1) """ print("Backward! ") myMotor1.run(Adafruit_MotorHAT.BACKWARD) myMotor2.run(Adafruit_MotorHAT.BACKWARD) print(" Speed up...") for i in range(startSpeed, maxSpeed): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) print(" Slow down...") for i in range(maxSpeed, startSpeed, -1): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) print("Release") myMotor1.run(Adafruit_MotorHAT.RELEASE) myMotor2.run(Adafruit_MotorHAT.RELEASE) """ time.sleep(0.25) <|reserved_special_token_1|> #!/usr/bin/python # coding=utf-8 import time import atexit # for signal handling import signal import sys # ---------------------- # Encoder stuff # ---------------------- import RPi.GPIO as GPIO # init GPIO.setmode(GPIO.BCM) # use the GPIO names, _not_ the pin numbers on the board # Raspberry Pi pin configuration: # pins BCM BOARD leftEncoderGPIO = 27 # pin rightEncoderGPIO = 22 # pin # setup print("setup...") GPIO.setup(leftEncoderGPIO, GPIO.IN) GPIO.setup(rightEncoderGPIO, GPIO.IN) # for counting encoder steps leftSteps = 0 rightSteps = 0 # driven distance in cm leftDistance = 0 rightDistance = 0 # encoder pulse detection by interrupt def leftEncoderCallback(answer): global leftSteps leftSteps = leftSteps +1 # measure distance global leftDistance leftDistance = leftDistance + 0.24 print("Left Encoder.") def rightEncoderCallback(answer): global rightSteps rightSteps = rightSteps +1 global rightDistance rightDistance = rightDistance + 0.24 print("Right Encoder.") # add GPIO event detectors print("registering event handlers...") # enabling event handlers (if needed only) def enableEncoderTracking(): GPIO.add_event_detect(leftEncoderGPIO, GPIO.FALLING, callback=leftEncoderCallback) GPIO.add_event_detect(rightEncoderGPIO, GPIO.FALLING, callback=rightEncoderCallback) # disabling event handlers def disableEncoderTracking(): GPIO.remove_event_detect(leftEncoderGPIO) GPIO.remove_event_detect(rightEncoderGPIO) # ---------------------- # Motor stuff # ---------------------- from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor # create a default motor object, no changes to I2C address or frequency mh = Adafruit_MotorHAT(addr=0x60) # recommended for auto-disabling motors on shutdown! def turnOffMotors(): mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE) mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE) # user motor 1 and 2 on RasPi hat myMotor1 = mh.getMotor(1) myMotor2 = mh.getMotor(2) # turn off motors myMotor1.run(Adafruit_MotorHAT.RELEASE); myMotor2.run(Adafruit_MotorHAT.RELEASE); # set the speed (from 0 (off) to 255 (max speed)) startSpeed = 100 maxSpeed = 255 # max is 255! # test switch fullSpeedDuration = 0 # default 0 myMotor1.setSpeed(startSpeed) myMotor2.setSpeed(startSpeed) # ------------------ # my signal handler # ------------------ def sig_handler(_signo, _stack_frame): turnOffMotors(); ## GPIO cleanup GPIO.remove_event_detect(leftEncoderGPIO) GPIO.remove_event_detect(rightEncoderGPIO) GPIO.cleanup() print("\n") print(str(leftSteps) + " left steps are " + str(leftDistance) + " cm driven.") print(str(rightSteps) + " right steps are " + str(rightDistance) + " cm driven.\n") sys.exit(0) # signals to be handled signal.signal(signal.SIGINT, sig_handler) signal.signal(signal.SIGHUP, sig_handler) signal.signal(signal.SIGTERM, sig_handler) # # Cowntdown # print('Starting in 3...') time.sleep(1) print('Starting in 2...') time.sleep(1) print('Starting in 1...') time.sleep(1) print('GO!\n') ###### ###### forever - or until ctrl+c :) ###### while (True): ### drive # drive print("Forward! ") # enable Odometrie enableEncoderTracking() myMotor1.run(Adafruit_MotorHAT.FORWARD) myMotor2.run(Adafruit_MotorHAT.FORWARD) print("\tSpeed up...") for i in range(startSpeed, maxSpeed): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) # full speed for n seconds print("+++ full speed for " + str(fullSpeedDuration) + " seconds +++") time.sleep(fullSpeedDuration) print("\tSlow down...") for i in range(maxSpeed, startSpeed, -1): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) # disable Odometrie disableEncoderTracking() # wait one second time.sleep(1) """ print("Backward! ") myMotor1.run(Adafruit_MotorHAT.BACKWARD) myMotor2.run(Adafruit_MotorHAT.BACKWARD) print("\tSpeed up...") for i in range(startSpeed, maxSpeed): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) print("\tSlow down...") for i in range(maxSpeed, startSpeed, -1): myMotor1.setSpeed(i) myMotor2.setSpeed(i) time.sleep(0.01) print("Release") myMotor1.run(Adafruit_MotorHAT.RELEASE) myMotor2.run(Adafruit_MotorHAT.RELEASE) """ # wait some time time.sleep(0.25)
flexible
{ "blob_id": "53841ba56589955e09b03018af1d0ae79b3756c4", "index": 5595, "step-1": "<mask token>\n\n\ndef leftEncoderCallback(answer):\n global leftSteps\n leftSteps = leftSteps + 1\n global leftDistance\n leftDistance = leftDistance + 0.24\n print('Left Encoder.')\n\n\ndef rightEncoderCallback(answer):\n global rightSteps\n rightSteps = rightSteps + 1\n global rightDistance\n rightDistance = rightDistance + 0.24\n print('Right Encoder.')\n\n\n<mask token>\n\n\ndef disableEncoderTracking():\n GPIO.remove_event_detect(leftEncoderGPIO)\n GPIO.remove_event_detect(rightEncoderGPIO)\n\n\n<mask token>\n\n\ndef turnOffMotors():\n mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE)\n mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE)\n\n\n<mask token>\n\n\ndef sig_handler(_signo, _stack_frame):\n turnOffMotors()\n GPIO.remove_event_detect(leftEncoderGPIO)\n GPIO.remove_event_detect(rightEncoderGPIO)\n GPIO.cleanup()\n print('\\n')\n print(str(leftSteps) + ' left steps are ' + str(leftDistance) +\n ' cm driven.')\n print(str(rightSteps) + ' right steps are ' + str(rightDistance) +\n ' cm driven.\\n')\n sys.exit(0)\n\n\n<mask token>\n", "step-2": "<mask token>\nGPIO.setmode(GPIO.BCM)\n<mask token>\nprint('setup...')\nGPIO.setup(leftEncoderGPIO, GPIO.IN)\nGPIO.setup(rightEncoderGPIO, GPIO.IN)\n<mask token>\n\n\ndef leftEncoderCallback(answer):\n global leftSteps\n leftSteps = leftSteps + 1\n global leftDistance\n leftDistance = leftDistance + 0.24\n print('Left Encoder.')\n\n\ndef rightEncoderCallback(answer):\n global rightSteps\n rightSteps = rightSteps + 1\n global rightDistance\n rightDistance = rightDistance + 0.24\n print('Right Encoder.')\n\n\nprint('registering event handlers...')\n\n\ndef enableEncoderTracking():\n GPIO.add_event_detect(leftEncoderGPIO, GPIO.FALLING, callback=\n leftEncoderCallback)\n GPIO.add_event_detect(rightEncoderGPIO, GPIO.FALLING, callback=\n rightEncoderCallback)\n\n\ndef disableEncoderTracking():\n GPIO.remove_event_detect(leftEncoderGPIO)\n GPIO.remove_event_detect(rightEncoderGPIO)\n\n\n<mask token>\n\n\ndef turnOffMotors():\n mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE)\n mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE)\n\n\n<mask token>\nmyMotor1.run(Adafruit_MotorHAT.RELEASE)\nmyMotor2.run(Adafruit_MotorHAT.RELEASE)\n<mask token>\nmyMotor1.setSpeed(startSpeed)\nmyMotor2.setSpeed(startSpeed)\n\n\ndef sig_handler(_signo, _stack_frame):\n turnOffMotors()\n GPIO.remove_event_detect(leftEncoderGPIO)\n GPIO.remove_event_detect(rightEncoderGPIO)\n GPIO.cleanup()\n print('\\n')\n print(str(leftSteps) + ' left steps are ' + str(leftDistance) +\n ' cm driven.')\n print(str(rightSteps) + ' right steps are ' + str(rightDistance) +\n ' cm driven.\\n')\n sys.exit(0)\n\n\nsignal.signal(signal.SIGINT, sig_handler)\nsignal.signal(signal.SIGHUP, sig_handler)\nsignal.signal(signal.SIGTERM, sig_handler)\nprint('Starting in 3...')\ntime.sleep(1)\nprint('Starting in 2...')\ntime.sleep(1)\nprint('Starting in 1...')\ntime.sleep(1)\nprint('GO!\\n')\nwhile True:\n print('Forward! ')\n enableEncoderTracking()\n myMotor1.run(Adafruit_MotorHAT.FORWARD)\n myMotor2.run(Adafruit_MotorHAT.FORWARD)\n print('\\tSpeed up...')\n for i in range(startSpeed, maxSpeed):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n print('+++ full speed for ' + str(fullSpeedDuration) + ' seconds +++')\n time.sleep(fullSpeedDuration)\n print('\\tSlow down...')\n for i in range(maxSpeed, startSpeed, -1):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n disableEncoderTracking()\n time.sleep(1)\n \"\"\" print(\"Backward! \")\n myMotor1.run(Adafruit_MotorHAT.BACKWARD)\n myMotor2.run(Adafruit_MotorHAT.BACKWARD)\n\n print(\"\tSpeed up...\")\n for i in range(startSpeed, maxSpeed):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n\n print(\"\tSlow down...\")\n for i in range(maxSpeed, startSpeed, -1):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n\n print(\"Release\")\n myMotor1.run(Adafruit_MotorHAT.RELEASE)\n myMotor2.run(Adafruit_MotorHAT.RELEASE)\n \"\"\"\n time.sleep(0.25)\n", "step-3": "<mask token>\nGPIO.setmode(GPIO.BCM)\nleftEncoderGPIO = 27\nrightEncoderGPIO = 22\nprint('setup...')\nGPIO.setup(leftEncoderGPIO, GPIO.IN)\nGPIO.setup(rightEncoderGPIO, GPIO.IN)\nleftSteps = 0\nrightSteps = 0\nleftDistance = 0\nrightDistance = 0\n\n\ndef leftEncoderCallback(answer):\n global leftSteps\n leftSteps = leftSteps + 1\n global leftDistance\n leftDistance = leftDistance + 0.24\n print('Left Encoder.')\n\n\ndef rightEncoderCallback(answer):\n global rightSteps\n rightSteps = rightSteps + 1\n global rightDistance\n rightDistance = rightDistance + 0.24\n print('Right Encoder.')\n\n\nprint('registering event handlers...')\n\n\ndef enableEncoderTracking():\n GPIO.add_event_detect(leftEncoderGPIO, GPIO.FALLING, callback=\n leftEncoderCallback)\n GPIO.add_event_detect(rightEncoderGPIO, GPIO.FALLING, callback=\n rightEncoderCallback)\n\n\ndef disableEncoderTracking():\n GPIO.remove_event_detect(leftEncoderGPIO)\n GPIO.remove_event_detect(rightEncoderGPIO)\n\n\n<mask token>\nmh = Adafruit_MotorHAT(addr=96)\n\n\ndef turnOffMotors():\n mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE)\n mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE)\n\n\nmyMotor1 = mh.getMotor(1)\nmyMotor2 = mh.getMotor(2)\nmyMotor1.run(Adafruit_MotorHAT.RELEASE)\nmyMotor2.run(Adafruit_MotorHAT.RELEASE)\nstartSpeed = 100\nmaxSpeed = 255\nfullSpeedDuration = 0\nmyMotor1.setSpeed(startSpeed)\nmyMotor2.setSpeed(startSpeed)\n\n\ndef sig_handler(_signo, _stack_frame):\n turnOffMotors()\n GPIO.remove_event_detect(leftEncoderGPIO)\n GPIO.remove_event_detect(rightEncoderGPIO)\n GPIO.cleanup()\n print('\\n')\n print(str(leftSteps) + ' left steps are ' + str(leftDistance) +\n ' cm driven.')\n print(str(rightSteps) + ' right steps are ' + str(rightDistance) +\n ' cm driven.\\n')\n sys.exit(0)\n\n\nsignal.signal(signal.SIGINT, sig_handler)\nsignal.signal(signal.SIGHUP, sig_handler)\nsignal.signal(signal.SIGTERM, sig_handler)\nprint('Starting in 3...')\ntime.sleep(1)\nprint('Starting in 2...')\ntime.sleep(1)\nprint('Starting in 1...')\ntime.sleep(1)\nprint('GO!\\n')\nwhile True:\n print('Forward! ')\n enableEncoderTracking()\n myMotor1.run(Adafruit_MotorHAT.FORWARD)\n myMotor2.run(Adafruit_MotorHAT.FORWARD)\n print('\\tSpeed up...')\n for i in range(startSpeed, maxSpeed):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n print('+++ full speed for ' + str(fullSpeedDuration) + ' seconds +++')\n time.sleep(fullSpeedDuration)\n print('\\tSlow down...')\n for i in range(maxSpeed, startSpeed, -1):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n disableEncoderTracking()\n time.sleep(1)\n \"\"\" print(\"Backward! \")\n myMotor1.run(Adafruit_MotorHAT.BACKWARD)\n myMotor2.run(Adafruit_MotorHAT.BACKWARD)\n\n print(\"\tSpeed up...\")\n for i in range(startSpeed, maxSpeed):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n\n print(\"\tSlow down...\")\n for i in range(maxSpeed, startSpeed, -1):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n\n print(\"Release\")\n myMotor1.run(Adafruit_MotorHAT.RELEASE)\n myMotor2.run(Adafruit_MotorHAT.RELEASE)\n \"\"\"\n time.sleep(0.25)\n", "step-4": "import time\nimport atexit\nimport signal\nimport sys\nimport RPi.GPIO as GPIO\nGPIO.setmode(GPIO.BCM)\nleftEncoderGPIO = 27\nrightEncoderGPIO = 22\nprint('setup...')\nGPIO.setup(leftEncoderGPIO, GPIO.IN)\nGPIO.setup(rightEncoderGPIO, GPIO.IN)\nleftSteps = 0\nrightSteps = 0\nleftDistance = 0\nrightDistance = 0\n\n\ndef leftEncoderCallback(answer):\n global leftSteps\n leftSteps = leftSteps + 1\n global leftDistance\n leftDistance = leftDistance + 0.24\n print('Left Encoder.')\n\n\ndef rightEncoderCallback(answer):\n global rightSteps\n rightSteps = rightSteps + 1\n global rightDistance\n rightDistance = rightDistance + 0.24\n print('Right Encoder.')\n\n\nprint('registering event handlers...')\n\n\ndef enableEncoderTracking():\n GPIO.add_event_detect(leftEncoderGPIO, GPIO.FALLING, callback=\n leftEncoderCallback)\n GPIO.add_event_detect(rightEncoderGPIO, GPIO.FALLING, callback=\n rightEncoderCallback)\n\n\ndef disableEncoderTracking():\n GPIO.remove_event_detect(leftEncoderGPIO)\n GPIO.remove_event_detect(rightEncoderGPIO)\n\n\nfrom Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor\nmh = Adafruit_MotorHAT(addr=96)\n\n\ndef turnOffMotors():\n mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE)\n mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE)\n\n\nmyMotor1 = mh.getMotor(1)\nmyMotor2 = mh.getMotor(2)\nmyMotor1.run(Adafruit_MotorHAT.RELEASE)\nmyMotor2.run(Adafruit_MotorHAT.RELEASE)\nstartSpeed = 100\nmaxSpeed = 255\nfullSpeedDuration = 0\nmyMotor1.setSpeed(startSpeed)\nmyMotor2.setSpeed(startSpeed)\n\n\ndef sig_handler(_signo, _stack_frame):\n turnOffMotors()\n GPIO.remove_event_detect(leftEncoderGPIO)\n GPIO.remove_event_detect(rightEncoderGPIO)\n GPIO.cleanup()\n print('\\n')\n print(str(leftSteps) + ' left steps are ' + str(leftDistance) +\n ' cm driven.')\n print(str(rightSteps) + ' right steps are ' + str(rightDistance) +\n ' cm driven.\\n')\n sys.exit(0)\n\n\nsignal.signal(signal.SIGINT, sig_handler)\nsignal.signal(signal.SIGHUP, sig_handler)\nsignal.signal(signal.SIGTERM, sig_handler)\nprint('Starting in 3...')\ntime.sleep(1)\nprint('Starting in 2...')\ntime.sleep(1)\nprint('Starting in 1...')\ntime.sleep(1)\nprint('GO!\\n')\nwhile True:\n print('Forward! ')\n enableEncoderTracking()\n myMotor1.run(Adafruit_MotorHAT.FORWARD)\n myMotor2.run(Adafruit_MotorHAT.FORWARD)\n print('\\tSpeed up...')\n for i in range(startSpeed, maxSpeed):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n print('+++ full speed for ' + str(fullSpeedDuration) + ' seconds +++')\n time.sleep(fullSpeedDuration)\n print('\\tSlow down...')\n for i in range(maxSpeed, startSpeed, -1):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n disableEncoderTracking()\n time.sleep(1)\n \"\"\" print(\"Backward! \")\n myMotor1.run(Adafruit_MotorHAT.BACKWARD)\n myMotor2.run(Adafruit_MotorHAT.BACKWARD)\n\n print(\"\tSpeed up...\")\n for i in range(startSpeed, maxSpeed):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n\n print(\"\tSlow down...\")\n for i in range(maxSpeed, startSpeed, -1):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n\n print(\"Release\")\n myMotor1.run(Adafruit_MotorHAT.RELEASE)\n myMotor2.run(Adafruit_MotorHAT.RELEASE)\n \"\"\"\n time.sleep(0.25)\n", "step-5": "#!/usr/bin/python\n# coding=utf-8\n\nimport time\nimport atexit\n\n# for signal handling\nimport signal\nimport sys\n\n\n# ----------------------\n# Encoder stuff\n# ----------------------\nimport RPi.GPIO as GPIO\n\n# init\nGPIO.setmode(GPIO.BCM) # use the GPIO names, _not_ the pin numbers on the board\n\n# Raspberry Pi pin configuration:\n# pins\t BCM BOARD\nleftEncoderGPIO = 27 # pin\nrightEncoderGPIO = 22 # pin\n\n# setup\nprint(\"setup...\")\nGPIO.setup(leftEncoderGPIO, GPIO.IN)\nGPIO.setup(rightEncoderGPIO, GPIO.IN)\n\n# for counting encoder steps\nleftSteps = 0\nrightSteps = 0\n# driven distance in cm\nleftDistance = 0\nrightDistance = 0\n\n# encoder pulse detection by interrupt\ndef leftEncoderCallback(answer):\n global leftSteps\n leftSteps = leftSteps +1\n # measure distance\n global leftDistance\n leftDistance = leftDistance + 0.24\n print(\"Left Encoder.\")\n\ndef rightEncoderCallback(answer):\n global rightSteps\n rightSteps = rightSteps +1\n global rightDistance\n rightDistance = rightDistance + 0.24\n print(\"Right Encoder.\")\n\n# add GPIO event detectors\nprint(\"registering event handlers...\")\n\n# enabling event handlers (if needed only)\ndef enableEncoderTracking():\n GPIO.add_event_detect(leftEncoderGPIO, GPIO.FALLING, callback=leftEncoderCallback)\n GPIO.add_event_detect(rightEncoderGPIO, GPIO.FALLING, callback=rightEncoderCallback)\n\n# disabling event handlers\ndef disableEncoderTracking():\n GPIO.remove_event_detect(leftEncoderGPIO)\n GPIO.remove_event_detect(rightEncoderGPIO)\n\n\n# ----------------------\n# Motor stuff\n# ----------------------\nfrom Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor\n\n# create a default motor object, no changes to I2C address or frequency\nmh = Adafruit_MotorHAT(addr=0x60)\n\n# recommended for auto-disabling motors on shutdown!\ndef turnOffMotors():\n mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE)\n mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE)\n\n# user motor 1 and 2 on RasPi hat\nmyMotor1 = mh.getMotor(1)\nmyMotor2 = mh.getMotor(2)\n\n# turn off motors\nmyMotor1.run(Adafruit_MotorHAT.RELEASE);\nmyMotor2.run(Adafruit_MotorHAT.RELEASE);\n\n# set the speed (from 0 (off) to 255 (max speed))\nstartSpeed = 100\nmaxSpeed = 255 # max is 255!\n\n# test switch\nfullSpeedDuration = 0 # default 0\n\nmyMotor1.setSpeed(startSpeed)\nmyMotor2.setSpeed(startSpeed)\n\n\n# ------------------\n# my signal handler\n# ------------------\ndef sig_handler(_signo, _stack_frame):\n turnOffMotors();\n ## GPIO cleanup\n GPIO.remove_event_detect(leftEncoderGPIO)\n GPIO.remove_event_detect(rightEncoderGPIO)\n GPIO.cleanup()\n print(\"\\n\")\n print(str(leftSteps) + \" left steps are \" + str(leftDistance) + \" cm driven.\")\n print(str(rightSteps) + \" right steps are \" + str(rightDistance) + \" cm driven.\\n\")\n sys.exit(0)\n\n# signals to be handled\nsignal.signal(signal.SIGINT, sig_handler)\nsignal.signal(signal.SIGHUP, sig_handler)\nsignal.signal(signal.SIGTERM, sig_handler)\n\n\n#\n# Cowntdown\n#\nprint('Starting in 3...')\ntime.sleep(1)\nprint('Starting in 2...')\ntime.sleep(1)\nprint('Starting in 1...')\ntime.sleep(1)\nprint('GO!\\n')\n\n\n######\n###### forever - or until ctrl+c :)\n######\nwhile (True):\n ### drive\n # drive\n print(\"Forward! \")\n # enable Odometrie\n enableEncoderTracking()\n\n myMotor1.run(Adafruit_MotorHAT.FORWARD)\n myMotor2.run(Adafruit_MotorHAT.FORWARD)\n\n print(\"\\tSpeed up...\")\n for i in range(startSpeed, maxSpeed):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n\n # full speed for n seconds\n print(\"+++ full speed for \" + str(fullSpeedDuration) + \" seconds +++\")\n time.sleep(fullSpeedDuration)\n\n print(\"\\tSlow down...\")\n for i in range(maxSpeed, startSpeed, -1):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n\n # disable Odometrie\n disableEncoderTracking()\n\n # wait one second\n time.sleep(1)\n\n \"\"\" print(\"Backward! \")\n myMotor1.run(Adafruit_MotorHAT.BACKWARD)\n myMotor2.run(Adafruit_MotorHAT.BACKWARD)\n\n print(\"\\tSpeed up...\")\n for i in range(startSpeed, maxSpeed):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n\n print(\"\\tSlow down...\")\n for i in range(maxSpeed, startSpeed, -1):\n myMotor1.setSpeed(i)\n myMotor2.setSpeed(i)\n time.sleep(0.01)\n\n print(\"Release\")\n myMotor1.run(Adafruit_MotorHAT.RELEASE)\n myMotor2.run(Adafruit_MotorHAT.RELEASE)\n \"\"\"\n\n # wait some time\n time.sleep(0.25)\n", "step-ids": [ 5, 7, 8, 9, 10 ] }
[ 5, 7, 8, 9, 10 ]
<|reserved_special_token_0|> class BaseLoader(metaclass=abc.ABCMeta): @property def plugin_class(self): raise NotImplementedError() <|reserved_special_token_0|> @abc.abstractmethod def get_options(self): """Return the list of parameters associated with the auth plugin. This list may be used to generate CLI or config arguments. :returns: A list of Param objects describing available plugin parameters. :rtype: list """ return [] <|reserved_special_token_0|> def load_from_options(self, **kwargs): """Create a plugin from the arguments retrieved from get_options. A client can override this function to do argument validation or to handle differences between the registered options and what is required to create the plugin. """ missing_required = [o for o in self.get_options() if o.required and kwargs.get(o.dest) is None] if missing_required: raise exceptions.MissingRequiredOptions(missing_required) return self.create_plugin(**kwargs) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def _auth_plugin_available(ext): """Read the value of available for whether to load this plugin.""" return ext.obj.available <|reserved_special_token_0|> def get_available_plugin_loaders(): """Retrieve all the plugin classes available on the system. :returns: A dict with plugin entrypoint name as the key and the plugin loader as the value. :rtype: dict """ mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE, check_func=_auth_plugin_available, invoke_on_load=True, propagate_map_exceptions=True) return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj))) def get_plugin_loader(name): """Retrieve a plugin class by its entrypoint name. :param str name: The name of the object to get. :returns: An auth plugin class. :rtype: :py:class:`keystoneauth1.loading.BaseLoader` :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin: if a plugin cannot be created. """ try: mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE, invoke_on_load=True, name=name) except RuntimeError: raise exceptions.NoMatchingPlugin(name) return mgr.driver def get_plugin_options(name): """Get the options for a specific plugin. This will be the list of options that is registered and loaded by the specified plugin. :returns: A list of :py:class:`keystoneauth1.loading.Opt` options. :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin: if a plugin cannot be created. """ return get_plugin_loader(name).get_options() class BaseLoader(metaclass=abc.ABCMeta): @property def plugin_class(self): raise NotImplementedError() def create_plugin(self, **kwargs): """Create a plugin from the options available for the loader. Given the options that were specified by the loader create an appropriate plugin. You can override this function in your loader. This used to be specified by providing the plugin_class property and this is still supported, however specifying a property didn't let you choose a plugin type based upon the options that were presented. Override this function if you wish to return different plugins based on the options presented, otherwise you can simply provide the plugin_class property. Added 2.9 """ return self.plugin_class(**kwargs) @abc.abstractmethod def get_options(self): """Return the list of parameters associated with the auth plugin. This list may be used to generate CLI or config arguments. :returns: A list of Param objects describing available plugin parameters. :rtype: list """ return [] @property def available(self): """Return if the plugin is available for loading. If a plugin is missing dependencies or for some other reason should not be available to the current system it should override this property and return False to exclude itself from the plugin list. :rtype: bool """ return True def load_from_options(self, **kwargs): """Create a plugin from the arguments retrieved from get_options. A client can override this function to do argument validation or to handle differences between the registered options and what is required to create the plugin. """ missing_required = [o for o in self.get_options() if o.required and kwargs.get(o.dest) is None] if missing_required: raise exceptions.MissingRequiredOptions(missing_required) return self.create_plugin(**kwargs) def load_from_options_getter(self, getter, **kwargs): """Load a plugin from getter function that returns appropriate values. To handle cases other than the provided CONF and CLI loading you can specify a custom loader function that will be queried for the option value. The getter is a function that takes a :py:class:`keystoneauth1.loading.Opt` and returns a value to load with. :param getter: A function that returns a value for the given opt. :type getter: callable :returns: An authentication Plugin. :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin` """ for opt in (o for o in self.get_options() if o.dest not in kwargs): val = getter(opt) if val is not None: val = opt.type(val) kwargs[opt.dest] = val return self.load_from_options(**kwargs) <|reserved_special_token_1|> <|reserved_special_token_0|> PLUGIN_NAMESPACE = 'keystoneauth1.plugin' __all__ = ('get_available_plugin_names', 'get_available_plugin_loaders', 'get_plugin_loader', 'get_plugin_options', 'BaseLoader', 'PLUGIN_NAMESPACE' ) def _auth_plugin_available(ext): """Read the value of available for whether to load this plugin.""" return ext.obj.available def get_available_plugin_names(): """Get the names of all the plugins that are available on the system. This is particularly useful for help and error text to prompt a user for example what plugins they may specify. :returns: A list of names. :rtype: frozenset """ mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE, check_func=_auth_plugin_available, invoke_on_load=True, propagate_map_exceptions=True) return frozenset(mgr.names()) def get_available_plugin_loaders(): """Retrieve all the plugin classes available on the system. :returns: A dict with plugin entrypoint name as the key and the plugin loader as the value. :rtype: dict """ mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE, check_func=_auth_plugin_available, invoke_on_load=True, propagate_map_exceptions=True) return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj))) def get_plugin_loader(name): """Retrieve a plugin class by its entrypoint name. :param str name: The name of the object to get. :returns: An auth plugin class. :rtype: :py:class:`keystoneauth1.loading.BaseLoader` :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin: if a plugin cannot be created. """ try: mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE, invoke_on_load=True, name=name) except RuntimeError: raise exceptions.NoMatchingPlugin(name) return mgr.driver def get_plugin_options(name): """Get the options for a specific plugin. This will be the list of options that is registered and loaded by the specified plugin. :returns: A list of :py:class:`keystoneauth1.loading.Opt` options. :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin: if a plugin cannot be created. """ return get_plugin_loader(name).get_options() class BaseLoader(metaclass=abc.ABCMeta): @property def plugin_class(self): raise NotImplementedError() def create_plugin(self, **kwargs): """Create a plugin from the options available for the loader. Given the options that were specified by the loader create an appropriate plugin. You can override this function in your loader. This used to be specified by providing the plugin_class property and this is still supported, however specifying a property didn't let you choose a plugin type based upon the options that were presented. Override this function if you wish to return different plugins based on the options presented, otherwise you can simply provide the plugin_class property. Added 2.9 """ return self.plugin_class(**kwargs) @abc.abstractmethod def get_options(self): """Return the list of parameters associated with the auth plugin. This list may be used to generate CLI or config arguments. :returns: A list of Param objects describing available plugin parameters. :rtype: list """ return [] @property def available(self): """Return if the plugin is available for loading. If a plugin is missing dependencies or for some other reason should not be available to the current system it should override this property and return False to exclude itself from the plugin list. :rtype: bool """ return True def load_from_options(self, **kwargs): """Create a plugin from the arguments retrieved from get_options. A client can override this function to do argument validation or to handle differences between the registered options and what is required to create the plugin. """ missing_required = [o for o in self.get_options() if o.required and kwargs.get(o.dest) is None] if missing_required: raise exceptions.MissingRequiredOptions(missing_required) return self.create_plugin(**kwargs) def load_from_options_getter(self, getter, **kwargs): """Load a plugin from getter function that returns appropriate values. To handle cases other than the provided CONF and CLI loading you can specify a custom loader function that will be queried for the option value. The getter is a function that takes a :py:class:`keystoneauth1.loading.Opt` and returns a value to load with. :param getter: A function that returns a value for the given opt. :type getter: callable :returns: An authentication Plugin. :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin` """ for opt in (o for o in self.get_options() if o.dest not in kwargs): val = getter(opt) if val is not None: val = opt.type(val) kwargs[opt.dest] = val return self.load_from_options(**kwargs) <|reserved_special_token_1|> import abc import stevedore from keystoneauth1 import exceptions PLUGIN_NAMESPACE = 'keystoneauth1.plugin' __all__ = ('get_available_plugin_names', 'get_available_plugin_loaders', 'get_plugin_loader', 'get_plugin_options', 'BaseLoader', 'PLUGIN_NAMESPACE' ) def _auth_plugin_available(ext): """Read the value of available for whether to load this plugin.""" return ext.obj.available def get_available_plugin_names(): """Get the names of all the plugins that are available on the system. This is particularly useful for help and error text to prompt a user for example what plugins they may specify. :returns: A list of names. :rtype: frozenset """ mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE, check_func=_auth_plugin_available, invoke_on_load=True, propagate_map_exceptions=True) return frozenset(mgr.names()) def get_available_plugin_loaders(): """Retrieve all the plugin classes available on the system. :returns: A dict with plugin entrypoint name as the key and the plugin loader as the value. :rtype: dict """ mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE, check_func=_auth_plugin_available, invoke_on_load=True, propagate_map_exceptions=True) return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj))) def get_plugin_loader(name): """Retrieve a plugin class by its entrypoint name. :param str name: The name of the object to get. :returns: An auth plugin class. :rtype: :py:class:`keystoneauth1.loading.BaseLoader` :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin: if a plugin cannot be created. """ try: mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE, invoke_on_load=True, name=name) except RuntimeError: raise exceptions.NoMatchingPlugin(name) return mgr.driver def get_plugin_options(name): """Get the options for a specific plugin. This will be the list of options that is registered and loaded by the specified plugin. :returns: A list of :py:class:`keystoneauth1.loading.Opt` options. :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin: if a plugin cannot be created. """ return get_plugin_loader(name).get_options() class BaseLoader(metaclass=abc.ABCMeta): @property def plugin_class(self): raise NotImplementedError() def create_plugin(self, **kwargs): """Create a plugin from the options available for the loader. Given the options that were specified by the loader create an appropriate plugin. You can override this function in your loader. This used to be specified by providing the plugin_class property and this is still supported, however specifying a property didn't let you choose a plugin type based upon the options that were presented. Override this function if you wish to return different plugins based on the options presented, otherwise you can simply provide the plugin_class property. Added 2.9 """ return self.plugin_class(**kwargs) @abc.abstractmethod def get_options(self): """Return the list of parameters associated with the auth plugin. This list may be used to generate CLI or config arguments. :returns: A list of Param objects describing available plugin parameters. :rtype: list """ return [] @property def available(self): """Return if the plugin is available for loading. If a plugin is missing dependencies or for some other reason should not be available to the current system it should override this property and return False to exclude itself from the plugin list. :rtype: bool """ return True def load_from_options(self, **kwargs): """Create a plugin from the arguments retrieved from get_options. A client can override this function to do argument validation or to handle differences between the registered options and what is required to create the plugin. """ missing_required = [o for o in self.get_options() if o.required and kwargs.get(o.dest) is None] if missing_required: raise exceptions.MissingRequiredOptions(missing_required) return self.create_plugin(**kwargs) def load_from_options_getter(self, getter, **kwargs): """Load a plugin from getter function that returns appropriate values. To handle cases other than the provided CONF and CLI loading you can specify a custom loader function that will be queried for the option value. The getter is a function that takes a :py:class:`keystoneauth1.loading.Opt` and returns a value to load with. :param getter: A function that returns a value for the given opt. :type getter: callable :returns: An authentication Plugin. :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin` """ for opt in (o for o in self.get_options() if o.dest not in kwargs): val = getter(opt) if val is not None: val = opt.type(val) kwargs[opt.dest] = val return self.load_from_options(**kwargs) <|reserved_special_token_1|> # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import stevedore from keystoneauth1 import exceptions PLUGIN_NAMESPACE = 'keystoneauth1.plugin' __all__ = ('get_available_plugin_names', 'get_available_plugin_loaders', 'get_plugin_loader', 'get_plugin_options', 'BaseLoader', 'PLUGIN_NAMESPACE') def _auth_plugin_available(ext): """Read the value of available for whether to load this plugin.""" return ext.obj.available def get_available_plugin_names(): """Get the names of all the plugins that are available on the system. This is particularly useful for help and error text to prompt a user for example what plugins they may specify. :returns: A list of names. :rtype: frozenset """ mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE, check_func=_auth_plugin_available, invoke_on_load=True, propagate_map_exceptions=True) return frozenset(mgr.names()) def get_available_plugin_loaders(): """Retrieve all the plugin classes available on the system. :returns: A dict with plugin entrypoint name as the key and the plugin loader as the value. :rtype: dict """ mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE, check_func=_auth_plugin_available, invoke_on_load=True, propagate_map_exceptions=True) return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj))) def get_plugin_loader(name): """Retrieve a plugin class by its entrypoint name. :param str name: The name of the object to get. :returns: An auth plugin class. :rtype: :py:class:`keystoneauth1.loading.BaseLoader` :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin: if a plugin cannot be created. """ try: mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE, invoke_on_load=True, name=name) except RuntimeError: raise exceptions.NoMatchingPlugin(name) return mgr.driver def get_plugin_options(name): """Get the options for a specific plugin. This will be the list of options that is registered and loaded by the specified plugin. :returns: A list of :py:class:`keystoneauth1.loading.Opt` options. :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin: if a plugin cannot be created. """ return get_plugin_loader(name).get_options() class BaseLoader(metaclass=abc.ABCMeta): @property def plugin_class(self): raise NotImplementedError() def create_plugin(self, **kwargs): """Create a plugin from the options available for the loader. Given the options that were specified by the loader create an appropriate plugin. You can override this function in your loader. This used to be specified by providing the plugin_class property and this is still supported, however specifying a property didn't let you choose a plugin type based upon the options that were presented. Override this function if you wish to return different plugins based on the options presented, otherwise you can simply provide the plugin_class property. Added 2.9 """ return self.plugin_class(**kwargs) @abc.abstractmethod def get_options(self): """Return the list of parameters associated with the auth plugin. This list may be used to generate CLI or config arguments. :returns: A list of Param objects describing available plugin parameters. :rtype: list """ return [] @property def available(self): """Return if the plugin is available for loading. If a plugin is missing dependencies or for some other reason should not be available to the current system it should override this property and return False to exclude itself from the plugin list. :rtype: bool """ return True def load_from_options(self, **kwargs): """Create a plugin from the arguments retrieved from get_options. A client can override this function to do argument validation or to handle differences between the registered options and what is required to create the plugin. """ missing_required = [o for o in self.get_options() if o.required and kwargs.get(o.dest) is None] if missing_required: raise exceptions.MissingRequiredOptions(missing_required) return self.create_plugin(**kwargs) def load_from_options_getter(self, getter, **kwargs): """Load a plugin from getter function that returns appropriate values. To handle cases other than the provided CONF and CLI loading you can specify a custom loader function that will be queried for the option value. The getter is a function that takes a :py:class:`keystoneauth1.loading.Opt` and returns a value to load with. :param getter: A function that returns a value for the given opt. :type getter: callable :returns: An authentication Plugin. :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin` """ for opt in (o for o in self.get_options() if o.dest not in kwargs): val = getter(opt) if val is not None: val = opt.type(val) kwargs[opt.dest] = val return self.load_from_options(**kwargs)
flexible
{ "blob_id": "53127de883fb5da3214d13904664566269becba6", "index": 3570, "step-1": "<mask token>\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n <mask token>\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n <mask token>\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n <mask token>\n", "step-2": "<mask token>\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\n<mask token>\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True, name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n return self.load_from_options(**kwargs)\n", "step-3": "<mask token>\nPLUGIN_NAMESPACE = 'keystoneauth1.plugin'\n__all__ = ('get_available_plugin_names', 'get_available_plugin_loaders',\n 'get_plugin_loader', 'get_plugin_options', 'BaseLoader', 'PLUGIN_NAMESPACE'\n )\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\ndef get_available_plugin_names():\n \"\"\"Get the names of all the plugins that are available on the system.\n\n This is particularly useful for help and error text to prompt a user for\n example what plugins they may specify.\n\n :returns: A list of names.\n :rtype: frozenset\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return frozenset(mgr.names())\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True, name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n return self.load_from_options(**kwargs)\n", "step-4": "import abc\nimport stevedore\nfrom keystoneauth1 import exceptions\nPLUGIN_NAMESPACE = 'keystoneauth1.plugin'\n__all__ = ('get_available_plugin_names', 'get_available_plugin_loaders',\n 'get_plugin_loader', 'get_plugin_options', 'BaseLoader', 'PLUGIN_NAMESPACE'\n )\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\ndef get_available_plugin_names():\n \"\"\"Get the names of all the plugins that are available on the system.\n\n This is particularly useful for help and error text to prompt a user for\n example what plugins they may specify.\n\n :returns: A list of names.\n :rtype: frozenset\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return frozenset(mgr.names())\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True, name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n return self.load_from_options(**kwargs)\n", "step-5": "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport abc\n\nimport stevedore\n\nfrom keystoneauth1 import exceptions\n\nPLUGIN_NAMESPACE = 'keystoneauth1.plugin'\n\n\n__all__ = ('get_available_plugin_names',\n 'get_available_plugin_loaders',\n 'get_plugin_loader',\n 'get_plugin_options',\n 'BaseLoader',\n 'PLUGIN_NAMESPACE')\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\ndef get_available_plugin_names():\n \"\"\"Get the names of all the plugins that are available on the system.\n\n This is particularly useful for help and error text to prompt a user for\n example what plugins they may specify.\n\n :returns: A list of names.\n :rtype: frozenset\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available,\n invoke_on_load=True,\n propagate_map_exceptions=True)\n return frozenset(mgr.names())\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available,\n invoke_on_load=True,\n propagate_map_exceptions=True)\n\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True,\n name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options()\n if o.required and kwargs.get(o.dest) is None]\n\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n\n return self.load_from_options(**kwargs)\n", "step-ids": [ 4, 11, 13, 14, 15 ] }
[ 4, 11, 13, 14, 15 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> with open('C:/users/vinhe/code/projects/golf/golf_stats.csv', 'r') as file_obj: content = file_obj.read() client.import_csv(spreadsheet.id, data=content) <|reserved_special_token_1|> <|reserved_special_token_0|> scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/spreadsheets', 'https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/drive'] credentials = ServiceAccountCredentials.from_json_keyfile_name( 'client_secret.json', scope) client = gspread.authorize(credentials) spreadsheet = client.open('golf-csv-to-sheets') with open('C:/users/vinhe/code/projects/golf/golf_stats.csv', 'r') as file_obj: content = file_obj.read() client.import_csv(spreadsheet.id, data=content) <|reserved_special_token_1|> <|reserved_special_token_0|> import gspread from oauth2client.service_account import ServiceAccountCredentials scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/spreadsheets', 'https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/drive'] credentials = ServiceAccountCredentials.from_json_keyfile_name( 'client_secret.json', scope) client = gspread.authorize(credentials) spreadsheet = client.open('golf-csv-to-sheets') with open('C:/users/vinhe/code/projects/golf/golf_stats.csv', 'r') as file_obj: content = file_obj.read() client.import_csv(spreadsheet.id, data=content) <|reserved_special_token_1|> # -*- coding: utf-8 -*- """ Created on Tue Aug 18 18:53:02 2020 @author: vinhe I followed below tutorial to push newly created csv to google sheets: https://medium.com/craftsmenltd/from-csv-to-google-sheet-using-python-ef097cb014f9 """ import gspread from oauth2client.service_account import ServiceAccountCredentials scope = ["https://spreadsheets.google.com/feeds", "https://www.googleapis.com/auth/spreadsheets", "https://www.googleapis.com/auth/drive.file", "https://www.googleapis.com/auth/drive"] credentials = ServiceAccountCredentials.from_json_keyfile_name('client_secret.json', scope) client = gspread.authorize(credentials) spreadsheet = client.open('golf-csv-to-sheets') with open('C:/users/vinhe/code/projects/golf/golf_stats.csv', 'r') as file_obj: content = file_obj.read() client.import_csv(spreadsheet.id, data=content)
flexible
{ "blob_id": "ac2edcd6ea71ebdc5b1df5fd4211632b5d8e2704", "index": 3019, "step-1": "<mask token>\n", "step-2": "<mask token>\nwith open('C:/users/vinhe/code/projects/golf/golf_stats.csv', 'r') as file_obj:\n content = file_obj.read()\n client.import_csv(spreadsheet.id, data=content)\n", "step-3": "<mask token>\nscope = ['https://spreadsheets.google.com/feeds',\n 'https://www.googleapis.com/auth/spreadsheets',\n 'https://www.googleapis.com/auth/drive.file',\n 'https://www.googleapis.com/auth/drive']\ncredentials = ServiceAccountCredentials.from_json_keyfile_name(\n 'client_secret.json', scope)\nclient = gspread.authorize(credentials)\nspreadsheet = client.open('golf-csv-to-sheets')\nwith open('C:/users/vinhe/code/projects/golf/golf_stats.csv', 'r') as file_obj:\n content = file_obj.read()\n client.import_csv(spreadsheet.id, data=content)\n", "step-4": "<mask token>\nimport gspread\nfrom oauth2client.service_account import ServiceAccountCredentials\nscope = ['https://spreadsheets.google.com/feeds',\n 'https://www.googleapis.com/auth/spreadsheets',\n 'https://www.googleapis.com/auth/drive.file',\n 'https://www.googleapis.com/auth/drive']\ncredentials = ServiceAccountCredentials.from_json_keyfile_name(\n 'client_secret.json', scope)\nclient = gspread.authorize(credentials)\nspreadsheet = client.open('golf-csv-to-sheets')\nwith open('C:/users/vinhe/code/projects/golf/golf_stats.csv', 'r') as file_obj:\n content = file_obj.read()\n client.import_csv(spreadsheet.id, data=content)\n", "step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Tue Aug 18 18:53:02 2020\r\n\r\n@author: vinhe\r\n\r\nI followed below tutorial to push newly created csv to google sheets:\r\nhttps://medium.com/craftsmenltd/from-csv-to-google-sheet-using-python-ef097cb014f9\r\n\r\n\"\"\"\r\n\r\n\r\nimport gspread\r\nfrom oauth2client.service_account import ServiceAccountCredentials\r\n\r\nscope = [\"https://spreadsheets.google.com/feeds\", \r\n \"https://www.googleapis.com/auth/spreadsheets\",\r\n \"https://www.googleapis.com/auth/drive.file\", \r\n \"https://www.googleapis.com/auth/drive\"]\r\n\r\ncredentials = ServiceAccountCredentials.from_json_keyfile_name('client_secret.json', scope)\r\nclient = gspread.authorize(credentials)\r\n\r\nspreadsheet = client.open('golf-csv-to-sheets')\r\n\r\nwith open('C:/users/vinhe/code/projects/golf/golf_stats.csv', 'r') as file_obj:\r\n content = file_obj.read()\r\n client.import_csv(spreadsheet.id, data=content)\r\n\r\n\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> from .authenticators import CookieAuthenticator, HeaderAuthenticator from .paginators import LimitOffsetPaginator, PageNumberPaginator from .views import * <|reserved_special_token_1|> from .authenticators import CookieAuthenticator, HeaderAuthenticator from .paginators import LimitOffsetPaginator, PageNumberPaginator from .views import * # pylint:disable=W0401
flexible
{ "blob_id": "dab53d10958b36cf75ab53bf30f744b1ed8a09b6", "index": 6475, "step-1": "<mask token>\n", "step-2": "from .authenticators import CookieAuthenticator, HeaderAuthenticator\nfrom .paginators import LimitOffsetPaginator, PageNumberPaginator\nfrom .views import *\n", "step-3": "from .authenticators import CookieAuthenticator, HeaderAuthenticator\nfrom .paginators import LimitOffsetPaginator, PageNumberPaginator\nfrom .views import * # pylint:disable=W0401\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> if n1 <= n2 and n2 <= n3: print(f'A ordem crescente é {n1}, {n2}, {n3}') elif n1 <= n3 and n3 <= n2: print(f'A ordem crescente é {n1}, {n3}, {n2}') elif n2 <= n1 and n1 <= n3: print(f'A ordem crescente é {n2}, {n1}, {n3}') elif n2 <= n3 and n3 <= n1: print(f'A ordem crescente é {n2}, {n3}, {n1}') elif n3 <= n1 and n1 <= n2: print(f'A ordem crescente é {n3}, {n1}, {n2}') elif n3 <= n2 and n2 <= n1: print(f'A ordem crescente é {n3}, {n2}, {n1}') <|reserved_special_token_1|> n1 = int(input('Digite o primeiro número: ')) n2 = int(input('Digite o segundo número: ')) n3 = int(input('Digite o terceiro número: ')) if n1 <= n2 and n2 <= n3: print(f'A ordem crescente é {n1}, {n2}, {n3}') elif n1 <= n3 and n3 <= n2: print(f'A ordem crescente é {n1}, {n3}, {n2}') elif n2 <= n1 and n1 <= n3: print(f'A ordem crescente é {n2}, {n1}, {n3}') elif n2 <= n3 and n3 <= n1: print(f'A ordem crescente é {n2}, {n3}, {n1}') elif n3 <= n1 and n1 <= n2: print(f'A ordem crescente é {n3}, {n1}, {n2}') elif n3 <= n2 and n2 <= n1: print(f'A ordem crescente é {n3}, {n2}, {n1}') <|reserved_special_token_1|> # 30 - Faça um programa que receba três números e mostre - os em ordem crescentes. n1 = int(input("Digite o primeiro número: ")) n2 = int(input("Digite o segundo número: ")) n3 = int(input("Digite o terceiro número: ")) if n1 <= n2 and n2 <= n3: print(f'A ordem crescente é {n1}, {n2}, {n3}') elif n1 <= n3 and n3 <= n2: print(f'A ordem crescente é {n1}, {n3}, {n2}') elif n2 <= n1 and n1 <= n3: print(f'A ordem crescente é {n2}, {n1}, {n3}') elif n2 <= n3 and n3 <= n1: print(f'A ordem crescente é {n2}, {n3}, {n1}') elif n3 <= n1 and n1 <= n2: print(f'A ordem crescente é {n3}, {n1}, {n2}') elif n3 <= n2 and n2 <= n1: print(f'A ordem crescente é {n3}, {n2}, {n1}')
flexible
{ "blob_id": "09712a397ad7915d9865b4aebf16606f85988f67", "index": 2737, "step-1": "<mask token>\n", "step-2": "<mask token>\nif n1 <= n2 and n2 <= n3:\n print(f'A ordem crescente é {n1}, {n2}, {n3}')\nelif n1 <= n3 and n3 <= n2:\n print(f'A ordem crescente é {n1}, {n3}, {n2}')\nelif n2 <= n1 and n1 <= n3:\n print(f'A ordem crescente é {n2}, {n1}, {n3}')\nelif n2 <= n3 and n3 <= n1:\n print(f'A ordem crescente é {n2}, {n3}, {n1}')\nelif n3 <= n1 and n1 <= n2:\n print(f'A ordem crescente é {n3}, {n1}, {n2}')\nelif n3 <= n2 and n2 <= n1:\n print(f'A ordem crescente é {n3}, {n2}, {n1}')\n", "step-3": "n1 = int(input('Digite o primeiro número: '))\nn2 = int(input('Digite o segundo número: '))\nn3 = int(input('Digite o terceiro número: '))\nif n1 <= n2 and n2 <= n3:\n print(f'A ordem crescente é {n1}, {n2}, {n3}')\nelif n1 <= n3 and n3 <= n2:\n print(f'A ordem crescente é {n1}, {n3}, {n2}')\nelif n2 <= n1 and n1 <= n3:\n print(f'A ordem crescente é {n2}, {n1}, {n3}')\nelif n2 <= n3 and n3 <= n1:\n print(f'A ordem crescente é {n2}, {n3}, {n1}')\nelif n3 <= n1 and n1 <= n2:\n print(f'A ordem crescente é {n3}, {n1}, {n2}')\nelif n3 <= n2 and n2 <= n1:\n print(f'A ordem crescente é {n3}, {n2}, {n1}')\n", "step-4": "# 30 - Faça um programa que receba três números e mostre - os em ordem crescentes.\n\nn1 = int(input(\"Digite o primeiro número: \"))\nn2 = int(input(\"Digite o segundo número: \"))\nn3 = int(input(\"Digite o terceiro número: \"))\n\nif n1 <= n2 and n2 <= n3:\n print(f'A ordem crescente é {n1}, {n2}, {n3}')\nelif n1 <= n3 and n3 <= n2:\n print(f'A ordem crescente é {n1}, {n3}, {n2}')\nelif n2 <= n1 and n1 <= n3:\n print(f'A ordem crescente é {n2}, {n1}, {n3}')\nelif n2 <= n3 and n3 <= n1:\n print(f'A ordem crescente é {n2}, {n3}, {n1}')\nelif n3 <= n1 and n1 <= n2:\n print(f'A ordem crescente é {n3}, {n1}, {n2}')\nelif n3 <= n2 and n2 <= n1:\n print(f'A ordem crescente é {n3}, {n2}, {n1}')", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> class Ratio_Execution_Time: <|reserved_special_token_0|> <|reserved_special_token_0|> def calculatePercentage(self, B, total, strr): E = {} for i in B: s = '' for j in range(i[0], i[1] + 1): s += '<p>' + strr[j] + '</p>' if total == 0: E[s] = 0.0 continue E[s] = B[i] / total * 100 return E <|reserved_special_token_0|> def tableMaking(self, B, title, clr, blr, H): stringff = ( '<table style="width:70%; border: 1px solid black;border-collapse: collapse;">' ) stringff += ( '<caption style="font:italic;font-size:20;font-weight:bold;color:#2b20a1 ">' + title + '</caption>') stringff += '<tr>' stringff += '<th style = "font-size:18;" > Block </th>' stringff += '<th style = "font-size:18;"> Time Percentage </th>' stringff += '</tr>' for line in B: if B[line] in H.values(): curColor = clr burColor = blr else: curColor = 'black' burColor = '#cfd2d2' stringff += '<tr>' stringff += ( '<td style="border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:' + burColor + ';color:' + curColor + '" >' + line + '</td>') stringff += ( '<td style="text-align: center;border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:' + burColor + ';color:' + curColor + '">' + str(B[line]) + ' %</td>') stringff += '</tr>' stringff += '</table>' return stringff <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Ratio_Execution_Time: <|reserved_special_token_0|> <|reserved_special_token_0|> def calculatePercentage(self, B, total, strr): E = {} for i in B: s = '' for j in range(i[0], i[1] + 1): s += '<p>' + strr[j] + '</p>' if total == 0: E[s] = 0.0 continue E[s] = B[i] / total * 100 return E def calculateHighestPercentageBlock(self, B): mx = -1 temp = {} for i in B: if B[i] > mx: mx = B[i] for i in B: if B[i] == mx: temp[i] = B[i] return temp def tableMaking(self, B, title, clr, blr, H): stringff = ( '<table style="width:70%; border: 1px solid black;border-collapse: collapse;">' ) stringff += ( '<caption style="font:italic;font-size:20;font-weight:bold;color:#2b20a1 ">' + title + '</caption>') stringff += '<tr>' stringff += '<th style = "font-size:18;" > Block </th>' stringff += '<th style = "font-size:18;"> Time Percentage </th>' stringff += '</tr>' for line in B: if B[line] in H.values(): curColor = clr burColor = blr else: curColor = 'black' burColor = '#cfd2d2' stringff += '<tr>' stringff += ( '<td style="border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:' + burColor + ';color:' + curColor + '" >' + line + '</td>') stringff += ( '<td style="text-align: center;border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:' + burColor + ';color:' + curColor + '">' + str(B[line]) + ' %</td>') stringff += '</tr>' stringff += '</table>' return stringff <|reserved_special_token_0|> def ShowTheTableOfPercentage(self, e1, e2, e3): Highest_ifElse = self.calculateHighestPercentageBlock(e1) Highest_Loop = self.calculateHighestPercentageBlock(e2) Highest_Function = self.calculateHighestPercentageBlock(e3) stringff = '<html><body><code>\n' if e1: stringff += self.tableMaking(e1, 'If-ElseIf-Else Operations', '#9d0235', '#7bc8ff', Highest_ifElse) if e2: stringff += self.tableMaking(e2, 'Loop Operations', 'red', '#5cffee', Highest_Loop) if e3: stringff += self.tableMaking(e3, 'Function Operation', 'blue', '#f4fc76', Highest_Function) stringff += '</code></body></html>\n' f = open('reportExecute.html', 'w') f.write(stringff) f.close() <|reserved_special_token_1|> <|reserved_special_token_0|> class Ratio_Execution_Time: <|reserved_special_token_0|> def __init__(self): self.utility = Utility.Utility() print('Successfully Found Ration Corrssponding to Execution Time') def calculatePercentage(self, B, total, strr): E = {} for i in B: s = '' for j in range(i[0], i[1] + 1): s += '<p>' + strr[j] + '</p>' if total == 0: E[s] = 0.0 continue E[s] = B[i] / total * 100 return E def calculateHighestPercentageBlock(self, B): mx = -1 temp = {} for i in B: if B[i] > mx: mx = B[i] for i in B: if B[i] == mx: temp[i] = B[i] return temp def tableMaking(self, B, title, clr, blr, H): stringff = ( '<table style="width:70%; border: 1px solid black;border-collapse: collapse;">' ) stringff += ( '<caption style="font:italic;font-size:20;font-weight:bold;color:#2b20a1 ">' + title + '</caption>') stringff += '<tr>' stringff += '<th style = "font-size:18;" > Block </th>' stringff += '<th style = "font-size:18;"> Time Percentage </th>' stringff += '</tr>' for line in B: if B[line] in H.values(): curColor = clr burColor = blr else: curColor = 'black' burColor = '#cfd2d2' stringff += '<tr>' stringff += ( '<td style="border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:' + burColor + ';color:' + curColor + '" >' + line + '</td>') stringff += ( '<td style="text-align: center;border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:' + burColor + ';color:' + curColor + '">' + str(B[line]) + ' %</td>') stringff += '</tr>' stringff += '</table>' return stringff def blockWiseExecutionTimePercentage(self, total, E1, E2, E3, strr): E_ifElse = self.calculatePercentage(E1, total, strr) E_Loop = self.calculatePercentage(E2, total, strr) E_Function = self.calculatePercentage(E3, total, strr) self.ShowTheTableOfPercentage(E_ifElse, E_Loop, E_Function) def ShowTheTableOfPercentage(self, e1, e2, e3): Highest_ifElse = self.calculateHighestPercentageBlock(e1) Highest_Loop = self.calculateHighestPercentageBlock(e2) Highest_Function = self.calculateHighestPercentageBlock(e3) stringff = '<html><body><code>\n' if e1: stringff += self.tableMaking(e1, 'If-ElseIf-Else Operations', '#9d0235', '#7bc8ff', Highest_ifElse) if e2: stringff += self.tableMaking(e2, 'Loop Operations', 'red', '#5cffee', Highest_Loop) if e3: stringff += self.tableMaking(e3, 'Function Operation', 'blue', '#f4fc76', Highest_Function) stringff += '</code></body></html>\n' f = open('reportExecute.html', 'w') f.write(stringff) f.close() <|reserved_special_token_1|> <|reserved_special_token_0|> class Ratio_Execution_Time: utility = None def __init__(self): self.utility = Utility.Utility() print('Successfully Found Ration Corrssponding to Execution Time') def calculatePercentage(self, B, total, strr): E = {} for i in B: s = '' for j in range(i[0], i[1] + 1): s += '<p>' + strr[j] + '</p>' if total == 0: E[s] = 0.0 continue E[s] = B[i] / total * 100 return E def calculateHighestPercentageBlock(self, B): mx = -1 temp = {} for i in B: if B[i] > mx: mx = B[i] for i in B: if B[i] == mx: temp[i] = B[i] return temp def tableMaking(self, B, title, clr, blr, H): stringff = ( '<table style="width:70%; border: 1px solid black;border-collapse: collapse;">' ) stringff += ( '<caption style="font:italic;font-size:20;font-weight:bold;color:#2b20a1 ">' + title + '</caption>') stringff += '<tr>' stringff += '<th style = "font-size:18;" > Block </th>' stringff += '<th style = "font-size:18;"> Time Percentage </th>' stringff += '</tr>' for line in B: if B[line] in H.values(): curColor = clr burColor = blr else: curColor = 'black' burColor = '#cfd2d2' stringff += '<tr>' stringff += ( '<td style="border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:' + burColor + ';color:' + curColor + '" >' + line + '</td>') stringff += ( '<td style="text-align: center;border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:' + burColor + ';color:' + curColor + '">' + str(B[line]) + ' %</td>') stringff += '</tr>' stringff += '</table>' return stringff def blockWiseExecutionTimePercentage(self, total, E1, E2, E3, strr): E_ifElse = self.calculatePercentage(E1, total, strr) E_Loop = self.calculatePercentage(E2, total, strr) E_Function = self.calculatePercentage(E3, total, strr) self.ShowTheTableOfPercentage(E_ifElse, E_Loop, E_Function) def ShowTheTableOfPercentage(self, e1, e2, e3): Highest_ifElse = self.calculateHighestPercentageBlock(e1) Highest_Loop = self.calculateHighestPercentageBlock(e2) Highest_Function = self.calculateHighestPercentageBlock(e3) stringff = '<html><body><code>\n' if e1: stringff += self.tableMaking(e1, 'If-ElseIf-Else Operations', '#9d0235', '#7bc8ff', Highest_ifElse) if e2: stringff += self.tableMaking(e2, 'Loop Operations', 'red', '#5cffee', Highest_Loop) if e3: stringff += self.tableMaking(e3, 'Function Operation', 'blue', '#f4fc76', Highest_Function) stringff += '</code></body></html>\n' f = open('reportExecute.html', 'w') f.write(stringff) f.close() <|reserved_special_token_1|> import Utility import copy class Ratio_Execution_Time: utility = None def __init__(self): self.utility = Utility.Utility() print("Successfully Found Ration Corrssponding to Execution Time") def calculatePercentage(self,B,total,strr): E = {} for i in B: s='' for j in range(i[0],i[1]+1): s+= '<p>' + strr[j] + '</p>' if total == 0: E[s] = 0.0 continue E[s] = (B[i]/total) * 100 return E def calculateHighestPercentageBlock(self,B): mx = -1 temp = {} for i in B: if B[i] > mx: mx = B[i] for i in B: if B[i] == mx: temp[i] = B[i] return temp def tableMaking(self,B,title,clr,blr,H): stringff = '<table style="width:70%; border: 1px solid black;border-collapse: collapse;">' stringff+= '<caption style="font:italic;font-size:20;font-weight:bold;color:#2b20a1 ">' + title + '</caption>' stringff += '<tr>' stringff += '<th style = "font-size:18;" > Block </th>' stringff += '<th style = "font-size:18;"> Time Percentage </th>' stringff += '</tr>' for line in B: if B[line] in H.values(): curColor = clr burColor = blr else: curColor = 'black' burColor = '#cfd2d2' stringff += '<tr>' stringff += '<td style="border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'+burColor+';color:'+curColor+'" >' + line + '</td>' stringff += '<td style="text-align: center;border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'+burColor+';color:'+curColor+'">' + str(B[line]) + ' %</td>' stringff += '</tr>' stringff += '</table>' return stringff def blockWiseExecutionTimePercentage(self,total,E1,E2,E3,strr): E_ifElse = self.calculatePercentage(E1,total,strr) E_Loop = self.calculatePercentage(E2,total,strr) E_Function = self.calculatePercentage(E3,total,strr) self.ShowTheTableOfPercentage(E_ifElse,E_Loop,E_Function) def ShowTheTableOfPercentage(self,e1,e2,e3): #print(b1) Highest_ifElse = self.calculateHighestPercentageBlock(e1) Highest_Loop = self.calculateHighestPercentageBlock(e2) Highest_Function = self.calculateHighestPercentageBlock(e3) stringff = "<html><body><code>\n" if e1: stringff += self.tableMaking(e1,'If-ElseIf-Else Operations',"#9d0235","#7bc8ff",Highest_ifElse) if e2: stringff += self.tableMaking(e2,'Loop Operations',"red","#5cffee",Highest_Loop) if e3: stringff += self.tableMaking(e3,'Function Operation',"blue","#f4fc76",Highest_Function) stringff += "</code></body></html>\n" f = open("reportExecute.html", "w") f.write(stringff) f.close()
flexible
{ "blob_id": "150603004a4b194a7c08f1f23e37c613aa3b883a", "index": 6431, "step-1": "<mask token>\n\n\nclass Ratio_Execution_Time:\n <mask token>\n <mask token>\n\n def calculatePercentage(self, B, total, strr):\n E = {}\n for i in B:\n s = ''\n for j in range(i[0], i[1] + 1):\n s += '<p>' + strr[j] + '</p>'\n if total == 0:\n E[s] = 0.0\n continue\n E[s] = B[i] / total * 100\n return E\n <mask token>\n\n def tableMaking(self, B, title, clr, blr, H):\n stringff = (\n '<table style=\"width:70%; border: 1px solid black;border-collapse: collapse;\">'\n )\n stringff += (\n '<caption style=\"font:italic;font-size:20;font-weight:bold;color:#2b20a1 \">'\n + title + '</caption>')\n stringff += '<tr>'\n stringff += '<th style = \"font-size:18;\" > Block </th>'\n stringff += '<th style = \"font-size:18;\"> Time Percentage </th>'\n stringff += '</tr>'\n for line in B:\n if B[line] in H.values():\n curColor = clr\n burColor = blr\n else:\n curColor = 'black'\n burColor = '#cfd2d2'\n stringff += '<tr>'\n stringff += (\n '<td style=\"border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'\n + burColor + ';color:' + curColor + '\" >' + line + '</td>')\n stringff += (\n '<td style=\"text-align: center;border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'\n + burColor + ';color:' + curColor + '\">' + str(B[line]) +\n ' %</td>')\n stringff += '</tr>'\n stringff += '</table>'\n return stringff\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass Ratio_Execution_Time:\n <mask token>\n <mask token>\n\n def calculatePercentage(self, B, total, strr):\n E = {}\n for i in B:\n s = ''\n for j in range(i[0], i[1] + 1):\n s += '<p>' + strr[j] + '</p>'\n if total == 0:\n E[s] = 0.0\n continue\n E[s] = B[i] / total * 100\n return E\n\n def calculateHighestPercentageBlock(self, B):\n mx = -1\n temp = {}\n for i in B:\n if B[i] > mx:\n mx = B[i]\n for i in B:\n if B[i] == mx:\n temp[i] = B[i]\n return temp\n\n def tableMaking(self, B, title, clr, blr, H):\n stringff = (\n '<table style=\"width:70%; border: 1px solid black;border-collapse: collapse;\">'\n )\n stringff += (\n '<caption style=\"font:italic;font-size:20;font-weight:bold;color:#2b20a1 \">'\n + title + '</caption>')\n stringff += '<tr>'\n stringff += '<th style = \"font-size:18;\" > Block </th>'\n stringff += '<th style = \"font-size:18;\"> Time Percentage </th>'\n stringff += '</tr>'\n for line in B:\n if B[line] in H.values():\n curColor = clr\n burColor = blr\n else:\n curColor = 'black'\n burColor = '#cfd2d2'\n stringff += '<tr>'\n stringff += (\n '<td style=\"border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'\n + burColor + ';color:' + curColor + '\" >' + line + '</td>')\n stringff += (\n '<td style=\"text-align: center;border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'\n + burColor + ';color:' + curColor + '\">' + str(B[line]) +\n ' %</td>')\n stringff += '</tr>'\n stringff += '</table>'\n return stringff\n <mask token>\n\n def ShowTheTableOfPercentage(self, e1, e2, e3):\n Highest_ifElse = self.calculateHighestPercentageBlock(e1)\n Highest_Loop = self.calculateHighestPercentageBlock(e2)\n Highest_Function = self.calculateHighestPercentageBlock(e3)\n stringff = '<html><body><code>\\n'\n if e1:\n stringff += self.tableMaking(e1, 'If-ElseIf-Else Operations',\n '#9d0235', '#7bc8ff', Highest_ifElse)\n if e2:\n stringff += self.tableMaking(e2, 'Loop Operations', 'red',\n '#5cffee', Highest_Loop)\n if e3:\n stringff += self.tableMaking(e3, 'Function Operation', 'blue',\n '#f4fc76', Highest_Function)\n stringff += '</code></body></html>\\n'\n f = open('reportExecute.html', 'w')\n f.write(stringff)\n f.close()\n", "step-3": "<mask token>\n\n\nclass Ratio_Execution_Time:\n <mask token>\n\n def __init__(self):\n self.utility = Utility.Utility()\n print('Successfully Found Ration Corrssponding to Execution Time')\n\n def calculatePercentage(self, B, total, strr):\n E = {}\n for i in B:\n s = ''\n for j in range(i[0], i[1] + 1):\n s += '<p>' + strr[j] + '</p>'\n if total == 0:\n E[s] = 0.0\n continue\n E[s] = B[i] / total * 100\n return E\n\n def calculateHighestPercentageBlock(self, B):\n mx = -1\n temp = {}\n for i in B:\n if B[i] > mx:\n mx = B[i]\n for i in B:\n if B[i] == mx:\n temp[i] = B[i]\n return temp\n\n def tableMaking(self, B, title, clr, blr, H):\n stringff = (\n '<table style=\"width:70%; border: 1px solid black;border-collapse: collapse;\">'\n )\n stringff += (\n '<caption style=\"font:italic;font-size:20;font-weight:bold;color:#2b20a1 \">'\n + title + '</caption>')\n stringff += '<tr>'\n stringff += '<th style = \"font-size:18;\" > Block </th>'\n stringff += '<th style = \"font-size:18;\"> Time Percentage </th>'\n stringff += '</tr>'\n for line in B:\n if B[line] in H.values():\n curColor = clr\n burColor = blr\n else:\n curColor = 'black'\n burColor = '#cfd2d2'\n stringff += '<tr>'\n stringff += (\n '<td style=\"border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'\n + burColor + ';color:' + curColor + '\" >' + line + '</td>')\n stringff += (\n '<td style=\"text-align: center;border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'\n + burColor + ';color:' + curColor + '\">' + str(B[line]) +\n ' %</td>')\n stringff += '</tr>'\n stringff += '</table>'\n return stringff\n\n def blockWiseExecutionTimePercentage(self, total, E1, E2, E3, strr):\n E_ifElse = self.calculatePercentage(E1, total, strr)\n E_Loop = self.calculatePercentage(E2, total, strr)\n E_Function = self.calculatePercentage(E3, total, strr)\n self.ShowTheTableOfPercentage(E_ifElse, E_Loop, E_Function)\n\n def ShowTheTableOfPercentage(self, e1, e2, e3):\n Highest_ifElse = self.calculateHighestPercentageBlock(e1)\n Highest_Loop = self.calculateHighestPercentageBlock(e2)\n Highest_Function = self.calculateHighestPercentageBlock(e3)\n stringff = '<html><body><code>\\n'\n if e1:\n stringff += self.tableMaking(e1, 'If-ElseIf-Else Operations',\n '#9d0235', '#7bc8ff', Highest_ifElse)\n if e2:\n stringff += self.tableMaking(e2, 'Loop Operations', 'red',\n '#5cffee', Highest_Loop)\n if e3:\n stringff += self.tableMaking(e3, 'Function Operation', 'blue',\n '#f4fc76', Highest_Function)\n stringff += '</code></body></html>\\n'\n f = open('reportExecute.html', 'w')\n f.write(stringff)\n f.close()\n", "step-4": "<mask token>\n\n\nclass Ratio_Execution_Time:\n utility = None\n\n def __init__(self):\n self.utility = Utility.Utility()\n print('Successfully Found Ration Corrssponding to Execution Time')\n\n def calculatePercentage(self, B, total, strr):\n E = {}\n for i in B:\n s = ''\n for j in range(i[0], i[1] + 1):\n s += '<p>' + strr[j] + '</p>'\n if total == 0:\n E[s] = 0.0\n continue\n E[s] = B[i] / total * 100\n return E\n\n def calculateHighestPercentageBlock(self, B):\n mx = -1\n temp = {}\n for i in B:\n if B[i] > mx:\n mx = B[i]\n for i in B:\n if B[i] == mx:\n temp[i] = B[i]\n return temp\n\n def tableMaking(self, B, title, clr, blr, H):\n stringff = (\n '<table style=\"width:70%; border: 1px solid black;border-collapse: collapse;\">'\n )\n stringff += (\n '<caption style=\"font:italic;font-size:20;font-weight:bold;color:#2b20a1 \">'\n + title + '</caption>')\n stringff += '<tr>'\n stringff += '<th style = \"font-size:18;\" > Block </th>'\n stringff += '<th style = \"font-size:18;\"> Time Percentage </th>'\n stringff += '</tr>'\n for line in B:\n if B[line] in H.values():\n curColor = clr\n burColor = blr\n else:\n curColor = 'black'\n burColor = '#cfd2d2'\n stringff += '<tr>'\n stringff += (\n '<td style=\"border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'\n + burColor + ';color:' + curColor + '\" >' + line + '</td>')\n stringff += (\n '<td style=\"text-align: center;border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'\n + burColor + ';color:' + curColor + '\">' + str(B[line]) +\n ' %</td>')\n stringff += '</tr>'\n stringff += '</table>'\n return stringff\n\n def blockWiseExecutionTimePercentage(self, total, E1, E2, E3, strr):\n E_ifElse = self.calculatePercentage(E1, total, strr)\n E_Loop = self.calculatePercentage(E2, total, strr)\n E_Function = self.calculatePercentage(E3, total, strr)\n self.ShowTheTableOfPercentage(E_ifElse, E_Loop, E_Function)\n\n def ShowTheTableOfPercentage(self, e1, e2, e3):\n Highest_ifElse = self.calculateHighestPercentageBlock(e1)\n Highest_Loop = self.calculateHighestPercentageBlock(e2)\n Highest_Function = self.calculateHighestPercentageBlock(e3)\n stringff = '<html><body><code>\\n'\n if e1:\n stringff += self.tableMaking(e1, 'If-ElseIf-Else Operations',\n '#9d0235', '#7bc8ff', Highest_ifElse)\n if e2:\n stringff += self.tableMaking(e2, 'Loop Operations', 'red',\n '#5cffee', Highest_Loop)\n if e3:\n stringff += self.tableMaking(e3, 'Function Operation', 'blue',\n '#f4fc76', Highest_Function)\n stringff += '</code></body></html>\\n'\n f = open('reportExecute.html', 'w')\n f.write(stringff)\n f.close()\n", "step-5": "import Utility\nimport copy\n\nclass Ratio_Execution_Time:\n utility = None\n\n def __init__(self):\n\n self.utility = Utility.Utility() \n print(\"Successfully Found Ration Corrssponding to Execution Time\")\n\n def calculatePercentage(self,B,total,strr):\n \n E = {}\n\n for i in B:\n s=''\n for j in range(i[0],i[1]+1):\n s+= '<p>' + strr[j] + '</p>'\n if total == 0:\n E[s] = 0.0\n continue\n E[s] = (B[i]/total) * 100\n return E\n\n\n def calculateHighestPercentageBlock(self,B):\n mx = -1\n temp = {}\n for i in B:\n if B[i] > mx:\n mx = B[i]\n \n for i in B:\n if B[i] == mx:\n temp[i] = B[i]\n return temp\n \n def tableMaking(self,B,title,clr,blr,H):\n\n stringff = '<table style=\"width:70%; border: 1px solid black;border-collapse: collapse;\">'\n stringff+= '<caption style=\"font:italic;font-size:20;font-weight:bold;color:#2b20a1 \">' + title + '</caption>'\n \n stringff += '<tr>'\n stringff += '<th style = \"font-size:18;\" > Block </th>'\n stringff += '<th style = \"font-size:18;\"> Time Percentage </th>'\n stringff += '</tr>'\n\n for line in B:\n if B[line] in H.values():\n curColor = clr\n burColor = blr\n else:\n curColor = 'black'\n burColor = '#cfd2d2'\n \n stringff += '<tr>'\n stringff += '<td style=\"border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'+burColor+';color:'+curColor+'\" >' + line + '</td>'\n stringff += '<td style=\"text-align: center;border: 1px solid black;font-family:verdana;font-size:16;font-weight:bold;background-color:'+burColor+';color:'+curColor+'\">' + str(B[line]) + ' %</td>'\n stringff += '</tr>'\n \n stringff += '</table>'\n \n return stringff\n\n def blockWiseExecutionTimePercentage(self,total,E1,E2,E3,strr):\n \n \n E_ifElse = self.calculatePercentage(E1,total,strr)\n E_Loop = self.calculatePercentage(E2,total,strr)\n E_Function = self.calculatePercentage(E3,total,strr)\n\n\n self.ShowTheTableOfPercentage(E_ifElse,E_Loop,E_Function)\n\n def ShowTheTableOfPercentage(self,e1,e2,e3):\n #print(b1)\n \n Highest_ifElse = self.calculateHighestPercentageBlock(e1)\n Highest_Loop = self.calculateHighestPercentageBlock(e2)\n Highest_Function = self.calculateHighestPercentageBlock(e3)\n \n \n stringff = \"<html><body><code>\\n\"\n if e1:\n stringff += self.tableMaking(e1,'If-ElseIf-Else Operations',\"#9d0235\",\"#7bc8ff\",Highest_ifElse) \n if e2:\n stringff += self.tableMaking(e2,'Loop Operations',\"red\",\"#5cffee\",Highest_Loop)\n if e3:\n stringff += self.tableMaking(e3,'Function Operation',\"blue\",\"#f4fc76\",Highest_Function)\n stringff += \"</code></body></html>\\n\"\n\n f = open(\"reportExecute.html\", \"w\")\n f.write(stringff)\n f.close()\n", "step-ids": [ 3, 5, 7, 8, 10 ] }
[ 3, 5, 7, 8, 10 ]
<|reserved_special_token_0|> def db_create(): cur.execute( """ create table if not exists Offshores_asset ( id INTEGER PRIMARY KEY AUTO_INCREMENT, asset_name VARCHAR(100), asset_link VARCHAR(200), slug CHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_offshore ( id INTEGER PRIMARY KEY AUTO_INCREMENT, off_name VARCHAR(50), off_jurisdiction VARCHAR(50), file VARCHAR(100), image VARCHAR(100), off_parent VARCHAR(50), off_link VARCHAR(300), slug VARCHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_beneficiary ( id INTEGER PRIMARY KEY AUTO_INCREMENT, ben_name VARCHAR(50), ben_lastname VARCHAR(100), ben_midname VARCHAR(30), ben_holding VARCHAR(70), ben_link VARCHAR(300), slug VARCHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_beneficiariesoffshores ( id INTEGER PRIMARY KEY AUTO_INCREMENT, share DECIMAL, rel_date DATE, source VARCHAR(150), link VARCHAR(200), beneficiary_id INT, offshore_id INT, uuid CHAR(36) ); """ ) conn.commit() print('tables created') <|reserved_special_token_0|> def db_insert_linktables(numrows): for x in xrange(0, numrows): num = str(x) bo_share = Decimal(x) bo_date = date(2016, randint(1, 12), randint(1, 28)) bo_source = 'source' + num bo_link = 'http://bo.ru/' + bo_source + '-' + num bo_ben = randint(1, numrows) bo_off = randint(1, numrows) bo_uuid = uuid.uuid4().hex oa_uuid = uuid.uuid4().hex oa_share = Decimal(x) oa_date = date(2016, randint(1, 12), randint(1, 28)) oa_source = 'source' + num oa_link = 'http://oa.ru/' + oa_source + '-' + num oa_asset = randint(1, numrows) oa_off = randint(1, numrows) ab_uuid = uuid.uuid4().hex ab_share = Decimal(x) ab_date = date(2016, randint(1, 12), randint(1, 28)) ab_source = 'source' + num ab_link = 'http://ab.ru/' + oa_source + '-' + num ab_asset = randint(1, numrows) ab_ben = randint(1, numrows) try: cur.execute( 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off, bo_uuid)) cur.execute( 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset, oa_off)) cur.execute( 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset, ab_ben)) conn.commit() except Exception as e: print('Exception 1:', type(e), e) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> filterwarnings('ignore', category=db.Warning) <|reserved_special_token_0|> def db_create(): cur.execute( """ create table if not exists Offshores_asset ( id INTEGER PRIMARY KEY AUTO_INCREMENT, asset_name VARCHAR(100), asset_link VARCHAR(200), slug CHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_offshore ( id INTEGER PRIMARY KEY AUTO_INCREMENT, off_name VARCHAR(50), off_jurisdiction VARCHAR(50), file VARCHAR(100), image VARCHAR(100), off_parent VARCHAR(50), off_link VARCHAR(300), slug VARCHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_beneficiary ( id INTEGER PRIMARY KEY AUTO_INCREMENT, ben_name VARCHAR(50), ben_lastname VARCHAR(100), ben_midname VARCHAR(30), ben_holding VARCHAR(70), ben_link VARCHAR(300), slug VARCHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_beneficiariesoffshores ( id INTEGER PRIMARY KEY AUTO_INCREMENT, share DECIMAL, rel_date DATE, source VARCHAR(150), link VARCHAR(200), beneficiary_id INT, offshore_id INT, uuid CHAR(36) ); """ ) conn.commit() print('tables created') def db_insert(numrows): for x in xrange(0, numrows): num = str(x) a_name = 'Asset' + num a_link = 'http://somelink/' + a_name a_uuid = uuid.uuid4().hex a_slug = a_name + '-' + str(a_uuid) o_name = 'Offshore' + num o_jur = 'Cyprus' o_file = 'offshores/favicon.xcf' o_image = 'offshores/favicon.png' o_prnt = 'parent' + num o_link = 'http://' + o_name + '-' + num + '.com' o_uuid = uuid.uuid4().hex o_slug = o_name + str(o_uuid) b_name = 'Michael' + num b_lname = 'Prohorov' + num b_mname = 'Dmitrievich' + num b_holding = 'Onexim' + num b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com' b_uuid = uuid.uuid4().hex b_slug = b_lname + str(b_uuid) try: cur.execute( 'INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)' , (a_name, a_link, a_slug, a_uuid)) cur.execute( 'INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)' , (o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug, o_uuid)) cur.execute( 'INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid) ) conn.commit() except Exception as e: print('Exception 1:', type(e), e) def db_insert_linktables(numrows): for x in xrange(0, numrows): num = str(x) bo_share = Decimal(x) bo_date = date(2016, randint(1, 12), randint(1, 28)) bo_source = 'source' + num bo_link = 'http://bo.ru/' + bo_source + '-' + num bo_ben = randint(1, numrows) bo_off = randint(1, numrows) bo_uuid = uuid.uuid4().hex oa_uuid = uuid.uuid4().hex oa_share = Decimal(x) oa_date = date(2016, randint(1, 12), randint(1, 28)) oa_source = 'source' + num oa_link = 'http://oa.ru/' + oa_source + '-' + num oa_asset = randint(1, numrows) oa_off = randint(1, numrows) ab_uuid = uuid.uuid4().hex ab_share = Decimal(x) ab_date = date(2016, randint(1, 12), randint(1, 28)) ab_source = 'source' + num ab_link = 'http://ab.ru/' + oa_source + '-' + num ab_asset = randint(1, numrows) ab_ben = randint(1, numrows) try: cur.execute( 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off, bo_uuid)) cur.execute( 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset, oa_off)) cur.execute( 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset, ab_ben)) conn.commit() except Exception as e: print('Exception 1:', type(e), e) <|reserved_special_token_0|> try: conn = db.connect('localhost', 'root', '0013Tau', 'ved2') cur = conn.cursor() db_insert(numrows) db_insert_linktables(numrows) except Exception as e: print('Exception 0:', type(e), e) except: db.rollback() conn.commit() conn.close() print('DB fullfilled') <|reserved_special_token_1|> __author__ = 'julia sayapina' <|reserved_special_token_0|> filterwarnings('ignore', category=db.Warning) db_name = 'ved3' def db_create(): cur.execute( """ create table if not exists Offshores_asset ( id INTEGER PRIMARY KEY AUTO_INCREMENT, asset_name VARCHAR(100), asset_link VARCHAR(200), slug CHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_offshore ( id INTEGER PRIMARY KEY AUTO_INCREMENT, off_name VARCHAR(50), off_jurisdiction VARCHAR(50), file VARCHAR(100), image VARCHAR(100), off_parent VARCHAR(50), off_link VARCHAR(300), slug VARCHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_beneficiary ( id INTEGER PRIMARY KEY AUTO_INCREMENT, ben_name VARCHAR(50), ben_lastname VARCHAR(100), ben_midname VARCHAR(30), ben_holding VARCHAR(70), ben_link VARCHAR(300), slug VARCHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_beneficiariesoffshores ( id INTEGER PRIMARY KEY AUTO_INCREMENT, share DECIMAL, rel_date DATE, source VARCHAR(150), link VARCHAR(200), beneficiary_id INT, offshore_id INT, uuid CHAR(36) ); """ ) conn.commit() print('tables created') def db_insert(numrows): for x in xrange(0, numrows): num = str(x) a_name = 'Asset' + num a_link = 'http://somelink/' + a_name a_uuid = uuid.uuid4().hex a_slug = a_name + '-' + str(a_uuid) o_name = 'Offshore' + num o_jur = 'Cyprus' o_file = 'offshores/favicon.xcf' o_image = 'offshores/favicon.png' o_prnt = 'parent' + num o_link = 'http://' + o_name + '-' + num + '.com' o_uuid = uuid.uuid4().hex o_slug = o_name + str(o_uuid) b_name = 'Michael' + num b_lname = 'Prohorov' + num b_mname = 'Dmitrievich' + num b_holding = 'Onexim' + num b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com' b_uuid = uuid.uuid4().hex b_slug = b_lname + str(b_uuid) try: cur.execute( 'INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)' , (a_name, a_link, a_slug, a_uuid)) cur.execute( 'INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)' , (o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug, o_uuid)) cur.execute( 'INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid) ) conn.commit() except Exception as e: print('Exception 1:', type(e), e) def db_insert_linktables(numrows): for x in xrange(0, numrows): num = str(x) bo_share = Decimal(x) bo_date = date(2016, randint(1, 12), randint(1, 28)) bo_source = 'source' + num bo_link = 'http://bo.ru/' + bo_source + '-' + num bo_ben = randint(1, numrows) bo_off = randint(1, numrows) bo_uuid = uuid.uuid4().hex oa_uuid = uuid.uuid4().hex oa_share = Decimal(x) oa_date = date(2016, randint(1, 12), randint(1, 28)) oa_source = 'source' + num oa_link = 'http://oa.ru/' + oa_source + '-' + num oa_asset = randint(1, numrows) oa_off = randint(1, numrows) ab_uuid = uuid.uuid4().hex ab_share = Decimal(x) ab_date = date(2016, randint(1, 12), randint(1, 28)) ab_source = 'source' + num ab_link = 'http://ab.ru/' + oa_source + '-' + num ab_asset = randint(1, numrows) ab_ben = randint(1, numrows) try: cur.execute( 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off, bo_uuid)) cur.execute( 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset, oa_off)) cur.execute( 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset, ab_ben)) conn.commit() except Exception as e: print('Exception 1:', type(e), e) numrows = 20 try: conn = db.connect('localhost', 'root', '0013Tau', 'ved2') cur = conn.cursor() db_insert(numrows) db_insert_linktables(numrows) except Exception as e: print('Exception 0:', type(e), e) except: db.rollback() conn.commit() conn.close() print('DB fullfilled') <|reserved_special_token_1|> __author__ = 'julia sayapina' from warnings import filterwarnings import MySQLdb as db import os import shutil import os import sys from subprocess import Popen, PIPE, STDOUT import uuid from decimal import * from datetime import date from random import randint filterwarnings('ignore', category=db.Warning) db_name = 'ved3' def db_create(): cur.execute( """ create table if not exists Offshores_asset ( id INTEGER PRIMARY KEY AUTO_INCREMENT, asset_name VARCHAR(100), asset_link VARCHAR(200), slug CHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_offshore ( id INTEGER PRIMARY KEY AUTO_INCREMENT, off_name VARCHAR(50), off_jurisdiction VARCHAR(50), file VARCHAR(100), image VARCHAR(100), off_parent VARCHAR(50), off_link VARCHAR(300), slug VARCHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_beneficiary ( id INTEGER PRIMARY KEY AUTO_INCREMENT, ben_name VARCHAR(50), ben_lastname VARCHAR(100), ben_midname VARCHAR(30), ben_holding VARCHAR(70), ben_link VARCHAR(300), slug VARCHAR(200), uuid CHAR(36) ); """ ) cur.execute( """ create table if not exists Offshores_beneficiariesoffshores ( id INTEGER PRIMARY KEY AUTO_INCREMENT, share DECIMAL, rel_date DATE, source VARCHAR(150), link VARCHAR(200), beneficiary_id INT, offshore_id INT, uuid CHAR(36) ); """ ) conn.commit() print('tables created') def db_insert(numrows): for x in xrange(0, numrows): num = str(x) a_name = 'Asset' + num a_link = 'http://somelink/' + a_name a_uuid = uuid.uuid4().hex a_slug = a_name + '-' + str(a_uuid) o_name = 'Offshore' + num o_jur = 'Cyprus' o_file = 'offshores/favicon.xcf' o_image = 'offshores/favicon.png' o_prnt = 'parent' + num o_link = 'http://' + o_name + '-' + num + '.com' o_uuid = uuid.uuid4().hex o_slug = o_name + str(o_uuid) b_name = 'Michael' + num b_lname = 'Prohorov' + num b_mname = 'Dmitrievich' + num b_holding = 'Onexim' + num b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com' b_uuid = uuid.uuid4().hex b_slug = b_lname + str(b_uuid) try: cur.execute( 'INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)' , (a_name, a_link, a_slug, a_uuid)) cur.execute( 'INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)' , (o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug, o_uuid)) cur.execute( 'INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid) ) conn.commit() except Exception as e: print('Exception 1:', type(e), e) def db_insert_linktables(numrows): for x in xrange(0, numrows): num = str(x) bo_share = Decimal(x) bo_date = date(2016, randint(1, 12), randint(1, 28)) bo_source = 'source' + num bo_link = 'http://bo.ru/' + bo_source + '-' + num bo_ben = randint(1, numrows) bo_off = randint(1, numrows) bo_uuid = uuid.uuid4().hex oa_uuid = uuid.uuid4().hex oa_share = Decimal(x) oa_date = date(2016, randint(1, 12), randint(1, 28)) oa_source = 'source' + num oa_link = 'http://oa.ru/' + oa_source + '-' + num oa_asset = randint(1, numrows) oa_off = randint(1, numrows) ab_uuid = uuid.uuid4().hex ab_share = Decimal(x) ab_date = date(2016, randint(1, 12), randint(1, 28)) ab_source = 'source' + num ab_link = 'http://ab.ru/' + oa_source + '-' + num ab_asset = randint(1, numrows) ab_ben = randint(1, numrows) try: cur.execute( 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off, bo_uuid)) cur.execute( 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset, oa_off)) cur.execute( 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)' , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset, ab_ben)) conn.commit() except Exception as e: print('Exception 1:', type(e), e) numrows = 20 try: conn = db.connect('localhost', 'root', '0013Tau', 'ved2') cur = conn.cursor() db_insert(numrows) db_insert_linktables(numrows) except Exception as e: print('Exception 0:', type(e), e) except: db.rollback() conn.commit() conn.close() print('DB fullfilled') <|reserved_special_token_1|> #coding: utf-8 #/usr/bin/python __author__='julia sayapina' ### Use db_reset.py to drop the db and recreate it, then use 'migrate' --> 'createsuperuser' --> 'makemigrations' --> 'migrate' as usual. ### This will create the DB structure as it has to be from django ### Then use test_db_fullfill.py to fullfill the db with test data. if you don't need to create tables manually don't use db_create() from warnings import filterwarnings import MySQLdb as db import os import shutil import os import sys from subprocess import Popen, PIPE, STDOUT import uuid from decimal import * from datetime import date from random import randint # Создание или открытие файла базы данных и создание схемы filterwarnings('ignore', category = db.Warning) db_name = 'ved3' def db_create(): # creates tables manually (doesn't create AO and AB tables) cur.execute(""" create table if not exists Offshores_asset ( id INTEGER PRIMARY KEY AUTO_INCREMENT, asset_name VARCHAR(100), asset_link VARCHAR(200), slug CHAR(200), uuid CHAR(36) ); """) cur.execute(""" create table if not exists Offshores_offshore ( id INTEGER PRIMARY KEY AUTO_INCREMENT, off_name VARCHAR(50), off_jurisdiction VARCHAR(50), file VARCHAR(100), image VARCHAR(100), off_parent VARCHAR(50), off_link VARCHAR(300), slug VARCHAR(200), uuid CHAR(36) ); """) cur.execute(""" create table if not exists Offshores_beneficiary ( id INTEGER PRIMARY KEY AUTO_INCREMENT, ben_name VARCHAR(50), ben_lastname VARCHAR(100), ben_midname VARCHAR(30), ben_holding VARCHAR(70), ben_link VARCHAR(300), slug VARCHAR(200), uuid CHAR(36) ); """) cur.execute(""" create table if not exists Offshores_beneficiariesoffshores ( id INTEGER PRIMARY KEY AUTO_INCREMENT, share DECIMAL, rel_date DATE, source VARCHAR(150), link VARCHAR(200), beneficiary_id INT, offshore_id INT, uuid CHAR(36) ); """) conn.commit() print('tables created') def db_insert(numrows): # inserts test data into tables for x in xrange(0,numrows): #creates test data for tables num = str(x) a_name = 'Asset' + num a_link = 'http://somelink/'+a_name a_uuid = uuid.uuid4().hex a_slug = a_name + '-' + str(a_uuid) o_name = 'Offshore' + num o_jur = 'Cyprus' o_file = 'offshores/favicon.xcf' o_image = 'offshores/favicon.png' o_prnt = 'parent' + num o_link = 'http://' + o_name + '-' + num + '.com' o_uuid = uuid.uuid4().hex o_slug = o_name + str(o_uuid) b_name = 'Michael' + num b_lname = 'Prohorov' + num b_mname = 'Dmitrievich' + num b_holding = 'Onexim' + num b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com' b_uuid = uuid.uuid4().hex b_slug = b_lname + str(b_uuid) try: #inserts test data to tables via SQL; still produces wierd errors for Beneficiariesoffshores idk why cur.execute("""INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)""",(a_name, a_link, a_slug, a_uuid)) cur.execute("""INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)""",(o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug, o_uuid)) cur.execute("""INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid)) conn.commit() except Exception as e: print ("Exception 1:", type(e), e) def db_insert_linktables(numrows): # inserts test data into linking tables; has to be called after db_insert(), as first basic tables need to be generated to produce links between # them using random numbers for x in xrange(0,numrows): #creates test data for tables num = str(x) bo_share = Decimal(x) bo_date = date(2016, randint(1, 12), randint(1, 28)) bo_source = 'source' + num bo_link = 'http://bo.ru/' + bo_source + '-' + num bo_ben = randint(1, numrows) bo_off = randint(1, numrows) bo_uuid = uuid.uuid4().hex oa_uuid = uuid.uuid4().hex oa_share = Decimal(x) oa_date = date(2016, randint(1, 12), randint(1, 28)) oa_source = 'source' + num oa_link = 'http://oa.ru/' + oa_source + '-' + num oa_asset = randint(1, numrows) oa_off = randint(1, numrows) ab_uuid = uuid.uuid4().hex ab_share = Decimal(x) ab_date = date(2016, randint(1, 12), randint(1, 28)) ab_source = 'source' + num ab_link = 'http://ab.ru/' + oa_source + '-' + num ab_asset = randint(1, numrows) ab_ben = randint(1, numrows) try: #inserts test data to tables via SQL; still produces wierd errors for Beneficiariesoffshores idk why cur.execute("""INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off, bo_uuid)) cur.execute("""INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset, oa_off)) cur.execute("""INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset, ab_ben)) conn.commit() except Exception as e: print ("Exception 1:", type(e), e) numrows = 20 try: conn = db.connect("localhost","root","0013Tau","ved2" ) cur = conn.cursor() # db_create() #<-- to create tables manually uncomment this db_insert(numrows) db_insert_linktables(numrows) # IMPORTANT! has to be called ONLY after db_insert()! except Exception as e: print ("Exception 0:", type(e), e) except: db.rollback() conn.commit() conn.close() print ('DB fullfilled') # def main(): # if len(sys.argv) != 2: # print('usage: python3 db_fullfill.py [numrows]') # sys.exit(1) # if len(sys.argv) == 2: # numrows = sys.argv[1] # else: # numrows = 15 # print (numrows) # return numrows # sys.exit(1) # if __name__ == '__main__': # main()
flexible
{ "blob_id": "3240310653930662dcc4d79646b1a75c2994cda7", "index": 9063, "step-1": "<mask token>\n\n\ndef db_create():\n cur.execute(\n \"\"\"\n create table if not exists Offshores_asset (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n asset_name VARCHAR(100),\n asset_link VARCHAR(200),\n slug CHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_offshore (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n off_name VARCHAR(50),\n off_jurisdiction VARCHAR(50),\n file VARCHAR(100),\n image VARCHAR(100),\n off_parent VARCHAR(50),\n off_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiary (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n ben_name VARCHAR(50),\n ben_lastname VARCHAR(100),\n ben_midname VARCHAR(30),\n ben_holding VARCHAR(70),\n ben_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiariesoffshores (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n share DECIMAL,\n rel_date DATE,\n source VARCHAR(150),\n link VARCHAR(200),\n beneficiary_id INT,\n offshore_id INT,\n uuid CHAR(36)\n );\n \"\"\"\n )\n conn.commit()\n print('tables created')\n\n\n<mask token>\n\n\ndef db_insert_linktables(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n bo_share = Decimal(x)\n bo_date = date(2016, randint(1, 12), randint(1, 28))\n bo_source = 'source' + num\n bo_link = 'http://bo.ru/' + bo_source + '-' + num\n bo_ben = randint(1, numrows)\n bo_off = randint(1, numrows)\n bo_uuid = uuid.uuid4().hex\n oa_uuid = uuid.uuid4().hex\n oa_share = Decimal(x)\n oa_date = date(2016, randint(1, 12), randint(1, 28))\n oa_source = 'source' + num\n oa_link = 'http://oa.ru/' + oa_source + '-' + num\n oa_asset = randint(1, numrows)\n oa_off = randint(1, numrows)\n ab_uuid = uuid.uuid4().hex\n ab_share = Decimal(x)\n ab_date = date(2016, randint(1, 12), randint(1, 28))\n ab_source = 'source' + num\n ab_link = 'http://ab.ru/' + oa_source + '-' + num\n ab_asset = randint(1, numrows)\n ab_ben = randint(1, numrows)\n try:\n cur.execute(\n 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off,\n bo_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset,\n oa_off))\n cur.execute(\n 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset,\n ab_ben))\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\n<mask token>\n", "step-2": "<mask token>\nfilterwarnings('ignore', category=db.Warning)\n<mask token>\n\n\ndef db_create():\n cur.execute(\n \"\"\"\n create table if not exists Offshores_asset (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n asset_name VARCHAR(100),\n asset_link VARCHAR(200),\n slug CHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_offshore (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n off_name VARCHAR(50),\n off_jurisdiction VARCHAR(50),\n file VARCHAR(100),\n image VARCHAR(100),\n off_parent VARCHAR(50),\n off_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiary (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n ben_name VARCHAR(50),\n ben_lastname VARCHAR(100),\n ben_midname VARCHAR(30),\n ben_holding VARCHAR(70),\n ben_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiariesoffshores (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n share DECIMAL,\n rel_date DATE,\n source VARCHAR(150),\n link VARCHAR(200),\n beneficiary_id INT,\n offshore_id INT,\n uuid CHAR(36)\n );\n \"\"\"\n )\n conn.commit()\n print('tables created')\n\n\ndef db_insert(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n a_name = 'Asset' + num\n a_link = 'http://somelink/' + a_name\n a_uuid = uuid.uuid4().hex\n a_slug = a_name + '-' + str(a_uuid)\n o_name = 'Offshore' + num\n o_jur = 'Cyprus'\n o_file = 'offshores/favicon.xcf'\n o_image = 'offshores/favicon.png'\n o_prnt = 'parent' + num\n o_link = 'http://' + o_name + '-' + num + '.com'\n o_uuid = uuid.uuid4().hex\n o_slug = o_name + str(o_uuid)\n b_name = 'Michael' + num\n b_lname = 'Prohorov' + num\n b_mname = 'Dmitrievich' + num\n b_holding = 'Onexim' + num\n b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com'\n b_uuid = uuid.uuid4().hex\n b_slug = b_lname + str(b_uuid)\n try:\n cur.execute(\n 'INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)'\n , (a_name, a_link, a_slug, a_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)'\n , (o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug,\n o_uuid))\n cur.execute(\n 'INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid)\n )\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\ndef db_insert_linktables(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n bo_share = Decimal(x)\n bo_date = date(2016, randint(1, 12), randint(1, 28))\n bo_source = 'source' + num\n bo_link = 'http://bo.ru/' + bo_source + '-' + num\n bo_ben = randint(1, numrows)\n bo_off = randint(1, numrows)\n bo_uuid = uuid.uuid4().hex\n oa_uuid = uuid.uuid4().hex\n oa_share = Decimal(x)\n oa_date = date(2016, randint(1, 12), randint(1, 28))\n oa_source = 'source' + num\n oa_link = 'http://oa.ru/' + oa_source + '-' + num\n oa_asset = randint(1, numrows)\n oa_off = randint(1, numrows)\n ab_uuid = uuid.uuid4().hex\n ab_share = Decimal(x)\n ab_date = date(2016, randint(1, 12), randint(1, 28))\n ab_source = 'source' + num\n ab_link = 'http://ab.ru/' + oa_source + '-' + num\n ab_asset = randint(1, numrows)\n ab_ben = randint(1, numrows)\n try:\n cur.execute(\n 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off,\n bo_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset,\n oa_off))\n cur.execute(\n 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset,\n ab_ben))\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\n<mask token>\ntry:\n conn = db.connect('localhost', 'root', '0013Tau', 'ved2')\n cur = conn.cursor()\n db_insert(numrows)\n db_insert_linktables(numrows)\nexcept Exception as e:\n print('Exception 0:', type(e), e)\nexcept:\n db.rollback()\nconn.commit()\nconn.close()\nprint('DB fullfilled')\n", "step-3": "__author__ = 'julia sayapina'\n<mask token>\nfilterwarnings('ignore', category=db.Warning)\ndb_name = 'ved3'\n\n\ndef db_create():\n cur.execute(\n \"\"\"\n create table if not exists Offshores_asset (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n asset_name VARCHAR(100),\n asset_link VARCHAR(200),\n slug CHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_offshore (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n off_name VARCHAR(50),\n off_jurisdiction VARCHAR(50),\n file VARCHAR(100),\n image VARCHAR(100),\n off_parent VARCHAR(50),\n off_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiary (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n ben_name VARCHAR(50),\n ben_lastname VARCHAR(100),\n ben_midname VARCHAR(30),\n ben_holding VARCHAR(70),\n ben_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiariesoffshores (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n share DECIMAL,\n rel_date DATE,\n source VARCHAR(150),\n link VARCHAR(200),\n beneficiary_id INT,\n offshore_id INT,\n uuid CHAR(36)\n );\n \"\"\"\n )\n conn.commit()\n print('tables created')\n\n\ndef db_insert(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n a_name = 'Asset' + num\n a_link = 'http://somelink/' + a_name\n a_uuid = uuid.uuid4().hex\n a_slug = a_name + '-' + str(a_uuid)\n o_name = 'Offshore' + num\n o_jur = 'Cyprus'\n o_file = 'offshores/favicon.xcf'\n o_image = 'offshores/favicon.png'\n o_prnt = 'parent' + num\n o_link = 'http://' + o_name + '-' + num + '.com'\n o_uuid = uuid.uuid4().hex\n o_slug = o_name + str(o_uuid)\n b_name = 'Michael' + num\n b_lname = 'Prohorov' + num\n b_mname = 'Dmitrievich' + num\n b_holding = 'Onexim' + num\n b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com'\n b_uuid = uuid.uuid4().hex\n b_slug = b_lname + str(b_uuid)\n try:\n cur.execute(\n 'INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)'\n , (a_name, a_link, a_slug, a_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)'\n , (o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug,\n o_uuid))\n cur.execute(\n 'INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid)\n )\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\ndef db_insert_linktables(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n bo_share = Decimal(x)\n bo_date = date(2016, randint(1, 12), randint(1, 28))\n bo_source = 'source' + num\n bo_link = 'http://bo.ru/' + bo_source + '-' + num\n bo_ben = randint(1, numrows)\n bo_off = randint(1, numrows)\n bo_uuid = uuid.uuid4().hex\n oa_uuid = uuid.uuid4().hex\n oa_share = Decimal(x)\n oa_date = date(2016, randint(1, 12), randint(1, 28))\n oa_source = 'source' + num\n oa_link = 'http://oa.ru/' + oa_source + '-' + num\n oa_asset = randint(1, numrows)\n oa_off = randint(1, numrows)\n ab_uuid = uuid.uuid4().hex\n ab_share = Decimal(x)\n ab_date = date(2016, randint(1, 12), randint(1, 28))\n ab_source = 'source' + num\n ab_link = 'http://ab.ru/' + oa_source + '-' + num\n ab_asset = randint(1, numrows)\n ab_ben = randint(1, numrows)\n try:\n cur.execute(\n 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off,\n bo_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset,\n oa_off))\n cur.execute(\n 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset,\n ab_ben))\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\nnumrows = 20\ntry:\n conn = db.connect('localhost', 'root', '0013Tau', 'ved2')\n cur = conn.cursor()\n db_insert(numrows)\n db_insert_linktables(numrows)\nexcept Exception as e:\n print('Exception 0:', type(e), e)\nexcept:\n db.rollback()\nconn.commit()\nconn.close()\nprint('DB fullfilled')\n", "step-4": "__author__ = 'julia sayapina'\nfrom warnings import filterwarnings\nimport MySQLdb as db\nimport os\nimport shutil\nimport os\nimport sys\nfrom subprocess import Popen, PIPE, STDOUT\nimport uuid\nfrom decimal import *\nfrom datetime import date\nfrom random import randint\nfilterwarnings('ignore', category=db.Warning)\ndb_name = 'ved3'\n\n\ndef db_create():\n cur.execute(\n \"\"\"\n create table if not exists Offshores_asset (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n asset_name VARCHAR(100),\n asset_link VARCHAR(200),\n slug CHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_offshore (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n off_name VARCHAR(50),\n off_jurisdiction VARCHAR(50),\n file VARCHAR(100),\n image VARCHAR(100),\n off_parent VARCHAR(50),\n off_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiary (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n ben_name VARCHAR(50),\n ben_lastname VARCHAR(100),\n ben_midname VARCHAR(30),\n ben_holding VARCHAR(70),\n ben_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiariesoffshores (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n share DECIMAL,\n rel_date DATE,\n source VARCHAR(150),\n link VARCHAR(200),\n beneficiary_id INT,\n offshore_id INT,\n uuid CHAR(36)\n );\n \"\"\"\n )\n conn.commit()\n print('tables created')\n\n\ndef db_insert(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n a_name = 'Asset' + num\n a_link = 'http://somelink/' + a_name\n a_uuid = uuid.uuid4().hex\n a_slug = a_name + '-' + str(a_uuid)\n o_name = 'Offshore' + num\n o_jur = 'Cyprus'\n o_file = 'offshores/favicon.xcf'\n o_image = 'offshores/favicon.png'\n o_prnt = 'parent' + num\n o_link = 'http://' + o_name + '-' + num + '.com'\n o_uuid = uuid.uuid4().hex\n o_slug = o_name + str(o_uuid)\n b_name = 'Michael' + num\n b_lname = 'Prohorov' + num\n b_mname = 'Dmitrievich' + num\n b_holding = 'Onexim' + num\n b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com'\n b_uuid = uuid.uuid4().hex\n b_slug = b_lname + str(b_uuid)\n try:\n cur.execute(\n 'INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)'\n , (a_name, a_link, a_slug, a_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)'\n , (o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug,\n o_uuid))\n cur.execute(\n 'INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid)\n )\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\ndef db_insert_linktables(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n bo_share = Decimal(x)\n bo_date = date(2016, randint(1, 12), randint(1, 28))\n bo_source = 'source' + num\n bo_link = 'http://bo.ru/' + bo_source + '-' + num\n bo_ben = randint(1, numrows)\n bo_off = randint(1, numrows)\n bo_uuid = uuid.uuid4().hex\n oa_uuid = uuid.uuid4().hex\n oa_share = Decimal(x)\n oa_date = date(2016, randint(1, 12), randint(1, 28))\n oa_source = 'source' + num\n oa_link = 'http://oa.ru/' + oa_source + '-' + num\n oa_asset = randint(1, numrows)\n oa_off = randint(1, numrows)\n ab_uuid = uuid.uuid4().hex\n ab_share = Decimal(x)\n ab_date = date(2016, randint(1, 12), randint(1, 28))\n ab_source = 'source' + num\n ab_link = 'http://ab.ru/' + oa_source + '-' + num\n ab_asset = randint(1, numrows)\n ab_ben = randint(1, numrows)\n try:\n cur.execute(\n 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off,\n bo_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset,\n oa_off))\n cur.execute(\n 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset,\n ab_ben))\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\nnumrows = 20\ntry:\n conn = db.connect('localhost', 'root', '0013Tau', 'ved2')\n cur = conn.cursor()\n db_insert(numrows)\n db_insert_linktables(numrows)\nexcept Exception as e:\n print('Exception 0:', type(e), e)\nexcept:\n db.rollback()\nconn.commit()\nconn.close()\nprint('DB fullfilled')\n", "step-5": "#coding: utf-8\n#/usr/bin/python\n__author__='julia sayapina'\n\n### Use db_reset.py to drop the db and recreate it, then use 'migrate' --> 'createsuperuser' --> 'makemigrations' --> 'migrate' as usual.\n### This will create the DB structure as it has to be from django\n### Then use test_db_fullfill.py to fullfill the db with test data. if you don't need to create tables manually don't use db_create()\n\nfrom warnings import filterwarnings\nimport MySQLdb as db\nimport os\nimport shutil\nimport os\nimport sys \nfrom subprocess import Popen, PIPE, STDOUT\nimport uuid\nfrom decimal import *\nfrom datetime import date\nfrom random import randint\n\n\n# Создание или открытие файла базы данных и создание схемы\nfilterwarnings('ignore', category = db.Warning)\ndb_name = 'ved3'\n\ndef db_create(): # creates tables manually (doesn't create AO and AB tables)\n cur.execute(\"\"\"\n create table if not exists Offshores_asset (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n asset_name VARCHAR(100),\n asset_link VARCHAR(200),\n slug CHAR(200),\n uuid CHAR(36)\n );\n \"\"\")\n cur.execute(\"\"\"\n create table if not exists Offshores_offshore (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n off_name VARCHAR(50),\n off_jurisdiction VARCHAR(50),\n file VARCHAR(100),\n image VARCHAR(100),\n off_parent VARCHAR(50),\n off_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\")\n cur.execute(\"\"\"\n create table if not exists Offshores_beneficiary (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n ben_name VARCHAR(50),\n ben_lastname VARCHAR(100),\n ben_midname VARCHAR(30),\n ben_holding VARCHAR(70),\n ben_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\")\n cur.execute(\"\"\"\n create table if not exists Offshores_beneficiariesoffshores (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n share DECIMAL,\n rel_date DATE,\n source VARCHAR(150),\n link VARCHAR(200),\n beneficiary_id INT,\n offshore_id INT,\n uuid CHAR(36)\n );\n \"\"\")\n conn.commit()\n print('tables created')\n\n\ndef db_insert(numrows):\n # inserts test data into tables\n for x in xrange(0,numrows): #creates test data for tables\n num = str(x)\n a_name = 'Asset' + num\n a_link = 'http://somelink/'+a_name\n a_uuid = uuid.uuid4().hex\n a_slug = a_name + '-' + str(a_uuid)\n o_name = 'Offshore' + num\n o_jur = 'Cyprus'\n o_file = 'offshores/favicon.xcf'\n o_image = 'offshores/favicon.png'\n o_prnt = 'parent' + num\n o_link = 'http://' + o_name + '-' + num + '.com'\n o_uuid = uuid.uuid4().hex\n o_slug = o_name + str(o_uuid)\n b_name = 'Michael' + num\n b_lname = 'Prohorov' + num\n b_mname = 'Dmitrievich' + num\n b_holding = 'Onexim' + num\n b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com'\n b_uuid = uuid.uuid4().hex\n b_slug = b_lname + str(b_uuid)\n\n try: #inserts test data to tables via SQL; still produces wierd errors for Beneficiariesoffshores idk why\n cur.execute(\"\"\"INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)\"\"\",(a_name, a_link, a_slug, a_uuid))\n cur.execute(\"\"\"INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)\"\"\",(o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug, o_uuid))\n cur.execute(\"\"\"INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)\"\"\",(b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid))\n conn.commit()\n except Exception as e:\n print (\"Exception 1:\", type(e), e)\n\ndef db_insert_linktables(numrows):\n # inserts test data into linking tables; has to be called after db_insert(), as first basic tables need to be generated to produce links between\n # them using random numbers\n for x in xrange(0,numrows): #creates test data for tables\n num = str(x)\n bo_share = Decimal(x)\n bo_date = date(2016, randint(1, 12), randint(1, 28))\n bo_source = 'source' + num\n bo_link = 'http://bo.ru/' + bo_source + '-' + num\n bo_ben = randint(1, numrows)\n bo_off = randint(1, numrows)\n bo_uuid = uuid.uuid4().hex\n oa_uuid = uuid.uuid4().hex\n oa_share = Decimal(x)\n oa_date = date(2016, randint(1, 12), randint(1, 28))\n oa_source = 'source' + num\n oa_link = 'http://oa.ru/' + oa_source + '-' + num\n oa_asset = randint(1, numrows)\n oa_off = randint(1, numrows)\n ab_uuid = uuid.uuid4().hex\n ab_share = Decimal(x)\n ab_date = date(2016, randint(1, 12), randint(1, 28))\n ab_source = 'source' + num\n ab_link = 'http://ab.ru/' + oa_source + '-' + num\n ab_asset = randint(1, numrows)\n ab_ben = randint(1, numrows)\n\n try: #inserts test data to tables via SQL; still produces wierd errors for Beneficiariesoffshores idk why\n cur.execute(\"\"\"INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)\"\"\",(bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off, bo_uuid))\n cur.execute(\"\"\"INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)\"\"\",(oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset, oa_off))\n cur.execute(\"\"\"INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)\"\"\",(ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset, ab_ben))\n conn.commit()\n except Exception as e:\n print (\"Exception 1:\", type(e), e)\n\nnumrows = 20\ntry:\n conn = db.connect(\"localhost\",\"root\",\"0013Tau\",\"ved2\" )\n cur = conn.cursor()\n # db_create() #<-- to create tables manually uncomment this\n db_insert(numrows)\n db_insert_linktables(numrows) # IMPORTANT! has to be called ONLY after db_insert()!\n\nexcept Exception as e:\n print (\"Exception 0:\", type(e), e)\n\nexcept: db.rollback() \n\n\nconn.commit()\nconn.close()\nprint ('DB fullfilled')\n\n\n# def main():\n# if len(sys.argv) != 2:\n# print('usage: python3 db_fullfill.py [numrows]')\n# sys.exit(1)\n\n# if len(sys.argv) == 2: \n# numrows = sys.argv[1]\n\n# else:\n# numrows = 15\n# print (numrows)\n\n# return numrows\n# sys.exit(1)\n\n# if __name__ == '__main__':\n# main()\n\n", "step-ids": [ 2, 4, 5, 6, 7 ] }
[ 2, 4, 5, 6, 7 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def presses(phrase): keyboard = ['1', 'ABC2', 'DEF3', 'GHI4', 'JKL5', 'MNO6', 'PQRS7', 'TUV8', 'WXYZ9', '*', ' 0', '#'] amount = 0 for lttr in phrase.upper(): for key in keyboard: try: i = key.index(lttr) i += 1 amount += i except ValueError: pass return amount
flexible
{ "blob_id": "c2e9a93861080be616b6d833a9343f1a2f018a0b", "index": 5039, "step-1": "<mask token>\n", "step-2": "def presses(phrase):\n keyboard = ['1', 'ABC2', 'DEF3', 'GHI4', 'JKL5', 'MNO6', 'PQRS7',\n 'TUV8', 'WXYZ9', '*', ' 0', '#']\n amount = 0\n for lttr in phrase.upper():\n for key in keyboard:\n try:\n i = key.index(lttr)\n i += 1\n amount += i\n except ValueError:\n pass\n return amount\n", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Implements the webservice calls of the command like rest apis or other network related methods """
normal
{ "blob_id": "48369e1ed826a9a50c0fd9f63b7cc10b8225ce2b", "index": 8760, "step-1": "<mask token>\n", "step-2": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nImplements the webservice calls of the command\nlike rest apis or other network related methods\n\"\"\"", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> print('x_train_data shape:', x_train_data.shape) print(x_train_data.shape[0], 'train samples') print(x_test_data.shape[0], 'test samples') print(x_train_data[0]) <|reserved_special_token_0|> print(y_train_data[i]) plt.title('Train Data') plt.imshow(x_train_data[i], cmap='binary') plt.show() <|reserved_special_token_0|> print(y_test_data[i]) plt.title('Test Data') plt.imshow(x_test_data[i], cmap='binary') plt.show() <|reserved_special_token_1|> <|reserved_special_token_0|> mnist = keras.datasets.mnist (x_train_data, y_train_data), (x_test_data, y_test_data) = mnist.load_data() print('x_train_data shape:', x_train_data.shape) print(x_train_data.shape[0], 'train samples') print(x_test_data.shape[0], 'test samples') print(x_train_data[0]) i = random.randint(0, x_train_data.shape[0]) print(y_train_data[i]) plt.title('Train Data') plt.imshow(x_train_data[i], cmap='binary') plt.show() i = random.randint(0, x_test_data.shape[0]) print(y_test_data[i]) plt.title('Test Data') plt.imshow(x_test_data[i], cmap='binary') plt.show() <|reserved_special_token_1|> import random import matplotlib.pyplot as plt import tensorflow.keras as keras mnist = keras.datasets.mnist (x_train_data, y_train_data), (x_test_data, y_test_data) = mnist.load_data() print('x_train_data shape:', x_train_data.shape) print(x_train_data.shape[0], 'train samples') print(x_test_data.shape[0], 'test samples') print(x_train_data[0]) i = random.randint(0, x_train_data.shape[0]) print(y_train_data[i]) plt.title('Train Data') plt.imshow(x_train_data[i], cmap='binary') plt.show() i = random.randint(0, x_test_data.shape[0]) print(y_test_data[i]) plt.title('Test Data') plt.imshow(x_test_data[i], cmap='binary') plt.show() <|reserved_special_token_1|> import random import matplotlib.pyplot as plt import tensorflow.keras as keras mnist = keras.datasets.mnist # MNIST datasets # Load Data and splitted to train & test sets # x : the handwritten data, y : the number (x_train_data, y_train_data), (x_test_data, y_test_data) = mnist.load_data() print('x_train_data shape:', x_train_data.shape) print(x_train_data.shape[0], 'train samples') print(x_test_data.shape[0], 'test samples') # Print one train_data print(x_train_data[0]) # Plot one train_data i = random.randint(0,x_train_data.shape[0]) print(y_train_data[i]) plt.title('Train Data') plt.imshow(x_train_data[i],cmap='binary') plt.show() # Plot one test_data i = random.randint(0,x_test_data.shape[0]) print(y_test_data[i]) plt.title('Test Data') plt.imshow(x_test_data[i],cmap='binary') plt.show()
flexible
{ "blob_id": "b4eb62413fb8069d8f11c34fbfecc742cd79bdb8", "index": 7057, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint('x_train_data shape:', x_train_data.shape)\nprint(x_train_data.shape[0], 'train samples')\nprint(x_test_data.shape[0], 'test samples')\nprint(x_train_data[0])\n<mask token>\nprint(y_train_data[i])\nplt.title('Train Data')\nplt.imshow(x_train_data[i], cmap='binary')\nplt.show()\n<mask token>\nprint(y_test_data[i])\nplt.title('Test Data')\nplt.imshow(x_test_data[i], cmap='binary')\nplt.show()\n", "step-3": "<mask token>\nmnist = keras.datasets.mnist\n(x_train_data, y_train_data), (x_test_data, y_test_data) = mnist.load_data()\nprint('x_train_data shape:', x_train_data.shape)\nprint(x_train_data.shape[0], 'train samples')\nprint(x_test_data.shape[0], 'test samples')\nprint(x_train_data[0])\ni = random.randint(0, x_train_data.shape[0])\nprint(y_train_data[i])\nplt.title('Train Data')\nplt.imshow(x_train_data[i], cmap='binary')\nplt.show()\ni = random.randint(0, x_test_data.shape[0])\nprint(y_test_data[i])\nplt.title('Test Data')\nplt.imshow(x_test_data[i], cmap='binary')\nplt.show()\n", "step-4": "import random\nimport matplotlib.pyplot as plt\nimport tensorflow.keras as keras\nmnist = keras.datasets.mnist\n(x_train_data, y_train_data), (x_test_data, y_test_data) = mnist.load_data()\nprint('x_train_data shape:', x_train_data.shape)\nprint(x_train_data.shape[0], 'train samples')\nprint(x_test_data.shape[0], 'test samples')\nprint(x_train_data[0])\ni = random.randint(0, x_train_data.shape[0])\nprint(y_train_data[i])\nplt.title('Train Data')\nplt.imshow(x_train_data[i], cmap='binary')\nplt.show()\ni = random.randint(0, x_test_data.shape[0])\nprint(y_test_data[i])\nplt.title('Test Data')\nplt.imshow(x_test_data[i], cmap='binary')\nplt.show()\n", "step-5": "import random\r\nimport matplotlib.pyplot as plt\r\nimport tensorflow.keras as keras\r\n\r\nmnist = keras.datasets.mnist # MNIST datasets\r\n\r\n# Load Data and splitted to train & test sets\r\n# x : the handwritten data, y : the number\r\n(x_train_data, y_train_data), (x_test_data, y_test_data) = mnist.load_data()\r\n\r\nprint('x_train_data shape:', x_train_data.shape)\r\nprint(x_train_data.shape[0], 'train samples')\r\nprint(x_test_data.shape[0], 'test samples')\r\n\r\n# Print one train_data\r\nprint(x_train_data[0])\r\n\r\n# Plot one train_data\r\ni = random.randint(0,x_train_data.shape[0])\r\nprint(y_train_data[i])\r\nplt.title('Train Data')\r\nplt.imshow(x_train_data[i],cmap='binary')\r\nplt.show()\r\n\r\n# Plot one test_data\r\ni = random.randint(0,x_test_data.shape[0])\r\nprint(y_test_data[i])\r\nplt.title('Test Data')\r\nplt.imshow(x_test_data[i],cmap='binary')\r\nplt.show()\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
include_rules = [ "+apps", "+components/live_caption", "+services/device/public", "+components/device_reauth", # Enable remote assistance on Chrome OS "+remoting/host", ] specific_include_rules = { ".*test.*": [ "+chrome/browser/ui/views/frame", "+components/captive_portal", "+components/web_package", "+skia/public/mojom/bitmap.mojom.h", ], "tls_socket_unittest\.cc": [ "+services/network/network_context.h", ], "tcp_socket_unittest\.cc": [ "+services/network/network_context.h", ], "udp_socket_unittest\.cc": [ "+services/network/network_context.h", ], }
normal
{ "blob_id": "728af8b07bc391b496709e54926f3f1f49897176", "index": 1992, "step-1": "<mask token>\n", "step-2": "include_rules = ['+apps', '+components/live_caption',\n '+services/device/public', '+components/device_reauth', '+remoting/host']\nspecific_include_rules = {'.*test.*': ['+chrome/browser/ui/views/frame',\n '+components/captive_portal', '+components/web_package',\n '+skia/public/mojom/bitmap.mojom.h'], 'tls_socket_unittest\\\\.cc': [\n '+services/network/network_context.h'], 'tcp_socket_unittest\\\\.cc': [\n '+services/network/network_context.h'], 'udp_socket_unittest\\\\.cc': [\n '+services/network/network_context.h']}\n", "step-3": "include_rules = [\n \"+apps\",\n \"+components/live_caption\",\n \"+services/device/public\",\n \"+components/device_reauth\",\n # Enable remote assistance on Chrome OS\n \"+remoting/host\",\n]\n\nspecific_include_rules = {\n \".*test.*\": [\n \"+chrome/browser/ui/views/frame\",\n \"+components/captive_portal\",\n \"+components/web_package\",\n \"+skia/public/mojom/bitmap.mojom.h\",\n ],\n \"tls_socket_unittest\\.cc\": [\n \"+services/network/network_context.h\",\n ],\n \"tcp_socket_unittest\\.cc\": [\n \"+services/network/network_context.h\",\n ],\n \"udp_socket_unittest\\.cc\": [\n \"+services/network/network_context.h\",\n ],\n}\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-02-10 11:06 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('album', '0013_auto_20160210_1609'), ] operations = [ migrations.CreateModel( name='Albumname', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100)), ('album_text', models.CharField(blank=True, max_length=1000, null=True)), ('album_no', models.IntegerField(blank=True, null=True)), ('lineup', models.ManyToManyField(to='album.Shilpi')), ('prokashok', models.ManyToManyField(to='album.Prokashok')), ], ), ]
normal
{ "blob_id": "a727502063bd0cd959fdde201832d37b29b4db70", "index": 4304, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('album', '0013_auto_20160210_1609')]\n operations = [migrations.CreateModel(name='Albumname', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('name', models.CharField(max_length=\n 100)), ('album_text', models.CharField(blank=True, max_length=1000,\n null=True)), ('album_no', models.IntegerField(blank=True, null=True\n )), ('lineup', models.ManyToManyField(to='album.Shilpi')), (\n 'prokashok', models.ManyToManyField(to='album.Prokashok'))])]\n", "step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('album', '0013_auto_20160210_1609')]\n operations = [migrations.CreateModel(name='Albumname', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('name', models.CharField(max_length=\n 100)), ('album_text', models.CharField(blank=True, max_length=1000,\n null=True)), ('album_no', models.IntegerField(blank=True, null=True\n )), ('lineup', models.ManyToManyField(to='album.Shilpi')), (\n 'prokashok', models.ManyToManyField(to='album.Prokashok'))])]\n", "step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.2 on 2016-02-10 11:06\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('album', '0013_auto_20160210_1609'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Albumname',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=100)),\n ('album_text', models.CharField(blank=True, max_length=1000, null=True)),\n ('album_no', models.IntegerField(blank=True, null=True)),\n ('lineup', models.ManyToManyField(to='album.Shilpi')),\n ('prokashok', models.ManyToManyField(to='album.Prokashok')),\n ],\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# Generated by Django 3.2.3 on 2021-07-24 12:14 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('profiles', '0018_userprofile_membership_fee_pending'), ] operations = [ migrations.RenameField( model_name='userprofile', old_name='membership_fee_pending', new_name='membership_fee_paid', ), ]
normal
{ "blob_id": "464980a2f17aeedfa08548d6c4e247f8c047e2cb", "index": 5743, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('profiles', '0018_userprofile_membership_fee_pending')]\n operations = [migrations.RenameField(model_name='userprofile', old_name\n ='membership_fee_pending', new_name='membership_fee_paid')]\n", "step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('profiles', '0018_userprofile_membership_fee_pending')]\n operations = [migrations.RenameField(model_name='userprofile', old_name\n ='membership_fee_pending', new_name='membership_fee_paid')]\n", "step-5": "# Generated by Django 3.2.3 on 2021-07-24 12:14\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('profiles', '0018_userprofile_membership_fee_pending'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='userprofile',\n old_name='membership_fee_pending',\n new_name='membership_fee_paid',\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
""" Contain meta-data related functions: * accessing integration schema: fields, values, constraints on inputs/queries * tracking fields available * tracking known (input field) values """ # coding=utf-8 __author__ = 'vidma'
normal
{ "blob_id": "abdedad2c2b42b54cdba0e61e095ba3df0783b81", "index": 1172, "step-1": "<mask token>\n", "step-2": "<mask token>\n__author__ = 'vidma'\n", "step-3": "\"\"\"\nContain meta-data related functions:\n\n* accessing integration schema: fields, values, constraints on inputs/queries\n* tracking fields available\n* tracking known (input field) values\n\"\"\"\n# coding=utf-8\n__author__ = 'vidma'\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> class Hypergraph: <|reserved_special_token_0|> def __init__(self): self.nodes = dict() self.hypernodes = dict() self.adj_out = dict() self.adj_in = dict() <|reserved_special_token_0|> def add_hypernode(self, name: Hashable, nodes: Collection[Hashable], info: Optional[dict]=None) ->None: self.hypernodes[name] = set(nodes) self.adj_out[name] = dict() self.adj_in[name] = dict() <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def outgoing_hypernodes(self, u) ->Dict[Hashable, List]: assert u in self.adj_out return dict((v, lst) for v, lst in self.adj_out[u].items() if v in self.hypernodes) <|reserved_special_token_0|> def incoming_hypernodes(self, v: Hashable) ->Dict[Hashable, List]: assert v in self.adj_in return dict((u, lst) for u, lst in self.adj_in[v].items() if u in self.hypernodes) <|reserved_special_token_0|> def indegrees(self, from_hyper: bool=False, to_hyper: bool=False) ->List[ int]: return [sum([len(l) for u, l in self.adj_in[v].items() if u in ( self.hypernodes if from_hyper else self.nodes)]) for v in (self .hypernodes if to_hyper else self.nodes)] def reciprocity_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, c1, c2, C1->c2, c2->c1) as in paper """ motifs = [] for C1, c1_nodes in self.hypernodes.items(): for c1 in c1_nodes: motifs += [(C1, c1, c2, e1, e2) for c2 in self.adj_in[c1] if c2 in self.nodes and c2 in self.adj_out[C1] for e1 in self.adj_out[C1][c2] for e2 in self.adj_out[c2][c1]] return motifs <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def outgoing_triad_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, C2, C3, C1->C2, C1->C3) as in paper """ motifs = [] for C1 in self.hypernodes: outgoing = list(self.adj_out[C1].keys()) motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools. combinations(outgoing, 2) for e1 in self.adj_out[C1][C2] for e2 in self.adj_out[C1][C3]] return motifs <|reserved_special_token_1|> <|reserved_special_token_0|> class Hypergraph: <|reserved_special_token_0|> def __init__(self): self.nodes = dict() self.hypernodes = dict() self.adj_out = dict() self.adj_in = dict() <|reserved_special_token_0|> def add_hypernode(self, name: Hashable, nodes: Collection[Hashable], info: Optional[dict]=None) ->None: self.hypernodes[name] = set(nodes) self.adj_out[name] = dict() self.adj_in[name] = dict() def add_edge(self, u: Hashable, v: Hashable, info: Optional[dict]=None ) ->None: assert u in self.nodes or u in self.hypernodes assert v in self.nodes or v in self.hypernodes if u in self.hypernodes and v in self.hypernodes: assert len(info.keys()) > 0 if v not in self.adj_out[u]: self.adj_out[u][v] = [] if u not in self.adj_in[v]: self.adj_in[v][u] = [] if info is None: info = dict() self.adj_out[u][v].append(info) self.adj_in[v][u].append(info) <|reserved_special_token_0|> <|reserved_special_token_0|> def outgoing_hypernodes(self, u) ->Dict[Hashable, List]: assert u in self.adj_out return dict((v, lst) for v, lst in self.adj_out[u].items() if v in self.hypernodes) def incoming_nodes(self, v: Hashable) ->Dict[Hashable, List]: assert v in self.adj_in return dict((u, lst) for u, lst in self.adj_in[v].items() if u in self.nodes) def incoming_hypernodes(self, v: Hashable) ->Dict[Hashable, List]: assert v in self.adj_in return dict((u, lst) for u, lst in self.adj_in[v].items() if u in self.hypernodes) <|reserved_special_token_0|> def indegrees(self, from_hyper: bool=False, to_hyper: bool=False) ->List[ int]: return [sum([len(l) for u, l in self.adj_in[v].items() if u in ( self.hypernodes if from_hyper else self.nodes)]) for v in (self .hypernodes if to_hyper else self.nodes)] def reciprocity_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, c1, c2, C1->c2, c2->c1) as in paper """ motifs = [] for C1, c1_nodes in self.hypernodes.items(): for c1 in c1_nodes: motifs += [(C1, c1, c2, e1, e2) for c2 in self.adj_in[c1] if c2 in self.nodes and c2 in self.adj_out[C1] for e1 in self.adj_out[C1][c2] for e2 in self.adj_out[c2][c1]] return motifs <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def outgoing_triad_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, C2, C3, C1->C2, C1->C3) as in paper """ motifs = [] for C1 in self.hypernodes: outgoing = list(self.adj_out[C1].keys()) motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools. combinations(outgoing, 2) for e1 in self.adj_out[C1][C2] for e2 in self.adj_out[C1][C3]] return motifs <|reserved_special_token_1|> <|reserved_special_token_0|> class Hypergraph: <|reserved_special_token_0|> def __init__(self): self.nodes = dict() self.hypernodes = dict() self.adj_out = dict() self.adj_in = dict() <|reserved_special_token_0|> def add_hypernode(self, name: Hashable, nodes: Collection[Hashable], info: Optional[dict]=None) ->None: self.hypernodes[name] = set(nodes) self.adj_out[name] = dict() self.adj_in[name] = dict() def add_edge(self, u: Hashable, v: Hashable, info: Optional[dict]=None ) ->None: assert u in self.nodes or u in self.hypernodes assert v in self.nodes or v in self.hypernodes if u in self.hypernodes and v in self.hypernodes: assert len(info.keys()) > 0 if v not in self.adj_out[u]: self.adj_out[u][v] = [] if u not in self.adj_in[v]: self.adj_in[v][u] = [] if info is None: info = dict() self.adj_out[u][v].append(info) self.adj_in[v][u].append(info) def edges(self) ->Dict[Tuple[Hashable, Hashable], List]: return dict(((u, v), lst) for u, d in self.adj_out.items() for v, lst in d.items()) def outgoing_nodes(self, u: Hashable) ->Dict[Hashable, List]: assert u in self.adj_out return dict((v, lst) for v, lst in self.adj_out[u].items() if v in self.nodes) def outgoing_hypernodes(self, u) ->Dict[Hashable, List]: assert u in self.adj_out return dict((v, lst) for v, lst in self.adj_out[u].items() if v in self.hypernodes) def incoming_nodes(self, v: Hashable) ->Dict[Hashable, List]: assert v in self.adj_in return dict((u, lst) for u, lst in self.adj_in[v].items() if u in self.nodes) def incoming_hypernodes(self, v: Hashable) ->Dict[Hashable, List]: assert v in self.adj_in return dict((u, lst) for u, lst in self.adj_in[v].items() if u in self.hypernodes) <|reserved_special_token_0|> def indegrees(self, from_hyper: bool=False, to_hyper: bool=False) ->List[ int]: return [sum([len(l) for u, l in self.adj_in[v].items() if u in ( self.hypernodes if from_hyper else self.nodes)]) for v in (self .hypernodes if to_hyper else self.nodes)] def reciprocity_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, c1, c2, C1->c2, c2->c1) as in paper """ motifs = [] for C1, c1_nodes in self.hypernodes.items(): for c1 in c1_nodes: motifs += [(C1, c1, c2, e1, e2) for c2 in self.adj_in[c1] if c2 in self.nodes and c2 in self.adj_out[C1] for e1 in self.adj_out[C1][c2] for e2 in self.adj_out[c2][c1]] return motifs def external_reciprocity_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C3, c2, c1, C3->c2, c2->c1) as in paper """ motifs = [] for C3 in self.hypernodes: for c2 in self.adj_out[C3]: if c2 in self.nodes: motifs += [(C3, c2, c1, e1, e2) for c1 in set(self. adj_out[c2].keys()) - self.hypernodes[C3] if c1 in self.nodes for e1 in self.adj_out[C3][c2] for e2 in self.adj_out[c2][c1]] return motifs def dyadic_interaction_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, C2, C1->C2, C2->C1) as in paper """ motifs = [] for C1 in self.hypernodes: motifs += [(C1, C2, e1, e2) for C2 in self.adj_out[C1] if C2 in self.hypernodes and C1 in self.adj_out[C2] for e1 in self. adj_out[C1][C2] for e2 in self.adj_out[C2][C1]] return motifs def incoming_triad_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, C2, C3, C2->C1, C3->C1) as in paper """ motifs = [] for C1 in self.hypernodes: incoming = list(self.adj_in[C1].keys()) motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools. combinations(incoming, 2) for e1 in self.adj_out[C2][C1] for e2 in self.adj_out[C3][C1]] return motifs def outgoing_triad_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, C2, C3, C1->C2, C1->C3) as in paper """ motifs = [] for C1 in self.hypernodes: outgoing = list(self.adj_out[C1].keys()) motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools. combinations(outgoing, 2) for e1 in self.adj_out[C1][C2] for e2 in self.adj_out[C1][C3]] return motifs <|reserved_special_token_1|> <|reserved_special_token_0|> class Hypergraph: <|reserved_special_token_0|> def __init__(self): self.nodes = dict() self.hypernodes = dict() self.adj_out = dict() self.adj_in = dict() def add_node(self, u: Hashable, info: Optional[Dict]=None) ->None: self.nodes[u] = info if info is not None else dict() self.adj_out[u] = dict() self.adj_in[u] = dict() def add_hypernode(self, name: Hashable, nodes: Collection[Hashable], info: Optional[dict]=None) ->None: self.hypernodes[name] = set(nodes) self.adj_out[name] = dict() self.adj_in[name] = dict() def add_edge(self, u: Hashable, v: Hashable, info: Optional[dict]=None ) ->None: assert u in self.nodes or u in self.hypernodes assert v in self.nodes or v in self.hypernodes if u in self.hypernodes and v in self.hypernodes: assert len(info.keys()) > 0 if v not in self.adj_out[u]: self.adj_out[u][v] = [] if u not in self.adj_in[v]: self.adj_in[v][u] = [] if info is None: info = dict() self.adj_out[u][v].append(info) self.adj_in[v][u].append(info) def edges(self) ->Dict[Tuple[Hashable, Hashable], List]: return dict(((u, v), lst) for u, d in self.adj_out.items() for v, lst in d.items()) def outgoing_nodes(self, u: Hashable) ->Dict[Hashable, List]: assert u in self.adj_out return dict((v, lst) for v, lst in self.adj_out[u].items() if v in self.nodes) def outgoing_hypernodes(self, u) ->Dict[Hashable, List]: assert u in self.adj_out return dict((v, lst) for v, lst in self.adj_out[u].items() if v in self.hypernodes) def incoming_nodes(self, v: Hashable) ->Dict[Hashable, List]: assert v in self.adj_in return dict((u, lst) for u, lst in self.adj_in[v].items() if u in self.nodes) def incoming_hypernodes(self, v: Hashable) ->Dict[Hashable, List]: assert v in self.adj_in return dict((u, lst) for u, lst in self.adj_in[v].items() if u in self.hypernodes) <|reserved_special_token_0|> def indegrees(self, from_hyper: bool=False, to_hyper: bool=False) ->List[ int]: return [sum([len(l) for u, l in self.adj_in[v].items() if u in ( self.hypernodes if from_hyper else self.nodes)]) for v in (self .hypernodes if to_hyper else self.nodes)] def reciprocity_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, c1, c2, C1->c2, c2->c1) as in paper """ motifs = [] for C1, c1_nodes in self.hypernodes.items(): for c1 in c1_nodes: motifs += [(C1, c1, c2, e1, e2) for c2 in self.adj_in[c1] if c2 in self.nodes and c2 in self.adj_out[C1] for e1 in self.adj_out[C1][c2] for e2 in self.adj_out[c2][c1]] return motifs def external_reciprocity_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C3, c2, c1, C3->c2, c2->c1) as in paper """ motifs = [] for C3 in self.hypernodes: for c2 in self.adj_out[C3]: if c2 in self.nodes: motifs += [(C3, c2, c1, e1, e2) for c1 in set(self. adj_out[c2].keys()) - self.hypernodes[C3] if c1 in self.nodes for e1 in self.adj_out[C3][c2] for e2 in self.adj_out[c2][c1]] return motifs def dyadic_interaction_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, C2, C1->C2, C2->C1) as in paper """ motifs = [] for C1 in self.hypernodes: motifs += [(C1, C2, e1, e2) for C2 in self.adj_out[C1] if C2 in self.hypernodes and C1 in self.adj_out[C2] for e1 in self. adj_out[C1][C2] for e2 in self.adj_out[C2][C1]] return motifs def incoming_triad_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, C2, C3, C2->C1, C3->C1) as in paper """ motifs = [] for C1 in self.hypernodes: incoming = list(self.adj_in[C1].keys()) motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools. combinations(incoming, 2) for e1 in self.adj_out[C2][C1] for e2 in self.adj_out[C3][C1]] return motifs def outgoing_triad_motifs(self) ->List[Tuple]: """ :return: List of tuples of form (C1, C2, C3, C1->C2, C1->C3) as in paper """ motifs = [] for C1 in self.hypernodes: outgoing = list(self.adj_out[C1].keys()) motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools. combinations(outgoing, 2) for e1 in self.adj_out[C1][C2] for e2 in self.adj_out[C1][C3]] return motifs <|reserved_special_token_1|> import itertools from typing import Tuple, List, Dict, Optional, Hashable, Collection class Hypergraph: """ Represents a hypergraph, consisting of nodes, directed edges, hypernodes (each of which is a set of nodes) and hyperedges (directed edges from hypernodes to hypernodes). Contains functionality to extract motifs from hypergraphs (Fig 2 of http://www.cs.cornell.edu/~cristian/Patterns_of_participant_interactions.html) """ def __init__(self): # public self.nodes = dict() self.hypernodes = dict() # private self.adj_out = dict() # out edges for each (hyper)node self.adj_in = dict() # in edges for each (hyper)node def add_node(self, u: Hashable, info: Optional[Dict]=None) -> None: self.nodes[u] = info if info is not None else dict() self.adj_out[u] = dict() self.adj_in[u] = dict() def add_hypernode(self, name: Hashable, nodes: Collection[Hashable], info: Optional[dict]=None) -> None: self.hypernodes[name] = set(nodes) self.adj_out[name] = dict() self.adj_in[name] = dict() # edge or hyperedge def add_edge(self, u: Hashable, v: Hashable, info: Optional[dict]=None) -> None: assert u in self.nodes or u in self.hypernodes assert v in self.nodes or v in self.hypernodes if u in self.hypernodes and v in self.hypernodes: assert len(info.keys()) > 0 if v not in self.adj_out[u]: self.adj_out[u][v] = [] if u not in self.adj_in[v]: self.adj_in[v][u] = [] if info is None: info = dict() self.adj_out[u][v].append(info) self.adj_in[v][u].append(info) def edges(self) -> Dict[Tuple[Hashable, Hashable], List]: return dict(((u, v), lst) for u, d in self.adj_out.items() for v, lst in d.items()) def outgoing_nodes(self, u: Hashable) -> Dict[Hashable, List]: assert u in self.adj_out return dict((v, lst) for v, lst in self.adj_out[u].items() if v in self.nodes) def outgoing_hypernodes(self, u) -> Dict[Hashable, List]: assert u in self.adj_out return dict((v, lst) for v, lst in self.adj_out[u].items() if v in self.hypernodes) def incoming_nodes(self, v: Hashable) -> Dict[Hashable, List]: assert v in self.adj_in return dict((u, lst) for u, lst in self.adj_in[v].items() if u in self.nodes) def incoming_hypernodes(self, v: Hashable) -> Dict[Hashable, List]: assert v in self.adj_in return dict((u, lst) for u, lst in self.adj_in[v].items() if u in self.hypernodes) def outdegrees(self, from_hyper: bool=False, to_hyper: bool=False) -> List[int]: return [sum([len(l) for v, l in self.adj_out[u].items() if v in (self.hypernodes if to_hyper else self.nodes)]) for u in (self.hypernodes if from_hyper else self.nodes)] def indegrees(self, from_hyper: bool=False, to_hyper: bool=False) -> List[int]: return [sum([len(l) for u, l in self.adj_in[v].items() if u in (self.hypernodes if from_hyper else self.nodes)]) for v in (self.hypernodes if to_hyper else self.nodes)] def reciprocity_motifs(self) -> List[Tuple]: """ :return: List of tuples of form (C1, c1, c2, C1->c2, c2->c1) as in paper """ motifs = [] for C1, c1_nodes in self.hypernodes.items(): for c1 in c1_nodes: motifs += [(C1, c1, c2, e1, e2) for c2 in self.adj_in[c1] if c2 in self.nodes and c2 in self.adj_out[C1] for e1 in self.adj_out[C1][c2] for e2 in self.adj_out[c2][c1]] return motifs def external_reciprocity_motifs(self) -> List[Tuple]: """ :return: List of tuples of form (C3, c2, c1, C3->c2, c2->c1) as in paper """ motifs = [] for C3 in self.hypernodes: for c2 in self.adj_out[C3]: if c2 in self.nodes: motifs += [(C3, c2, c1, e1, e2) for c1 in set(self.adj_out[c2].keys()) - self.hypernodes[C3] if c1 in self.nodes for e1 in self.adj_out[C3][c2] for e2 in self.adj_out[c2][c1]] return motifs def dyadic_interaction_motifs(self) -> List[Tuple]: """ :return: List of tuples of form (C1, C2, C1->C2, C2->C1) as in paper """ motifs = [] for C1 in self.hypernodes: motifs += [(C1, C2, e1, e2) for C2 in self.adj_out[C1] if C2 in self.hypernodes and C1 in self.adj_out[C2] for e1 in self.adj_out[C1][C2] for e2 in self.adj_out[C2][C1]] return motifs def incoming_triad_motifs(self) -> List[Tuple]: """ :return: List of tuples of form (C1, C2, C3, C2->C1, C3->C1) as in paper """ motifs = [] for C1 in self.hypernodes: incoming = list(self.adj_in[C1].keys()) motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools.combinations(incoming, 2) for e1 in self.adj_out[C2][C1] for e2 in self.adj_out[C3][C1]] return motifs def outgoing_triad_motifs(self) -> List[Tuple]: """ :return: List of tuples of form (C1, C2, C3, C1->C2, C1->C3) as in paper """ motifs = [] for C1 in self.hypernodes: outgoing = list(self.adj_out[C1].keys()) motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools.combinations(outgoing, 2) for e1 in self.adj_out[C1][C2] for e2 in self.adj_out[C1][C3]] return motifs
flexible
{ "blob_id": "4a3611ecd70d80575f9f68bf45d67532a17b9c93", "index": 7527, "step-1": "<mask token>\n\n\nclass Hypergraph:\n <mask token>\n\n def __init__(self):\n self.nodes = dict()\n self.hypernodes = dict()\n self.adj_out = dict()\n self.adj_in = dict()\n <mask token>\n\n def add_hypernode(self, name: Hashable, nodes: Collection[Hashable],\n info: Optional[dict]=None) ->None:\n self.hypernodes[name] = set(nodes)\n self.adj_out[name] = dict()\n self.adj_in[name] = dict()\n <mask token>\n <mask token>\n <mask token>\n\n def outgoing_hypernodes(self, u) ->Dict[Hashable, List]:\n assert u in self.adj_out\n return dict((v, lst) for v, lst in self.adj_out[u].items() if v in\n self.hypernodes)\n <mask token>\n\n def incoming_hypernodes(self, v: Hashable) ->Dict[Hashable, List]:\n assert v in self.adj_in\n return dict((u, lst) for u, lst in self.adj_in[v].items() if u in\n self.hypernodes)\n <mask token>\n\n def indegrees(self, from_hyper: bool=False, to_hyper: bool=False) ->List[\n int]:\n return [sum([len(l) for u, l in self.adj_in[v].items() if u in (\n self.hypernodes if from_hyper else self.nodes)]) for v in (self\n .hypernodes if to_hyper else self.nodes)]\n\n def reciprocity_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, c1, c2, C1->c2, c2->c1) as in paper\n \"\"\"\n motifs = []\n for C1, c1_nodes in self.hypernodes.items():\n for c1 in c1_nodes:\n motifs += [(C1, c1, c2, e1, e2) for c2 in self.adj_in[c1] if\n c2 in self.nodes and c2 in self.adj_out[C1] for e1 in\n self.adj_out[C1][c2] for e2 in self.adj_out[c2][c1]]\n return motifs\n <mask token>\n <mask token>\n <mask token>\n\n def outgoing_triad_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, C2, C3, C1->C2, C1->C3) as in paper\n \"\"\"\n motifs = []\n for C1 in self.hypernodes:\n outgoing = list(self.adj_out[C1].keys())\n motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools.\n combinations(outgoing, 2) for e1 in self.adj_out[C1][C2] for\n e2 in self.adj_out[C1][C3]]\n return motifs\n", "step-2": "<mask token>\n\n\nclass Hypergraph:\n <mask token>\n\n def __init__(self):\n self.nodes = dict()\n self.hypernodes = dict()\n self.adj_out = dict()\n self.adj_in = dict()\n <mask token>\n\n def add_hypernode(self, name: Hashable, nodes: Collection[Hashable],\n info: Optional[dict]=None) ->None:\n self.hypernodes[name] = set(nodes)\n self.adj_out[name] = dict()\n self.adj_in[name] = dict()\n\n def add_edge(self, u: Hashable, v: Hashable, info: Optional[dict]=None\n ) ->None:\n assert u in self.nodes or u in self.hypernodes\n assert v in self.nodes or v in self.hypernodes\n if u in self.hypernodes and v in self.hypernodes:\n assert len(info.keys()) > 0\n if v not in self.adj_out[u]:\n self.adj_out[u][v] = []\n if u not in self.adj_in[v]:\n self.adj_in[v][u] = []\n if info is None:\n info = dict()\n self.adj_out[u][v].append(info)\n self.adj_in[v][u].append(info)\n <mask token>\n <mask token>\n\n def outgoing_hypernodes(self, u) ->Dict[Hashable, List]:\n assert u in self.adj_out\n return dict((v, lst) for v, lst in self.adj_out[u].items() if v in\n self.hypernodes)\n\n def incoming_nodes(self, v: Hashable) ->Dict[Hashable, List]:\n assert v in self.adj_in\n return dict((u, lst) for u, lst in self.adj_in[v].items() if u in\n self.nodes)\n\n def incoming_hypernodes(self, v: Hashable) ->Dict[Hashable, List]:\n assert v in self.adj_in\n return dict((u, lst) for u, lst in self.adj_in[v].items() if u in\n self.hypernodes)\n <mask token>\n\n def indegrees(self, from_hyper: bool=False, to_hyper: bool=False) ->List[\n int]:\n return [sum([len(l) for u, l in self.adj_in[v].items() if u in (\n self.hypernodes if from_hyper else self.nodes)]) for v in (self\n .hypernodes if to_hyper else self.nodes)]\n\n def reciprocity_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, c1, c2, C1->c2, c2->c1) as in paper\n \"\"\"\n motifs = []\n for C1, c1_nodes in self.hypernodes.items():\n for c1 in c1_nodes:\n motifs += [(C1, c1, c2, e1, e2) for c2 in self.adj_in[c1] if\n c2 in self.nodes and c2 in self.adj_out[C1] for e1 in\n self.adj_out[C1][c2] for e2 in self.adj_out[c2][c1]]\n return motifs\n <mask token>\n <mask token>\n <mask token>\n\n def outgoing_triad_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, C2, C3, C1->C2, C1->C3) as in paper\n \"\"\"\n motifs = []\n for C1 in self.hypernodes:\n outgoing = list(self.adj_out[C1].keys())\n motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools.\n combinations(outgoing, 2) for e1 in self.adj_out[C1][C2] for\n e2 in self.adj_out[C1][C3]]\n return motifs\n", "step-3": "<mask token>\n\n\nclass Hypergraph:\n <mask token>\n\n def __init__(self):\n self.nodes = dict()\n self.hypernodes = dict()\n self.adj_out = dict()\n self.adj_in = dict()\n <mask token>\n\n def add_hypernode(self, name: Hashable, nodes: Collection[Hashable],\n info: Optional[dict]=None) ->None:\n self.hypernodes[name] = set(nodes)\n self.adj_out[name] = dict()\n self.adj_in[name] = dict()\n\n def add_edge(self, u: Hashable, v: Hashable, info: Optional[dict]=None\n ) ->None:\n assert u in self.nodes or u in self.hypernodes\n assert v in self.nodes or v in self.hypernodes\n if u in self.hypernodes and v in self.hypernodes:\n assert len(info.keys()) > 0\n if v not in self.adj_out[u]:\n self.adj_out[u][v] = []\n if u not in self.adj_in[v]:\n self.adj_in[v][u] = []\n if info is None:\n info = dict()\n self.adj_out[u][v].append(info)\n self.adj_in[v][u].append(info)\n\n def edges(self) ->Dict[Tuple[Hashable, Hashable], List]:\n return dict(((u, v), lst) for u, d in self.adj_out.items() for v,\n lst in d.items())\n\n def outgoing_nodes(self, u: Hashable) ->Dict[Hashable, List]:\n assert u in self.adj_out\n return dict((v, lst) for v, lst in self.adj_out[u].items() if v in\n self.nodes)\n\n def outgoing_hypernodes(self, u) ->Dict[Hashable, List]:\n assert u in self.adj_out\n return dict((v, lst) for v, lst in self.adj_out[u].items() if v in\n self.hypernodes)\n\n def incoming_nodes(self, v: Hashable) ->Dict[Hashable, List]:\n assert v in self.adj_in\n return dict((u, lst) for u, lst in self.adj_in[v].items() if u in\n self.nodes)\n\n def incoming_hypernodes(self, v: Hashable) ->Dict[Hashable, List]:\n assert v in self.adj_in\n return dict((u, lst) for u, lst in self.adj_in[v].items() if u in\n self.hypernodes)\n <mask token>\n\n def indegrees(self, from_hyper: bool=False, to_hyper: bool=False) ->List[\n int]:\n return [sum([len(l) for u, l in self.adj_in[v].items() if u in (\n self.hypernodes if from_hyper else self.nodes)]) for v in (self\n .hypernodes if to_hyper else self.nodes)]\n\n def reciprocity_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, c1, c2, C1->c2, c2->c1) as in paper\n \"\"\"\n motifs = []\n for C1, c1_nodes in self.hypernodes.items():\n for c1 in c1_nodes:\n motifs += [(C1, c1, c2, e1, e2) for c2 in self.adj_in[c1] if\n c2 in self.nodes and c2 in self.adj_out[C1] for e1 in\n self.adj_out[C1][c2] for e2 in self.adj_out[c2][c1]]\n return motifs\n\n def external_reciprocity_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C3, c2, c1, C3->c2, c2->c1) as in paper\n \"\"\"\n motifs = []\n for C3 in self.hypernodes:\n for c2 in self.adj_out[C3]:\n if c2 in self.nodes:\n motifs += [(C3, c2, c1, e1, e2) for c1 in set(self.\n adj_out[c2].keys()) - self.hypernodes[C3] if c1 in\n self.nodes for e1 in self.adj_out[C3][c2] for e2 in\n self.adj_out[c2][c1]]\n return motifs\n\n def dyadic_interaction_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, C2, C1->C2, C2->C1) as in paper\n \"\"\"\n motifs = []\n for C1 in self.hypernodes:\n motifs += [(C1, C2, e1, e2) for C2 in self.adj_out[C1] if C2 in\n self.hypernodes and C1 in self.adj_out[C2] for e1 in self.\n adj_out[C1][C2] for e2 in self.adj_out[C2][C1]]\n return motifs\n\n def incoming_triad_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, C2, C3, C2->C1, C3->C1) as in paper\n \"\"\"\n motifs = []\n for C1 in self.hypernodes:\n incoming = list(self.adj_in[C1].keys())\n motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools.\n combinations(incoming, 2) for e1 in self.adj_out[C2][C1] for\n e2 in self.adj_out[C3][C1]]\n return motifs\n\n def outgoing_triad_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, C2, C3, C1->C2, C1->C3) as in paper\n \"\"\"\n motifs = []\n for C1 in self.hypernodes:\n outgoing = list(self.adj_out[C1].keys())\n motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools.\n combinations(outgoing, 2) for e1 in self.adj_out[C1][C2] for\n e2 in self.adj_out[C1][C3]]\n return motifs\n", "step-4": "<mask token>\n\n\nclass Hypergraph:\n <mask token>\n\n def __init__(self):\n self.nodes = dict()\n self.hypernodes = dict()\n self.adj_out = dict()\n self.adj_in = dict()\n\n def add_node(self, u: Hashable, info: Optional[Dict]=None) ->None:\n self.nodes[u] = info if info is not None else dict()\n self.adj_out[u] = dict()\n self.adj_in[u] = dict()\n\n def add_hypernode(self, name: Hashable, nodes: Collection[Hashable],\n info: Optional[dict]=None) ->None:\n self.hypernodes[name] = set(nodes)\n self.adj_out[name] = dict()\n self.adj_in[name] = dict()\n\n def add_edge(self, u: Hashable, v: Hashable, info: Optional[dict]=None\n ) ->None:\n assert u in self.nodes or u in self.hypernodes\n assert v in self.nodes or v in self.hypernodes\n if u in self.hypernodes and v in self.hypernodes:\n assert len(info.keys()) > 0\n if v not in self.adj_out[u]:\n self.adj_out[u][v] = []\n if u not in self.adj_in[v]:\n self.adj_in[v][u] = []\n if info is None:\n info = dict()\n self.adj_out[u][v].append(info)\n self.adj_in[v][u].append(info)\n\n def edges(self) ->Dict[Tuple[Hashable, Hashable], List]:\n return dict(((u, v), lst) for u, d in self.adj_out.items() for v,\n lst in d.items())\n\n def outgoing_nodes(self, u: Hashable) ->Dict[Hashable, List]:\n assert u in self.adj_out\n return dict((v, lst) for v, lst in self.adj_out[u].items() if v in\n self.nodes)\n\n def outgoing_hypernodes(self, u) ->Dict[Hashable, List]:\n assert u in self.adj_out\n return dict((v, lst) for v, lst in self.adj_out[u].items() if v in\n self.hypernodes)\n\n def incoming_nodes(self, v: Hashable) ->Dict[Hashable, List]:\n assert v in self.adj_in\n return dict((u, lst) for u, lst in self.adj_in[v].items() if u in\n self.nodes)\n\n def incoming_hypernodes(self, v: Hashable) ->Dict[Hashable, List]:\n assert v in self.adj_in\n return dict((u, lst) for u, lst in self.adj_in[v].items() if u in\n self.hypernodes)\n <mask token>\n\n def indegrees(self, from_hyper: bool=False, to_hyper: bool=False) ->List[\n int]:\n return [sum([len(l) for u, l in self.adj_in[v].items() if u in (\n self.hypernodes if from_hyper else self.nodes)]) for v in (self\n .hypernodes if to_hyper else self.nodes)]\n\n def reciprocity_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, c1, c2, C1->c2, c2->c1) as in paper\n \"\"\"\n motifs = []\n for C1, c1_nodes in self.hypernodes.items():\n for c1 in c1_nodes:\n motifs += [(C1, c1, c2, e1, e2) for c2 in self.adj_in[c1] if\n c2 in self.nodes and c2 in self.adj_out[C1] for e1 in\n self.adj_out[C1][c2] for e2 in self.adj_out[c2][c1]]\n return motifs\n\n def external_reciprocity_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C3, c2, c1, C3->c2, c2->c1) as in paper\n \"\"\"\n motifs = []\n for C3 in self.hypernodes:\n for c2 in self.adj_out[C3]:\n if c2 in self.nodes:\n motifs += [(C3, c2, c1, e1, e2) for c1 in set(self.\n adj_out[c2].keys()) - self.hypernodes[C3] if c1 in\n self.nodes for e1 in self.adj_out[C3][c2] for e2 in\n self.adj_out[c2][c1]]\n return motifs\n\n def dyadic_interaction_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, C2, C1->C2, C2->C1) as in paper\n \"\"\"\n motifs = []\n for C1 in self.hypernodes:\n motifs += [(C1, C2, e1, e2) for C2 in self.adj_out[C1] if C2 in\n self.hypernodes and C1 in self.adj_out[C2] for e1 in self.\n adj_out[C1][C2] for e2 in self.adj_out[C2][C1]]\n return motifs\n\n def incoming_triad_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, C2, C3, C2->C1, C3->C1) as in paper\n \"\"\"\n motifs = []\n for C1 in self.hypernodes:\n incoming = list(self.adj_in[C1].keys())\n motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools.\n combinations(incoming, 2) for e1 in self.adj_out[C2][C1] for\n e2 in self.adj_out[C3][C1]]\n return motifs\n\n def outgoing_triad_motifs(self) ->List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, C2, C3, C1->C2, C1->C3) as in paper\n \"\"\"\n motifs = []\n for C1 in self.hypernodes:\n outgoing = list(self.adj_out[C1].keys())\n motifs += [(C1, C2, C3, e1, e2) for C2, C3 in itertools.\n combinations(outgoing, 2) for e1 in self.adj_out[C1][C2] for\n e2 in self.adj_out[C1][C3]]\n return motifs\n", "step-5": "import itertools\nfrom typing import Tuple, List, Dict, Optional, Hashable, Collection\n\nclass Hypergraph:\n \"\"\"\n Represents a hypergraph, consisting of nodes, directed edges,\n hypernodes (each of which is a set of nodes) and hyperedges (directed edges\n from hypernodes to hypernodes). Contains functionality to extract motifs\n from hypergraphs (Fig 2 of\n http://www.cs.cornell.edu/~cristian/Patterns_of_participant_interactions.html)\n \"\"\"\n def __init__(self):\n # public\n self.nodes = dict()\n self.hypernodes = dict()\n\n # private\n self.adj_out = dict() # out edges for each (hyper)node\n self.adj_in = dict() # in edges for each (hyper)node\n\n def add_node(self, u: Hashable, info: Optional[Dict]=None) -> None:\n self.nodes[u] = info if info is not None else dict()\n self.adj_out[u] = dict()\n self.adj_in[u] = dict()\n\n def add_hypernode(self, name: Hashable,\n nodes: Collection[Hashable],\n info: Optional[dict]=None) -> None:\n self.hypernodes[name] = set(nodes)\n self.adj_out[name] = dict()\n self.adj_in[name] = dict()\n\n # edge or hyperedge\n def add_edge(self, u: Hashable, v: Hashable, info: Optional[dict]=None) -> None:\n assert u in self.nodes or u in self.hypernodes\n assert v in self.nodes or v in self.hypernodes\n if u in self.hypernodes and v in self.hypernodes:\n assert len(info.keys()) > 0\n if v not in self.adj_out[u]:\n self.adj_out[u][v] = []\n if u not in self.adj_in[v]:\n self.adj_in[v][u] = []\n if info is None: info = dict()\n self.adj_out[u][v].append(info)\n self.adj_in[v][u].append(info)\n\n def edges(self) -> Dict[Tuple[Hashable, Hashable], List]:\n return dict(((u, v), lst) for u, d in self.adj_out.items()\n for v, lst in d.items())\n\n def outgoing_nodes(self, u: Hashable) -> Dict[Hashable, List]:\n assert u in self.adj_out\n return dict((v, lst) for v, lst in self.adj_out[u].items()\n if v in self.nodes)\n\n def outgoing_hypernodes(self, u) -> Dict[Hashable, List]:\n assert u in self.adj_out\n return dict((v, lst) for v, lst in self.adj_out[u].items()\n if v in self.hypernodes)\n\n def incoming_nodes(self, v: Hashable) -> Dict[Hashable, List]:\n assert v in self.adj_in\n return dict((u, lst) for u, lst in self.adj_in[v].items() if u in\n self.nodes)\n\n def incoming_hypernodes(self, v: Hashable) -> Dict[Hashable, List]:\n assert v in self.adj_in\n return dict((u, lst) for u, lst in self.adj_in[v].items() if u in\n self.hypernodes)\n\n def outdegrees(self, from_hyper: bool=False, to_hyper: bool=False) -> List[int]:\n return [sum([len(l) for v, l in self.adj_out[u].items() if v in\n (self.hypernodes if to_hyper else self.nodes)]) for u in\n (self.hypernodes if from_hyper else self.nodes)]\n\n def indegrees(self, from_hyper: bool=False, to_hyper: bool=False) -> List[int]:\n return [sum([len(l) for u, l in self.adj_in[v].items() if u in\n (self.hypernodes if from_hyper else self.nodes)]) for v in\n (self.hypernodes if to_hyper else self.nodes)]\n\n def reciprocity_motifs(self) -> List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, c1, c2, C1->c2, c2->c1) as in paper\n \"\"\"\n motifs = []\n for C1, c1_nodes in self.hypernodes.items():\n for c1 in c1_nodes:\n motifs += [(C1, c1, c2, e1, e2) for c2 in self.adj_in[c1] if\n c2 in self.nodes and c2 in self.adj_out[C1]\n for e1 in self.adj_out[C1][c2]\n for e2 in self.adj_out[c2][c1]]\n return motifs\n\n def external_reciprocity_motifs(self) -> List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C3, c2, c1, C3->c2, c2->c1) as in paper\n \"\"\"\n motifs = []\n for C3 in self.hypernodes:\n for c2 in self.adj_out[C3]:\n if c2 in self.nodes:\n motifs += [(C3, c2, c1, e1, e2) for c1 in\n set(self.adj_out[c2].keys()) - self.hypernodes[C3]\n if c1 in self.nodes\n for e1 in self.adj_out[C3][c2]\n for e2 in self.adj_out[c2][c1]]\n return motifs\n\n def dyadic_interaction_motifs(self) -> List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, C2, C1->C2, C2->C1) as in paper\n \"\"\"\n\n motifs = []\n for C1 in self.hypernodes:\n motifs += [(C1, C2, e1, e2) for C2 in self.adj_out[C1] if C2 in\n self.hypernodes and C1 in self.adj_out[C2]\n for e1 in self.adj_out[C1][C2]\n for e2 in self.adj_out[C2][C1]]\n return motifs\n\n def incoming_triad_motifs(self) -> List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, C2, C3, C2->C1, C3->C1) as in paper\n \"\"\"\n motifs = []\n for C1 in self.hypernodes:\n incoming = list(self.adj_in[C1].keys())\n motifs += [(C1, C2, C3, e1, e2) for C2, C3 in\n itertools.combinations(incoming, 2)\n for e1 in self.adj_out[C2][C1]\n for e2 in self.adj_out[C3][C1]]\n return motifs\n\n def outgoing_triad_motifs(self) -> List[Tuple]:\n \"\"\"\n :return: List of tuples of form (C1, C2, C3, C1->C2, C1->C3) as in paper\n \"\"\"\n motifs = []\n for C1 in self.hypernodes:\n outgoing = list(self.adj_out[C1].keys())\n motifs += [(C1, C2, C3, e1, e2) for C2, C3 in\n itertools.combinations(outgoing, 2)\n for e1 in self.adj_out[C1][C2]\n for e2 in self.adj_out[C1][C3]]\n return motifs\n", "step-ids": [ 8, 10, 15, 16, 20 ] }
[ 8, 10, 15, 16, 20 ]
# -*- coding: utf-8 -*- """ This file is part of pyCMBS. (c) 2012- Alexander Loew For COPYING and LICENSE details, please refer to the LICENSE file """ import unittest from pycmbs import data4D class TestPycmbsData4D(unittest.TestCase): def setUp(self): pass def test_DummyTest(self): pass if __name__ == "__main__": unittest.main()
normal
{ "blob_id": "87562ce2a957de3fa2eb84cbb0de18c6ce264c6b", "index": 7676, "step-1": "<mask token>\n\n\nclass TestPycmbsData4D(unittest.TestCase):\n\n def setUp(self):\n pass\n <mask token>\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass TestPycmbsData4D(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def test_DummyTest(self):\n pass\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass TestPycmbsData4D(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def test_DummyTest(self):\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n", "step-4": "<mask token>\nimport unittest\nfrom pycmbs import data4D\n\n\nclass TestPycmbsData4D(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def test_DummyTest(self):\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n", "step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nThis file is part of pyCMBS.\n(c) 2012- Alexander Loew\nFor COPYING and LICENSE details, please refer to the LICENSE file\n\"\"\"\n\nimport unittest\nfrom pycmbs import data4D\n\nclass TestPycmbsData4D(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def test_DummyTest(self):\n pass\n\nif __name__ == \"__main__\":\n unittest.main()\n\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
def adder(x, y): return x + y adder('one', 'two') adder([3, 4], [9, 0, 33]) adder(4.3, 3.5)
normal
{ "blob_id": "1ee5139cb1613977f1c85619404b3dcc6e996382", "index": 5364, "step-1": "<mask token>\n", "step-2": "def adder(x, y):\n return x + y\n\n\n<mask token>\n", "step-3": "def adder(x, y):\n return x + y\n\n\nadder('one', 'two')\nadder([3, 4], [9, 0, 33])\nadder(4.3, 3.5)\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
# -*- coding:utf-8 -*- """ Author:xufei Date:2021/1/21 """
normal
{ "blob_id": "d39e3a552a7c558d3f5b410e0b228fb7409d732a", "index": 928, "step-1": "<mask token>\n", "step-2": "# -*- coding:utf-8 -*-\n\"\"\"\nAuthor:xufei\nDate:2021/1/21\n\"\"\"\n", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
from pypc.a_primitives.nand import nand # nand gates used: 5 def half_adder(a: bool, b: bool) -> (bool, bool): """Returns a + b in the form of a tuple of two bools representing the two bits.""" nand_a_b = nand(a, b) nand_c = nand(nand_a_b, a) nand_d = nand(nand_a_b, b) high = nand(nand_a_b, nand_a_b) low = nand(nand_c, nand_d) return high, low # nand gates used: 9 def full_adder(a: bool, b: bool, c: bool) -> (bool, bool): """Returns a + b + c in the form of a tuple of two bools representing the two bits. Carried value is ignored. """ nand_a_b = nand(a, b) nand_c = nand(nand_a_b, a) nand_d = nand(nand_a_b, b) low_a_b = nand(nand_c, nand_d) nand_low_a_b_c = nand(low_a_b, c) nand_e = nand(low_a_b, nand_low_a_b_c) nand_f = nand(nand_low_a_b_c, c) high = nand(nand_a_b, nand_low_a_b_c) low = nand(nand_e, nand_f) return high, low
normal
{ "blob_id": "66f6639ae62fe8c0b42171cf3e3fb450d8eee2b2", "index": 7671, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef full_adder(a: bool, b: bool, c: bool) ->(bool, bool):\n \"\"\"Returns a + b + c in the form of a tuple of two bools representing the two\n bits.\n \n Carried value is ignored.\n \"\"\"\n nand_a_b = nand(a, b)\n nand_c = nand(nand_a_b, a)\n nand_d = nand(nand_a_b, b)\n low_a_b = nand(nand_c, nand_d)\n nand_low_a_b_c = nand(low_a_b, c)\n nand_e = nand(low_a_b, nand_low_a_b_c)\n nand_f = nand(nand_low_a_b_c, c)\n high = nand(nand_a_b, nand_low_a_b_c)\n low = nand(nand_e, nand_f)\n return high, low\n", "step-3": "<mask token>\n\n\ndef half_adder(a: bool, b: bool) ->(bool, bool):\n \"\"\"Returns a + b in the form of a tuple of two bools representing the two\n bits.\"\"\"\n nand_a_b = nand(a, b)\n nand_c = nand(nand_a_b, a)\n nand_d = nand(nand_a_b, b)\n high = nand(nand_a_b, nand_a_b)\n low = nand(nand_c, nand_d)\n return high, low\n\n\ndef full_adder(a: bool, b: bool, c: bool) ->(bool, bool):\n \"\"\"Returns a + b + c in the form of a tuple of two bools representing the two\n bits.\n \n Carried value is ignored.\n \"\"\"\n nand_a_b = nand(a, b)\n nand_c = nand(nand_a_b, a)\n nand_d = nand(nand_a_b, b)\n low_a_b = nand(nand_c, nand_d)\n nand_low_a_b_c = nand(low_a_b, c)\n nand_e = nand(low_a_b, nand_low_a_b_c)\n nand_f = nand(nand_low_a_b_c, c)\n high = nand(nand_a_b, nand_low_a_b_c)\n low = nand(nand_e, nand_f)\n return high, low\n", "step-4": "from pypc.a_primitives.nand import nand\n\n\ndef half_adder(a: bool, b: bool) ->(bool, bool):\n \"\"\"Returns a + b in the form of a tuple of two bools representing the two\n bits.\"\"\"\n nand_a_b = nand(a, b)\n nand_c = nand(nand_a_b, a)\n nand_d = nand(nand_a_b, b)\n high = nand(nand_a_b, nand_a_b)\n low = nand(nand_c, nand_d)\n return high, low\n\n\ndef full_adder(a: bool, b: bool, c: bool) ->(bool, bool):\n \"\"\"Returns a + b + c in the form of a tuple of two bools representing the two\n bits.\n \n Carried value is ignored.\n \"\"\"\n nand_a_b = nand(a, b)\n nand_c = nand(nand_a_b, a)\n nand_d = nand(nand_a_b, b)\n low_a_b = nand(nand_c, nand_d)\n nand_low_a_b_c = nand(low_a_b, c)\n nand_e = nand(low_a_b, nand_low_a_b_c)\n nand_f = nand(nand_low_a_b_c, c)\n high = nand(nand_a_b, nand_low_a_b_c)\n low = nand(nand_e, nand_f)\n return high, low\n", "step-5": "from pypc.a_primitives.nand import nand\r\n\r\n\r\n# nand gates used: 5\r\ndef half_adder(a: bool, b: bool) -> (bool, bool):\r\n \"\"\"Returns a + b in the form of a tuple of two bools representing the two\r\n bits.\"\"\"\r\n nand_a_b = nand(a, b)\r\n nand_c = nand(nand_a_b, a)\r\n nand_d = nand(nand_a_b, b)\r\n high = nand(nand_a_b, nand_a_b)\r\n low = nand(nand_c, nand_d)\r\n return high, low\r\n\r\n\r\n# nand gates used: 9\r\ndef full_adder(a: bool, b: bool, c: bool) -> (bool, bool):\r\n \"\"\"Returns a + b + c in the form of a tuple of two bools representing the two\r\n bits.\r\n \r\n Carried value is ignored.\r\n \"\"\"\r\n nand_a_b = nand(a, b)\r\n nand_c = nand(nand_a_b, a)\r\n nand_d = nand(nand_a_b, b)\r\n low_a_b = nand(nand_c, nand_d)\r\n nand_low_a_b_c = nand(low_a_b, c)\r\n nand_e = nand(low_a_b, nand_low_a_b_c)\r\n nand_f = nand(nand_low_a_b_c, c)\r\n high = nand(nand_a_b, nand_low_a_b_c)\r\n low = nand(nand_e, nand_f)\r\n return high, low\r\n\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> @ex.automain def main(json_path, database, _log): database_config = database database = pt.configurable.config_to_instance(database) database_dict = {'datasets': {dataset_name: dict(tqdm(database. get_dataset(dataset_name).items(), desc=dataset_name)) for dataset_name in database.dataset_names}, 'meta': {'config': pt. configurable.recursive_class_to_str(copy.deepcopy(database_config)), 'generated': datetime.datetime.now()}} pb.io.dump(database_dict, json_path) _log.info(f'Wrote file: {json_path}') <|reserved_special_token_1|> <|reserved_special_token_0|> @ex.config def defaults(): json_path = 'database.json' database = {'factory': WSJ2Mix} pt.Configurable.get_config(database) @ex.automain def main(json_path, database, _log): database_config = database database = pt.configurable.config_to_instance(database) database_dict = {'datasets': {dataset_name: dict(tqdm(database. get_dataset(dataset_name).items(), desc=dataset_name)) for dataset_name in database.dataset_names}, 'meta': {'config': pt. configurable.recursive_class_to_str(copy.deepcopy(database_config)), 'generated': datetime.datetime.now()}} pb.io.dump(database_dict, json_path) _log.info(f'Wrote file: {json_path}') <|reserved_special_token_1|> <|reserved_special_token_0|> ex = Experiment('mixture_generator_create_json') @ex.config def defaults(): json_path = 'database.json' database = {'factory': WSJ2Mix} pt.Configurable.get_config(database) @ex.automain def main(json_path, database, _log): database_config = database database = pt.configurable.config_to_instance(database) database_dict = {'datasets': {dataset_name: dict(tqdm(database. get_dataset(dataset_name).items(), desc=dataset_name)) for dataset_name in database.dataset_names}, 'meta': {'config': pt. configurable.recursive_class_to_str(copy.deepcopy(database_config)), 'generated': datetime.datetime.now()}} pb.io.dump(database_dict, json_path) _log.info(f'Wrote file: {json_path}') <|reserved_special_token_1|> import copy import datetime from sacred import Experiment from tqdm import tqdm from mms_msg.databases.classical.full_overlap import WSJ2Mix import paderbox as pb import padertorch as pt ex = Experiment('mixture_generator_create_json') @ex.config def defaults(): json_path = 'database.json' database = {'factory': WSJ2Mix} pt.Configurable.get_config(database) @ex.automain def main(json_path, database, _log): database_config = database database = pt.configurable.config_to_instance(database) database_dict = {'datasets': {dataset_name: dict(tqdm(database. get_dataset(dataset_name).items(), desc=dataset_name)) for dataset_name in database.dataset_names}, 'meta': {'config': pt. configurable.recursive_class_to_str(copy.deepcopy(database_config)), 'generated': datetime.datetime.now()}} pb.io.dump(database_dict, json_path) _log.info(f'Wrote file: {json_path}') <|reserved_special_token_1|> import copy import datetime from sacred import Experiment from tqdm import tqdm from mms_msg.databases.classical.full_overlap import WSJ2Mix import paderbox as pb import padertorch as pt ex = Experiment('mixture_generator_create_json') @ex.config def defaults(): json_path = 'database.json' database = { 'factory': WSJ2Mix, } pt.Configurable.get_config(database) @ex.automain def main(json_path, database, _log): database_config = database database = pt.configurable.config_to_instance(database) database_dict = { 'datasets': { dataset_name: dict(tqdm( database.get_dataset(dataset_name).items(), desc=dataset_name, )) for dataset_name in database.dataset_names }, 'meta': { 'config': pt.configurable.recursive_class_to_str( copy.deepcopy(database_config) ), 'generated': datetime.datetime.now(), } } pb.io.dump(database_dict, json_path) _log.info(f'Wrote file: {json_path}')
flexible
{ "blob_id": "f39130099ccf467623d65ac328fd02538044d36a", "index": 6476, "step-1": "<mask token>\n\n\[email protected]\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n", "step-2": "<mask token>\n\n\[email protected]\ndef defaults():\n json_path = 'database.json'\n database = {'factory': WSJ2Mix}\n pt.Configurable.get_config(database)\n\n\[email protected]\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n", "step-3": "<mask token>\nex = Experiment('mixture_generator_create_json')\n\n\[email protected]\ndef defaults():\n json_path = 'database.json'\n database = {'factory': WSJ2Mix}\n pt.Configurable.get_config(database)\n\n\[email protected]\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n", "step-4": "import copy\nimport datetime\nfrom sacred import Experiment\nfrom tqdm import tqdm\nfrom mms_msg.databases.classical.full_overlap import WSJ2Mix\nimport paderbox as pb\nimport padertorch as pt\nex = Experiment('mixture_generator_create_json')\n\n\[email protected]\ndef defaults():\n json_path = 'database.json'\n database = {'factory': WSJ2Mix}\n pt.Configurable.get_config(database)\n\n\[email protected]\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n", "step-5": "import copy\nimport datetime\n\nfrom sacred import Experiment\nfrom tqdm import tqdm\n\nfrom mms_msg.databases.classical.full_overlap import WSJ2Mix\nimport paderbox as pb\nimport padertorch as pt\n\nex = Experiment('mixture_generator_create_json')\n\n\[email protected]\ndef defaults():\n json_path = 'database.json'\n database = {\n 'factory': WSJ2Mix,\n }\n pt.Configurable.get_config(database)\n\n\[email protected]\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {\n 'datasets': {\n dataset_name: dict(tqdm(\n database.get_dataset(dataset_name).items(),\n desc=dataset_name,\n )) for dataset_name in database.dataset_names\n },\n 'meta': {\n 'config': pt.configurable.recursive_class_to_str(\n copy.deepcopy(database_config)\n ),\n 'generated': datetime.datetime.now(),\n }\n }\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def quick_sort(array: list) ->list: return [] <|reserved_special_token_1|> """ OBJECTIVE: Given a list, sort it from low to high using the QUICK SORT algorithm Quicksort first divides a large array into two smaller sub-arrays: the low elements and the high elements. Quicksort can then recursively sort the sub-arrays. The steps are: 1. Pick an element, called a pivot, from the array. 2. Partitioning: reorder the array so that all elements with values less than the pivot come before the pivot, while all elements with values greater than the pivot come after it (equal values can go either way). After this partitioning, the pivot is in its final position. This is called the partition operation. 3. Recursively apply the above steps to the sub-array of elements with smaller values and separately to the sub-array of elements with greater values. The base case of the recursion is arrays of size zero or one, which are in order by definition, so they never need to be sorted. https://www.geeksforgeeks.org/quick-sort/ """ def quick_sort(array: list) -> list: return []
flexible
{ "blob_id": "04099c46c029af37a08b3861809da13b3cc3153b", "index": 997, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef quick_sort(array: list) ->list:\n return []\n", "step-3": "\"\"\"\nOBJECTIVE: Given a list, sort it from low to high using the QUICK SORT algorithm\n\nQuicksort first divides a large array into two smaller sub-arrays: the low elements and the high elements.\nQuicksort can then recursively sort the sub-arrays.\n\nThe steps are:\n\n1. Pick an element, called a pivot, from the array.\n2. Partitioning: reorder the array so that all elements with values less than the pivot come before the pivot,\n while all elements with values greater than the pivot come after it (equal values can go either way).\n After this partitioning, the pivot is in its final position. This is called the partition operation.\n3. Recursively apply the above steps to the sub-array of elements with smaller values\n and separately to the sub-array of elements with greater values.\n\nThe base case of the recursion is arrays of size zero or one, which are in order by definition,\n so they never need to be sorted.\n\nhttps://www.geeksforgeeks.org/quick-sort/\n\"\"\"\n\n\ndef quick_sort(array: list) -> list:\n return []\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
t3 = float(input('Digite um numero: ')) print('o dobro deste numero é', t3 * 2) print('O triplo deste numero é', t3 * 3) print('E a raiz quadrada deste numero é', t3 ** (1 / 2))
normal
{ "blob_id": "005ea8a1e75447b2b1c030a645bde5d0cdc8fb53", "index": 3532, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint('o dobro deste numero é', t3 * 2)\nprint('O triplo deste numero é', t3 * 3)\nprint('E a raiz quadrada deste numero é', t3 ** (1 / 2))\n", "step-3": "t3 = float(input('Digite um numero: '))\nprint('o dobro deste numero é', t3 * 2)\nprint('O triplo deste numero é', t3 * 3)\nprint('E a raiz quadrada deste numero é', t3 ** (1 / 2))\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
#Write a Python program to get the maximum and minimum value in a dictionary. d1={6: 10, 2: 20, 5: 30, 4: 40, 1: 50, 3: 60} print(max(d1.values())) print(min(d1.values()))
normal
{ "blob_id": "53e397068fcf88bbbce4dcc1bf1b441a2fbbee48", "index": 2261, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(max(d1.values()))\nprint(min(d1.values()))\n", "step-3": "d1 = {(6): 10, (2): 20, (5): 30, (4): 40, (1): 50, (3): 60}\nprint(max(d1.values()))\nprint(min(d1.values()))\n", "step-4": "#Write a Python program to get the maximum and minimum value in a dictionary.\n\nd1={6: 10, 2: 20, 5: 30, 4: 40, 1: 50, 3: 60}\n\nprint(max(d1.values()))\nprint(min(d1.values()))\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
# -*- coding: utf-8 -*- # Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html from sqlalchemy.orm.session import sessionmaker, query from FoodPandaStore.FoodPandaStore.model import * import datetime as dt from datetime import datetime class FoodpandastoreInfo2Pipeline: def __init__(self): engine = db_connect() create_tables(engine) self.session = sessionmaker(bind=engine) def process_item(self, item, spider): session = self.session() new_store_info = FoodPandaStoreInfo2( id=item['id'], code=item['code'], category=item['category'], name=item['name'], url=item['url'], rating=item.get('rating', None), address=item['address'], latitude=item['latitude'], longitude=item['longitude'], is_pickup_available=item['is_pickup_available'], is_delivery_available=item['is_delivery_available'], is_active=item['is_active'], date=dt.datetime.utcnow() ) new_ts = TambonStore( store_id=item['id'], sub_district_id=item['sub_district_id'], district_id=item['district_id'], province_id=item['province_id'], updated_datetime=datetime.utcnow()) existing_tambon = session.query(TambonGeo2).filter_by(sub_district_id = item['sub_district_id'], district_id=item['district_id'], province_id=item['province_id']).first() if existing_tambon: ## Store existing_store_info = session.query(FoodPandaStoreInfo2).filter_by(id=item['id']).first() existing_tambon_store = session.query(TambonStore).filter_by(store_id=item['id'], sub_district_id=item['sub_district_id'], district_id=item['district_id'], province_id=item['province_id']).first() if existing_store_info: session.merge(existing_store_info) if existing_tambon_store: session.merge(new_ts) else: session.add(new_ts) else: session.add(new_store_info) session.add(new_ts) menus = item.get('menus', []) for menu in menus: m = FoodPandaStoreMenu2( id=menu['id'], name=menu['name'], type=menu['type'], opening_time=menu['opening_time'], closing_time=menu['closing_time'] ) new_store_info.menus.append(m) else: print('{}, {}, {} is not persisted in TambonGeo'.format(item['sub_district_id'], item['district_id'], item['province_id'])) session.commit() session.close()
normal
{ "blob_id": "f66306908f1fdd5c662804e73596b445c66dc176", "index": 9521, "step-1": "<mask token>\n\n\nclass FoodpandastoreInfo2Pipeline:\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass FoodpandastoreInfo2Pipeline:\n\n def __init__(self):\n engine = db_connect()\n create_tables(engine)\n self.session = sessionmaker(bind=engine)\n <mask token>\n", "step-3": "<mask token>\n\n\nclass FoodpandastoreInfo2Pipeline:\n\n def __init__(self):\n engine = db_connect()\n create_tables(engine)\n self.session = sessionmaker(bind=engine)\n\n def process_item(self, item, spider):\n session = self.session()\n new_store_info = FoodPandaStoreInfo2(id=item['id'], code=item[\n 'code'], category=item['category'], name=item['name'], url=item\n ['url'], rating=item.get('rating', None), address=item[\n 'address'], latitude=item['latitude'], longitude=item[\n 'longitude'], is_pickup_available=item['is_pickup_available'],\n is_delivery_available=item['is_delivery_available'], is_active=\n item['is_active'], date=dt.datetime.utcnow())\n new_ts = TambonStore(store_id=item['id'], sub_district_id=item[\n 'sub_district_id'], district_id=item['district_id'],\n province_id=item['province_id'], updated_datetime=datetime.utcnow()\n )\n existing_tambon = session.query(TambonGeo2).filter_by(sub_district_id\n =item['sub_district_id'], district_id=item['district_id'],\n province_id=item['province_id']).first()\n if existing_tambon:\n existing_store_info = session.query(FoodPandaStoreInfo2).filter_by(\n id=item['id']).first()\n existing_tambon_store = session.query(TambonStore).filter_by(\n store_id=item['id'], sub_district_id=item['sub_district_id'\n ], district_id=item['district_id'], province_id=item[\n 'province_id']).first()\n if existing_store_info:\n session.merge(existing_store_info)\n if existing_tambon_store:\n session.merge(new_ts)\n else:\n session.add(new_ts)\n else:\n session.add(new_store_info)\n session.add(new_ts)\n menus = item.get('menus', [])\n for menu in menus:\n m = FoodPandaStoreMenu2(id=menu['id'], name=menu['name'],\n type=menu['type'], opening_time=menu['opening_time'],\n closing_time=menu['closing_time'])\n new_store_info.menus.append(m)\n else:\n print('{}, {}, {} is not persisted in TambonGeo'.format(item[\n 'sub_district_id'], item['district_id'], item['province_id']))\n session.commit()\n session.close()\n", "step-4": "from sqlalchemy.orm.session import sessionmaker, query\nfrom FoodPandaStore.FoodPandaStore.model import *\nimport datetime as dt\nfrom datetime import datetime\n\n\nclass FoodpandastoreInfo2Pipeline:\n\n def __init__(self):\n engine = db_connect()\n create_tables(engine)\n self.session = sessionmaker(bind=engine)\n\n def process_item(self, item, spider):\n session = self.session()\n new_store_info = FoodPandaStoreInfo2(id=item['id'], code=item[\n 'code'], category=item['category'], name=item['name'], url=item\n ['url'], rating=item.get('rating', None), address=item[\n 'address'], latitude=item['latitude'], longitude=item[\n 'longitude'], is_pickup_available=item['is_pickup_available'],\n is_delivery_available=item['is_delivery_available'], is_active=\n item['is_active'], date=dt.datetime.utcnow())\n new_ts = TambonStore(store_id=item['id'], sub_district_id=item[\n 'sub_district_id'], district_id=item['district_id'],\n province_id=item['province_id'], updated_datetime=datetime.utcnow()\n )\n existing_tambon = session.query(TambonGeo2).filter_by(sub_district_id\n =item['sub_district_id'], district_id=item['district_id'],\n province_id=item['province_id']).first()\n if existing_tambon:\n existing_store_info = session.query(FoodPandaStoreInfo2).filter_by(\n id=item['id']).first()\n existing_tambon_store = session.query(TambonStore).filter_by(\n store_id=item['id'], sub_district_id=item['sub_district_id'\n ], district_id=item['district_id'], province_id=item[\n 'province_id']).first()\n if existing_store_info:\n session.merge(existing_store_info)\n if existing_tambon_store:\n session.merge(new_ts)\n else:\n session.add(new_ts)\n else:\n session.add(new_store_info)\n session.add(new_ts)\n menus = item.get('menus', [])\n for menu in menus:\n m = FoodPandaStoreMenu2(id=menu['id'], name=menu['name'],\n type=menu['type'], opening_time=menu['opening_time'],\n closing_time=menu['closing_time'])\n new_store_info.menus.append(m)\n else:\n print('{}, {}, {} is not persisted in TambonGeo'.format(item[\n 'sub_district_id'], item['district_id'], item['province_id']))\n session.commit()\n session.close()\n", "step-5": "# -*- coding: utf-8 -*-\n\n# Define your item pipelines here\n#\n# Don't forget to add your pipeline to the ITEM_PIPELINES setting\n# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html\n\n\nfrom sqlalchemy.orm.session import sessionmaker, query\nfrom FoodPandaStore.FoodPandaStore.model import *\nimport datetime as dt\nfrom datetime import datetime\n\n\n\n\nclass FoodpandastoreInfo2Pipeline:\n\n def __init__(self):\n engine = db_connect()\n create_tables(engine)\n self.session = sessionmaker(bind=engine)\n\n\n def process_item(self, item, spider):\n\n session = self.session()\n new_store_info = FoodPandaStoreInfo2(\n id=item['id'],\n code=item['code'],\n category=item['category'],\n name=item['name'],\n url=item['url'],\n rating=item.get('rating', None),\n address=item['address'],\n latitude=item['latitude'],\n longitude=item['longitude'],\n is_pickup_available=item['is_pickup_available'],\n is_delivery_available=item['is_delivery_available'],\n is_active=item['is_active'],\n date=dt.datetime.utcnow()\n )\n\n new_ts = TambonStore(\n store_id=item['id'],\n sub_district_id=item['sub_district_id'],\n district_id=item['district_id'],\n province_id=item['province_id'],\n updated_datetime=datetime.utcnow())\n\n existing_tambon = session.query(TambonGeo2).filter_by(sub_district_id = item['sub_district_id'],\n district_id=item['district_id'],\n province_id=item['province_id']).first()\n\n if existing_tambon:\n ## Store\n existing_store_info = session.query(FoodPandaStoreInfo2).filter_by(id=item['id']).first()\n existing_tambon_store = session.query(TambonStore).filter_by(store_id=item['id'],\n sub_district_id=item['sub_district_id'],\n district_id=item['district_id'],\n province_id=item['province_id']).first()\n if existing_store_info:\n session.merge(existing_store_info)\n if existing_tambon_store:\n session.merge(new_ts)\n else:\n session.add(new_ts)\n else:\n session.add(new_store_info)\n session.add(new_ts)\n\n menus = item.get('menus', [])\n for menu in menus:\n m = FoodPandaStoreMenu2(\n id=menu['id'],\n name=menu['name'],\n type=menu['type'],\n opening_time=menu['opening_time'],\n closing_time=menu['closing_time']\n )\n new_store_info.menus.append(m)\n\n\n else:\n print('{}, {}, {} is not persisted in TambonGeo'.format(item['sub_district_id'],\n item['district_id'],\n item['province_id']))\n\n\n session.commit()\n session.close()", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> def epoch_day(epoch_time): epoch_time = int(epoch_time) return epoch_time - epoch_time % 86400 <|reserved_special_token_0|> def add_ecb(): unix_time = Web3.toInt(epoch_day(time.time())) ECB = ECB_Processor() f = open(ecb_daily_log_path, 'a') if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']: for curr in ecb_currencies: curr_code = bytes(curr, encoding='utf-8') curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 ** 9)) tx_hash = contract_instance.add_ecb(unix_time, curr_code, curr_value, transact={'from': web3.eth.accounts[0]}) tx_hash = tx_hash.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash, curr_code, file=f) else: print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f) f.close() def add_tcmb(): unix_time = Web3.toInt(epoch_day(time.time())) TCMB = TCMB_Processor() f = open(tcmb_daily_log_path, 'a') if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']: for curr in tcmb_currencies: curr_code = bytes(curr, encoding='utf-8') curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][ 'ForexBuying']) * 10 ** 9)) curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][ 'ForexSelling']) * 10 ** 9)) tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time, curr_code, curr_value_fb, transact={'from': web3.eth. accounts[0]}) tx_hash_fb = tx_hash_fb.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb, curr_code, file=f) tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time, curr_code, curr_value_fs, transact={'from': web3.eth. accounts[0]}) tx_hash_fs = tx_hash_fs.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs, curr_code, file=f) else: print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f) f.close() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def epoch_day(epoch_time): epoch_time = int(epoch_time) return epoch_time - epoch_time % 86400 with open('config_ebloc.json') as json_data_file: config_data = json.load(json_data_file) <|reserved_special_token_0|> web3.middleware_stack.inject(geth_poa_middleware, layer=0) <|reserved_special_token_0|> web3.personal.unlockAccount(web3.eth.accounts[0], owner_password) <|reserved_special_token_0|> def add_ecb(): unix_time = Web3.toInt(epoch_day(time.time())) ECB = ECB_Processor() f = open(ecb_daily_log_path, 'a') if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']: for curr in ecb_currencies: curr_code = bytes(curr, encoding='utf-8') curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 ** 9)) tx_hash = contract_instance.add_ecb(unix_time, curr_code, curr_value, transact={'from': web3.eth.accounts[0]}) tx_hash = tx_hash.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash, curr_code, file=f) else: print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f) f.close() def add_tcmb(): unix_time = Web3.toInt(epoch_day(time.time())) TCMB = TCMB_Processor() f = open(tcmb_daily_log_path, 'a') if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']: for curr in tcmb_currencies: curr_code = bytes(curr, encoding='utf-8') curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][ 'ForexBuying']) * 10 ** 9)) curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][ 'ForexSelling']) * 10 ** 9)) tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time, curr_code, curr_value_fb, transact={'from': web3.eth. accounts[0]}) tx_hash_fb = tx_hash_fb.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb, curr_code, file=f) tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time, curr_code, curr_value_fs, transact={'from': web3.eth. accounts[0]}) tx_hash_fs = tx_hash_fs.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs, curr_code, file=f) else: print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f) f.close() if __name__ == '__main__': add_ecb() add_tcmb() print(time.strftime('%Y-%m-%d %H:%M'), ' DONE EBLOC add_ecb & add_tcmb') <|reserved_special_token_1|> <|reserved_special_token_0|> tcmb_currencies = ['TRY', 'USD', 'AUD', 'DKK', 'EUR', 'GBP', 'CHF', 'SEK', 'CAD', 'KWD', 'NOK', 'SAR', 'JPY', 'BGN', 'RON', 'RUB', 'IRR', 'CNY', 'PKR' ] ecb_currencies = ['EUR', 'USD', 'JPY', 'BGN', 'CZK', 'DKK', 'GBP', 'HUF', 'PLN', 'RON', 'SEK', 'CHF', 'ISK', 'NOK', 'HRK', 'RUB', 'TRY', 'AUD', 'BRL', 'CAD', 'CNY', 'HKD', 'IDR', 'ILS', 'INR', 'KRW', 'MXN', 'MYR', 'NZD', 'PHP', 'SGD', 'THB', 'ZAR'] def epoch_day(epoch_time): epoch_time = int(epoch_time) return epoch_time - epoch_time % 86400 with open('config_ebloc.json') as json_data_file: config_data = json.load(json_data_file) owner_address = config_data['owner']['address'] owner_password = config_data['owner']['password'] contract_address = config_data['contract']['address'] contract_abi = config_data['contract']['abi'] gas = int(config_data['price']['gas']) gas_price = Web3.toWei(int(config_data['price']['gas_price']), 'gwei') ecb_daily_log_path = config_data['log']['ecb_daily'] tcmb_daily_log_path = config_data['log']['tcmb_daily'] geth_ipc_path = config_data['geth']['geth_ipc_path'] contract_address = Web3.toChecksumAddress(contract_address) web3 = Web3(IPCProvider(geth_ipc_path)) web3.middleware_stack.inject(geth_poa_middleware, layer=0) web3.eth.defaultAccount = web3.eth.accounts[0] web3.personal.unlockAccount(web3.eth.accounts[0], owner_password) contract_instance = web3.eth.contract(abi=contract_abi, address= contract_address, ContractFactoryClass=ConciseContract) unix_time = Web3.toInt(epoch_day(time.time())) def add_ecb(): unix_time = Web3.toInt(epoch_day(time.time())) ECB = ECB_Processor() f = open(ecb_daily_log_path, 'a') if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']: for curr in ecb_currencies: curr_code = bytes(curr, encoding='utf-8') curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 ** 9)) tx_hash = contract_instance.add_ecb(unix_time, curr_code, curr_value, transact={'from': web3.eth.accounts[0]}) tx_hash = tx_hash.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash, curr_code, file=f) else: print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f) f.close() def add_tcmb(): unix_time = Web3.toInt(epoch_day(time.time())) TCMB = TCMB_Processor() f = open(tcmb_daily_log_path, 'a') if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']: for curr in tcmb_currencies: curr_code = bytes(curr, encoding='utf-8') curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][ 'ForexBuying']) * 10 ** 9)) curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][ 'ForexSelling']) * 10 ** 9)) tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time, curr_code, curr_value_fb, transact={'from': web3.eth. accounts[0]}) tx_hash_fb = tx_hash_fb.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb, curr_code, file=f) tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time, curr_code, curr_value_fs, transact={'from': web3.eth. accounts[0]}) tx_hash_fs = tx_hash_fs.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs, curr_code, file=f) else: print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f) f.close() if __name__ == '__main__': add_ecb() add_tcmb() print(time.strftime('%Y-%m-%d %H:%M'), ' DONE EBLOC add_ecb & add_tcmb') <|reserved_special_token_1|> from web3 import Web3, HTTPProvider, IPCProvider from tcmb.tcmb_parser import TCMB_Processor from ecb.ecb_parser import ECB_Processor from web3.contract import ConciseContract from web3.middleware import geth_poa_middleware import json import time tcmb_currencies = ['TRY', 'USD', 'AUD', 'DKK', 'EUR', 'GBP', 'CHF', 'SEK', 'CAD', 'KWD', 'NOK', 'SAR', 'JPY', 'BGN', 'RON', 'RUB', 'IRR', 'CNY', 'PKR' ] ecb_currencies = ['EUR', 'USD', 'JPY', 'BGN', 'CZK', 'DKK', 'GBP', 'HUF', 'PLN', 'RON', 'SEK', 'CHF', 'ISK', 'NOK', 'HRK', 'RUB', 'TRY', 'AUD', 'BRL', 'CAD', 'CNY', 'HKD', 'IDR', 'ILS', 'INR', 'KRW', 'MXN', 'MYR', 'NZD', 'PHP', 'SGD', 'THB', 'ZAR'] def epoch_day(epoch_time): epoch_time = int(epoch_time) return epoch_time - epoch_time % 86400 with open('config_ebloc.json') as json_data_file: config_data = json.load(json_data_file) owner_address = config_data['owner']['address'] owner_password = config_data['owner']['password'] contract_address = config_data['contract']['address'] contract_abi = config_data['contract']['abi'] gas = int(config_data['price']['gas']) gas_price = Web3.toWei(int(config_data['price']['gas_price']), 'gwei') ecb_daily_log_path = config_data['log']['ecb_daily'] tcmb_daily_log_path = config_data['log']['tcmb_daily'] geth_ipc_path = config_data['geth']['geth_ipc_path'] contract_address = Web3.toChecksumAddress(contract_address) web3 = Web3(IPCProvider(geth_ipc_path)) web3.middleware_stack.inject(geth_poa_middleware, layer=0) web3.eth.defaultAccount = web3.eth.accounts[0] web3.personal.unlockAccount(web3.eth.accounts[0], owner_password) contract_instance = web3.eth.contract(abi=contract_abi, address= contract_address, ContractFactoryClass=ConciseContract) unix_time = Web3.toInt(epoch_day(time.time())) def add_ecb(): unix_time = Web3.toInt(epoch_day(time.time())) ECB = ECB_Processor() f = open(ecb_daily_log_path, 'a') if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']: for curr in ecb_currencies: curr_code = bytes(curr, encoding='utf-8') curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 ** 9)) tx_hash = contract_instance.add_ecb(unix_time, curr_code, curr_value, transact={'from': web3.eth.accounts[0]}) tx_hash = tx_hash.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash, curr_code, file=f) else: print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f) f.close() def add_tcmb(): unix_time = Web3.toInt(epoch_day(time.time())) TCMB = TCMB_Processor() f = open(tcmb_daily_log_path, 'a') if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']: for curr in tcmb_currencies: curr_code = bytes(curr, encoding='utf-8') curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][ 'ForexBuying']) * 10 ** 9)) curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][ 'ForexSelling']) * 10 ** 9)) tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time, curr_code, curr_value_fb, transact={'from': web3.eth. accounts[0]}) tx_hash_fb = tx_hash_fb.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb, curr_code, file=f) tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time, curr_code, curr_value_fs, transact={'from': web3.eth. accounts[0]}) tx_hash_fs = tx_hash_fs.hex() print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs, curr_code, file=f) else: print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f) f.close() if __name__ == '__main__': add_ecb() add_tcmb() print(time.strftime('%Y-%m-%d %H:%M'), ' DONE EBLOC add_ecb & add_tcmb') <|reserved_special_token_1|> from web3 import Web3, HTTPProvider, IPCProvider from tcmb.tcmb_parser import TCMB_Processor from ecb.ecb_parser import ECB_Processor from web3.contract import ConciseContract from web3.middleware import geth_poa_middleware import json import time tcmb_currencies = ["TRY", "USD", "AUD", "DKK", "EUR", "GBP", "CHF", "SEK", "CAD", "KWD", "NOK", "SAR", "JPY", "BGN", "RON", "RUB", "IRR", "CNY", "PKR"] ecb_currencies = ["EUR", "USD", "JPY", "BGN", "CZK", "DKK", "GBP", "HUF", "PLN", "RON", "SEK", "CHF", "ISK", "NOK", "HRK", "RUB", "TRY", "AUD", "BRL", "CAD", "CNY", "HKD", "IDR", "ILS", "INR", "KRW", "MXN", "MYR", "NZD", "PHP", "SGD", "THB", "ZAR"] def epoch_day(epoch_time): epoch_time = int(epoch_time) return(epoch_time - (epoch_time % 86400)) with open('config_ebloc.json') as json_data_file: config_data = json.load(json_data_file) owner_address = config_data["owner"]["address"] owner_password = config_data["owner"]["password"] contract_address = config_data["contract"]["address"] contract_abi = config_data["contract"]["abi"] gas = int(config_data["price"]["gas"]) gas_price = Web3.toWei( int(config_data["price"]["gas_price"]), 'gwei') ecb_daily_log_path = config_data["log"]["ecb_daily"] tcmb_daily_log_path = config_data["log"]["tcmb_daily"] geth_ipc_path = config_data["geth"]["geth_ipc_path"] contract_address = Web3.toChecksumAddress(contract_address) web3 = Web3(IPCProvider(geth_ipc_path)) web3.middleware_stack.inject(geth_poa_middleware, layer=0) web3.eth.defaultAccount = web3.eth.accounts[0] web3.personal.unlockAccount(web3.eth.accounts[0], owner_password) contract_instance = web3.eth.contract(abi=contract_abi, address=contract_address, ContractFactoryClass=ConciseContract) unix_time = Web3.toInt(epoch_day(time.time())) def add_ecb(): unix_time = Web3.toInt(epoch_day(time.time())) ECB = ECB_Processor() f = open(ecb_daily_log_path, "a") if(time.strftime("%Y-%m-%d") == ECB.Currency_Dict["time"]): for curr in ecb_currencies: curr_code = bytes(curr, encoding='utf-8') curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr])*(10**9))) tx_hash = contract_instance.add_ecb(unix_time, curr_code, curr_value, transact={'from': web3.eth.accounts[0]}) tx_hash = tx_hash.hex() print(time.strftime("%Y-%m-%d %H:%M"), unix_time, tx_hash, curr_code, file=f) else: print(time.strftime("%Y-%m-%d %H:%M"), unix_time, "Weekend", file=f) f.close() def add_tcmb(): unix_time = Web3.toInt(epoch_day(time.time())) TCMB = TCMB_Processor() f = open(tcmb_daily_log_path, "a") if(time.strftime("%m/%d/%Y") == TCMB.CURRENCY_DICT["Date"]): for curr in tcmb_currencies: curr_code = bytes(curr, encoding='utf-8') curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr]["ForexBuying"])*(10**9))) curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr]["ForexSelling"])*(10**9))) # forex buying tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time, curr_code, curr_value_fb, transact={'from': web3.eth.accounts[0]}) tx_hash_fb = tx_hash_fb.hex() print(time.strftime("%Y-%m-%d %H:%M"), unix_time, tx_hash_fb, curr_code, file=f) # forex selling tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time, curr_code, curr_value_fs, transact={'from': web3.eth.accounts[0]}) tx_hash_fs = tx_hash_fs.hex() print(time.strftime("%Y-%m-%d %H:%M"), unix_time, tx_hash_fs, curr_code, file=f) else: print(time.strftime("%Y-%m-%d %H:%M"), unix_time, "Weekend", file=f) f.close() if __name__ == "__main__": add_ecb() add_tcmb() print(time.strftime("%Y-%m-%d %H:%M"), " DONE EBLOC add_ecb & add_tcmb")
flexible
{ "blob_id": "ecd5097d9d497b62b89217ee3c46506f21fc15d2", "index": 5065, "step-1": "<mask token>\n\n\ndef epoch_day(epoch_time):\n epoch_time = int(epoch_time)\n return epoch_time - epoch_time % 86400\n\n\n<mask token>\n\n\ndef add_ecb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n ECB = ECB_Processor()\n f = open(ecb_daily_log_path, 'a')\n if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']:\n for curr in ecb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 **\n 9))\n tx_hash = contract_instance.add_ecb(unix_time, curr_code,\n curr_value, transact={'from': web3.eth.accounts[0]})\n tx_hash = tx_hash.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\ndef add_tcmb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n TCMB = TCMB_Processor()\n f = open(tcmb_daily_log_path, 'a')\n if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']:\n for curr in tcmb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexBuying']) * 10 ** 9))\n curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexSelling']) * 10 ** 9))\n tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time,\n curr_code, curr_value_fb, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fb = tx_hash_fb.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb,\n curr_code, file=f)\n tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time,\n curr_code, curr_value_fs, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fs = tx_hash_fs.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef epoch_day(epoch_time):\n epoch_time = int(epoch_time)\n return epoch_time - epoch_time % 86400\n\n\nwith open('config_ebloc.json') as json_data_file:\n config_data = json.load(json_data_file)\n<mask token>\nweb3.middleware_stack.inject(geth_poa_middleware, layer=0)\n<mask token>\nweb3.personal.unlockAccount(web3.eth.accounts[0], owner_password)\n<mask token>\n\n\ndef add_ecb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n ECB = ECB_Processor()\n f = open(ecb_daily_log_path, 'a')\n if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']:\n for curr in ecb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 **\n 9))\n tx_hash = contract_instance.add_ecb(unix_time, curr_code,\n curr_value, transact={'from': web3.eth.accounts[0]})\n tx_hash = tx_hash.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\ndef add_tcmb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n TCMB = TCMB_Processor()\n f = open(tcmb_daily_log_path, 'a')\n if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']:\n for curr in tcmb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexBuying']) * 10 ** 9))\n curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexSelling']) * 10 ** 9))\n tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time,\n curr_code, curr_value_fb, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fb = tx_hash_fb.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb,\n curr_code, file=f)\n tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time,\n curr_code, curr_value_fs, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fs = tx_hash_fs.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\nif __name__ == '__main__':\n add_ecb()\n add_tcmb()\n print(time.strftime('%Y-%m-%d %H:%M'), ' DONE EBLOC add_ecb & add_tcmb')\n", "step-3": "<mask token>\ntcmb_currencies = ['TRY', 'USD', 'AUD', 'DKK', 'EUR', 'GBP', 'CHF', 'SEK',\n 'CAD', 'KWD', 'NOK', 'SAR', 'JPY', 'BGN', 'RON', 'RUB', 'IRR', 'CNY', 'PKR'\n ]\necb_currencies = ['EUR', 'USD', 'JPY', 'BGN', 'CZK', 'DKK', 'GBP', 'HUF',\n 'PLN', 'RON', 'SEK', 'CHF', 'ISK', 'NOK', 'HRK', 'RUB', 'TRY', 'AUD',\n 'BRL', 'CAD', 'CNY', 'HKD', 'IDR', 'ILS', 'INR', 'KRW', 'MXN', 'MYR',\n 'NZD', 'PHP', 'SGD', 'THB', 'ZAR']\n\n\ndef epoch_day(epoch_time):\n epoch_time = int(epoch_time)\n return epoch_time - epoch_time % 86400\n\n\nwith open('config_ebloc.json') as json_data_file:\n config_data = json.load(json_data_file)\nowner_address = config_data['owner']['address']\nowner_password = config_data['owner']['password']\ncontract_address = config_data['contract']['address']\ncontract_abi = config_data['contract']['abi']\ngas = int(config_data['price']['gas'])\ngas_price = Web3.toWei(int(config_data['price']['gas_price']), 'gwei')\necb_daily_log_path = config_data['log']['ecb_daily']\ntcmb_daily_log_path = config_data['log']['tcmb_daily']\ngeth_ipc_path = config_data['geth']['geth_ipc_path']\ncontract_address = Web3.toChecksumAddress(contract_address)\nweb3 = Web3(IPCProvider(geth_ipc_path))\nweb3.middleware_stack.inject(geth_poa_middleware, layer=0)\nweb3.eth.defaultAccount = web3.eth.accounts[0]\nweb3.personal.unlockAccount(web3.eth.accounts[0], owner_password)\ncontract_instance = web3.eth.contract(abi=contract_abi, address=\n contract_address, ContractFactoryClass=ConciseContract)\nunix_time = Web3.toInt(epoch_day(time.time()))\n\n\ndef add_ecb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n ECB = ECB_Processor()\n f = open(ecb_daily_log_path, 'a')\n if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']:\n for curr in ecb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 **\n 9))\n tx_hash = contract_instance.add_ecb(unix_time, curr_code,\n curr_value, transact={'from': web3.eth.accounts[0]})\n tx_hash = tx_hash.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\ndef add_tcmb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n TCMB = TCMB_Processor()\n f = open(tcmb_daily_log_path, 'a')\n if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']:\n for curr in tcmb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexBuying']) * 10 ** 9))\n curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexSelling']) * 10 ** 9))\n tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time,\n curr_code, curr_value_fb, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fb = tx_hash_fb.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb,\n curr_code, file=f)\n tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time,\n curr_code, curr_value_fs, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fs = tx_hash_fs.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\nif __name__ == '__main__':\n add_ecb()\n add_tcmb()\n print(time.strftime('%Y-%m-%d %H:%M'), ' DONE EBLOC add_ecb & add_tcmb')\n", "step-4": "from web3 import Web3, HTTPProvider, IPCProvider\nfrom tcmb.tcmb_parser import TCMB_Processor\nfrom ecb.ecb_parser import ECB_Processor\nfrom web3.contract import ConciseContract\nfrom web3.middleware import geth_poa_middleware\nimport json\nimport time\ntcmb_currencies = ['TRY', 'USD', 'AUD', 'DKK', 'EUR', 'GBP', 'CHF', 'SEK',\n 'CAD', 'KWD', 'NOK', 'SAR', 'JPY', 'BGN', 'RON', 'RUB', 'IRR', 'CNY', 'PKR'\n ]\necb_currencies = ['EUR', 'USD', 'JPY', 'BGN', 'CZK', 'DKK', 'GBP', 'HUF',\n 'PLN', 'RON', 'SEK', 'CHF', 'ISK', 'NOK', 'HRK', 'RUB', 'TRY', 'AUD',\n 'BRL', 'CAD', 'CNY', 'HKD', 'IDR', 'ILS', 'INR', 'KRW', 'MXN', 'MYR',\n 'NZD', 'PHP', 'SGD', 'THB', 'ZAR']\n\n\ndef epoch_day(epoch_time):\n epoch_time = int(epoch_time)\n return epoch_time - epoch_time % 86400\n\n\nwith open('config_ebloc.json') as json_data_file:\n config_data = json.load(json_data_file)\nowner_address = config_data['owner']['address']\nowner_password = config_data['owner']['password']\ncontract_address = config_data['contract']['address']\ncontract_abi = config_data['contract']['abi']\ngas = int(config_data['price']['gas'])\ngas_price = Web3.toWei(int(config_data['price']['gas_price']), 'gwei')\necb_daily_log_path = config_data['log']['ecb_daily']\ntcmb_daily_log_path = config_data['log']['tcmb_daily']\ngeth_ipc_path = config_data['geth']['geth_ipc_path']\ncontract_address = Web3.toChecksumAddress(contract_address)\nweb3 = Web3(IPCProvider(geth_ipc_path))\nweb3.middleware_stack.inject(geth_poa_middleware, layer=0)\nweb3.eth.defaultAccount = web3.eth.accounts[0]\nweb3.personal.unlockAccount(web3.eth.accounts[0], owner_password)\ncontract_instance = web3.eth.contract(abi=contract_abi, address=\n contract_address, ContractFactoryClass=ConciseContract)\nunix_time = Web3.toInt(epoch_day(time.time()))\n\n\ndef add_ecb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n ECB = ECB_Processor()\n f = open(ecb_daily_log_path, 'a')\n if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']:\n for curr in ecb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 **\n 9))\n tx_hash = contract_instance.add_ecb(unix_time, curr_code,\n curr_value, transact={'from': web3.eth.accounts[0]})\n tx_hash = tx_hash.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\ndef add_tcmb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n TCMB = TCMB_Processor()\n f = open(tcmb_daily_log_path, 'a')\n if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']:\n for curr in tcmb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexBuying']) * 10 ** 9))\n curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexSelling']) * 10 ** 9))\n tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time,\n curr_code, curr_value_fb, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fb = tx_hash_fb.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb,\n curr_code, file=f)\n tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time,\n curr_code, curr_value_fs, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fs = tx_hash_fs.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\nif __name__ == '__main__':\n add_ecb()\n add_tcmb()\n print(time.strftime('%Y-%m-%d %H:%M'), ' DONE EBLOC add_ecb & add_tcmb')\n", "step-5": "from web3 import Web3, HTTPProvider, IPCProvider\nfrom tcmb.tcmb_parser import TCMB_Processor\nfrom ecb.ecb_parser import ECB_Processor\nfrom web3.contract import ConciseContract\nfrom web3.middleware import geth_poa_middleware\nimport json\nimport time\n\ntcmb_currencies = [\"TRY\", \"USD\", \"AUD\", \"DKK\", \"EUR\", \"GBP\", \"CHF\", \"SEK\", \"CAD\", \n\t\t\"KWD\", \"NOK\", \"SAR\", \"JPY\", \"BGN\", \"RON\", \"RUB\", \"IRR\", \"CNY\", \"PKR\"]\n\necb_currencies = [\"EUR\", \"USD\", \"JPY\", \"BGN\", \"CZK\", \"DKK\", \"GBP\", \"HUF\", \"PLN\", \n\t\t\"RON\", \"SEK\", \"CHF\", \"ISK\", \"NOK\", \"HRK\", \"RUB\", \"TRY\", \"AUD\", \"BRL\", \n\t\t\"CAD\", \"CNY\", \"HKD\", \"IDR\", \"ILS\", \"INR\", \"KRW\", \"MXN\", \"MYR\", \"NZD\", \n\t\t\"PHP\", \"SGD\", \"THB\", \"ZAR\"]\n\ndef epoch_day(epoch_time):\n\tepoch_time = int(epoch_time)\n\treturn(epoch_time - (epoch_time % 86400))\n\nwith open('config_ebloc.json') as json_data_file:\n\tconfig_data = json.load(json_data_file)\n\nowner_address = config_data[\"owner\"][\"address\"]\nowner_password = config_data[\"owner\"][\"password\"]\ncontract_address = config_data[\"contract\"][\"address\"]\ncontract_abi = config_data[\"contract\"][\"abi\"]\ngas = int(config_data[\"price\"][\"gas\"])\ngas_price = Web3.toWei( int(config_data[\"price\"][\"gas_price\"]), 'gwei')\necb_daily_log_path = config_data[\"log\"][\"ecb_daily\"]\ntcmb_daily_log_path = config_data[\"log\"][\"tcmb_daily\"]\ngeth_ipc_path = config_data[\"geth\"][\"geth_ipc_path\"]\n\ncontract_address = Web3.toChecksumAddress(contract_address)\n\nweb3 = Web3(IPCProvider(geth_ipc_path))\nweb3.middleware_stack.inject(geth_poa_middleware, layer=0)\n\nweb3.eth.defaultAccount = web3.eth.accounts[0]\nweb3.personal.unlockAccount(web3.eth.accounts[0], owner_password)\n\ncontract_instance = web3.eth.contract(abi=contract_abi, address=contract_address, ContractFactoryClass=ConciseContract)\n\nunix_time = Web3.toInt(epoch_day(time.time()))\n\ndef add_ecb():\n\tunix_time = Web3.toInt(epoch_day(time.time()))\n\tECB = ECB_Processor()\n\tf = open(ecb_daily_log_path, \"a\")\n\tif(time.strftime(\"%Y-%m-%d\") == ECB.Currency_Dict[\"time\"]):\n\t\tfor curr in ecb_currencies:\n\t\t\tcurr_code = bytes(curr, encoding='utf-8')\n\t\t\tcurr_value = web3.toInt(int(float(ECB.Currency_Dict[curr])*(10**9)))\n\t\t\ttx_hash = contract_instance.add_ecb(unix_time, curr_code, curr_value, transact={'from': web3.eth.accounts[0]})\n\t\t\ttx_hash = tx_hash.hex()\n\t\t\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), unix_time, tx_hash, curr_code, file=f)\n\telse:\n\t\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), unix_time, \"Weekend\", file=f)\n\tf.close()\n\ndef add_tcmb():\n\tunix_time = Web3.toInt(epoch_day(time.time()))\n\tTCMB = TCMB_Processor()\n\tf = open(tcmb_daily_log_path, \"a\")\n\tif(time.strftime(\"%m/%d/%Y\") == TCMB.CURRENCY_DICT[\"Date\"]):\n\t\tfor curr in tcmb_currencies:\n\t\t\tcurr_code = bytes(curr, encoding='utf-8')\n\t\t\tcurr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\"ForexBuying\"])*(10**9)))\n\t\t\tcurr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\"ForexSelling\"])*(10**9)))\n\t\t\t# forex buying\n\t\t\ttx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time, curr_code, curr_value_fb, transact={'from': web3.eth.accounts[0]})\n\t\t\ttx_hash_fb = tx_hash_fb.hex()\n\t\t\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), unix_time, tx_hash_fb, curr_code, file=f)\n\t\t\t# forex selling\n\t\t\ttx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time, curr_code, curr_value_fs, transact={'from': web3.eth.accounts[0]})\n\t\t\ttx_hash_fs = tx_hash_fs.hex()\n\t\t\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), unix_time, tx_hash_fs, curr_code, file=f)\n\telse:\n\t\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), unix_time, \"Weekend\", file=f)\n\tf.close()\n\n\nif __name__ == \"__main__\":\n\tadd_ecb()\n\tadd_tcmb()\n\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), \" DONE EBLOC add_ecb & add_tcmb\")", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
<|reserved_special_token_0|> class CategoriesControllerTests(ControllerTestBase): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class CategoriesControllerTests(ControllerTestBase): @classmethod def setUpClass(cls): super(CategoriesControllerTests, cls).setUpClass() cls.response_catcher = HttpResponseCatcher() cls.controller = CategoriesController(cls.config, cls.response_catcher) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class CategoriesControllerTests(ControllerTestBase): @classmethod def setUpClass(cls): super(CategoriesControllerTests, cls).setUpClass() cls.response_catcher = HttpResponseCatcher() cls.controller = CategoriesController(cls.config, cls.response_catcher) def test_retrieve_a_paginated_list_of_categories_by_query_parameter_s(self ): result = (self.controller. retrieve_a_paginated_list_of_categories_by_query_parameter_s()) self.assertEquals(self.response_catcher.response.status_code, 200) expected_headers = {} expected_headers['trace-id'] = None expected_headers['content-type'] = 'application/json' self.assertTrue(TestHelper.match_headers(expected_headers, self. response_catcher.response.headers)) <|reserved_special_token_1|> <|reserved_special_token_0|> import json import dateutil.parser from tests.controllers.controller_test_base import ControllerTestBase from tests.test_helper import TestHelper from tests.http_response_catcher import HttpResponseCatcher from plastiqpublicapi.api_helper import APIHelper from plastiqpublicapi.controllers.categories_controller import CategoriesController class CategoriesControllerTests(ControllerTestBase): @classmethod def setUpClass(cls): super(CategoriesControllerTests, cls).setUpClass() cls.response_catcher = HttpResponseCatcher() cls.controller = CategoriesController(cls.config, cls.response_catcher) def test_retrieve_a_paginated_list_of_categories_by_query_parameter_s(self ): result = (self.controller. retrieve_a_paginated_list_of_categories_by_query_parameter_s()) self.assertEquals(self.response_catcher.response.status_code, 200) expected_headers = {} expected_headers['trace-id'] = None expected_headers['content-type'] = 'application/json' self.assertTrue(TestHelper.match_headers(expected_headers, self. response_catcher.response.headers)) <|reserved_special_token_1|> # -*- coding: utf-8 -*- """ plastiqpublicapi This file was automatically generated by APIMATIC v3.0 ( https://www.apimatic.io ). """ import json import dateutil.parser from tests.controllers.controller_test_base import ControllerTestBase from tests.test_helper import TestHelper from tests.http_response_catcher import HttpResponseCatcher from plastiqpublicapi.api_helper import APIHelper from plastiqpublicapi.controllers.categories_controller import CategoriesController class CategoriesControllerTests(ControllerTestBase): @classmethod def setUpClass(cls): super(CategoriesControllerTests, cls).setUpClass() cls.response_catcher = HttpResponseCatcher() cls.controller = CategoriesController(cls.config, cls.response_catcher) # Retrieve a paginated list of Categories by query parameter(s) def test_retrieve_a_paginated_list_of_categories_by_query_parameter_s(self): # Perform the API call through the SDK function result = self.controller.retrieve_a_paginated_list_of_categories_by_query_parameter_s() # Test response code self.assertEquals(self.response_catcher.response.status_code, 200) # Test headers expected_headers = {} expected_headers['trace-id'] = None expected_headers['content-type'] = 'application/json' self.assertTrue(TestHelper.match_headers(expected_headers, self.response_catcher.response.headers))
flexible
{ "blob_id": "a4f2418e746cc43bd407b6a212de9802044351e1", "index": 3928, "step-1": "<mask token>\n\n\nclass CategoriesControllerTests(ControllerTestBase):\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass CategoriesControllerTests(ControllerTestBase):\n\n @classmethod\n def setUpClass(cls):\n super(CategoriesControllerTests, cls).setUpClass()\n cls.response_catcher = HttpResponseCatcher()\n cls.controller = CategoriesController(cls.config, cls.response_catcher)\n <mask token>\n", "step-3": "<mask token>\n\n\nclass CategoriesControllerTests(ControllerTestBase):\n\n @classmethod\n def setUpClass(cls):\n super(CategoriesControllerTests, cls).setUpClass()\n cls.response_catcher = HttpResponseCatcher()\n cls.controller = CategoriesController(cls.config, cls.response_catcher)\n\n def test_retrieve_a_paginated_list_of_categories_by_query_parameter_s(self\n ):\n result = (self.controller.\n retrieve_a_paginated_list_of_categories_by_query_parameter_s())\n self.assertEquals(self.response_catcher.response.status_code, 200)\n expected_headers = {}\n expected_headers['trace-id'] = None\n expected_headers['content-type'] = 'application/json'\n self.assertTrue(TestHelper.match_headers(expected_headers, self.\n response_catcher.response.headers))\n", "step-4": "<mask token>\nimport json\nimport dateutil.parser\nfrom tests.controllers.controller_test_base import ControllerTestBase\nfrom tests.test_helper import TestHelper\nfrom tests.http_response_catcher import HttpResponseCatcher\nfrom plastiqpublicapi.api_helper import APIHelper\nfrom plastiqpublicapi.controllers.categories_controller import CategoriesController\n\n\nclass CategoriesControllerTests(ControllerTestBase):\n\n @classmethod\n def setUpClass(cls):\n super(CategoriesControllerTests, cls).setUpClass()\n cls.response_catcher = HttpResponseCatcher()\n cls.controller = CategoriesController(cls.config, cls.response_catcher)\n\n def test_retrieve_a_paginated_list_of_categories_by_query_parameter_s(self\n ):\n result = (self.controller.\n retrieve_a_paginated_list_of_categories_by_query_parameter_s())\n self.assertEquals(self.response_catcher.response.status_code, 200)\n expected_headers = {}\n expected_headers['trace-id'] = None\n expected_headers['content-type'] = 'application/json'\n self.assertTrue(TestHelper.match_headers(expected_headers, self.\n response_catcher.response.headers))\n", "step-5": "# -*- coding: utf-8 -*-\r\n\r\n\"\"\"\r\nplastiqpublicapi\r\n\r\nThis file was automatically generated by APIMATIC v3.0 (\r\n https://www.apimatic.io ).\r\n\"\"\"\r\n\r\nimport json\r\nimport dateutil.parser\r\n\r\nfrom tests.controllers.controller_test_base import ControllerTestBase\r\nfrom tests.test_helper import TestHelper\r\nfrom tests.http_response_catcher import HttpResponseCatcher\r\nfrom plastiqpublicapi.api_helper import APIHelper\r\nfrom plastiqpublicapi.controllers.categories_controller import CategoriesController\r\n\r\n\r\nclass CategoriesControllerTests(ControllerTestBase):\r\n\r\n @classmethod\r\n def setUpClass(cls):\r\n super(CategoriesControllerTests, cls).setUpClass()\r\n cls.response_catcher = HttpResponseCatcher()\r\n cls.controller = CategoriesController(cls.config, cls.response_catcher)\r\n\r\n # Retrieve a paginated list of Categories by query parameter(s)\r\n def test_retrieve_a_paginated_list_of_categories_by_query_parameter_s(self):\r\n\r\n # Perform the API call through the SDK function\r\n result = self.controller.retrieve_a_paginated_list_of_categories_by_query_parameter_s()\r\n\r\n # Test response code\r\n self.assertEquals(self.response_catcher.response.status_code, 200)\r\n\r\n # Test headers\r\n expected_headers = {}\r\n expected_headers['trace-id'] = None\r\n expected_headers['content-type'] = 'application/json'\r\n\r\n self.assertTrue(TestHelper.match_headers(expected_headers, self.response_catcher.response.headers))\r\n\r\n\r\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
__version__ = "2.1.2" default_app_config = "channels.apps.ChannelsConfig" DEFAULT_CHANNEL_LAYER = "default"
normal
{ "blob_id": "92e414c76f4c585092a356d7d2957e91c1477c5f", "index": 5658, "step-1": "<mask token>\n", "step-2": "__version__ = '2.1.2'\ndefault_app_config = 'channels.apps.ChannelsConfig'\nDEFAULT_CHANNEL_LAYER = 'default'\n", "step-3": "__version__ = \"2.1.2\"\n\ndefault_app_config = \"channels.apps.ChannelsConfig\"\nDEFAULT_CHANNEL_LAYER = \"default\"\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
try: import RPi.GPIO as GPIO import time import numpy as np import matplotlib.pyplot as plt from os.path import dirname, join as pjoin from scipy.io import wavfile import scipy.io except ImportError: print ("Import error!") raise SystemExit try: chan_list = (26, 19, 13, 6, 5, 11, 9, 10) GPIO.setmode (GPIO.BCM) GPIO.setup (chan_list, GPIO.OUT) except: print ("GPIO Initialization error!") raise SystemExit def decToBinList (decNumber): if decNumber < 0 or decNumber > 255: raise ValueError return [(int(decNumber) & (1 << i)) >> i for i in range (7, -1, -1)] def num2dac (value): x = decToBinList (value) GPIO.output (chan_list, tuple (x)) wav_fname = pjoin('SOUND.WAV') samplerate, data = wavfile.read(wav_fname) length = data.shape[0] / samplerate print ("length: ", int(length), "s, number of channels: ", data.shape[1], ", Sample Rate: ", samplerate, ", data type: ", type (data[1, 0])) try: for i in data[:, 0]: num2dac ((int(i) + 32768) / 256) except ValueError: print ("Ошибка в в размере входных данных. Выходим из программы") except: print ("Неизвестная ошибка. Выходим из программы") finally: GPIO.output (chan_list, 0) GPIO.cleanup (chan_list)
normal
{ "blob_id": "675d564ad60870f49b88dece480d5a50a30491df", "index": 4907, "step-1": "<mask token>\n\n\ndef decToBinList(decNumber):\n if decNumber < 0 or decNumber > 255:\n raise ValueError\n return [((int(decNumber) & 1 << i) >> i) for i in range(7, -1, -1)]\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef decToBinList(decNumber):\n if decNumber < 0 or decNumber > 255:\n raise ValueError\n return [((int(decNumber) & 1 << i) >> i) for i in range(7, -1, -1)]\n\n\ndef num2dac(value):\n x = decToBinList(value)\n GPIO.output(chan_list, tuple(x))\n\n\n<mask token>\n", "step-3": "try:\n import RPi.GPIO as GPIO\n import time\n import numpy as np\n import matplotlib.pyplot as plt\n from os.path import dirname, join as pjoin\n from scipy.io import wavfile\n import scipy.io\nexcept ImportError:\n print('Import error!')\n raise SystemExit\ntry:\n chan_list = 26, 19, 13, 6, 5, 11, 9, 10\n GPIO.setmode(GPIO.BCM)\n GPIO.setup(chan_list, GPIO.OUT)\nexcept:\n print('GPIO Initialization error!')\n raise SystemExit\n\n\ndef decToBinList(decNumber):\n if decNumber < 0 or decNumber > 255:\n raise ValueError\n return [((int(decNumber) & 1 << i) >> i) for i in range(7, -1, -1)]\n\n\ndef num2dac(value):\n x = decToBinList(value)\n GPIO.output(chan_list, tuple(x))\n\n\n<mask token>\nprint('length: ', int(length), 's, number of channels: ', data.shape[1],\n ', Sample Rate: ', samplerate, ', data type: ', type(data[1, 0]))\ntry:\n for i in data[:, 0]:\n num2dac((int(i) + 32768) / 256)\nexcept ValueError:\n print('Ошибка в в размере входных данных. Выходим из программы')\nexcept:\n print('Неизвестная ошибка. Выходим из программы')\nfinally:\n GPIO.output(chan_list, 0)\n GPIO.cleanup(chan_list)\n", "step-4": "try:\n import RPi.GPIO as GPIO\n import time\n import numpy as np\n import matplotlib.pyplot as plt\n from os.path import dirname, join as pjoin\n from scipy.io import wavfile\n import scipy.io\nexcept ImportError:\n print('Import error!')\n raise SystemExit\ntry:\n chan_list = 26, 19, 13, 6, 5, 11, 9, 10\n GPIO.setmode(GPIO.BCM)\n GPIO.setup(chan_list, GPIO.OUT)\nexcept:\n print('GPIO Initialization error!')\n raise SystemExit\n\n\ndef decToBinList(decNumber):\n if decNumber < 0 or decNumber > 255:\n raise ValueError\n return [((int(decNumber) & 1 << i) >> i) for i in range(7, -1, -1)]\n\n\ndef num2dac(value):\n x = decToBinList(value)\n GPIO.output(chan_list, tuple(x))\n\n\nwav_fname = pjoin('SOUND.WAV')\nsamplerate, data = wavfile.read(wav_fname)\nlength = data.shape[0] / samplerate\nprint('length: ', int(length), 's, number of channels: ', data.shape[1],\n ', Sample Rate: ', samplerate, ', data type: ', type(data[1, 0]))\ntry:\n for i in data[:, 0]:\n num2dac((int(i) + 32768) / 256)\nexcept ValueError:\n print('Ошибка в в размере входных данных. Выходим из программы')\nexcept:\n print('Неизвестная ошибка. Выходим из программы')\nfinally:\n GPIO.output(chan_list, 0)\n GPIO.cleanup(chan_list)\n", "step-5": "try:\n import RPi.GPIO as GPIO\n import time\n import numpy as np\n import matplotlib.pyplot as plt\n from os.path import dirname, join as pjoin\n from scipy.io import wavfile\n import scipy.io\nexcept ImportError:\n print (\"Import error!\")\n raise SystemExit\n \ntry:\n chan_list = (26, 19, 13, 6, 5, 11, 9, 10)\n GPIO.setmode (GPIO.BCM)\n GPIO.setup (chan_list, GPIO.OUT)\nexcept:\n print (\"GPIO Initialization error!\")\n raise SystemExit\n \n \ndef decToBinList (decNumber):\n if decNumber < 0 or decNumber > 255:\n raise ValueError\n return [(int(decNumber) & (1 << i)) >> i for i in range (7, -1, -1)]\n \ndef num2dac (value):\n x = decToBinList (value)\n GPIO.output (chan_list, tuple (x))\n\nwav_fname = pjoin('SOUND.WAV')\nsamplerate, data = wavfile.read(wav_fname)\nlength = data.shape[0] / samplerate\n\nprint (\"length: \", int(length), \"s, number of channels: \", data.shape[1], \", Sample Rate: \", samplerate, \", data type: \", type (data[1, 0]))\n\ntry:\n for i in data[:, 0]:\n num2dac ((int(i) + 32768) / 256)\nexcept ValueError:\n print (\"Ошибка в в размере входных данных. Выходим из программы\")\nexcept:\n print (\"Неизвестная ошибка. Выходим из программы\")\nfinally:\n GPIO.output (chan_list, 0)\n GPIO.cleanup (chan_list)", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> class LSTMRNN(object): def __init__(self, n_steps, input_size, output_size, cell_size, batch_size ): self.n_steps = n_steps self.input_size = input_size self.output_size = output_size self.cell_size = cell_size self.batch_size = batch_size with tf.name_scope('inputs'): self.xs = tf.placeholder(tf.float32, [None, n_steps, input_size ], name='xs') self.ys = tf.placeholder(tf.float32, [None, n_steps, input_size ], name='ys') with tf.variable_scope('in_hidden'): self.add_input_layer() with tf.variable_scope('LSTM_cell'): self.add_cell() with tf.variable_scope('out_hidden'): self.add_output_layer() with tf.name_scope('cost'): self.compute_cost() with tf.name_scope('train'): self.train_op = tf.train.AdamOptimizer(LR).minimize(self.cost) <|reserved_special_token_0|> def add_cell(self): lstm_cell = tf.contrib.rnn.BasicLSTMCell(self.cell_size, forget_bias=1.0, state_is_tuple=True) with tf.name_scope('initial_state'): self.cell_init_state = lstm_cell.zero_state(self.batch_size, dtype=tf.float32) self.cell_outputs, self.cell_final_state = tf.nn.dynamic_rnn(lstm_cell, self.l_in_y, initial_state=self.cell_init_state, time_major=False) def add_output_layer(self): l_out_x = tf.reshape(self.cell_outputs, [-1, self.cell_size], name= '2_2D') Ws_out = self._weight_variable([self.cell_size, self.output_size]) bs_out = self._bias_variable([self.output_size]) with tf.name_scope('Wx_plus_b'): self.pred = tf.matmul(l_out_x, Ws_out) + bs_out def compute_cost(self): losses = tf.contrib.legacy_seq2seq.sequence_loss_by_example([tf. reshape(self.pred, [-1], name='reshape_pred')], [tf.reshape( self.ys, [-1], name='reshape_target')], [tf.ones([self. batch_size * self.n_steps], dtype=tf.float32)], average_across_timesteps=True, softmax_loss_function=self. ms_error, name='losses') with tf.name_scope('average_cost'): self.cost = tf.div(tf.reduce_sum(losses, name='losses_sum'), self.batch_size, name='average_cost') tf.summary.scalar('cost', self.cost) @staticmethod def ms_error(labels, logits): return tf.square(tf.subtract(labels, logits)) def _weight_variable(self, shape, name='weights'): initializer = tf.random_normal_initializer(mean=0.0, stddev=1.0) return tf.get_variable(shape=shape, initializer=initializer, name=name) def _bias_variable(self, shape, name='biases'): initializer = tf.constant_initializer(0.1) return tf.get_variable(shape=shape, initializer=initializer, name=name) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def get_batch(): global BATCH_START, TIME_STEPS xs = np.arange(BATCH_START, BATCH_START + TIME_STEPS * BATCH_SIZE).reshape( (BATCH_SIZE, TIME_STEPS)) / (10 * np.pi) seq = np.sin(xs) res = np.cos(xs) BATCH_START += TIME_STEPS return [seq[:, :, np.newaxis], res[:, :, np.newaxis], xs] class LSTMRNN(object): def __init__(self, n_steps, input_size, output_size, cell_size, batch_size ): self.n_steps = n_steps self.input_size = input_size self.output_size = output_size self.cell_size = cell_size self.batch_size = batch_size with tf.name_scope('inputs'): self.xs = tf.placeholder(tf.float32, [None, n_steps, input_size ], name='xs') self.ys = tf.placeholder(tf.float32, [None, n_steps, input_size ], name='ys') with tf.variable_scope('in_hidden'): self.add_input_layer() with tf.variable_scope('LSTM_cell'): self.add_cell() with tf.variable_scope('out_hidden'): self.add_output_layer() with tf.name_scope('cost'): self.compute_cost() with tf.name_scope('train'): self.train_op = tf.train.AdamOptimizer(LR).minimize(self.cost) def add_input_layer(self): l_in_x = tf.reshape(self.xs, [-1, self.input_size], name='2_2D') Ws_in = self._weight_variable([self.input_size, self.cell_size]) bs_in = self._bias_variable([self.cell_size]) with tf.name_scope('Wx_plus_b'): l_in_y = tf.matmul(l_in_x, Ws_in) + bs_in self.l_in_y = tf.reshape(l_in_y, [-1, self.n_steps, self.cell_size], name='2_3D') def add_cell(self): lstm_cell = tf.contrib.rnn.BasicLSTMCell(self.cell_size, forget_bias=1.0, state_is_tuple=True) with tf.name_scope('initial_state'): self.cell_init_state = lstm_cell.zero_state(self.batch_size, dtype=tf.float32) self.cell_outputs, self.cell_final_state = tf.nn.dynamic_rnn(lstm_cell, self.l_in_y, initial_state=self.cell_init_state, time_major=False) def add_output_layer(self): l_out_x = tf.reshape(self.cell_outputs, [-1, self.cell_size], name= '2_2D') Ws_out = self._weight_variable([self.cell_size, self.output_size]) bs_out = self._bias_variable([self.output_size]) with tf.name_scope('Wx_plus_b'): self.pred = tf.matmul(l_out_x, Ws_out) + bs_out def compute_cost(self): losses = tf.contrib.legacy_seq2seq.sequence_loss_by_example([tf. reshape(self.pred, [-1], name='reshape_pred')], [tf.reshape( self.ys, [-1], name='reshape_target')], [tf.ones([self. batch_size * self.n_steps], dtype=tf.float32)], average_across_timesteps=True, softmax_loss_function=self. ms_error, name='losses') with tf.name_scope('average_cost'): self.cost = tf.div(tf.reduce_sum(losses, name='losses_sum'), self.batch_size, name='average_cost') tf.summary.scalar('cost', self.cost) @staticmethod def ms_error(labels, logits): return tf.square(tf.subtract(labels, logits)) def _weight_variable(self, shape, name='weights'): initializer = tf.random_normal_initializer(mean=0.0, stddev=1.0) return tf.get_variable(shape=shape, initializer=initializer, name=name) def _bias_variable(self, shape, name='biases'): initializer = tf.constant_initializer(0.1) return tf.get_variable(shape=shape, initializer=initializer, name=name) if __name__ == '__main__': model = LSTMRNN(TIME_STEPS, INPUT_SIZE, OUTPUT_SIZE, CELL_SIZE, BATCH_SIZE) sess = tf.Session() merged = tf.summary.merge_all() writer = tf.summary.FileWriter('lstmlogs', sess.graph) sess.run(tf.global_variables_initializer()) plt.ion() plt.show() for i in range(200): seq, res, xs = get_batch() if i == 0: feed_dict = {model.xs: seq, model.ys: res} else: feed_dict = {model.xs: seq, model.ys: res, model. cell_init_state: state} _, cost, state, pred = sess.run([model.train_op, model.cost, model. cell_final_state, model.pred], feed_dict=feed_dict) plt.plot(xs[0, :], res[0].flatten(), 'r', xs[0, :], pred.flatten()[ :TIME_STEPS], 'b--') plt.ylim((-1.2, 1.2)) plt.draw() plt.pause(0.3) if i % 20 == 0: print('cost', round(cost, 4)) result = sess.run(merged, feed_dict) writer.add_summary(result, i) <|reserved_special_token_1|> <|reserved_special_token_0|> BATCH_START = 0 TIME_STEPS = 20 BATCH_SIZE = 50 INPUT_SIZE = 1 OUTPUT_SIZE = 1 CELL_SIZE = 10 LR = 0.006 def get_batch(): global BATCH_START, TIME_STEPS xs = np.arange(BATCH_START, BATCH_START + TIME_STEPS * BATCH_SIZE).reshape( (BATCH_SIZE, TIME_STEPS)) / (10 * np.pi) seq = np.sin(xs) res = np.cos(xs) BATCH_START += TIME_STEPS return [seq[:, :, np.newaxis], res[:, :, np.newaxis], xs] class LSTMRNN(object): def __init__(self, n_steps, input_size, output_size, cell_size, batch_size ): self.n_steps = n_steps self.input_size = input_size self.output_size = output_size self.cell_size = cell_size self.batch_size = batch_size with tf.name_scope('inputs'): self.xs = tf.placeholder(tf.float32, [None, n_steps, input_size ], name='xs') self.ys = tf.placeholder(tf.float32, [None, n_steps, input_size ], name='ys') with tf.variable_scope('in_hidden'): self.add_input_layer() with tf.variable_scope('LSTM_cell'): self.add_cell() with tf.variable_scope('out_hidden'): self.add_output_layer() with tf.name_scope('cost'): self.compute_cost() with tf.name_scope('train'): self.train_op = tf.train.AdamOptimizer(LR).minimize(self.cost) def add_input_layer(self): l_in_x = tf.reshape(self.xs, [-1, self.input_size], name='2_2D') Ws_in = self._weight_variable([self.input_size, self.cell_size]) bs_in = self._bias_variable([self.cell_size]) with tf.name_scope('Wx_plus_b'): l_in_y = tf.matmul(l_in_x, Ws_in) + bs_in self.l_in_y = tf.reshape(l_in_y, [-1, self.n_steps, self.cell_size], name='2_3D') def add_cell(self): lstm_cell = tf.contrib.rnn.BasicLSTMCell(self.cell_size, forget_bias=1.0, state_is_tuple=True) with tf.name_scope('initial_state'): self.cell_init_state = lstm_cell.zero_state(self.batch_size, dtype=tf.float32) self.cell_outputs, self.cell_final_state = tf.nn.dynamic_rnn(lstm_cell, self.l_in_y, initial_state=self.cell_init_state, time_major=False) def add_output_layer(self): l_out_x = tf.reshape(self.cell_outputs, [-1, self.cell_size], name= '2_2D') Ws_out = self._weight_variable([self.cell_size, self.output_size]) bs_out = self._bias_variable([self.output_size]) with tf.name_scope('Wx_plus_b'): self.pred = tf.matmul(l_out_x, Ws_out) + bs_out def compute_cost(self): losses = tf.contrib.legacy_seq2seq.sequence_loss_by_example([tf. reshape(self.pred, [-1], name='reshape_pred')], [tf.reshape( self.ys, [-1], name='reshape_target')], [tf.ones([self. batch_size * self.n_steps], dtype=tf.float32)], average_across_timesteps=True, softmax_loss_function=self. ms_error, name='losses') with tf.name_scope('average_cost'): self.cost = tf.div(tf.reduce_sum(losses, name='losses_sum'), self.batch_size, name='average_cost') tf.summary.scalar('cost', self.cost) @staticmethod def ms_error(labels, logits): return tf.square(tf.subtract(labels, logits)) def _weight_variable(self, shape, name='weights'): initializer = tf.random_normal_initializer(mean=0.0, stddev=1.0) return tf.get_variable(shape=shape, initializer=initializer, name=name) def _bias_variable(self, shape, name='biases'): initializer = tf.constant_initializer(0.1) return tf.get_variable(shape=shape, initializer=initializer, name=name) if __name__ == '__main__': model = LSTMRNN(TIME_STEPS, INPUT_SIZE, OUTPUT_SIZE, CELL_SIZE, BATCH_SIZE) sess = tf.Session() merged = tf.summary.merge_all() writer = tf.summary.FileWriter('lstmlogs', sess.graph) sess.run(tf.global_variables_initializer()) plt.ion() plt.show() for i in range(200): seq, res, xs = get_batch() if i == 0: feed_dict = {model.xs: seq, model.ys: res} else: feed_dict = {model.xs: seq, model.ys: res, model. cell_init_state: state} _, cost, state, pred = sess.run([model.train_op, model.cost, model. cell_final_state, model.pred], feed_dict=feed_dict) plt.plot(xs[0, :], res[0].flatten(), 'r', xs[0, :], pred.flatten()[ :TIME_STEPS], 'b--') plt.ylim((-1.2, 1.2)) plt.draw() plt.pause(0.3) if i % 20 == 0: print('cost', round(cost, 4)) result = sess.run(merged, feed_dict) writer.add_summary(result, i) <|reserved_special_token_1|> import tensorflow as tf import numpy as np import matplotlib.pyplot as plt BATCH_START = 0 TIME_STEPS = 20 BATCH_SIZE = 50 INPUT_SIZE = 1 OUTPUT_SIZE = 1 CELL_SIZE = 10 LR = 0.006 def get_batch(): global BATCH_START, TIME_STEPS xs = np.arange(BATCH_START, BATCH_START + TIME_STEPS * BATCH_SIZE).reshape( (BATCH_SIZE, TIME_STEPS)) / (10 * np.pi) seq = np.sin(xs) res = np.cos(xs) BATCH_START += TIME_STEPS return [seq[:, :, np.newaxis], res[:, :, np.newaxis], xs] class LSTMRNN(object): def __init__(self, n_steps, input_size, output_size, cell_size, batch_size ): self.n_steps = n_steps self.input_size = input_size self.output_size = output_size self.cell_size = cell_size self.batch_size = batch_size with tf.name_scope('inputs'): self.xs = tf.placeholder(tf.float32, [None, n_steps, input_size ], name='xs') self.ys = tf.placeholder(tf.float32, [None, n_steps, input_size ], name='ys') with tf.variable_scope('in_hidden'): self.add_input_layer() with tf.variable_scope('LSTM_cell'): self.add_cell() with tf.variable_scope('out_hidden'): self.add_output_layer() with tf.name_scope('cost'): self.compute_cost() with tf.name_scope('train'): self.train_op = tf.train.AdamOptimizer(LR).minimize(self.cost) def add_input_layer(self): l_in_x = tf.reshape(self.xs, [-1, self.input_size], name='2_2D') Ws_in = self._weight_variable([self.input_size, self.cell_size]) bs_in = self._bias_variable([self.cell_size]) with tf.name_scope('Wx_plus_b'): l_in_y = tf.matmul(l_in_x, Ws_in) + bs_in self.l_in_y = tf.reshape(l_in_y, [-1, self.n_steps, self.cell_size], name='2_3D') def add_cell(self): lstm_cell = tf.contrib.rnn.BasicLSTMCell(self.cell_size, forget_bias=1.0, state_is_tuple=True) with tf.name_scope('initial_state'): self.cell_init_state = lstm_cell.zero_state(self.batch_size, dtype=tf.float32) self.cell_outputs, self.cell_final_state = tf.nn.dynamic_rnn(lstm_cell, self.l_in_y, initial_state=self.cell_init_state, time_major=False) def add_output_layer(self): l_out_x = tf.reshape(self.cell_outputs, [-1, self.cell_size], name= '2_2D') Ws_out = self._weight_variable([self.cell_size, self.output_size]) bs_out = self._bias_variable([self.output_size]) with tf.name_scope('Wx_plus_b'): self.pred = tf.matmul(l_out_x, Ws_out) + bs_out def compute_cost(self): losses = tf.contrib.legacy_seq2seq.sequence_loss_by_example([tf. reshape(self.pred, [-1], name='reshape_pred')], [tf.reshape( self.ys, [-1], name='reshape_target')], [tf.ones([self. batch_size * self.n_steps], dtype=tf.float32)], average_across_timesteps=True, softmax_loss_function=self. ms_error, name='losses') with tf.name_scope('average_cost'): self.cost = tf.div(tf.reduce_sum(losses, name='losses_sum'), self.batch_size, name='average_cost') tf.summary.scalar('cost', self.cost) @staticmethod def ms_error(labels, logits): return tf.square(tf.subtract(labels, logits)) def _weight_variable(self, shape, name='weights'): initializer = tf.random_normal_initializer(mean=0.0, stddev=1.0) return tf.get_variable(shape=shape, initializer=initializer, name=name) def _bias_variable(self, shape, name='biases'): initializer = tf.constant_initializer(0.1) return tf.get_variable(shape=shape, initializer=initializer, name=name) if __name__ == '__main__': model = LSTMRNN(TIME_STEPS, INPUT_SIZE, OUTPUT_SIZE, CELL_SIZE, BATCH_SIZE) sess = tf.Session() merged = tf.summary.merge_all() writer = tf.summary.FileWriter('lstmlogs', sess.graph) sess.run(tf.global_variables_initializer()) plt.ion() plt.show() for i in range(200): seq, res, xs = get_batch() if i == 0: feed_dict = {model.xs: seq, model.ys: res} else: feed_dict = {model.xs: seq, model.ys: res, model. cell_init_state: state} _, cost, state, pred = sess.run([model.train_op, model.cost, model. cell_final_state, model.pred], feed_dict=feed_dict) plt.plot(xs[0, :], res[0].flatten(), 'r', xs[0, :], pred.flatten()[ :TIME_STEPS], 'b--') plt.ylim((-1.2, 1.2)) plt.draw() plt.pause(0.3) if i % 20 == 0: print('cost', round(cost, 4)) result = sess.run(merged, feed_dict) writer.add_summary(result, i) <|reserved_special_token_1|> import tensorflow as tf import numpy as np import matplotlib.pyplot as plt BATCH_START=0 TIME_STEPS=20 BATCH_SIZE=50 INPUT_SIZE=1 OUTPUT_SIZE=1 CELL_SIZE=10 LR=0.006 #generate data def get_batch(): global BATCH_START,TIME_STEPS xs=np.arange(BATCH_START,BATCH_START+TIME_STEPS*BATCH_SIZE).reshape((BATCH_SIZE,TIME_STEPS))/(10*np.pi) seq=np.sin(xs) res=np.cos(xs) #data move one BATCH_START+=TIME_STEPS # all return shape is (batch_size,time_step,input_size) return [seq[:,:,np.newaxis],res[:,:,np.newaxis],xs] #def RNN LSTM Structure class LSTMRNN(object): def __init__(self,n_steps,input_size,output_size,cell_size,batch_size): self.n_steps=n_steps self.input_size=input_size self.output_size=output_size self.cell_size=cell_size self.batch_size=batch_size with tf.name_scope('inputs'): self.xs=tf.placeholder(tf.float32,[None,n_steps,input_size],name='xs') self.ys=tf.placeholder(tf.float32,[None,n_steps,input_size],name='ys') with tf.variable_scope('in_hidden'): self.add_input_layer() with tf.variable_scope('LSTM_cell'): self.add_cell() with tf.variable_scope('out_hidden'): self.add_output_layer() with tf.name_scope('cost'): self.compute_cost() with tf.name_scope('train'): self.train_op=tf.train.AdamOptimizer(LR).minimize(self.cost) #add input layer def add_input_layer(self): #shape(batch,step,input)=>(batch*step,input) l_in_x=tf.reshape(self.xs,[-1,self.input_size],name='2_2D') Ws_in=self._weight_variable([self.input_size,self.cell_size]) bs_in=self._bias_variable([self.cell_size]) with tf.name_scope('Wx_plus_b'): l_in_y=tf.matmul(l_in_x,Ws_in)+bs_in self.l_in_y=tf.reshape(l_in_y,[-1,self.n_steps,self.cell_size],name='2_3D') #add cell def add_cell(self): lstm_cell=tf.contrib.rnn.BasicLSTMCell(self.cell_size,forget_bias=1.0,state_is_tuple=True) with tf.name_scope('initial_state'): self.cell_init_state=lstm_cell.zero_state(self.batch_size,dtype=tf.float32) self.cell_outputs,self.cell_final_state=tf.nn.dynamic_rnn(lstm_cell,self.l_in_y,initial_state=self.cell_init_state,time_major=False) #add output layer def add_output_layer(self): l_out_x=tf.reshape(self.cell_outputs,[-1,self.cell_size],name='2_2D') Ws_out=self._weight_variable([self.cell_size,self.output_size]) bs_out=self._bias_variable([self.output_size,]) with tf.name_scope('Wx_plus_b'): self.pred=tf.matmul(l_out_x,Ws_out)+bs_out def compute_cost(self): losses=tf.contrib.legacy_seq2seq.sequence_loss_by_example( [tf.reshape(self.pred,[-1],name='reshape_pred')], [tf.reshape(self.ys,[-1],name='reshape_target')], [tf.ones([self.batch_size*self.n_steps],dtype=tf.float32)], average_across_timesteps=True, softmax_loss_function=self.ms_error, name='losses' ) with tf.name_scope('average_cost'): self.cost=tf.div( tf.reduce_sum(losses,name='losses_sum'), self.batch_size, name='average_cost' ) tf.summary.scalar('cost',self.cost) @staticmethod def ms_error(labels,logits): return tf.square(tf.subtract(labels,logits)) def _weight_variable(self,shape,name='weights'): initializer=tf.random_normal_initializer(mean=0.,stddev=1.,) return tf.get_variable(shape=shape,initializer=initializer,name=name) def _bias_variable(self,shape,name='biases'): initializer=tf.constant_initializer(0.1) return tf.get_variable(shape=shape,initializer=initializer,name=name) #train if __name__=='__main__': model=LSTMRNN(TIME_STEPS,INPUT_SIZE,OUTPUT_SIZE,CELL_SIZE,BATCH_SIZE) sess=tf.Session() #merge for tensorboard merged=tf.summary.merge_all() writer=tf.summary.FileWriter("lstmlogs",sess.graph) sess.run(tf.global_variables_initializer()) #visiable plt.ion() plt.show() #train for 200 for i in range(200): seq,res,xs=get_batch() if i==0: feed_dict={model.xs:seq,model.ys:res,} else: feed_dict={model.xs:seq,model.ys:res,model.cell_init_state:state} #train _,cost,state,pred=sess.run([model.train_op,model.cost,model.cell_final_state,model.pred],feed_dict=feed_dict) #plotting plt.plot(xs[0,:],res[0].flatten(),'r',xs[0,:],pred.flatten()[:TIME_STEPS],'b--') plt.ylim((-1.2,1.2)) plt.draw() plt.pause(0.3) if i%20==0: # 4 print('cost',round(cost,4)) result=sess.run(merged,feed_dict) writer.add_summary(result,i)
flexible
{ "blob_id": "e54078f21176bbb7accb4164e7b56633b13cc693", "index": 8803, "step-1": "<mask token>\n\n\nclass LSTMRNN(object):\n\n def __init__(self, n_steps, input_size, output_size, cell_size, batch_size\n ):\n self.n_steps = n_steps\n self.input_size = input_size\n self.output_size = output_size\n self.cell_size = cell_size\n self.batch_size = batch_size\n with tf.name_scope('inputs'):\n self.xs = tf.placeholder(tf.float32, [None, n_steps, input_size\n ], name='xs')\n self.ys = tf.placeholder(tf.float32, [None, n_steps, input_size\n ], name='ys')\n with tf.variable_scope('in_hidden'):\n self.add_input_layer()\n with tf.variable_scope('LSTM_cell'):\n self.add_cell()\n with tf.variable_scope('out_hidden'):\n self.add_output_layer()\n with tf.name_scope('cost'):\n self.compute_cost()\n with tf.name_scope('train'):\n self.train_op = tf.train.AdamOptimizer(LR).minimize(self.cost)\n <mask token>\n\n def add_cell(self):\n lstm_cell = tf.contrib.rnn.BasicLSTMCell(self.cell_size,\n forget_bias=1.0, state_is_tuple=True)\n with tf.name_scope('initial_state'):\n self.cell_init_state = lstm_cell.zero_state(self.batch_size,\n dtype=tf.float32)\n self.cell_outputs, self.cell_final_state = tf.nn.dynamic_rnn(lstm_cell,\n self.l_in_y, initial_state=self.cell_init_state, time_major=False)\n\n def add_output_layer(self):\n l_out_x = tf.reshape(self.cell_outputs, [-1, self.cell_size], name=\n '2_2D')\n Ws_out = self._weight_variable([self.cell_size, self.output_size])\n bs_out = self._bias_variable([self.output_size])\n with tf.name_scope('Wx_plus_b'):\n self.pred = tf.matmul(l_out_x, Ws_out) + bs_out\n\n def compute_cost(self):\n losses = tf.contrib.legacy_seq2seq.sequence_loss_by_example([tf.\n reshape(self.pred, [-1], name='reshape_pred')], [tf.reshape(\n self.ys, [-1], name='reshape_target')], [tf.ones([self.\n batch_size * self.n_steps], dtype=tf.float32)],\n average_across_timesteps=True, softmax_loss_function=self.\n ms_error, name='losses')\n with tf.name_scope('average_cost'):\n self.cost = tf.div(tf.reduce_sum(losses, name='losses_sum'),\n self.batch_size, name='average_cost')\n tf.summary.scalar('cost', self.cost)\n\n @staticmethod\n def ms_error(labels, logits):\n return tf.square(tf.subtract(labels, logits))\n\n def _weight_variable(self, shape, name='weights'):\n initializer = tf.random_normal_initializer(mean=0.0, stddev=1.0)\n return tf.get_variable(shape=shape, initializer=initializer, name=name)\n\n def _bias_variable(self, shape, name='biases'):\n initializer = tf.constant_initializer(0.1)\n return tf.get_variable(shape=shape, initializer=initializer, name=name)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef get_batch():\n global BATCH_START, TIME_STEPS\n xs = np.arange(BATCH_START, BATCH_START + TIME_STEPS * BATCH_SIZE).reshape(\n (BATCH_SIZE, TIME_STEPS)) / (10 * np.pi)\n seq = np.sin(xs)\n res = np.cos(xs)\n BATCH_START += TIME_STEPS\n return [seq[:, :, np.newaxis], res[:, :, np.newaxis], xs]\n\n\nclass LSTMRNN(object):\n\n def __init__(self, n_steps, input_size, output_size, cell_size, batch_size\n ):\n self.n_steps = n_steps\n self.input_size = input_size\n self.output_size = output_size\n self.cell_size = cell_size\n self.batch_size = batch_size\n with tf.name_scope('inputs'):\n self.xs = tf.placeholder(tf.float32, [None, n_steps, input_size\n ], name='xs')\n self.ys = tf.placeholder(tf.float32, [None, n_steps, input_size\n ], name='ys')\n with tf.variable_scope('in_hidden'):\n self.add_input_layer()\n with tf.variable_scope('LSTM_cell'):\n self.add_cell()\n with tf.variable_scope('out_hidden'):\n self.add_output_layer()\n with tf.name_scope('cost'):\n self.compute_cost()\n with tf.name_scope('train'):\n self.train_op = tf.train.AdamOptimizer(LR).minimize(self.cost)\n\n def add_input_layer(self):\n l_in_x = tf.reshape(self.xs, [-1, self.input_size], name='2_2D')\n Ws_in = self._weight_variable([self.input_size, self.cell_size])\n bs_in = self._bias_variable([self.cell_size])\n with tf.name_scope('Wx_plus_b'):\n l_in_y = tf.matmul(l_in_x, Ws_in) + bs_in\n self.l_in_y = tf.reshape(l_in_y, [-1, self.n_steps, self.cell_size],\n name='2_3D')\n\n def add_cell(self):\n lstm_cell = tf.contrib.rnn.BasicLSTMCell(self.cell_size,\n forget_bias=1.0, state_is_tuple=True)\n with tf.name_scope('initial_state'):\n self.cell_init_state = lstm_cell.zero_state(self.batch_size,\n dtype=tf.float32)\n self.cell_outputs, self.cell_final_state = tf.nn.dynamic_rnn(lstm_cell,\n self.l_in_y, initial_state=self.cell_init_state, time_major=False)\n\n def add_output_layer(self):\n l_out_x = tf.reshape(self.cell_outputs, [-1, self.cell_size], name=\n '2_2D')\n Ws_out = self._weight_variable([self.cell_size, self.output_size])\n bs_out = self._bias_variable([self.output_size])\n with tf.name_scope('Wx_plus_b'):\n self.pred = tf.matmul(l_out_x, Ws_out) + bs_out\n\n def compute_cost(self):\n losses = tf.contrib.legacy_seq2seq.sequence_loss_by_example([tf.\n reshape(self.pred, [-1], name='reshape_pred')], [tf.reshape(\n self.ys, [-1], name='reshape_target')], [tf.ones([self.\n batch_size * self.n_steps], dtype=tf.float32)],\n average_across_timesteps=True, softmax_loss_function=self.\n ms_error, name='losses')\n with tf.name_scope('average_cost'):\n self.cost = tf.div(tf.reduce_sum(losses, name='losses_sum'),\n self.batch_size, name='average_cost')\n tf.summary.scalar('cost', self.cost)\n\n @staticmethod\n def ms_error(labels, logits):\n return tf.square(tf.subtract(labels, logits))\n\n def _weight_variable(self, shape, name='weights'):\n initializer = tf.random_normal_initializer(mean=0.0, stddev=1.0)\n return tf.get_variable(shape=shape, initializer=initializer, name=name)\n\n def _bias_variable(self, shape, name='biases'):\n initializer = tf.constant_initializer(0.1)\n return tf.get_variable(shape=shape, initializer=initializer, name=name)\n\n\nif __name__ == '__main__':\n model = LSTMRNN(TIME_STEPS, INPUT_SIZE, OUTPUT_SIZE, CELL_SIZE, BATCH_SIZE)\n sess = tf.Session()\n merged = tf.summary.merge_all()\n writer = tf.summary.FileWriter('lstmlogs', sess.graph)\n sess.run(tf.global_variables_initializer())\n plt.ion()\n plt.show()\n for i in range(200):\n seq, res, xs = get_batch()\n if i == 0:\n feed_dict = {model.xs: seq, model.ys: res}\n else:\n feed_dict = {model.xs: seq, model.ys: res, model.\n cell_init_state: state}\n _, cost, state, pred = sess.run([model.train_op, model.cost, model.\n cell_final_state, model.pred], feed_dict=feed_dict)\n plt.plot(xs[0, :], res[0].flatten(), 'r', xs[0, :], pred.flatten()[\n :TIME_STEPS], 'b--')\n plt.ylim((-1.2, 1.2))\n plt.draw()\n plt.pause(0.3)\n if i % 20 == 0:\n print('cost', round(cost, 4))\n result = sess.run(merged, feed_dict)\n writer.add_summary(result, i)\n", "step-3": "<mask token>\nBATCH_START = 0\nTIME_STEPS = 20\nBATCH_SIZE = 50\nINPUT_SIZE = 1\nOUTPUT_SIZE = 1\nCELL_SIZE = 10\nLR = 0.006\n\n\ndef get_batch():\n global BATCH_START, TIME_STEPS\n xs = np.arange(BATCH_START, BATCH_START + TIME_STEPS * BATCH_SIZE).reshape(\n (BATCH_SIZE, TIME_STEPS)) / (10 * np.pi)\n seq = np.sin(xs)\n res = np.cos(xs)\n BATCH_START += TIME_STEPS\n return [seq[:, :, np.newaxis], res[:, :, np.newaxis], xs]\n\n\nclass LSTMRNN(object):\n\n def __init__(self, n_steps, input_size, output_size, cell_size, batch_size\n ):\n self.n_steps = n_steps\n self.input_size = input_size\n self.output_size = output_size\n self.cell_size = cell_size\n self.batch_size = batch_size\n with tf.name_scope('inputs'):\n self.xs = tf.placeholder(tf.float32, [None, n_steps, input_size\n ], name='xs')\n self.ys = tf.placeholder(tf.float32, [None, n_steps, input_size\n ], name='ys')\n with tf.variable_scope('in_hidden'):\n self.add_input_layer()\n with tf.variable_scope('LSTM_cell'):\n self.add_cell()\n with tf.variable_scope('out_hidden'):\n self.add_output_layer()\n with tf.name_scope('cost'):\n self.compute_cost()\n with tf.name_scope('train'):\n self.train_op = tf.train.AdamOptimizer(LR).minimize(self.cost)\n\n def add_input_layer(self):\n l_in_x = tf.reshape(self.xs, [-1, self.input_size], name='2_2D')\n Ws_in = self._weight_variable([self.input_size, self.cell_size])\n bs_in = self._bias_variable([self.cell_size])\n with tf.name_scope('Wx_plus_b'):\n l_in_y = tf.matmul(l_in_x, Ws_in) + bs_in\n self.l_in_y = tf.reshape(l_in_y, [-1, self.n_steps, self.cell_size],\n name='2_3D')\n\n def add_cell(self):\n lstm_cell = tf.contrib.rnn.BasicLSTMCell(self.cell_size,\n forget_bias=1.0, state_is_tuple=True)\n with tf.name_scope('initial_state'):\n self.cell_init_state = lstm_cell.zero_state(self.batch_size,\n dtype=tf.float32)\n self.cell_outputs, self.cell_final_state = tf.nn.dynamic_rnn(lstm_cell,\n self.l_in_y, initial_state=self.cell_init_state, time_major=False)\n\n def add_output_layer(self):\n l_out_x = tf.reshape(self.cell_outputs, [-1, self.cell_size], name=\n '2_2D')\n Ws_out = self._weight_variable([self.cell_size, self.output_size])\n bs_out = self._bias_variable([self.output_size])\n with tf.name_scope('Wx_plus_b'):\n self.pred = tf.matmul(l_out_x, Ws_out) + bs_out\n\n def compute_cost(self):\n losses = tf.contrib.legacy_seq2seq.sequence_loss_by_example([tf.\n reshape(self.pred, [-1], name='reshape_pred')], [tf.reshape(\n self.ys, [-1], name='reshape_target')], [tf.ones([self.\n batch_size * self.n_steps], dtype=tf.float32)],\n average_across_timesteps=True, softmax_loss_function=self.\n ms_error, name='losses')\n with tf.name_scope('average_cost'):\n self.cost = tf.div(tf.reduce_sum(losses, name='losses_sum'),\n self.batch_size, name='average_cost')\n tf.summary.scalar('cost', self.cost)\n\n @staticmethod\n def ms_error(labels, logits):\n return tf.square(tf.subtract(labels, logits))\n\n def _weight_variable(self, shape, name='weights'):\n initializer = tf.random_normal_initializer(mean=0.0, stddev=1.0)\n return tf.get_variable(shape=shape, initializer=initializer, name=name)\n\n def _bias_variable(self, shape, name='biases'):\n initializer = tf.constant_initializer(0.1)\n return tf.get_variable(shape=shape, initializer=initializer, name=name)\n\n\nif __name__ == '__main__':\n model = LSTMRNN(TIME_STEPS, INPUT_SIZE, OUTPUT_SIZE, CELL_SIZE, BATCH_SIZE)\n sess = tf.Session()\n merged = tf.summary.merge_all()\n writer = tf.summary.FileWriter('lstmlogs', sess.graph)\n sess.run(tf.global_variables_initializer())\n plt.ion()\n plt.show()\n for i in range(200):\n seq, res, xs = get_batch()\n if i == 0:\n feed_dict = {model.xs: seq, model.ys: res}\n else:\n feed_dict = {model.xs: seq, model.ys: res, model.\n cell_init_state: state}\n _, cost, state, pred = sess.run([model.train_op, model.cost, model.\n cell_final_state, model.pred], feed_dict=feed_dict)\n plt.plot(xs[0, :], res[0].flatten(), 'r', xs[0, :], pred.flatten()[\n :TIME_STEPS], 'b--')\n plt.ylim((-1.2, 1.2))\n plt.draw()\n plt.pause(0.3)\n if i % 20 == 0:\n print('cost', round(cost, 4))\n result = sess.run(merged, feed_dict)\n writer.add_summary(result, i)\n", "step-4": "import tensorflow as tf\nimport numpy as np\nimport matplotlib.pyplot as plt\nBATCH_START = 0\nTIME_STEPS = 20\nBATCH_SIZE = 50\nINPUT_SIZE = 1\nOUTPUT_SIZE = 1\nCELL_SIZE = 10\nLR = 0.006\n\n\ndef get_batch():\n global BATCH_START, TIME_STEPS\n xs = np.arange(BATCH_START, BATCH_START + TIME_STEPS * BATCH_SIZE).reshape(\n (BATCH_SIZE, TIME_STEPS)) / (10 * np.pi)\n seq = np.sin(xs)\n res = np.cos(xs)\n BATCH_START += TIME_STEPS\n return [seq[:, :, np.newaxis], res[:, :, np.newaxis], xs]\n\n\nclass LSTMRNN(object):\n\n def __init__(self, n_steps, input_size, output_size, cell_size, batch_size\n ):\n self.n_steps = n_steps\n self.input_size = input_size\n self.output_size = output_size\n self.cell_size = cell_size\n self.batch_size = batch_size\n with tf.name_scope('inputs'):\n self.xs = tf.placeholder(tf.float32, [None, n_steps, input_size\n ], name='xs')\n self.ys = tf.placeholder(tf.float32, [None, n_steps, input_size\n ], name='ys')\n with tf.variable_scope('in_hidden'):\n self.add_input_layer()\n with tf.variable_scope('LSTM_cell'):\n self.add_cell()\n with tf.variable_scope('out_hidden'):\n self.add_output_layer()\n with tf.name_scope('cost'):\n self.compute_cost()\n with tf.name_scope('train'):\n self.train_op = tf.train.AdamOptimizer(LR).minimize(self.cost)\n\n def add_input_layer(self):\n l_in_x = tf.reshape(self.xs, [-1, self.input_size], name='2_2D')\n Ws_in = self._weight_variable([self.input_size, self.cell_size])\n bs_in = self._bias_variable([self.cell_size])\n with tf.name_scope('Wx_plus_b'):\n l_in_y = tf.matmul(l_in_x, Ws_in) + bs_in\n self.l_in_y = tf.reshape(l_in_y, [-1, self.n_steps, self.cell_size],\n name='2_3D')\n\n def add_cell(self):\n lstm_cell = tf.contrib.rnn.BasicLSTMCell(self.cell_size,\n forget_bias=1.0, state_is_tuple=True)\n with tf.name_scope('initial_state'):\n self.cell_init_state = lstm_cell.zero_state(self.batch_size,\n dtype=tf.float32)\n self.cell_outputs, self.cell_final_state = tf.nn.dynamic_rnn(lstm_cell,\n self.l_in_y, initial_state=self.cell_init_state, time_major=False)\n\n def add_output_layer(self):\n l_out_x = tf.reshape(self.cell_outputs, [-1, self.cell_size], name=\n '2_2D')\n Ws_out = self._weight_variable([self.cell_size, self.output_size])\n bs_out = self._bias_variable([self.output_size])\n with tf.name_scope('Wx_plus_b'):\n self.pred = tf.matmul(l_out_x, Ws_out) + bs_out\n\n def compute_cost(self):\n losses = tf.contrib.legacy_seq2seq.sequence_loss_by_example([tf.\n reshape(self.pred, [-1], name='reshape_pred')], [tf.reshape(\n self.ys, [-1], name='reshape_target')], [tf.ones([self.\n batch_size * self.n_steps], dtype=tf.float32)],\n average_across_timesteps=True, softmax_loss_function=self.\n ms_error, name='losses')\n with tf.name_scope('average_cost'):\n self.cost = tf.div(tf.reduce_sum(losses, name='losses_sum'),\n self.batch_size, name='average_cost')\n tf.summary.scalar('cost', self.cost)\n\n @staticmethod\n def ms_error(labels, logits):\n return tf.square(tf.subtract(labels, logits))\n\n def _weight_variable(self, shape, name='weights'):\n initializer = tf.random_normal_initializer(mean=0.0, stddev=1.0)\n return tf.get_variable(shape=shape, initializer=initializer, name=name)\n\n def _bias_variable(self, shape, name='biases'):\n initializer = tf.constant_initializer(0.1)\n return tf.get_variable(shape=shape, initializer=initializer, name=name)\n\n\nif __name__ == '__main__':\n model = LSTMRNN(TIME_STEPS, INPUT_SIZE, OUTPUT_SIZE, CELL_SIZE, BATCH_SIZE)\n sess = tf.Session()\n merged = tf.summary.merge_all()\n writer = tf.summary.FileWriter('lstmlogs', sess.graph)\n sess.run(tf.global_variables_initializer())\n plt.ion()\n plt.show()\n for i in range(200):\n seq, res, xs = get_batch()\n if i == 0:\n feed_dict = {model.xs: seq, model.ys: res}\n else:\n feed_dict = {model.xs: seq, model.ys: res, model.\n cell_init_state: state}\n _, cost, state, pred = sess.run([model.train_op, model.cost, model.\n cell_final_state, model.pred], feed_dict=feed_dict)\n plt.plot(xs[0, :], res[0].flatten(), 'r', xs[0, :], pred.flatten()[\n :TIME_STEPS], 'b--')\n plt.ylim((-1.2, 1.2))\n plt.draw()\n plt.pause(0.3)\n if i % 20 == 0:\n print('cost', round(cost, 4))\n result = sess.run(merged, feed_dict)\n writer.add_summary(result, i)\n", "step-5": "import tensorflow as tf\nimport numpy as np \nimport matplotlib.pyplot as plt\n\nBATCH_START=0\nTIME_STEPS=20\nBATCH_SIZE=50\nINPUT_SIZE=1\nOUTPUT_SIZE=1\nCELL_SIZE=10\nLR=0.006\n\n#generate data\ndef get_batch():\n global BATCH_START,TIME_STEPS\n xs=np.arange(BATCH_START,BATCH_START+TIME_STEPS*BATCH_SIZE).reshape((BATCH_SIZE,TIME_STEPS))/(10*np.pi)\n seq=np.sin(xs)\n res=np.cos(xs)\n #data move one\n BATCH_START+=TIME_STEPS\n # all return shape is (batch_size,time_step,input_size)\n return [seq[:,:,np.newaxis],res[:,:,np.newaxis],xs]\n\n#def RNN LSTM Structure\nclass LSTMRNN(object):\n def __init__(self,n_steps,input_size,output_size,cell_size,batch_size):\n self.n_steps=n_steps\n self.input_size=input_size\n self.output_size=output_size\n self.cell_size=cell_size\n self.batch_size=batch_size\n with tf.name_scope('inputs'):\n self.xs=tf.placeholder(tf.float32,[None,n_steps,input_size],name='xs')\n self.ys=tf.placeholder(tf.float32,[None,n_steps,input_size],name='ys')\n with tf.variable_scope('in_hidden'):\n self.add_input_layer()\n with tf.variable_scope('LSTM_cell'):\n self.add_cell()\n with tf.variable_scope('out_hidden'):\n self.add_output_layer()\n with tf.name_scope('cost'):\n self.compute_cost()\n with tf.name_scope('train'):\n self.train_op=tf.train.AdamOptimizer(LR).minimize(self.cost)\n \n#add input layer\n def add_input_layer(self):\n #shape(batch,step,input)=>(batch*step,input)\n l_in_x=tf.reshape(self.xs,[-1,self.input_size],name='2_2D')\n Ws_in=self._weight_variable([self.input_size,self.cell_size])\n bs_in=self._bias_variable([self.cell_size])\n with tf.name_scope('Wx_plus_b'):\n l_in_y=tf.matmul(l_in_x,Ws_in)+bs_in\n self.l_in_y=tf.reshape(l_in_y,[-1,self.n_steps,self.cell_size],name='2_3D')\n#add cell\n def add_cell(self):\n lstm_cell=tf.contrib.rnn.BasicLSTMCell(self.cell_size,forget_bias=1.0,state_is_tuple=True)\n with tf.name_scope('initial_state'):\n self.cell_init_state=lstm_cell.zero_state(self.batch_size,dtype=tf.float32)\n self.cell_outputs,self.cell_final_state=tf.nn.dynamic_rnn(lstm_cell,self.l_in_y,initial_state=self.cell_init_state,time_major=False)\n#add output layer\n def add_output_layer(self):\n l_out_x=tf.reshape(self.cell_outputs,[-1,self.cell_size],name='2_2D')\n Ws_out=self._weight_variable([self.cell_size,self.output_size])\n bs_out=self._bias_variable([self.output_size,])\n with tf.name_scope('Wx_plus_b'):\n self.pred=tf.matmul(l_out_x,Ws_out)+bs_out\n \n def compute_cost(self):\n losses=tf.contrib.legacy_seq2seq.sequence_loss_by_example(\n [tf.reshape(self.pred,[-1],name='reshape_pred')],\n [tf.reshape(self.ys,[-1],name='reshape_target')],\n [tf.ones([self.batch_size*self.n_steps],dtype=tf.float32)],\n average_across_timesteps=True,\n softmax_loss_function=self.ms_error,\n name='losses'\n )\n with tf.name_scope('average_cost'):\n self.cost=tf.div(\n tf.reduce_sum(losses,name='losses_sum'),\n self.batch_size,\n name='average_cost'\n )\n tf.summary.scalar('cost',self.cost)\n\n @staticmethod\n def ms_error(labels,logits):\n return tf.square(tf.subtract(labels,logits))\n def _weight_variable(self,shape,name='weights'):\n initializer=tf.random_normal_initializer(mean=0.,stddev=1.,)\n return tf.get_variable(shape=shape,initializer=initializer,name=name)\n \n def _bias_variable(self,shape,name='biases'):\n initializer=tf.constant_initializer(0.1)\n return tf.get_variable(shape=shape,initializer=initializer,name=name)\n\n#train\nif __name__=='__main__':\n model=LSTMRNN(TIME_STEPS,INPUT_SIZE,OUTPUT_SIZE,CELL_SIZE,BATCH_SIZE)\n sess=tf.Session()\n #merge for tensorboard\n merged=tf.summary.merge_all()\n writer=tf.summary.FileWriter(\"lstmlogs\",sess.graph)\n sess.run(tf.global_variables_initializer())\n\n #visiable\n plt.ion()\n plt.show()\n\n #train for 200\n for i in range(200):\n seq,res,xs=get_batch()\n if i==0:\n feed_dict={model.xs:seq,model.ys:res,} \n else:\n feed_dict={model.xs:seq,model.ys:res,model.cell_init_state:state}\n #train\n _,cost,state,pred=sess.run([model.train_op,model.cost,model.cell_final_state,model.pred],feed_dict=feed_dict)\n\n\n #plotting\n plt.plot(xs[0,:],res[0].flatten(),'r',xs[0,:],pred.flatten()[:TIME_STEPS],'b--')\n plt.ylim((-1.2,1.2))\n plt.draw()\n plt.pause(0.3)\n\n if i%20==0:\n # 4 \n print('cost',round(cost,4))\n result=sess.run(merged,feed_dict)\n writer.add_summary(result,i)\n\n\n\n\n\n\n\n", "step-ids": [ 8, 11, 12, 13, 14 ] }
[ 8, 11, 12, 13, 14 ]
<|reserved_special_token_0|> class MediaSchema(ma.SQLAlchemyAutoSchema): class Meta: model = Media fields = 'id', 'name', 'mimetype', 'extension', 'owner', '_links' dump_only = 'id', 'owner', '_links' include_fk = True <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class MediaSchema(ma.SQLAlchemyAutoSchema): class Meta: model = Media fields = 'id', 'name', 'mimetype', 'extension', 'owner', '_links' dump_only = 'id', 'owner', '_links' include_fk = True id = fields.UUID() owner = ma.auto_field('owner_id') _links = ma.Hyperlinks({'self': ma.URLFor('media', values={'media_id': '<id>'}), 'collection': ma.URLFor('medias'), 'image': ma.URLFor( 'media_file', values={'media_id': '<id>'}), 'thumbnail': ma.URLFor( 'media_file', values={'media_id': '<id>', 'thumb': ''}), 'owner': ma.URLFor('user', values={'user_id': '<owner>'})}) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class MediaSchema(ma.SQLAlchemyAutoSchema): class Meta: model = Media fields = 'id', 'name', 'mimetype', 'extension', 'owner', '_links' dump_only = 'id', 'owner', '_links' include_fk = True id = fields.UUID() owner = ma.auto_field('owner_id') _links = ma.Hyperlinks({'self': ma.URLFor('media', values={'media_id': '<id>'}), 'collection': ma.URLFor('medias'), 'image': ma.URLFor( 'media_file', values={'media_id': '<id>'}), 'thumbnail': ma.URLFor( 'media_file', values={'media_id': '<id>', 'thumb': ''}), 'owner': ma.URLFor('user', values={'user_id': '<owner>'})}) Media.__marshmallow__ = MediaSchema <|reserved_special_token_1|> from marshmallow import fields from server.common.database import Media from server.common.schema.ref import ma class MediaSchema(ma.SQLAlchemyAutoSchema): class Meta: model = Media fields = 'id', 'name', 'mimetype', 'extension', 'owner', '_links' dump_only = 'id', 'owner', '_links' include_fk = True id = fields.UUID() owner = ma.auto_field('owner_id') _links = ma.Hyperlinks({'self': ma.URLFor('media', values={'media_id': '<id>'}), 'collection': ma.URLFor('medias'), 'image': ma.URLFor( 'media_file', values={'media_id': '<id>'}), 'thumbnail': ma.URLFor( 'media_file', values={'media_id': '<id>', 'thumb': ''}), 'owner': ma.URLFor('user', values={'user_id': '<owner>'})}) Media.__marshmallow__ = MediaSchema <|reserved_special_token_1|> from marshmallow import fields from server.common.database import Media from server.common.schema.ref import ma class MediaSchema(ma.SQLAlchemyAutoSchema): class Meta: model = Media fields = ('id', 'name', 'mimetype', 'extension', 'owner', '_links') dump_only = ('id', 'owner', '_links') include_fk = True id = fields.UUID() owner = ma.auto_field('owner_id') _links = ma.Hyperlinks({ 'self': ma.URLFor('media', values={'media_id': '<id>'}), 'collection': ma.URLFor('medias'), 'image': ma.URLFor('media_file', values={'media_id': '<id>'}), 'thumbnail': ma.URLFor('media_file', values={'media_id': '<id>', 'thumb': ''}), 'owner': ma.URLFor('user', values={'user_id': '<owner>'}) }) Media.__marshmallow__ = MediaSchema
flexible
{ "blob_id": "1810fee40ff8a99871ecc1d024f6794a68ee54e8", "index": 3543, "step-1": "<mask token>\n\n\nclass MediaSchema(ma.SQLAlchemyAutoSchema):\n\n\n class Meta:\n model = Media\n fields = 'id', 'name', 'mimetype', 'extension', 'owner', '_links'\n dump_only = 'id', 'owner', '_links'\n include_fk = True\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass MediaSchema(ma.SQLAlchemyAutoSchema):\n\n\n class Meta:\n model = Media\n fields = 'id', 'name', 'mimetype', 'extension', 'owner', '_links'\n dump_only = 'id', 'owner', '_links'\n include_fk = True\n id = fields.UUID()\n owner = ma.auto_field('owner_id')\n _links = ma.Hyperlinks({'self': ma.URLFor('media', values={'media_id':\n '<id>'}), 'collection': ma.URLFor('medias'), 'image': ma.URLFor(\n 'media_file', values={'media_id': '<id>'}), 'thumbnail': ma.URLFor(\n 'media_file', values={'media_id': '<id>', 'thumb': ''}), 'owner':\n ma.URLFor('user', values={'user_id': '<owner>'})})\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass MediaSchema(ma.SQLAlchemyAutoSchema):\n\n\n class Meta:\n model = Media\n fields = 'id', 'name', 'mimetype', 'extension', 'owner', '_links'\n dump_only = 'id', 'owner', '_links'\n include_fk = True\n id = fields.UUID()\n owner = ma.auto_field('owner_id')\n _links = ma.Hyperlinks({'self': ma.URLFor('media', values={'media_id':\n '<id>'}), 'collection': ma.URLFor('medias'), 'image': ma.URLFor(\n 'media_file', values={'media_id': '<id>'}), 'thumbnail': ma.URLFor(\n 'media_file', values={'media_id': '<id>', 'thumb': ''}), 'owner':\n ma.URLFor('user', values={'user_id': '<owner>'})})\n\n\nMedia.__marshmallow__ = MediaSchema\n", "step-4": "from marshmallow import fields\nfrom server.common.database import Media\nfrom server.common.schema.ref import ma\n\n\nclass MediaSchema(ma.SQLAlchemyAutoSchema):\n\n\n class Meta:\n model = Media\n fields = 'id', 'name', 'mimetype', 'extension', 'owner', '_links'\n dump_only = 'id', 'owner', '_links'\n include_fk = True\n id = fields.UUID()\n owner = ma.auto_field('owner_id')\n _links = ma.Hyperlinks({'self': ma.URLFor('media', values={'media_id':\n '<id>'}), 'collection': ma.URLFor('medias'), 'image': ma.URLFor(\n 'media_file', values={'media_id': '<id>'}), 'thumbnail': ma.URLFor(\n 'media_file', values={'media_id': '<id>', 'thumb': ''}), 'owner':\n ma.URLFor('user', values={'user_id': '<owner>'})})\n\n\nMedia.__marshmallow__ = MediaSchema\n", "step-5": "from marshmallow import fields\n\nfrom server.common.database import Media\nfrom server.common.schema.ref import ma\n\n\nclass MediaSchema(ma.SQLAlchemyAutoSchema):\n class Meta:\n model = Media\n fields = ('id', 'name', 'mimetype', 'extension', 'owner', '_links')\n dump_only = ('id', 'owner', '_links')\n include_fk = True\n\n id = fields.UUID()\n owner = ma.auto_field('owner_id')\n _links = ma.Hyperlinks({\n 'self': ma.URLFor('media', values={'media_id': '<id>'}),\n 'collection': ma.URLFor('medias'),\n 'image': ma.URLFor('media_file', values={'media_id': '<id>'}),\n 'thumbnail': ma.URLFor('media_file', values={'media_id': '<id>', 'thumb': ''}),\n 'owner': ma.URLFor('user', values={'user_id': '<owner>'})\n })\n\n\nMedia.__marshmallow__ = MediaSchema\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
from twindb_backup.copy.binlog_copy import BinlogCopy from twindb_backup.status.binlog_status import BinlogStatus def test_get_latest_backup(raw_binlog_status): instance = BinlogStatus(raw_binlog_status) assert instance.get_latest_backup() == BinlogCopy(host='master1', name= 'mysqlbin005.bin', created_at=100504)
normal
{ "blob_id": "0dc556336cee9e5f41c036c6fcf6da950216693c", "index": 5910, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef test_get_latest_backup(raw_binlog_status):\n instance = BinlogStatus(raw_binlog_status)\n assert instance.get_latest_backup() == BinlogCopy(host='master1', name=\n 'mysqlbin005.bin', created_at=100504)\n", "step-3": "from twindb_backup.copy.binlog_copy import BinlogCopy\nfrom twindb_backup.status.binlog_status import BinlogStatus\n\n\ndef test_get_latest_backup(raw_binlog_status):\n instance = BinlogStatus(raw_binlog_status)\n assert instance.get_latest_backup() == BinlogCopy(host='master1', name=\n 'mysqlbin005.bin', created_at=100504)\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
import tensorflow as tf import cv2 img=cv2.imread('d:\st.jpg',0) cv2.namedWindow('st',cv2.WINDOW_NORMAL)#可以调整图像窗口大小 cv2.imshow('st',img) cv2.imwrite('mes.png',img) cv2.waitKey(0) cv2.destroyAllWindows()
normal
{ "blob_id": "6b5399effe73d27eade0381f016cd7819a6e104a", "index": 2466, "step-1": "<mask token>\n", "step-2": "<mask token>\ncv2.namedWindow('st', cv2.WINDOW_NORMAL)\ncv2.imshow('st', img)\ncv2.imwrite('mes.png', img)\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n", "step-3": "<mask token>\nimg = cv2.imread('d:\\\\st.jpg', 0)\ncv2.namedWindow('st', cv2.WINDOW_NORMAL)\ncv2.imshow('st', img)\ncv2.imwrite('mes.png', img)\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n", "step-4": "import tensorflow as tf\nimport cv2\nimg = cv2.imread('d:\\\\st.jpg', 0)\ncv2.namedWindow('st', cv2.WINDOW_NORMAL)\ncv2.imshow('st', img)\ncv2.imwrite('mes.png', img)\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n", "step-5": "import tensorflow as tf\r\nimport cv2 \r\nimg=cv2.imread('d:\\st.jpg',0)\r\ncv2.namedWindow('st',cv2.WINDOW_NORMAL)#可以调整图像窗口大小\r\ncv2.imshow('st',img)\r\ncv2.imwrite('mes.png',img)\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]