lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_83.included_stack:\n",
"return None\n",
"return os.path.join(os.path.dirname(VAR_83.included_stack[-1]), VAR_0)\n"
] | [
"def srcdir(path):...\n",
"\"\"\"docstring\"\"\"\n",
"if not workflow.included_stack:\n",
"return None\n",
"return os.path.join(os.path.dirname(workflow.included_stack[-1]), path)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_9(VAR_8, VAR_9):...\n",
"VAR_30 = {}\n",
"for VAR_36 in ('point', 'line', 'polygon'):\n",
"VAR_35 = VAR_9 * VAR_18[VAR_36]\n",
"return VAR_30\n",
"VAR_30[VAR_36] = bounds_buffer(VAR_8, VAR_35)\n"
] | [
"def bounds_pad(bounds, meters_per_pixel_dim):...\n",
"buffered_by_type = {}\n",
"for geometry_type in ('point', 'line', 'polygon'):\n",
"offset = meters_per_pixel_dim * buf_by_type[geometry_type]\n",
"return buffered_by_type\n",
"buffered_by_type[geometry_type] = bounds_buffer(bounds, offset)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'"
] |
[
"@VAR_47.exception(Exception)...\n",
"VAR_56 = None\n",
"VAR_57 = None\n",
"if isinstance(VAR_52, NotFound):\n",
"VAR_66 = 404\n",
"if isinstance(VAR_52, InvalidUsage):\n",
"VAR_64 = {}\n",
"VAR_66 = 405\n",
"if isinstance(VAR_52, DatasetteError):\n",
"VAR_67 = VAR_52.args[0]\n",
"VAR_64 = {}\n",
"VAR_66 = VAR_52.status\n",
"VAR_66 = 500\n",
"VAR_58 = ['500.html']\n",
"VAR_67 = VAR_52.args[0]\n",
"VAR_64 = VAR_52.error_dict\n",
"VAR_64 = {}\n",
"if VAR_66 != 500:\n",
"VAR_67 = VAR_52.message\n",
"VAR_67 = str(VAR_52)\n",
"VAR_58 = ['{}.html'.format(VAR_66)] + VAR_58\n",
"VAR_64.update({'ok': False, 'error': VAR_67, 'status': VAR_66, 'title': VAR_56}\n )\n",
"if VAR_52.messagge_is_html:\n",
"traceback.print_exc()\n",
"if VAR_6 is not None and VAR_6.path.split('?')[0].endswith('.json'):\n",
"VAR_67 = Markup(VAR_67)\n",
"VAR_56 = VAR_52.title\n",
"return response.json(VAR_64, VAR_66=status)\n",
"VAR_68 = self.jinja_env.select_template(VAR_58)\n",
"return response.html(VAR_68.render(VAR_64), VAR_66=status)\n"
] | [
"@app.exception(Exception)...\n",
"title = None\n",
"help = None\n",
"if isinstance(exception, NotFound):\n",
"status = 404\n",
"if isinstance(exception, InvalidUsage):\n",
"info = {}\n",
"status = 405\n",
"if isinstance(exception, DatasetteError):\n",
"message = exception.args[0]\n",
"info = {}\n",
"status = exception.status\n",
"status = 500\n",
"templates = ['500.html']\n",
"message = exception.args[0]\n",
"info = exception.error_dict\n",
"info = {}\n",
"if status != 500:\n",
"message = exception.message\n",
"message = str(exception)\n",
"templates = ['{}.html'.format(status)] + templates\n",
"info.update({'ok': False, 'error': message, 'status': status, 'title': title})\n",
"if exception.messagge_is_html:\n",
"traceback.print_exc()\n",
"if request is not None and request.path.split('?')[0].endswith('.json'):\n",
"message = Markup(message)\n",
"title = exception.title\n",
"return response.json(info, status=status)\n",
"template = self.jinja_env.select_template(templates)\n",
"return response.html(template.render(info), status=status)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Return'"
] |
[
"def FUNC_29(self, VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"return self.stat(VAR_21, VAR_23=False)\n"
] | [
"def lstat(self, path):...\n",
"\"\"\"docstring\"\"\"\n",
"return self.stat(path, follow_symlinks=False)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
""
] | [
"@utils.synchronized('3par', external=True)...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_36(self, VAR_14):...\n",
"return self.__class__(self.get_related_model(VAR_14), self.session)\n"
] | [
"def get_related_interface(self, col_name):...\n",
"return self.__class__(self.get_related_model(col_name), self.session)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_23(VAR_33, VAR_13, VAR_20, VAR_21, VAR_12):...\n",
"if VAR_21 == wzrpc.status.success:\n",
"self.log.debug('Succesfully set route type for (%s, %s) to %s', VAR_16,\n VAR_17, wzrpc.name_route_type(VAR_19))\n",
"if VAR_21 == wzrpc.status.e_req_denied:\n",
"self.log.warn('Status {0}, reauthentificating'.format(wzrpc.name_status(\n VAR_21)))\n",
"self.log.warn('Status {0}, retrying'.format(wzrpc.name_status(VAR_21)))\n",
"self.auth_requests()\n",
"VAR_33.retry = True\n"
] | [
"def accept(that, reqid, seqnum, status, data):...\n",
"if status == wzrpc.status.success:\n",
"self.log.debug('Succesfully set route type for (%s, %s) to %s', i, m, wzrpc\n .name_route_type(t))\n",
"if status == wzrpc.status.e_req_denied:\n",
"self.log.warn('Status {0}, reauthentificating'.format(wzrpc.name_status(\n status)))\n",
"self.log.warn('Status {0}, retrying'.format(wzrpc.name_status(status)))\n",
"self.auth_requests()\n",
"that.retry = True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Assign'"
] |
[
"def FUNC_10(self, VAR_5=False, VAR_6=False):...\n",
"\"\"\"docstring\"\"\"\n",
"assert self._raylet_socket_name is None\n",
"self._raylet_socket_name = (self._ray_params.raylet_socket_name or\n get_raylet_socket_name())\n",
"self.prepare_socket_file(self._raylet_socket_name)\n",
"VAR_14, VAR_15 = new_raylet_log_file(redirect_output=self._ray_params.\n redirect_worker_output)\n",
"VAR_16 = ray.services.start_raylet(self._redis_address, self.\n _node_ip_address, self._raylet_socket_name, self.\n _plasma_store_socket_name, self._ray_params.worker_path, self.\n _ray_params.num_cpus, self._ray_params.num_gpus, self._ray_params.\n resources, self._ray_params.object_manager_port, self._ray_params.\n node_manager_port, self._ray_params.redis_password, VAR_5=use_valgrind,\n VAR_6=use_profiler, VAR_14=stdout_file, VAR_15=stderr_file, config=self\n ._config, include_java=self._ray_params.include_java,\n java_worker_options=self._ray_params.java_worker_options)\n",
"assert VAR_13.PROCESS_TYPE_RAYLET not in self.all_processes\n",
"self.all_processes[VAR_13.PROCESS_TYPE_RAYLET] = [VAR_16]\n"
] | [
"def start_raylet(self, use_valgrind=False, use_profiler=False):...\n",
"\"\"\"docstring\"\"\"\n",
"assert self._raylet_socket_name is None\n",
"self._raylet_socket_name = (self._ray_params.raylet_socket_name or\n get_raylet_socket_name())\n",
"self.prepare_socket_file(self._raylet_socket_name)\n",
"stdout_file, stderr_file = new_raylet_log_file(redirect_output=self.\n _ray_params.redirect_worker_output)\n",
"process_info = ray.services.start_raylet(self._redis_address, self.\n _node_ip_address, self._raylet_socket_name, self.\n _plasma_store_socket_name, self._ray_params.worker_path, self.\n _ray_params.num_cpus, self._ray_params.num_gpus, self._ray_params.\n resources, self._ray_params.object_manager_port, self._ray_params.\n node_manager_port, self._ray_params.redis_password, use_valgrind=\n use_valgrind, use_profiler=use_profiler, stdout_file=stdout_file,\n stderr_file=stderr_file, config=self._config, include_java=self.\n _ray_params.include_java, java_worker_options=self._ray_params.\n java_worker_options)\n",
"assert ray_constants.PROCESS_TYPE_RAYLET not in self.all_processes\n",
"self.all_processes[ray_constants.PROCESS_TYPE_RAYLET] = [process_info]\n"
] | [
0,
0,
0,
0,
0,
6,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assert'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assert'",
"Assign'"
] |
[
"def FUNC_5(VAR_4, VAR_5, VAR_11, VAR_10):...\n",
"VAR_9 = FUNC_6(VAR_4, VAR_5, VAR_11)\n",
"FUNC_4(VAR_9, VAR_10)\n"
] | [
"def verify_results(client, jwt, query, expected):...\n",
"data = search(client, jwt, query)\n",
"verify(data, expected)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"@VAR_0.route('/terminate')...\n",
"\"\"\"docstring\"\"\"\n",
"return FUNC_3('terminate-account-action', VAR_2=True)\n"
] | [
"@authn_views.route('/terminate')...\n",
"\"\"\"docstring\"\"\"\n",
"return _authn('terminate-account-action', force_authn=True)\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"self.run_test_case(self.scenario.upsert_load_balancer())\n"
] | [
"def test_a_upsert_load_balancer(self):...\n",
"self.run_test_case(self.scenario.upsert_load_balancer())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_10(VAR_1, VAR_2, VAR_3, VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_9 = FUNC_1(VAR_1)\n",
"VAR_11 = db.connection.cursor(db.pymysql.cursors.DictCursor)\n",
"VAR_11.execute('string', (VAR_1, VAR_2))\n",
"VAR_14 = VAR_11.fetchone()\n",
"VAR_15 = 'REVEALED'\n",
"VAR_10 = False\n",
"VAR_16 = VAR_14['current_steps'] if VAR_14 else 0\n",
"VAR_17 = VAR_4(VAR_16, VAR_3)\n",
"if VAR_17 >= VAR_9['total_steps']:\n",
"VAR_15 = 'UNLOCKED'\n",
"VAR_11.execute('string', {'player_id': VAR_2, 'achievement_id': VAR_1,\n 'current_steps': VAR_17, 'state': VAR_15})\n",
"VAR_17 = VAR_9['total_steps']\n",
"return dict(VAR_16=new_current_steps, current_state=new_state, VAR_10=\n newly_unlocked)\n",
"VAR_10 = VAR_14['state'] != 'UNLOCKED' if VAR_14 else True\n"
] | [
"def update_steps(achievement_id, player_id, steps, steps_function):...\n",
"\"\"\"docstring\"\"\"\n",
"achievement = achievements_get(achievement_id)\n",
"cursor = db.connection.cursor(db.pymysql.cursors.DictCursor)\n",
"cursor.execute(\n \"\"\"SELECT\n current_steps,\n state\n FROM player_achievements\n WHERE achievement_id = %s AND player_id = %s\"\"\"\n , (achievement_id, player_id))\n",
"player_achievement = cursor.fetchone()\n",
"new_state = 'REVEALED'\n",
"newly_unlocked = False\n",
"current_steps = player_achievement['current_steps'\n ] if player_achievement else 0\n",
"new_current_steps = steps_function(current_steps, steps)\n",
"if new_current_steps >= achievement['total_steps']:\n",
"new_state = 'UNLOCKED'\n",
"cursor.execute(\n \"\"\"INSERT INTO player_achievements (player_id, achievement_id, current_steps, state)\n VALUES\n (%(player_id)s, %(achievement_id)s, %(current_steps)s, %(state)s)\n ON DUPLICATE KEY UPDATE\n current_steps = VALUES(current_steps),\n state = VALUES(state)\"\"\"\n , {'player_id': player_id, 'achievement_id': achievement_id,\n 'current_steps': new_current_steps, 'state': new_state})\n",
"new_current_steps = achievement['total_steps']\n",
"return dict(current_steps=new_current_steps, current_state=new_state,\n newly_unlocked=newly_unlocked)\n",
"newly_unlocked = player_achievement['state'\n ] != 'UNLOCKED' if player_achievement else True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Return'",
"Assign'"
] |
[
"def FUNC_2(self, VAR_1, VAR_2):...\n",
"if not VAR_1.is_loading():\n",
"VAR_1.window().focus_view(VAR_2)\n",
"sublime.set_timeout(lambda : self.return_to_left(VAR_1, VAR_2), 10)\n",
"VAR_1.window().focus_group(0)\n"
] | [
"def return_to_left(self, view, return_view):...\n",
"if not view.is_loading():\n",
"view.window().focus_view(return_view)\n",
"sublime.set_timeout(lambda : self.return_to_left(view, return_view), 10)\n",
"view.window().focus_group(0)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._prepare()\n",
"self._execute()\n"
] | [
"def execute(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._prepare()\n",
"self._execute()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'"
] |
[
"def __init__(self, VAR_0):...\n",
"self.cursor = VAR_0\n",
"self.rides = []\n"
] | [
"def __init__(self, cursor):...\n",
"self.cursor = cursor\n",
"self.rides = []\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"@staticmethod...\n",
"return os.path.isfile(VAR_3) and os.access(VAR_3, os.X_OK)\n"
] | [
"@staticmethod...\n",
"return os.path.isfile(path) and os.access(path, os.X_OK)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_11(self, VAR_1=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = FUNC_0(VAR_1)\n",
"logging.info('Removing all completed jobs from history')\n",
"return self.execute(\n \"DELETE FROM history WHERE name LIKE ? AND status = 'Completed'\", (\n VAR_1,), VAR_17=True)\n"
] | [
"def remove_completed(self, search=None):...\n",
"\"\"\"docstring\"\"\"\n",
"search = convert_search(search)\n",
"logging.info('Removing all completed jobs from history')\n",
"return self.execute(\n \"DELETE FROM history WHERE name LIKE ? AND status = 'Completed'\", (\n search,), save=True)\n"
] | [
0,
0,
4,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Return'"
] |
[
"\"\"\"\nModule for gathering disk information\n\"\"\"\n",
"import logging\n",
"import salt.utils\n",
"VAR_0 = logging.getLogger(__name__)\n",
"def __virtual__():...\n",
"\"\"\"docstring\"\"\"\n",
"if salt.utils.is_windows():\n",
"return False\n",
"return 'disk'\n"
] | [
"\"\"\"\nModule for gathering disk information\n\"\"\"\n",
"import logging\n",
"import salt.utils\n",
"log = logging.getLogger(__name__)\n",
"def __virtual__():...\n",
"\"\"\"docstring\"\"\"\n",
"if salt.utils.is_windows():\n",
"return False\n",
"return 'disk'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Assign'",
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def __del__(self):...\n",
"self.connection.close()\n"
] | [
"def __del__(self):...\n",
"self.connection.close()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@VAR_0.route('/screenshot/<page>')...\n",
"if current_user.email == '[email protected]':\n",
"return render_template(VAR_7 + '.html')\n",
"return redirect(VAR_4)\n"
] | [
"@app.route('/screenshot/<page>')...\n",
"if current_user.email == '[email protected]':\n",
"return render_template(page + '.html')\n",
"return redirect(url_prefix)\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_6(VAR_2):...\n",
"if len(set(getattr(obj, 'academic_year_id') for obj in VAR_2)) > 1:\n",
"return VAR_2[0].academic_year\n"
] | [
"def _extract_common_academic_year(objects):...\n",
"if len(set(getattr(obj, 'academic_year_id') for obj in objects)) > 1:\n",
"return objects[0].academic_year\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"For",
"Return'"
] |
[
"def FUNC_2(VAR_2, VAR_3='main'):...\n",
"VAR_8, VAR_6 = FUNC_1(VAR_2)\n",
"if VAR_8 not in VAR_9.path:\n",
"VAR_9.path.insert(0, VAR_8)\n",
"VAR_9.modules['__tng_runfile__'] = VAR_10 = imp.load_source(VAR_6, VAR_2)\n",
"if hasattr(VAR_10, VAR_3):\n",
"return getattr(VAR_10, VAR_3)\n",
"logging.getLogger('tng').warn('No {} function found in {}'.format(VAR_3, VAR_2)\n )\n"
] | [
"def run_file(runfile, func_to_get='main'):...\n",
"import_path, name = _guess_import_path_and_name(runfile)\n",
"if import_path not in sys.path:\n",
"sys.path.insert(0, import_path)\n",
"sys.modules['__tng_runfile__'] = module = imp.load_source(name, runfile)\n",
"if hasattr(module, func_to_get):\n",
"return getattr(module, func_to_get)\n",
"logging.getLogger('tng').warn('No {} function found in {}'.format(\n func_to_get, runfile))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Expr'"
] |
[
"import mimetypes\n",
"import os\n",
"from django.contrib.gis.db.models import GeometryField\n",
"from django.contrib.gis.db.models.functions import Envelope\n",
"from django.core.exceptions import ObjectDoesNotExist\n",
"from django.db.models.functions import Cast\n",
"from django.http import HttpResponse\n",
"from wsgiref.util import FileWrapper\n",
"from rest_framework import status, serializers, viewsets, filters, exceptions, permissions, parsers\n",
"from rest_framework.response import Response\n",
"from rest_framework.decorators import detail_route\n",
"from rest_framework.views import APIView\n",
"from .common import get_and_check_project, get_tile_json\n",
"from app import models, scheduler, pending_actions\n",
"from nodeodm.models import ProcessingNode\n",
"def FUNC_0(self, VAR_0):...\n",
"return VAR_0.id\n"
] | [
"import mimetypes\n",
"import os\n",
"from django.contrib.gis.db.models import GeometryField\n",
"from django.contrib.gis.db.models.functions import Envelope\n",
"from django.core.exceptions import ObjectDoesNotExist\n",
"from django.db.models.functions import Cast\n",
"from django.http import HttpResponse\n",
"from wsgiref.util import FileWrapper\n",
"from rest_framework import status, serializers, viewsets, filters, exceptions, permissions, parsers\n",
"from rest_framework.response import Response\n",
"from rest_framework.decorators import detail_route\n",
"from rest_framework.views import APIView\n",
"from .common import get_and_check_project, get_tile_json\n",
"from app import models, scheduler, pending_actions\n",
"from nodeodm.models import ProcessingNode\n",
"def to_representation(self, obj):...\n",
"return obj.id\n"
] | [
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_37=None, VAR_38=None, VAR_39=False):...\n",
"\"\"\"docstring\"\"\"\n",
"list.__init__(self)\n",
"self._names = dict()\n",
"if VAR_37:\n",
"self.extend(map(str, VAR_37) if VAR_39 else VAR_37)\n",
"if VAR_38:\n",
"if isinstance(VAR_37, CLASS_2):\n",
"for VAR_44, item in VAR_38.items():\n",
"self.take_names(VAR_37.get_names())\n",
"self.append(item)\n",
"self.add_name(VAR_44)\n"
] | [
"def __init__(self, toclone=None, fromdict=None, plainstr=False):...\n",
"\"\"\"docstring\"\"\"\n",
"list.__init__(self)\n",
"self._names = dict()\n",
"if toclone:\n",
"self.extend(map(str, toclone) if plainstr else toclone)\n",
"if fromdict:\n",
"if isinstance(toclone, Namedlist):\n",
"for key, item in fromdict.items():\n",
"self.take_names(toclone.get_names())\n",
"self.append(item)\n",
"self.add_name(key)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Condition",
"For",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@VAR_0.before_app_first_request...\n",
"\"\"\"docstring\"\"\"\n",
"user_logged_out.connect(oauth_logout_handler)\n",
"oauth.init_app(current_app)\n",
"for VAR_1, conf in cfg['OAUTHCLIENT_REMOTE_APPS'].items():\n",
"if VAR_1 not in oauth.remote_apps:\n",
"VAR_4 = oauth.remote_app(VAR_1, **conf['params'])\n",
"VAR_4 = oauth.remote_apps[VAR_1]\n",
"VAR_4.tokengetter(make_token_getter(VAR_4))\n",
"handlers.register(VAR_1, VAR_4.authorized_handler(make_handler(conf.get(\n 'authorized_handler', authorized_default_handler), VAR_4)))\n",
"disconnect_handlers.register(VAR_1, make_handler(conf.get(\n 'disconnect_handler', disconnect_handler), VAR_4, with_response=False))\n",
"def FUNC_5(VAR_4, *VAR_5, **VAR_6):...\n",
"VAR_7 = conf.get('signup_handler', dict())\n",
"VAR_8 = make_handler(VAR_7.get('info', FUNC_5), VAR_4, with_response=False)\n",
"VAR_9 = make_handler(VAR_7.get('setup', FUNC_5), VAR_4, with_response=False)\n",
"VAR_10 = make_handler(VAR_7.get('view', FUNC_5), VAR_4, with_response=False)\n",
"signup_handlers.register(VAR_1, dict(info=account_info_handler, setup=\n account_setup_handler, view=account_view_handler))\n"
] | [
"@blueprint.before_app_first_request...\n",
"\"\"\"docstring\"\"\"\n",
"user_logged_out.connect(oauth_logout_handler)\n",
"oauth.init_app(current_app)\n",
"for remote_app, conf in cfg['OAUTHCLIENT_REMOTE_APPS'].items():\n",
"if remote_app not in oauth.remote_apps:\n",
"remote = oauth.remote_app(remote_app, **conf['params'])\n",
"remote = oauth.remote_apps[remote_app]\n",
"remote.tokengetter(make_token_getter(remote))\n",
"handlers.register(remote_app, remote.authorized_handler(make_handler(conf.\n get('authorized_handler', authorized_default_handler), remote)))\n",
"disconnect_handlers.register(remote_app, make_handler(conf.get(\n 'disconnect_handler', disconnect_handler), remote, with_response=False))\n",
"def dummy_handler(remote, *args, **kargs):...\n",
"signup_handler = conf.get('signup_handler', dict())\n",
"account_info_handler = make_handler(signup_handler.get('info',\n dummy_handler), remote, with_response=False)\n",
"account_setup_handler = make_handler(signup_handler.get('setup',\n dummy_handler), remote, with_response=False)\n",
"account_view_handler = make_handler(signup_handler.get('view',\n dummy_handler), remote, with_response=False)\n",
"signup_handlers.register(remote_app, dict(info=account_info_handler, setup=\n account_setup_handler, view=account_view_handler))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"For",
"Docstring",
"Expr'",
"Expr'",
"For",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_5(VAR_6):...\n",
"return JsonResponse({'applinks': {'apps': [], 'details': [{'appID':\n settings.APPLE_APPID, 'paths': ['*']}]}})\n"
] | [
"def aasa_redirect(request):...\n",
"return JsonResponse({'applinks': {'apps': [], 'details': [{'appID':\n settings.APPLE_APPID, 'paths': ['*']}]}})\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"async def FUNC_2(VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_10, VAR_7, VAR_8, VAR_9...\n",
"VAR_7 = VAR_7.split(' ')\n",
"if len(VAR_7) >= 2:\n",
"VAR_0 = VAR_7[0].lower()\n",
"await VAR_1.send_message(VAR_2,\n '{}, **USAGE:** {}remindall <time> <message...>'.format(VAR_3, VAR_9))\n",
"VAR_14 = ''\n",
"print('')\n",
"for i in range(1, len(VAR_7)):\n",
"VAR_14 += VAR_7[i] + ' '\n",
"if 'd' in VAR_0 or 'h' in VAR_0 or 'm' in VAR_0 or 's' in VAR_0 or ',' in VAR_0:\n",
"VAR_15 = FUNC_0(VAR_0)\n",
"await VAR_1.send_message(VAR_2,\n '{}, The time must be in #time format (ex: 1h or 2h,5m).'.format(VAR_3,\n VAR_9))\n",
"VAR_5.execute(\n \"INSERT INTO reminder (type, channel, message, date) VALUES ('1', {}, '{}', '{}');\"\n .format(VAR_10, VAR_14, str(VAR_15)))\n",
"await VAR_1.send_message(VAR_2,\n '{}, error when trying to add info to database! Please notifiy the admins!'\n .format(VAR_3))\n",
"VAR_4.commit()\n",
"print('[{}]: {} - {}'.format(strftime('%b %d, %Y %X', localtime()),\n 'SQLITE', 'Error when trying to insert data: ' + e.args[0]))\n",
"await VAR_1.send_message(VAR_2, '{}, will remind you.'.format(VAR_3))\n",
"VAR_8.write('[{}]: {} - {}\\n'.format(strftime('%b %d, %Y %X', localtime()),\n 'SQLITE', 'Error when trying to insert data: ' + e.args[0]))\n"
] | [
"async def ex_all(dclient, channel, mention, con, con_ex, channel_id, a,...\n",
"a = a.split(' ')\n",
"if len(a) >= 2:\n",
"time = a[0].lower()\n",
"await dclient.send_message(channel,\n '{}, **USAGE:** {}remindall <time> <message...>'.format(mention, cmd_char))\n",
"msg = ''\n",
"print('')\n",
"for i in range(1, len(a)):\n",
"msg += a[i] + ' '\n",
"if 'd' in time or 'h' in time or 'm' in time or 's' in time or ',' in time:\n",
"date = get_date(time)\n",
"await dclient.send_message(channel,\n '{}, The time must be in #time format (ex: 1h or 2h,5m).'.format(\n mention, cmd_char))\n",
"con_ex.execute(\n \"INSERT INTO reminder (type, channel, message, date) VALUES ('1', {}, '{}', '{}');\"\n .format(channel_id, msg, str(date)))\n",
"await dclient.send_message(channel,\n '{}, error when trying to add info to database! Please notifiy the admins!'\n .format(mention))\n",
"con.commit()\n",
"print('[{}]: {} - {}'.format(strftime('%b %d, %Y %X', localtime()),\n 'SQLITE', 'Error when trying to insert data: ' + e.args[0]))\n",
"await dclient.send_message(channel, '{}, will remind you.'.format(mention))\n",
"log_file.write('[{}]: {} - {}\\n'.format(strftime('%b %d, %Y %X', localtime(\n )), 'SQLITE', 'Error when trying to insert data: ' + e.args[0]))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"For",
"AugAssign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_23(self):...\n",
"VAR_15 = 1\n",
"VAR_16 = 2\n",
"mock_signal.return_value = VAR_15\n",
"self.assertEqual(mock_signal.call_count, 2)\n",
"mock_signal.assert_has_calls([mock.call(signal.SIGUSR2, VAR_16), mock.call(\n signal.SIGUSR2, VAR_15)])\n"
] | [
"def test_signal_handler_as(self):...\n",
"mock_initial_handler = 1\n",
"mock_new_handler = 2\n",
"mock_signal.return_value = mock_initial_handler\n",
"self.assertEqual(mock_signal.call_count, 2)\n",
"mock_signal.assert_has_calls([mock.call(signal.SIGUSR2, mock_new_handler),\n mock.call(signal.SIGUSR2, mock_initial_handler)])\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"return self.data\n"
] | [
"def to_json(self):...\n",
"return self.data\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_19(self, VAR_6):...\n",
"VAR_49 = self.nodes.get(VAR_6['name'])\n",
"VAR_50 = []\n",
"VAR_51 = []\n",
"dep_resolve(VAR_49, VAR_50, VAR_51)\n",
"for VAR_49 in VAR_50:\n",
"self.logger.debug(\"node name '%s' vs. comp name '%s'\" % (VAR_49.comp_name,\n VAR_6['name']))\n",
"self.logger.debug(\"All dependencies satisfied, starting '%s'\" % VAR_6['name'])\n",
"if VAR_49.comp_name != VAR_6['name']:\n",
"VAR_52 = self.check_component(VAR_49.component)\n",
"self.logger.debug('Checking and starting %s' % VAR_49.comp_name)\n",
"if VAR_52 is CLASS_0.STARTED_BY_HAND or VAR_52 is CLASS_0.RUNNING:\n",
"VAR_52 = self.check_component(VAR_49.component)\n",
"self.logger.debug('Component %s is already running. Skipping start' % VAR_6\n ['name'])\n",
"self.start_component_without_deps(VAR_6)\n",
"if VAR_52 is CLASS_0.STOPPED_BUT_SUCCESSFUL or VAR_52 is CLASS_0.STARTED_BY_HAND or VAR_52 is CLASS_0.RUNNING:\n",
"return True\n",
"self.logger.debug(\n 'Component %s is already running, skipping to next in line' % VAR_6['name']\n )\n",
"self.logger.debug(\"Start component '%s' as dependency of '%s'\" % (VAR_49.\n comp_name, VAR_6['name']))\n",
"self.start_component_without_deps(VAR_49.component)\n",
"VAR_62 = 0\n",
"while True:\n",
"self.logger.debug('Checking %s resulted in checkstate %s' % (VAR_49.\n comp_name, VAR_52))\n",
"VAR_52 = self.check_component(VAR_49.component)\n",
"if VAR_52 is not CLASS_0.RUNNING or VAR_52 is not CLASS_0.STOPPED_BUT_SUCCESSFUL:\n",
"if VAR_62 > 100:\n",
"return False\n",
"VAR_62 = VAR_62 + 1\n",
"sleep(0.5)\n"
] | [
"def start_component(self, comp):...\n",
"node = self.nodes.get(comp['name'])\n",
"res = []\n",
"unres = []\n",
"dep_resolve(node, res, unres)\n",
"for node in res:\n",
"self.logger.debug(\"node name '%s' vs. comp name '%s'\" % (node.comp_name,\n comp['name']))\n",
"self.logger.debug(\"All dependencies satisfied, starting '%s'\" % comp['name'])\n",
"if node.comp_name != comp['name']:\n",
"state = self.check_component(node.component)\n",
"self.logger.debug('Checking and starting %s' % node.comp_name)\n",
"if state is CheckState.STARTED_BY_HAND or state is CheckState.RUNNING:\n",
"state = self.check_component(node.component)\n",
"self.logger.debug('Component %s is already running. Skipping start' % comp[\n 'name'])\n",
"self.start_component_without_deps(comp)\n",
"if state is CheckState.STOPPED_BUT_SUCCESSFUL or state is CheckState.STARTED_BY_HAND or state is CheckState.RUNNING:\n",
"return True\n",
"self.logger.debug(\n 'Component %s is already running, skipping to next in line' % comp['name'])\n",
"self.logger.debug(\"Start component '%s' as dependency of '%s'\" % (node.\n comp_name, comp['name']))\n",
"self.start_component_without_deps(node.component)\n",
"tries = 0\n",
"while True:\n",
"self.logger.debug('Checking %s resulted in checkstate %s' % (node.comp_name,\n state))\n",
"state = self.check_component(node.component)\n",
"if state is not CheckState.RUNNING or state is not CheckState.STOPPED_BUT_SUCCESSFUL:\n",
"if tries > 100:\n",
"return False\n",
"tries = tries + 1\n",
"sleep(0.5)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Assign'",
"Expr'"
] |
[
"def FUNC_2(VAR_0, VAR_3):...\n",
"VAR_4 = VAR_0.cursor()\n",
"VAR_5 = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"VAR_4.execute(VAR_5)\n",
"VAR_5 = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"VAR_4.execute(VAR_5)\n",
"VAR_5 = 'DELETE FROM Presets WHERE id = ' + VAR_3\n",
"VAR_4.execute(VAR_5)\n",
"VAR_5 = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"VAR_4.execute(VAR_5)\n",
"VAR_5 = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"VAR_4.execute(VAR_5)\n"
] | [
"def remove_preset(conn, key):...\n",
"cursor = conn.cursor()\n",
"quer = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"cursor.execute(quer)\n",
"quer = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"cursor.execute(quer)\n",
"quer = 'DELETE FROM Presets WHERE id = ' + key\n",
"cursor.execute(quer)\n",
"quer = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"cursor.execute(quer)\n",
"quer = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"cursor.execute(quer)\n"
] | [
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_23(self):...\n",
"if self.get('__islocal') or not self.name:\n",
"self.db_insert()\n",
"VAR_6 = self.get_valid_dict(VAR_13=True)\n",
"return\n",
"VAR_34 = VAR_6['name']\n",
"VAR_33 = list(VAR_6)\n",
"VAR_54.db.sql(\"\"\"update `tab{doctype}`\n\t\t\t\tset {values} where name=%s\"\"\".\n format(VAR_1=self.doctype, VAR_57=', '.join([('`' + c + '`=%s') for c in\n columns])), list(VAR_6.values()) + [VAR_34])\n",
"if VAR_17.args[0] == 1062 and 'Duplicate' in cstr(VAR_17.args[1]):\n",
"self.show_unique_validation_message(VAR_17)\n"
] | [
"def db_update(self):...\n",
"if self.get('__islocal') or not self.name:\n",
"self.db_insert()\n",
"d = self.get_valid_dict(convert_dates_to_str=True)\n",
"return\n",
"name = d['name']\n",
"columns = list(d)\n",
"frappe.db.sql(\"\"\"update `tab{doctype}`\n\t\t\t\tset {values} where name=%s\"\"\".\n format(doctype=self.doctype, values=', '.join([('`' + c + '`=%s') for c in\n columns])), list(d.values()) + [name])\n",
"if e.args[0] == 1062 and 'Duplicate' in cstr(e.args[1]):\n",
"self.show_unique_validation_message(e)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_8(self):...\n",
"return 'try to login'\n"
] | [
"def describe(self):...\n",
"return 'try to login'\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_5(self, VAR_7):...\n",
"return VAR_54.db.get_value(self.doctype, self.name, VAR_7)\n"
] | [
"def get_db_value(self, key):...\n",
"return frappe.db.get_value(self.doctype, self.name, key)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_4(VAR_7):...\n",
"\"\"\"docstring\"\"\"\n",
"int(VAR_7)\n",
"return False\n",
"return True\n"
] | [
"def isInt(ss):...\n",
"\"\"\"docstring\"\"\"\n",
"int(ss)\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Return'",
"Return'"
] |
[
"def __init__(self, *VAR_6, **VAR_7):...\n",
"super(CLASS_21, self).__init__(*VAR_6, **kwargs)\n",
"self.fields['document_type'].label = ''\n",
"self.fields['document_type'].widget.attrs.update({'onchange': 'form.submit();'}\n )\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super(DocumentTypeForm, self).__init__(*args, **kwargs)\n",
"self.fields['document_type'].label = ''\n",
"self.fields['document_type'].widget.attrs.update({'onchange': 'form.submit();'}\n )\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_30(self, VAR_16):...\n",
""
] | [
"def delete_all(self, items):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_8(VAR_4):...\n",
"if not os.path.exists('D:' + VAR_4):\n",
"return VAR_4\n",
"VAR_28 = 1\n",
"while VAR_28 < 101:\n",
"VAR_32 = re.sub('\\\\.(.*?)$', ' (%d).\\\\1' % VAR_28, VAR_4)\n",
"return ''\n",
"if not os.path.exists('D:' + VAR_32):\n",
"return VAR_32\n",
"VAR_28 = VAR_28 + 1\n"
] | [
"def get_non_duplicate_path(file_path):...\n",
"if not os.path.exists('D:' + file_path):\n",
"return file_path\n",
"duplicate = 1\n",
"while duplicate < 101:\n",
"new_path = re.sub('\\\\.(.*?)$', ' (%d).\\\\1' % duplicate, file_path)\n",
"return ''\n",
"if not os.path.exists('D:' + new_path):\n",
"return new_path\n",
"duplicate = duplicate + 1\n"
] | [
0,
4,
4,
4,
4,
4,
4,
4,
4,
4
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Return'",
"Assign'"
] |
[
"def FUNC_3(VAR_2, VAR_3):...\n",
"if VAR_3 == True:\n",
"print('I will be tracking this series: ' + FUNC_1(VAR_2.submission.title) +\n ' because of this comment ' + VAR_2.fullname)\n",
"print('I will stop tracking this series: ' + FUNC_1(VAR_2.submission.title) +\n ' because of this comment ' + VAR_2.fullname)\n"
] | [
"def replyToTrackRequest(comment, positive):...\n",
"if positive == True:\n",
"print('I will be tracking this series: ' + getTitle(comment.submission.\n title) + ' because of this comment ' + comment.fullname)\n",
"print('I will stop tracking this series: ' + getTitle(comment.submission.\n title) + ' because of this comment ' + comment.fullname)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(self, VAR_7='True', VAR_8='*'):...\n",
"self.cursor.execute(self.SQL_QUERY_JSON % (VAR_8, self.name, VAR_7))\n",
"VAR_9 = [item for item in self.cursor.fetchall()]\n",
"return VAR_9\n"
] | [
"def query(self, query='True', columns='*'):...\n",
"self.cursor.execute(self.SQL_QUERY_JSON % (columns, self.name, query))\n",
"rows = [item for item in self.cursor.fetchall()]\n",
"return rows\n"
] | [
0,
4,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Return'"
] |
[
"@staticmethod...\n",
"return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', VAR_0)))\n"
] | [
"@staticmethod...\n",
"return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', scriptname)))\n"
] | [
0,
2
] | [
"Condition",
"Return'"
] |
[
"def FUNC_38(self, VAR_40, VAR_41, VAR_42=None):...\n",
"\"\"\"docstring\"\"\"\n",
"self._names[VAR_40] = VAR_41, VAR_42\n",
"if VAR_42 is None:\n",
"setattr(self, VAR_40, self[VAR_41])\n",
"setattr(self, VAR_40, CLASS_2(VAR_37=self[index:end]))\n"
] | [
"def set_name(self, name, index, end=None):...\n",
"\"\"\"docstring\"\"\"\n",
"self._names[name] = index, end\n",
"if end is None:\n",
"setattr(self, name, self[index])\n",
"setattr(self, name, Namedlist(toclone=self[index:end]))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_30():...\n",
"if VAR_3 == 'text':\n",
"print('Input file: %s' % VAR_75)\n",
"VAR_52 = FUNC_2(VAR_15, VAR_2, VAR_3=output_mode, VAR_4=output_limit, VAR_5\n =spires, VAR_6=match_mode, VAR_7=no_cache, VAR_8=with_author_keywords,\n VAR_9=rebuild_cache, VAR_10=only_core_tags, VAR_11=extract_acronyms)\n",
"if VAR_12:\n",
"return VAR_52\n",
"if isinstance(VAR_52, dict):\n",
"for VAR_84 in VAR_52:\n",
"print(VAR_52[VAR_84])\n"
] | [
"def process_lines():...\n",
"if output_mode == 'text':\n",
"print('Input file: %s' % source)\n",
"output = get_keywords_from_text(text_lines, taxonomy_name, output_mode=\n output_mode, output_limit=output_limit, spires=spires, match_mode=\n match_mode, no_cache=no_cache, with_author_keywords=\n with_author_keywords, rebuild_cache=rebuild_cache, only_core_tags=\n only_core_tags, extract_acronyms=extract_acronyms)\n",
"if api:\n",
"return output\n",
"if isinstance(output, dict):\n",
"for i in output:\n",
"print(output[i])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Condition",
"For",
"Expr'"
] |
[
"def FUNC_9(self):...\n",
"self.test_strings = ('normal_string', 'string with spaces',\n 'string with quotes\"a', \"string with s-quotes'b\", 'bsn \\n A',\n 'unrecognized \\\\q escape')\n"
] | [
"def setUp(self):...\n",
"self.test_strings = ('normal_string', 'string with spaces',\n 'string with quotes\"a', \"string with s-quotes'b\", 'bsn \\n A',\n 'unrecognized \\\\q escape')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def __init__(self, VAR_46=50, VAR_47=100):...\n",
"\"\"\"docstring\"\"\"\n",
"self.regex = re.compile(\n '((?P<value>.+)(?P=value){{{min_repeat},{max_repeat}}})$'.format(VAR_46\n =min_repeat - 1, VAR_47=max_repeat - 1))\n"
] | [
"def __init__(self, min_repeat=50, max_repeat=100):...\n",
"\"\"\"docstring\"\"\"\n",
"self.regex = re.compile(\n '((?P<value>.+)(?P=value){{{min_repeat},{max_repeat}}})$'.format(\n min_repeat=min_repeat - 1, max_repeat=max_repeat - 1))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'"
] |
[
"def __init__(self, VAR_9, VAR_5, VAR_1, VAR_6, VAR_11={}, **VAR_12):...\n",
"super().__init__(VAR_9, VAR_1, VAR_6, VAR_11={}, **kwargs)\n",
"self.rev = 'HEAD'\n",
"VAR_39 = VAR_5.fragment and CLASS_1._parse_fragment(VAR_5) or {}\n",
"VAR_40 = VAR_39.get('rev', None)\n",
"VAR_41 = VAR_39.get('branch', None)\n",
"VAR_42 = VAR_39.get('tag', None)\n",
"if (VAR_41 or VAR_42) and self.url.path.endswith('trunk'):\n",
"VAR_5 = VAR_5._replace(VAR_36=self.url.path[:-5])\n",
"if VAR_41:\n",
"VAR_5 = VAR_5._replace(VAR_36=join(url.path, 'branches', branch))\n",
"if VAR_42:\n",
"if VAR_40:\n",
"VAR_5 = VAR_5._replace(VAR_36=join(url.path, 'tags', tag))\n",
"VAR_5 = VAR_5._replace(VAR_36=url.path + '@' + rev)\n",
"self.url = VAR_5._replace(VAR_39='')\n",
"self.rev = VAR_40\n"
] | [
"def __init__(self, name, url, directory, options, conf={}, **kwargs):...\n",
"super().__init__(name, directory, options, conf={}, **kwargs)\n",
"self.rev = 'HEAD'\n",
"fragment = url.fragment and Subproject._parse_fragment(url) or {}\n",
"rev = fragment.get('rev', None)\n",
"branch = fragment.get('branch', None)\n",
"tag = fragment.get('tag', None)\n",
"if (branch or tag) and self.url.path.endswith('trunk'):\n",
"url = url._replace(path=self.url.path[:-5])\n",
"if branch:\n",
"url = url._replace(path=join(url.path, 'branches', branch))\n",
"if tag:\n",
"if rev:\n",
"url = url._replace(path=join(url.path, 'tags', tag))\n",
"url = url._replace(path=url.path + '@' + rev)\n",
"self.url = url._replace(fragment='')\n",
"self.rev = rev\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_5(self):...\n",
"VAR_8 = 'test_foo.py:FooTest.test_bar'\n",
"VAR_7 = BokChoyTestSuite('', test_spec=spec)\n",
"VAR_1 = 'tests/{}'.format(VAR_8)\n",
"self.assertEqual(VAR_7.cmd, self._expected_command(VAR_1=name))\n"
] | [
"def test_testcase_spec(self):...\n",
"spec = 'test_foo.py:FooTest.test_bar'\n",
"suite = BokChoyTestSuite('', test_spec=spec)\n",
"name = 'tests/{}'.format(spec)\n",
"self.assertEqual(suite.cmd, self._expected_command(name=name))\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_1(VAR_2, VAR_3=False):...\n",
"\"\"\"docstring\"\"\"\n",
"if FUNC_0(VAR_2):\n",
"VAR_0.error('Unable to read from file %s. (%s)' % (VAR_2, ex1.strerror))\n",
"VAR_11 = [line.decode('utf-8', 'replace') for line in VAR_24]\n",
"if not FUNC_4('pdftotext'):\n",
"VAR_24 = open(VAR_2, 'r')\n",
"return []\n",
"VAR_24.close()\n",
"VAR_0.error('pdftotext is not available on the system.')\n",
"VAR_23 = 'pdftotext -q -enc UTF-8 %s -' % re.escape(VAR_2)\n",
"if not FUNC_2('\\n'.join(VAR_11)):\n",
"VAR_24 = os.popen(VAR_23)\n",
"VAR_0.warning('string' % VAR_2)\n",
"VAR_12 = len(VAR_11)\n",
"VAR_13 = 0\n",
"for line in VAR_11:\n",
"VAR_13 += len(re.findall('\\\\S+', line))\n",
"VAR_11 = [line for line in VAR_11 if VAR_1.search(line) is not None]\n",
"if not VAR_3:\n",
"VAR_0.info('Local file has %d lines and %d words.' % (VAR_12, VAR_13))\n",
"return VAR_11\n"
] | [
"def text_lines_from_local_file(document, remote=False):...\n",
"\"\"\"docstring\"\"\"\n",
"if is_pdf(document):\n",
"log.error('Unable to read from file %s. (%s)' % (document, ex1.strerror))\n",
"lines = [line.decode('utf-8', 'replace') for line in filestream]\n",
"if not executable_exists('pdftotext'):\n",
"filestream = open(document, 'r')\n",
"return []\n",
"filestream.close()\n",
"log.error('pdftotext is not available on the system.')\n",
"cmd = 'pdftotext -q -enc UTF-8 %s -' % re.escape(document)\n",
"if not _is_english_text('\\n'.join(lines)):\n",
"filestream = os.popen(cmd)\n",
"log.warning(\n \"It seems the file '%s' is unvalid and doesn't contain text. Please communicate this file to the Invenio team.\"\n % document)\n",
"line_nb = len(lines)\n",
"word_nb = 0\n",
"for line in lines:\n",
"word_nb += len(re.findall('\\\\S+', line))\n",
"lines = [line for line in lines if _ONE_WORD.search(line) is not None]\n",
"if not remote:\n",
"log.info('Local file has %d lines and %d words.' % (line_nb, word_nb))\n",
"return lines\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
0,
7,
7,
7,
7,
7,
7,
7,
7,
7
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"AugAssign'",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"def __init__(self, VAR_22):...\n",
"self.prefix = VAR_22\n"
] | [
"def __init__(self, prefix):...\n",
"self.prefix = prefix\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_1(VAR_0: DirectoryEntry, VAR_2=False) ->ScanResult:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12 = url_to_domain(VAR_0.landing_page_url)\n",
"VAR_1 = inspect_domains([VAR_12], {'timeout': 10})\n",
"VAR_13 = FUNC_0(VAR_0, VAR_1[0])\n",
"if VAR_2:\n",
"VAR_13.securedrop = VAR_0\n",
"return VAR_13\n",
"VAR_13.save()\n"
] | [
"def scan(securedrop: DirectoryEntry, commit=False) ->ScanResult:...\n",
"\"\"\"docstring\"\"\"\n",
"securedrop_domain = url_to_domain(securedrop.landing_page_url)\n",
"pshtt_results = inspect_domains([securedrop_domain], {'timeout': 10})\n",
"result = pshtt_data_to_result(securedrop, pshtt_results[0])\n",
"if commit:\n",
"result.securedrop = securedrop\n",
"return result\n",
"result.save()\n"
] | [
0,
0,
6,
6,
6,
0,
6,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_13(VAR_11, VAR_1, VAR_3, VAR_4):...\n",
"VAR_12 = []\n",
"for VAR_15 in range(VAR_11[1]):\n",
"VAR_29 = VAR_1.pop()\n",
"if len(VAR_12) == 1:\n",
"if VAR_29 >= 0:\n",
"if VAR_12[0] < 0:\n",
"if VAR_12[0] >= 0 and VAR_12[1] >= 0:\n",
"VAR_12.insert(0, int(VAR_29, 16))\n",
"VAR_12.insert(0, VAR_29)\n",
"VAR_30 = VAR_17[VAR_11[0]]\n",
"VAR_30 = VAR_16[VAR_11[0]]\n",
"VAR_30 = VAR_16[VAR_11[0]]\n",
"VAR_30 = VAR_17[VAR_11[0]]\n",
"VAR_1.append(VAR_30(VAR_12, VAR_3, VAR_4))\n",
"VAR_1.append(helpers.toHex(VAR_30(VAR_12)))\n",
"VAR_1.append(helpers.toHex(VAR_30(VAR_12)))\n",
"VAR_1.append(VAR_30(VAR_12, VAR_3, VAR_4))\n"
] | [
"def handleBoolOp(item, stack, symbols, symId):...\n",
"params = []\n",
"for i in range(item[1]):\n",
"p = stack.pop()\n",
"if len(params) == 1:\n",
"if p >= 0:\n",
"if params[0] < 0:\n",
"if params[0] >= 0 and params[1] >= 0:\n",
"params.insert(0, int(p, 16))\n",
"params.insert(0, p)\n",
"func = boolMapSym[item[0]]\n",
"func = boolMap[item[0]]\n",
"func = boolMap[item[0]]\n",
"func = boolMapSym[item[0]]\n",
"stack.append(func(params, symbols, symId))\n",
"stack.append(helpers.toHex(func(params)))\n",
"stack.append(helpers.toHex(func(params)))\n",
"stack.append(func(params, symbols, symId))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"from __future__ import absolute_import, division, print_function\n",
"__metaclass__ = type\n",
"from ansible.errors import AnsibleError\n",
"\"\"\" a group of ansible hosts \"\"\"\n",
"def __init__(self, VAR_0=None):...\n",
"self.depth = 0\n",
"self.name = VAR_0\n",
"self.hosts = []\n",
"self._hosts = None\n",
"self.vars = {}\n",
"self.child_groups = []\n",
"self.parent_groups = []\n",
"self._hosts_cache = None\n",
"self.priority = 1\n",
"def __repr__(self):...\n",
"return self.get_name()\n"
] | [
"from __future__ import absolute_import, division, print_function\n",
"__metaclass__ = type\n",
"from ansible.errors import AnsibleError\n",
"\"\"\" a group of ansible hosts \"\"\"\n",
"def __init__(self, name=None):...\n",
"self.depth = 0\n",
"self.name = name\n",
"self.hosts = []\n",
"self._hosts = None\n",
"self.vars = {}\n",
"self.child_groups = []\n",
"self.parent_groups = []\n",
"self._hosts_cache = None\n",
"self.priority = 1\n",
"def __repr__(self):...\n",
"return self.get_name()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Assign'",
"ImportFrom'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_3(self, VAR_3, VAR_4, VAR_9):...\n",
"return VAR_4.is_course_staff or VAR_3.user.is_superuser\n"
] | [
"def has_object_permission(self, request, view, obj):...\n",
"return view.is_course_staff or request.user.is_superuser\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"from flask import Flask, render_template, redirect, request\n",
"import pg, markdown, time\n",
"from time import strftime, localtime\n",
"import pg, markdown, time\n",
"from wiki_linkify import wiki_linkify\n",
"VAR_0 = Flask('WikiApp')\n",
"VAR_1 = pg.DB(dbname='wiki_db_redo')\n",
"@VAR_0.route('/')...\n",
"return render_template('homepage.html')\n"
] | [
"from flask import Flask, render_template, redirect, request\n",
"import pg, markdown, time\n",
"from time import strftime, localtime\n",
"import pg, markdown, time\n",
"from wiki_linkify import wiki_linkify\n",
"app = Flask('WikiApp')\n",
"db = pg.DB(dbname='wiki_db_redo')\n",
"@app.route('/')...\n",
"return render_template('homepage.html')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_31(self, VAR_17, VAR_15):...\n",
"VAR_28 = FileManager()\n",
"VAR_29 = ImageManager()\n",
"for file_col in VAR_17.files:\n",
"if self.is_file(file_col):\n",
"for file_col in VAR_17.files:\n",
"VAR_28.save_file(VAR_17.files[file_col], getattr(VAR_15, file_col))\n",
"if self.is_image(file_col):\n",
"VAR_29.save_file(VAR_17.files[file_col], getattr(VAR_15, file_col))\n"
] | [
"def _add_files(self, this_request, item):...\n",
"fm = FileManager()\n",
"im = ImageManager()\n",
"for file_col in this_request.files:\n",
"if self.is_file(file_col):\n",
"for file_col in this_request.files:\n",
"fm.save_file(this_request.files[file_col], getattr(item, file_col))\n",
"if self.is_image(file_col):\n",
"im.save_file(this_request.files[file_col], getattr(item, file_col))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Condition",
"For",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_5(VAR_17, VAR_16):...\n",
"if FUNC_6(VAR_17):\n",
"return FUNC_8(VAR_17, VAR_16)\n",
"return FUNC_7(open(VAR_17).read(), VAR_16)\n",
"VAR_28.error('%s', e)\n"
] | [
"def render_template(template, config):...\n",
"if is_executable(template):\n",
"return render_executable(template, config)\n",
"return render_moustache(open(template).read(), config)\n",
"logger.error('%s', e)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'",
"Expr'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_35 = VAR_14.get_entries_query(**kwargs)\n",
"if isinstance(VAR_25, list):\n",
"VAR_35 = VAR_35.where(lambda g: g.metadata_type in VAR_25)\n",
"VAR_35 = VAR_35.where(VAR_25=metadata_type)\n",
"if VAR_27:\n",
"VAR_35 = VAR_35.where(lambda g: g.status != TODELETE)\n",
"if VAR_28:\n",
"VAR_35 = VAR_35.where(lambda g: g.xxx == 0)\n",
"if VAR_26:\n",
"VAR_35 = VAR_35.where(public_key=channel_pk)\n",
"VAR_36 = VAR_35.count()\n",
"return VAR_35[(VAR_23 or 1) - 1:VAR_24] if VAR_23 or VAR_24 else VAR_35, VAR_36\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"pony_query = cls.get_entries_query(**kwargs)\n",
"if isinstance(metadata_type, list):\n",
"pony_query = pony_query.where(lambda g: g.metadata_type in metadata_type)\n",
"pony_query = pony_query.where(metadata_type=metadata_type)\n",
"if exclude_deleted:\n",
"pony_query = pony_query.where(lambda g: g.status != TODELETE)\n",
"if hide_xxx:\n",
"pony_query = pony_query.where(lambda g: g.xxx == 0)\n",
"if channel_pk:\n",
"pony_query = pony_query.where(public_key=channel_pk)\n",
"count = pony_query.count()\n",
"return pony_query[(first or 1) - 1:last\n ] if first or last else pony_query, count\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_1: neko.NekoBot):...\n",
"\"\"\"docstring\"\"\"\n",
"self.bot = VAR_1\n"
] | [
"def __init__(self, bot: neko.NekoBot):...\n",
"\"\"\"docstring\"\"\"\n",
"self.bot = bot\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'"
] |
[
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = discord.Embed(VAR_2=\n 'Launcher for old flashcards (r4,m3,dstt,dsx,etc)', VAR_3=discord.Color\n (4387938))\n",
"VAR_7.set_author(name='Apache Thunder', url=\n 'https://gbatemp.net/threads/r4-stage2-twl-flashcart-launcher-and-perhaps-other-cards-soon%E2%84%A2.416434/'\n )\n",
"VAR_7.set_thumbnail(url='https://gbatemp.net/data/avatars/m/105/105648.jpg')\n",
"VAR_7.url = 'string'\n",
"VAR_7.description = 'Launcher for old flashcards'\n",
"await self.bot.say('', VAR_7=embed)\n"
] | [
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"embed = discord.Embed(title=\n 'Launcher for old flashcards (r4,m3,dstt,dsx,etc)', color=discord.Color\n (4387938))\n",
"embed.set_author(name='Apache Thunder', url=\n 'https://gbatemp.net/threads/r4-stage2-twl-flashcart-launcher-and-perhaps-other-cards-soon%E2%84%A2.416434/'\n )\n",
"embed.set_thumbnail(url='https://gbatemp.net/data/avatars/m/105/105648.jpg')\n",
"embed.url = (\n 'https://gbatemp.net/threads/r4-stage2-twl-flashcart-launcher-and-perhaps-other-cards-soon%E2%84%A2.416434/'\n )\n",
"embed.description = 'Launcher for old flashcards'\n",
"await self.bot.say('', embed=embed)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(self, VAR_2, VAR_6, VAR_7, *VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = []\n",
"assert VAR_2 in self.SUBJECTS, 'Subject %s not in SUBJECTS' % VAR_2\n",
"VAR_8 = [VAR_2, VAR_6, VAR_7] + list(VAR_8)\n",
"self.observerLock.acquire()\n",
"for VAR_12, osubject, ochangeTypes, oid, VAR_5 in self.observers:\n",
"self.observerLock.release()\n",
"if VAR_2 == osubject and VAR_6 in ochangeTypes and (oid is None or oid == VAR_7\n",
"self._logger.exception('OIDs were %s %s', repr(oid), repr(VAR_7))\n",
"for task in VAR_10:\n",
"if not VAR_5:\n",
"task(*VAR_8)\n",
"VAR_10.append(VAR_12)\n",
"if VAR_12 not in self.observerscache:\n",
"def FUNC_4(VAR_12):...\n",
"self.observerscache[VAR_12].append(VAR_8)\n",
"self.observerLock.acquire()\n",
"if VAR_12 in self.observerscache:\n",
"VAR_14 = self.observerscache[VAR_12]\n",
"VAR_14 = []\n",
"self.observerLock.release()\n",
"if VAR_14:\n",
"VAR_12(VAR_14)\n",
"VAR_13 = threading.Timer(VAR_5, FUNC_4, (VAR_12,))\n",
"VAR_13.setName('Notifier-timer-%s' % VAR_2)\n",
"VAR_13.start()\n",
"self.observerscache[VAR_12] = []\n",
"self.observertimers[VAR_12] = VAR_13\n"
] | [
"def notify(self, subject, changeType, obj_id, *args):...\n",
"\"\"\"docstring\"\"\"\n",
"tasks = []\n",
"assert subject in self.SUBJECTS, 'Subject %s not in SUBJECTS' % subject\n",
"args = [subject, changeType, obj_id] + list(args)\n",
"self.observerLock.acquire()\n",
"for ofunc, osubject, ochangeTypes, oid, cache in self.observers:\n",
"self.observerLock.release()\n",
"if subject == osubject and changeType in ochangeTypes and (oid is None or \n",
"self._logger.exception('OIDs were %s %s', repr(oid), repr(obj_id))\n",
"for task in tasks:\n",
"if not cache:\n",
"task(*args)\n",
"tasks.append(ofunc)\n",
"if ofunc not in self.observerscache:\n",
"def doQueue(ofunc):...\n",
"self.observerscache[ofunc].append(args)\n",
"self.observerLock.acquire()\n",
"if ofunc in self.observerscache:\n",
"events = self.observerscache[ofunc]\n",
"events = []\n",
"self.observerLock.release()\n",
"if events:\n",
"ofunc(events)\n",
"t = threading.Timer(cache, doQueue, (ofunc,))\n",
"t.setName('Notifier-timer-%s' % subject)\n",
"t.start()\n",
"self.observerscache[ofunc] = []\n",
"self.observertimers[ofunc] = t\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assert'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Condition",
"Expr'",
"For",
"Condition",
"Expr'",
"Expr'",
"Condition",
"FunctionDef'",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_9 = {}\n",
"if not VAR_5:\n",
"VAR_5 = CLASS_0.get_report(VAR_1)['analysis']\n",
"VAR_15 = CLASS_0.behavior_get_processes(VAR_1, VAR_5)\n",
"for proc in VAR_15['data']:\n",
"VAR_6 = proc['pid']\n",
"return VAR_9\n",
"VAR_22 = proc['process_name']\n",
"VAR_23 = None\n",
"for p in VAR_5['behavior']['generic']:\n",
"if p['pid'] == VAR_6:\n",
"if not VAR_23:\n",
"VAR_23 = p\n",
"VAR_24 = CLASS_0.behavior_get_watchers(VAR_1, VAR_6=pid, VAR_5=report)\n",
"for VAR_30, events in VAR_24.iteritems():\n",
"if not VAR_9.has_key(VAR_30):\n",
"VAR_9[VAR_30] = {}\n",
"if not VAR_9[VAR_30].has_key(VAR_6):\n",
"VAR_9[VAR_30][VAR_22] = {'pid': VAR_6, 'process_name': VAR_22, 'events': {}}\n",
"for VAR_31 in events:\n",
"if not VAR_9[VAR_30][VAR_22]['events'].has_key(VAR_31):\n",
"VAR_9[VAR_30][VAR_22]['events'][VAR_31] = []\n",
"for _event in VAR_23['summary'][VAR_31]:\n",
"VAR_9[VAR_30][VAR_22]['events'][VAR_31].append(_event)\n"
] | [
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"data = {}\n",
"if not report:\n",
"report = AnalysisController.get_report(task_id)['analysis']\n",
"procs = AnalysisController.behavior_get_processes(task_id, report)\n",
"for proc in procs['data']:\n",
"pid = proc['pid']\n",
"return data\n",
"pname = proc['process_name']\n",
"pdetails = None\n",
"for p in report['behavior']['generic']:\n",
"if p['pid'] == pid:\n",
"if not pdetails:\n",
"pdetails = p\n",
"watchers = AnalysisController.behavior_get_watchers(task_id, pid=pid,\n report=report)\n",
"for category, events in watchers.iteritems():\n",
"if not data.has_key(category):\n",
"data[category] = {}\n",
"if not data[category].has_key(pid):\n",
"data[category][pname] = {'pid': pid, 'process_name': pname, 'events': {}}\n",
"for event in events:\n",
"if not data[category][pname]['events'].has_key(event):\n",
"data[category][pname]['events'][event] = []\n",
"for _event in pdetails['summary'][event]:\n",
"data[category][pname]['events'][event].append(_event)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"For",
"Condition",
"Condition",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Condition",
"Assign'",
"For",
"Condition",
"Assign'",
"For",
"Expr'"
] |
[
"@abstractmethod...\n",
""
] | [
"@abstractmethod...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_0(self, VAR_0: str):...\n",
"VAR_1 = get_email(self)\n",
"self.write(api.survey.get_one(VAR_0, VAR_1=email))\n"
] | [
"def get(self, survey_id: str):...\n",
"email = get_email(self)\n",
"self.write(api.survey.get_one(survey_id, email=email))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_42(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = 'TRUNCATE TABLE players;'\n",
"tools.query(VAR_11)\n",
"tournament.swissPairings()\n"
] | [
"def test_no_players(self):...\n",
"\"\"\"docstring\"\"\"\n",
"q = 'TRUNCATE TABLE players;'\n",
"tools.query(q)\n",
"tournament.swissPairings()\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_6(self, VAR_12):...\n",
"if self.etag != VAR_12.etag or self.etag is None:\n",
"return cmp(self.mtime, VAR_12.mtime) > 0\n",
"return False\n"
] | [
"def is_newer_than(self, other):...\n",
"if self.etag != other.etag or self.etag is None:\n",
"return cmp(self.mtime, other.mtime) > 0\n",
"return False\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_36():...\n",
"yield os.path.join(self.home, 'lib')\n",
"if self.jdk:\n",
"yield os.path.join(self.home, 'jre', 'lib')\n"
] | [
"def lib_paths():...\n",
"yield os.path.join(self.home, 'lib')\n",
"if self.jdk:\n",
"yield os.path.join(self.home, 'jre', 'lib')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_5():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_32 = ['date', 'league', 'team1', 'team2']\n",
"VAR_33 = ['Date', 'Div', 'HomeTeam', 'AwayTeam']\n",
"VAR_34 = ['spi1', 'spi2', 'prob1', 'prob2', 'probtie', 'proj_score1',\n 'proj_score2', 'importance1', 'importance2', 'BbAvH', 'BbAvA', 'BbAvD',\n 'BbAv>2.5', 'BbAv<2.5', 'BbAHh', 'BbAvAHH', 'BbAvAHA']\n",
"VAR_35 = ['score1', 'score2', 'xg1', 'xg2', 'nsxg1', 'nsxg2', 'adj_score1',\n 'adj_score2']\n",
"VAR_36 = {'PSH': 'H', 'PSA': 'A', 'PSD': 'D', 'BbMx>2.5': 'over_2.5',\n 'BbMx<2.5': 'under_2.5', 'BbAHh': 'handicap', 'BbMxAHH':\n 'handicap_home', 'BbMxAHA': 'handicap_away'}\n",
"VAR_37 = {}\n",
"for VAR_48 in ('spi_historical', 'spi_fixtures', 'fd_historical',\n",
"VAR_47 = ['date'] if VAR_48 in ('spi_historical', 'spi_fixtures') else ['Date'\n ] if VAR_48 in ('fd_historical', 'fd_fixtures') else None\n",
"for col in ['team1', 'team2']:\n",
"VAR_37[VAR_48] = pd.read_sql('select * from %s' % VAR_48, VAR_0, VAR_47=\n parse_dates)\n",
"for VAR_48 in ('spi_historical', 'spi_fixtures'):\n",
"VAR_38 = pd.merge(VAR_37['spi_historical'], VAR_37['fd_historical'],\n left_on=spi_keys, right_on=fd_keys).dropna(subset=odds_cols_mapping.\n keys(), how='any').reset_index(drop=True)\n",
"VAR_37[VAR_48] = pd.merge(VAR_37[VAR_48], VAR_37['names_mapping'], left_on=\n col, right_on='left_team', how='left').drop(columns=[col, 'left_team']\n ).rename(columns={'right_team': col})\n",
"VAR_39 = pd.merge(VAR_37['spi_fixtures'], VAR_37['fd_fixtures'], left_on=\n spi_keys, right_on=fd_keys)\n",
"VAR_40 = VAR_38.loc[:, (['season'] + VAR_32 + VAR_34)]\n",
"VAR_41 = VAR_38.loc[:, (VAR_35)]\n",
"VAR_2 = VAR_38.loc[:, (VAR_32 + list(VAR_36.keys()))].rename(columns=\n odds_cols_mapping)\n",
"VAR_42 = VAR_39.loc[:, (VAR_32 + VAR_34)]\n",
"VAR_43 = VAR_39.loc[:, (VAR_32 + list(VAR_36.keys()))].rename(columns=\n odds_cols_mapping)\n",
"for VAR_49 in (1, 2):\n",
"VAR_41['avg_score%s' % VAR_49] = VAR_41[['score%s' % VAR_49, 'xg%s' %\n VAR_49, 'nsxg%s' % VAR_49]].mean(axis=1)\n",
"for target_type in TARGET_TYPES_MAPPING.keys():\n",
"if '+' in target_type:\n",
"for VAR_50 in (VAR_40, VAR_42):\n",
"VAR_3 = target_type.split('+')\n",
"VAR_50['quality'] = hmean(VAR_50[['spi1', 'spi2']], axis=1)\n",
"for VAR_48, VAR_50 in zip(['X', 'y', 'odds', 'X_test', 'odds_test'], [\n",
"VAR_2 = FUNC_0(VAR_2, VAR_3)\n",
"VAR_50['importance'] = VAR_50[['importance1', 'importance2']].mean(axis=1)\n",
"VAR_50.to_sql(VAR_48, VAR_0, index=False, if_exists='replace')\n",
"VAR_43 = FUNC_0(VAR_43, VAR_3)\n",
"VAR_50['rating'] = VAR_50[['quality', 'importance']].mean(axis=1)\n",
"VAR_50['sum_proj_score'] = VAR_50['proj_score1'] + VAR_50['proj_score2']\n"
] | [
"def create_modeling_tables():...\n",
"\"\"\"docstring\"\"\"\n",
"spi_keys = ['date', 'league', 'team1', 'team2']\n",
"fd_keys = ['Date', 'Div', 'HomeTeam', 'AwayTeam']\n",
"input_cols = ['spi1', 'spi2', 'prob1', 'prob2', 'probtie', 'proj_score1',\n 'proj_score2', 'importance1', 'importance2', 'BbAvH', 'BbAvA', 'BbAvD',\n 'BbAv>2.5', 'BbAv<2.5', 'BbAHh', 'BbAvAHH', 'BbAvAHA']\n",
"output_cols = ['score1', 'score2', 'xg1', 'xg2', 'nsxg1', 'nsxg2',\n 'adj_score1', 'adj_score2']\n",
"odds_cols_mapping = {'PSH': 'H', 'PSA': 'A', 'PSD': 'D', 'BbMx>2.5':\n 'over_2.5', 'BbMx<2.5': 'under_2.5', 'BbAHh': 'handicap', 'BbMxAHH':\n 'handicap_home', 'BbMxAHA': 'handicap_away'}\n",
"data = {}\n",
"for name in ('spi_historical', 'spi_fixtures', 'fd_historical',\n",
"parse_dates = ['date'] if name in ('spi_historical', 'spi_fixtures') else [\n 'Date'] if name in ('fd_historical', 'fd_fixtures') else None\n",
"for col in ['team1', 'team2']:\n",
"data[name] = pd.read_sql('select * from %s' % name, DB_CONNECTION,\n parse_dates=parse_dates)\n",
"for name in ('spi_historical', 'spi_fixtures'):\n",
"historical = pd.merge(data['spi_historical'], data['fd_historical'],\n left_on=spi_keys, right_on=fd_keys).dropna(subset=odds_cols_mapping.\n keys(), how='any').reset_index(drop=True)\n",
"data[name] = pd.merge(data[name], data['names_mapping'], left_on=col,\n right_on='left_team', how='left').drop(columns=[col, 'left_team']).rename(\n columns={'right_team': col})\n",
"fixtures = pd.merge(data['spi_fixtures'], data['fd_fixtures'], left_on=\n spi_keys, right_on=fd_keys)\n",
"X = historical.loc[:, (['season'] + spi_keys + input_cols)]\n",
"y = historical.loc[:, (output_cols)]\n",
"odds = historical.loc[:, (spi_keys + list(odds_cols_mapping.keys()))].rename(\n columns=odds_cols_mapping)\n",
"X_test = fixtures.loc[:, (spi_keys + input_cols)]\n",
"odds_test = fixtures.loc[:, (spi_keys + list(odds_cols_mapping.keys()))\n ].rename(columns=odds_cols_mapping)\n",
"for ind in (1, 2):\n",
"y['avg_score%s' % ind] = y[['score%s' % ind, 'xg%s' % ind, 'nsxg%s' % ind]\n ].mean(axis=1)\n",
"for target_type in TARGET_TYPES_MAPPING.keys():\n",
"if '+' in target_type:\n",
"for df in (X, X_test):\n",
"target_types = target_type.split('+')\n",
"df['quality'] = hmean(df[['spi1', 'spi2']], axis=1)\n",
"for name, df in zip(['X', 'y', 'odds', 'X_test', 'odds_test'], [X, y, odds,\n",
"odds = combine_odds(odds, target_types)\n",
"df['importance'] = df[['importance1', 'importance2']].mean(axis=1)\n",
"df.to_sql(name, DB_CONNECTION, index=False, if_exists='replace')\n",
"odds_test = combine_odds(odds_test, target_types)\n",
"df['rating'] = df[['quality', 'importance']].mean(axis=1)\n",
"df['sum_proj_score'] = df['proj_score1'] + df['proj_score2']\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"For",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"For",
"Condition",
"For",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_1, VAR_2, VAR_3, VAR_4=None):...\n",
"\"\"\"docstring\"\"\"\n",
"self.node_ip_address = VAR_3\n",
"self.redis_client = redis.StrictRedis(host=redis_ip_address, port=\n redis_port, password=redis_password)\n",
"self.log_files = {}\n",
"self.log_file_handles = {}\n",
"self.files_to_ignore = set()\n"
] | [
"def __init__(self, redis_ip_address, redis_port, node_ip_address,...\n",
"\"\"\"docstring\"\"\"\n",
"self.node_ip_address = node_ip_address\n",
"self.redis_client = redis.StrictRedis(host=redis_ip_address, port=\n redis_port, password=redis_password)\n",
"self.log_files = {}\n",
"self.log_file_handles = {}\n",
"self.files_to_ignore = set()\n"
] | [
0,
0,
6,
6,
6,
6,
6
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_1(self):...\n",
"settings.SSH_PRIVATE_KEY = ''\n"
] | [
"def tearDown(self):...\n",
"settings.SSH_PRIVATE_KEY = ''\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_7(self, VAR_14):...\n",
"if not VAR_14:\n",
"return\n",
"self.model.total_items = VAR_14['total']\n",
"if self.num_search_results_label:\n",
"self.num_search_results_label.setText('%d results' % VAR_14['total'])\n",
"if VAR_14['first'] >= self.model.rowCount():\n",
"self.model.add_items(VAR_14['results'])\n"
] | [
"def on_search_results(self, response):...\n",
"if not response:\n",
"return\n",
"self.model.total_items = response['total']\n",
"if self.num_search_results_label:\n",
"self.num_search_results_label.setText('%d results' % response['total'])\n",
"if response['first'] >= self.model.rowCount():\n",
"self.model.add_items(response['results'])\n"
] | [
0,
0,
0,
0,
0,
0,
4,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'"
] |
[
"@staticmethod...\n",
"VAR_7 = dbaseConn()\n",
"VAR_9 = 'INSERT INTO `User` (`uid`, `email`, `name`) VALUES (%s, %s, %s)'\n",
"VAR_7.db_conn_close()\n",
"VAR_7.db.commit()\n",
"cursor.execute(VAR_9, (VAR_3.uid, VAR_3.email, VAR_3.name))\n",
"return False\n",
"VAR_7.db_conn_close()\n",
"return True\n"
] | [
"@staticmethod...\n",
"dbConn = dbaseConn()\n",
"sql = 'INSERT INTO `User` (`uid`, `email`, `name`) VALUES (%s, %s, %s)'\n",
"dbConn.db_conn_close()\n",
"dbConn.db.commit()\n",
"cursor.execute(sql, (user.uid, user.email, user.name))\n",
"return False\n",
"dbConn.db_conn_close()\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Return'"
] |
[
"@VAR_8.command()...\n",
"\"\"\"docstring\"\"\"\n",
"if FUNC_0(VAR_9):\n",
"pgsql.complete_quest(VAR_3, VAR_12, False)\n",
"await VAR_9.send(\"You don't have permission to use this command\")\n"
] | [
"@bot.command()...\n",
"\"\"\"docstring\"\"\"\n",
"if whitelist_check(ctx):\n",
"pgsql.complete_quest(pg_connection, quest_id, False)\n",
"await ctx.send(\"You don't have permission to use this command\")\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(VAR_0):...\n",
"return CLASS_0(VAR_0)\n"
] | [
"def connectToMySQL(db):...\n",
"return MySQLConnection(db)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_28(self, VAR_18=False):...\n",
"\"\"\"docstring\"\"\"\n",
"def FUNC_45(VAR_25, VAR_39):...\n",
"if self.parentfield:\n",
"return '{} #{}: {}: {}'.format(_('Row'), self.idx, _(VAR_25.label), VAR_39)\n",
"return '{}: {}'.format(_(VAR_25.label), VAR_39)\n"
] | [
"def get_invalid_links(self, is_submittable=False):...\n",
"\"\"\"docstring\"\"\"\n",
"def get_msg(df, docname):...\n",
"if self.parentfield:\n",
"return '{} #{}: {}: {}'.format(_('Row'), self.idx, _(df.label), docname)\n",
"return '{}: {}'.format(_(df.label), docname)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"@api.public...\n",
"self.response.write(api.get_peer_host() or '<none>')\n"
] | [
"@api.public...\n",
"self.response.write(api.get_peer_host() or '<none>')\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_11(self, VAR_9, VAR_10=None):...\n",
"if isinstance(VAR_9, str) or callable(VAR_9):\n",
"self.params.append(VAR_9)\n",
"VAR_48 = len(self.params)\n",
"if VAR_10:\n",
"for VAR_51 in VAR_9:\n",
"self.params.add_name(VAR_10)\n",
"self._set_params_item(VAR_51)\n",
"if VAR_10:\n",
"self.params.set_name(VAR_10, VAR_48, end=len(self.params))\n"
] | [
"def _set_params_item(self, item, name=None):...\n",
"if isinstance(item, str) or callable(item):\n",
"self.params.append(item)\n",
"start = len(self.params)\n",
"if name:\n",
"for i in item:\n",
"self.params.add_name(name)\n",
"self._set_params_item(i)\n",
"if name:\n",
"self.params.set_name(name, start, end=len(self.params))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"For",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_1():...\n",
"VAR_12 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\theory.db')\n",
"VAR_7 = VAR_12.cursor()\n",
"for i in VAR_0:\n",
"VAR_7.execute('create table ' + str(i) + ' (link STRING)')\n",
"VAR_12.commit()\n",
"VAR_12.close()\n"
] | [
"def create_theory_table():...\n",
"theory = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\theory.db')\n",
"conn = theory.cursor()\n",
"for i in available_tags:\n",
"conn.execute('create table ' + str(i) + ' (link STRING)')\n",
"theory.commit()\n",
"theory.close()\n"
] | [
0,
0,
0,
0,
4,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@user_passes_test(user_is_superuser)...\n",
"VAR_4 = context_testcases()\n",
"return render(VAR_0, 'testcases/testcases.html', VAR_4)\n"
] | [
"@user_passes_test(user_is_superuser)...\n",
"context = context_testcases()\n",
"return render(request, 'testcases/testcases.html', context)\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_23(VAR_11):...\n",
"return '%s> ' % self.configuration.eqlx_group_name\n"
] | [
"def _fake_recv(ignore_arg):...\n",
"return '%s> ' % self.configuration.eqlx_group_name\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_4(self, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = get_random_string(import_from_settings('OIDC_STATE_SIZE', 32))\n",
"VAR_8 = import_from_settings('OIDC_REDIRECT_FIELD_NAME', 'next')\n",
"VAR_9 = {'response_type': 'code', 'scope': 'openid', 'client_id': self.\n OIDC_RP_CLIENT_ID, 'redirect_uri': absolutify(VAR_1, reverse(\n 'oidc_authentication_callback')), 'state': VAR_7}\n",
"if import_from_settings('OIDC_USE_NONCE', True):\n",
"VAR_6 = get_random_string(import_from_settings('OIDC_NONCE_SIZE', 32))\n",
"VAR_1.session['oidc_state'] = VAR_7\n",
"VAR_9.update({'nonce': VAR_6})\n",
"VAR_1.session['oidc_login_next'] = VAR_1.GET.get(VAR_8)\n",
"VAR_1.session['oidc_nonce'] = VAR_6\n",
"VAR_10 = urlencode(VAR_9)\n",
"VAR_11 = '{url}?{query}'.format(url=self.OIDC_OP_AUTH_ENDPOINT, VAR_10=query)\n",
"return HttpResponseRedirect(VAR_11)\n"
] | [
"def get(self, request):...\n",
"\"\"\"docstring\"\"\"\n",
"state = get_random_string(import_from_settings('OIDC_STATE_SIZE', 32))\n",
"redirect_field_name = import_from_settings('OIDC_REDIRECT_FIELD_NAME', 'next')\n",
"params = {'response_type': 'code', 'scope': 'openid', 'client_id': self.\n OIDC_RP_CLIENT_ID, 'redirect_uri': absolutify(request, reverse(\n 'oidc_authentication_callback')), 'state': state}\n",
"if import_from_settings('OIDC_USE_NONCE', True):\n",
"nonce = get_random_string(import_from_settings('OIDC_NONCE_SIZE', 32))\n",
"request.session['oidc_state'] = state\n",
"params.update({'nonce': nonce})\n",
"request.session['oidc_login_next'] = request.GET.get(redirect_field_name)\n",
"request.session['oidc_nonce'] = nonce\n",
"query = urlencode(params)\n",
"redirect_url = '{url}?{query}'.format(url=self.OIDC_OP_AUTH_ENDPOINT, query\n =query)\n",
"return HttpResponseRedirect(redirect_url)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
6,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_9(self, VAR_13):...\n",
"if not VAR_13:\n"
] | [
"def p_error(self, p):...\n",
"if not p:\n"
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"@VAR_0.route('/strip/<int:id>')...\n",
"VAR_7 = get_db()\n",
"VAR_15 = get_user(VAR_4)\n",
"VAR_16 = None\n",
"if VAR_15['admin'] != 1:\n",
"VAR_16 = 'User has no admin rights.'\n",
"if VAR_16 is None:\n",
"VAR_7.execute('UPDATE user SET admin = 0 WHERE id = ?', (VAR_4,))\n",
"flash(VAR_16)\n",
"VAR_7.commit()\n",
"return redirect(url_for('admin.user_view'))\n",
"return redirect(url_for('admin.user_view'))\n"
] | [
"@bp.route('/strip/<int:id>')...\n",
"db = get_db()\n",
"user = get_user(id)\n",
"error = None\n",
"if user['admin'] != 1:\n",
"error = 'User has no admin rights.'\n",
"if error is None:\n",
"db.execute('UPDATE user SET admin = 0 WHERE id = ?', (id,))\n",
"flash(error)\n",
"db.commit()\n",
"return redirect(url_for('admin.user_view'))\n",
"return redirect(url_for('admin.user_view'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Return'"
] |
[
"async def FUNC_1(self, VAR_1, VAR_2='', VAR_3=discord.Color.default()):...\n",
"VAR_7 = discord.Embed(VAR_2=title, VAR_3=color)\n",
"VAR_7.description = VAR_1\n",
"await self.bot.say('', VAR_7=embed)\n"
] | [
"async def simple_embed(self, text, title='', color=discord.Color.default()):...\n",
"embed = discord.Embed(title=title, color=color)\n",
"embed.description = text\n",
"await self.bot.say('', embed=embed)\n"
] | [
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_18, VAR_19):...\n",
"\"\"\"docstring\"\"\"\n",
"self.auth = VAR_18\n",
"self.data = None\n",
"self.station_data = None\n",
"self.station = VAR_19\n"
] | [
"def __init__(self, auth, station):...\n",
"\"\"\"docstring\"\"\"\n",
"self.auth = auth\n",
"self.data = None\n",
"self.station_data = None\n",
"self.station = station\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_13(self, VAR_23, VAR_32):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_23.shape != (2, 2):\n",
"if len(VAR_32) != 1:\n",
"VAR_32 = VAR_32[0]\n",
"VAR_38 = 2 ** VAR_32\n",
"VAR_39 = 2 ** (self.wires - VAR_32 - 1)\n",
"VAR_23 = np.kron(np.kron(np.eye(VAR_38), VAR_23), np.eye(VAR_39))\n",
"return VAR_23\n"
] | [
"def expand_one(self, U, wires):...\n",
"\"\"\"docstring\"\"\"\n",
"if U.shape != (2, 2):\n",
"if len(wires) != 1:\n",
"wires = wires[0]\n",
"before = 2 ** wires\n",
"after = 2 ** (self.wires - wires - 1)\n",
"U = np.kron(np.kron(np.eye(before), U), np.eye(after))\n",
"return U\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self, VAR_5):...\n",
"VAR_5.setResponseCode(OK)\n",
"return self._render_template(VAR_5)\n"
] | [
"def render_GET(self, request):...\n",
"request.setResponseCode(OK)\n",
"return self._render_template(request)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(VAR_0, VAR_1):...\n",
"VAR_8 = get_object_or_404(Hunt, id=pk)\n",
"VAR_8.delete()\n",
"return redirect('threat_hunter:index')\n"
] | [
"def hunt_del(request, pk):...\n",
"hunt = get_object_or_404(Hunt, id=pk)\n",
"hunt.delete()\n",
"return redirect('threat_hunter:index')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_24(self, VAR_14):...\n",
""
] | [
"def is_pk(self, col_name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_13(self, VAR_16):...\n",
"return True\n"
] | [
"def does_intersect_rule(self, rulectx):...\n",
"return True\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_11(self, VAR_0, *VAR_1, **VAR_2):...\n",
"if not VAR_0.user.is_authenticated:\n",
"return invalid_permission_redirect(VAR_0)\n",
"self.request = VAR_0\n",
"self.filter_data = VAR_2.pop('filter_data', {})\n",
"return super().get(self, VAR_0, *VAR_1, **kwargs)\n"
] | [
"def get(self, request, *args, **kwargs):...\n",
"if not request.user.is_authenticated:\n",
"return invalid_permission_redirect(request)\n",
"self.request = request\n",
"self.filter_data = kwargs.pop('filter_data', {})\n",
"return super().get(self, request, *args, **kwargs)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_0, VAR_1, VAR_2):...\n",
"super(CLASS_1, self).__init__(VAR_0, VAR_1, VAR_2, 'HEAD')\n"
] | [
"def __init__(self, connection, args, logger):...\n",
"super(HttpHead, self).__init__(connection, args, logger, 'HEAD')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_6(self, VAR_7):...\n",
"VAR_8 = self.content\n",
"while VAR_8:\n",
"yield VAR_8[:VAR_7]\n",
"VAR_8 = VAR_8[VAR_7:]\n"
] | [
"def iter_content(self, chunk_size):...\n",
"c = self.content\n",
"while c:\n",
"yield c[:chunk_size]\n",
"c = c[chunk_size:]\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Expr'",
"Assign'"
] |
[
"def FUNC_1(self, **VAR_34):...\n",
"\"\"\"docstring\"\"\"\n",
"for arg in VAR_34:\n",
"if hasattr(self, arg):\n",
"self._check_usage()\n",
"if getattr(self, arg) is None:\n",
"setattr(self, arg, VAR_34[arg])\n"
] | [
"def update_if_absent(self, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"for arg in kwargs:\n",
"if hasattr(self, arg):\n",
"self._check_usage()\n",
"if getattr(self, arg) is None:\n",
"setattr(self, arg, kwargs[arg])\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Condition",
"Expr'",
"Condition",
"Expr'"
] |
[
"from bs4 import BeautifulSoup\n",
"import requests\n",
"import re\n",
"from typing import Dict, TYPE_CHECKING\n",
"from pshtt.pshtt import inspect_domains\n",
"import tldextract\n",
"from django.utils import timezone\n",
"from directory.models import ScanResult, DirectoryEntry\n",
"from scanner.utils import url_to_domain\n",
"if TYPE_CHECKING:\n",
"from directory.models import DirectoryEntryQuerySet\n",
"def FUNC_0(VAR_0: DirectoryEntry, VAR_1: Dict) ->ScanResult:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6, VAR_23 = FUNC_3(VAR_0.landing_page_url)\n",
"return ScanResult(VAR_0=securedrop, live=pshtt_results['Live'],\n http_status_200_ok=False)\n",
"return ScanResult(landing_page_url=securedrop.landing_page_url, live=\n pshtt_results['Live'], http_status_200_ok=validate_200_ok(\n no_redirects_page), forces_https=bool(pshtt_results[\n 'Strictly Forces HTTPS']), hsts=pshtt_results['HSTS'], hsts_max_age=\n validate_hsts_max_age(pshtt_results['HSTS Max Age']),\n hsts_entire_domain=validate_hsts_entire_domain(pshtt_results[\n 'HSTS Entire Domain']), hsts_preloaded=pshtt_results['HSTS Preloaded'],\n subdomain=validate_subdomain(securedrop.landing_page_url), no_cookies=\n validate_no_cookies(page), safe_onion_address=\n validate_onion_address_not_in_href(soup), no_cdn=validate_not_using_cdn\n (page), http_no_redirect=validate_no_redirects(no_redirects_page),\n expected_encoding=validate_encoding(page), no_analytics=\n validate_not_using_analytics(page), no_server_info=\n validate_server_software(page), no_server_version=\n validate_server_version(page), csp_origin_only=validate_csp(page),\n mime_sniffing_blocked=validate_no_sniff(page), noopen_download=\n validate_download_options(page), xss_protection=validate_xss_protection\n (page), clickjacking_protection=validate_clickjacking_protection(page),\n good_cross_domain_policy=validate_cross_domain_policy(page),\n http_1_0_caching_disabled=validate_pragma(page), expires_set=\n validate_expires(page), cache_control_set=validate_cache_control_set(\n page), cache_control_revalidate_set=validate_cache_must_revalidate(page\n ), cache_control_nocache_set=validate_nocache(page),\n cache_control_notransform_set=validate_notransform(page),\n cache_control_nostore_set=validate_nostore(page),\n cache_control_private_set=validate_private(page),\n referrer_policy_set_to_no_referrer=validate_no_referrer_policy(page))\n",
"VAR_24, VAR_25 = FUNC_3(VAR_0.landing_page_url, VAR_5=False)\n"
] | [
"from bs4 import BeautifulSoup\n",
"import requests\n",
"import re\n",
"from typing import Dict, TYPE_CHECKING\n",
"from pshtt.pshtt import inspect_domains\n",
"import tldextract\n",
"from django.utils import timezone\n",
"from directory.models import ScanResult, DirectoryEntry\n",
"from scanner.utils import url_to_domain\n",
"if TYPE_CHECKING:\n",
"from directory.models import DirectoryEntryQuerySet\n",
"def pshtt_data_to_result(securedrop: DirectoryEntry, pshtt_results: Dict...\n",
"\"\"\"docstring\"\"\"\n",
"page, soup = request_and_scrape_page(securedrop.landing_page_url)\n",
"return ScanResult(securedrop=securedrop, live=pshtt_results['Live'],\n http_status_200_ok=False)\n",
"return ScanResult(landing_page_url=securedrop.landing_page_url, live=\n pshtt_results['Live'], http_status_200_ok=validate_200_ok(\n no_redirects_page), forces_https=bool(pshtt_results[\n 'Strictly Forces HTTPS']), hsts=pshtt_results['HSTS'], hsts_max_age=\n validate_hsts_max_age(pshtt_results['HSTS Max Age']),\n hsts_entire_domain=validate_hsts_entire_domain(pshtt_results[\n 'HSTS Entire Domain']), hsts_preloaded=pshtt_results['HSTS Preloaded'],\n subdomain=validate_subdomain(securedrop.landing_page_url), no_cookies=\n validate_no_cookies(page), safe_onion_address=\n validate_onion_address_not_in_href(soup), no_cdn=validate_not_using_cdn\n (page), http_no_redirect=validate_no_redirects(no_redirects_page),\n expected_encoding=validate_encoding(page), no_analytics=\n validate_not_using_analytics(page), no_server_info=\n validate_server_software(page), no_server_version=\n validate_server_version(page), csp_origin_only=validate_csp(page),\n mime_sniffing_blocked=validate_no_sniff(page), noopen_download=\n validate_download_options(page), xss_protection=validate_xss_protection\n (page), clickjacking_protection=validate_clickjacking_protection(page),\n good_cross_domain_policy=validate_cross_domain_policy(page),\n http_1_0_caching_disabled=validate_pragma(page), expires_set=\n validate_expires(page), cache_control_set=validate_cache_control_set(\n page), cache_control_revalidate_set=validate_cache_must_revalidate(page\n ), cache_control_nocache_set=validate_nocache(page),\n cache_control_notransform_set=validate_notransform(page),\n cache_control_nostore_set=validate_nostore(page),\n cache_control_private_set=validate_private(page),\n referrer_policy_set_to_no_referrer=validate_no_referrer_policy(page))\n",
"no_redirects_page, _ = request_and_scrape_page(securedrop.landing_page_url,\n allow_redirects=False)\n"
] | [
0,
0,
0,
6,
0,
0,
0,
0,
0,
0,
0,
0,
0,
6,
6,
6,
6
] | [
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Condition",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Assign'",
"Return'",
"Return'",
"Assign'"
] |
[
"def FUNC_6(self):...\n",
"self.assertTrue({'name': 'DocField'} in DatabaseQuery('DocType').execute(\n filters={'name': 'DocField'}))\n"
] | [
"def test_filters_4(self):...\n",
"self.assertTrue({'name': 'DocField'} in DatabaseQuery('DocType').execute(\n filters={'name': 'DocField'}))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_9(self, VAR_10, VAR_6=[], VAR_11=None, VAR_12=True):...\n",
"return super().command(VAR_10, VAR_6, VAR_11, VAR_12, '\\r\\n', False)\n"
] | [
"def command(self, command, expected_output=[], error_message=None,...\n",
"return super().command(command, expected_output, error_message, log_event,\n '\\r\\n', False)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_31(self, *VAR_79, **VAR_80):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.log = VAR_79, VAR_80\n",
"return VAR_101\n"
] | [
"def log(self, *logs, **kwlogs):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.log = logs, kwlogs\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self):...\n",
"VAR_9 = fingerprint(VAR_0)\n",
"self.assertEquals(VAR_9, '54:6d:da:1f:91:b5:2b:6f:a2:83:90:c4:f9:73:76:f5')\n"
] | [
"def test_rsa_key_fingerprint(self):...\n",
"fp = fingerprint(RSA_PUBKEY)\n",
"self.assertEquals(fp, '54:6d:da:1f:91:b5:2b:6f:a2:83:90:c4:f9:73:76:f5')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_14(self, VAR_17):...\n",
"return True\n"
] | [
"def does_intersect_path(self, pathctx):...\n",
"return True\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_9(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = block.sysfs_to_devpath(VAR_1)\n",
"if block.is_extended_partition(VAR_11):\n",
"LOG.info(\"extended partitions do not need wiping, so skipping: '%s'\", VAR_11)\n",
"for bcache_path in ['bcache', 'bcache/set']:\n",
"VAR_42 = os.path.join(VAR_1, bcache_path)\n",
"VAR_39 = [1, 3, 5, 7]\n",
"if os.path.exists(VAR_42):\n",
"LOG.info('wiping superblock on %s', VAR_11)\n",
"LOG.debug('Attempting to release bcache layer from device: %s', VAR_1)\n",
"for attempt, wait in enumerate(VAR_39):\n",
"FUNC_4(VAR_42)\n",
"LOG.debug('wiping %s attempt %s/%s', VAR_11, attempt + 1, len(VAR_39))\n",
"block.wipe_volume(VAR_11, mode='superblock')\n",
"if attempt + 1 >= len(VAR_39):\n",
"LOG.debug('successfully wiped device %s on attempt %s/%s', VAR_11, attempt +\n 1, len(VAR_39))\n",
"LOG.debug(\n \"wiping device '%s' failed on attempt %s/%s. sleeping %ss before retry\",\n VAR_11, attempt + 1, len(VAR_39), wait)\n",
"return\n",
"time.sleep(wait)\n"
] | [
"def wipe_superblock(device):...\n",
"\"\"\"docstring\"\"\"\n",
"blockdev = block.sysfs_to_devpath(device)\n",
"if block.is_extended_partition(blockdev):\n",
"LOG.info(\"extended partitions do not need wiping, so skipping: '%s'\", blockdev)\n",
"for bcache_path in ['bcache', 'bcache/set']:\n",
"stop_path = os.path.join(device, bcache_path)\n",
"retries = [1, 3, 5, 7]\n",
"if os.path.exists(stop_path):\n",
"LOG.info('wiping superblock on %s', blockdev)\n",
"LOG.debug('Attempting to release bcache layer from device: %s', device)\n",
"for attempt, wait in enumerate(retries):\n",
"maybe_stop_bcache_device(stop_path)\n",
"LOG.debug('wiping %s attempt %s/%s', blockdev, attempt + 1, len(retries))\n",
"block.wipe_volume(blockdev, mode='superblock')\n",
"if attempt + 1 >= len(retries):\n",
"LOG.debug('successfully wiped device %s on attempt %s/%s', blockdev, \n attempt + 1, len(retries))\n",
"LOG.debug(\n \"wiping device '%s' failed on attempt %s/%s. sleeping %ss before retry\",\n blockdev, attempt + 1, len(retries), wait)\n",
"return\n",
"time.sleep(wait)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"For",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Return'",
"Expr'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.