lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def __getattr__(self, VAR_1: str) ->ISpy:...\n", "return self.selected_fields[VAR_1]\n" ]
[ "def __getattr__(self, name: str) ->ISpy:...\n", "return self.selected_fields[name]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_19(self):...\n", "\"\"\"docstring\"\"\"\n", "self.cursor.execute('create table t1(id integer, s varchar(20))')\n", "self.cursor.execute('insert into t1 values (?,?)', 1, 'test')\n", "VAR_24 = self.cursor.execute('string', 'test', 1, 10).fetchone()\n", "self.assertNotEqual(VAR_24, None)\n", "self.assertEqual(VAR_24[0], 1)\n" ]
[ "def test_subquery_params(self):...\n", "\"\"\"docstring\"\"\"\n", "self.cursor.execute('create table t1(id integer, s varchar(20))')\n", "self.cursor.execute('insert into t1 values (?,?)', 1, 'test')\n", "row = self.cursor.execute(\n \"\"\"\n select x.id\n from (\n select id\n from t1\n where s = ?\n and id between ? and ?\n ) x\n \"\"\"\n , 'test', 1, 10).fetchone()\n", "self.assertNotEqual(row, None)\n", "self.assertEqual(row[0], 1)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_15(self, VAR_16, VAR_18):...\n", "\"\"\"docstring\"\"\"\n", "VAR_25 = self._eql_execute('volume', 'select', VAR_18['volume_name'],\n 'snapshot', 'select', VAR_18['name'], 'clone', VAR_16['name'])\n", "VAR_0.error(_('Failed to create volume from snapshot %s'), VAR_18['name'])\n", "return self._get_volume_data(VAR_25)\n" ]
[ "def create_volume_from_snapshot(self, volume, snapshot):...\n", "\"\"\"docstring\"\"\"\n", "out = self._eql_execute('volume', 'select', snapshot['volume_name'],\n 'snapshot', 'select', snapshot['name'], 'clone', volume['name'])\n", "LOG.error(_('Failed to create volume from snapshot %s'), snapshot['name'])\n", "return self._get_volume_data(out)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Return'" ]
[ "import os\n", "import sys\n", "import imp\n", "import logging\n", "def FUNC_0(VAR_0):...\n", "return os.path.exists(os.path.join(VAR_0, '__init__.py'))\n" ]
[ "import os\n", "import sys\n", "import imp\n", "import logging\n", "def _is_package(directory):...\n", "return os.path.exists(os.path.join(directory, '__init__.py'))\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "FunctionDef'", "Return'" ]
[ "@property...\n", "return self._record\n" ]
[ "@property...\n", "return self._record\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@VAR_1.route('/resetCurrent')...\n", "VAR_18.pop('currentFile', None)\n", "VAR_18.pop('tempFile', None)\n", "return ''\n" ]
[ "@app.route('/resetCurrent')...\n", "session.pop('currentFile', None)\n", "session.pop('tempFile', None)\n", "return ''\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Return'" ]
[ "def __init__(self, VAR_2: db.note.Note):...\n", "self.note = VAR_2\n" ]
[ "def __init__(self, note: db.note.Note):...\n", "self.note = note\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_2(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = 'katotestlb' + self.test_id\n", "self.__use_lb_name = VAR_4\n", "VAR_2 = self.bindings\n", "VAR_5 = VAR_2['TEST_AWS_REGION']\n", "VAR_6 = [VAR_5 + 'a', VAR_5 + 'b']\n", "VAR_7 = {'Listener': {'InstancePort': 7001, 'LoadBalancerPort': 80}}\n", "VAR_8 = {'HealthyThreshold': 8, 'UnhealthyThreshold': 3, 'Interval': 123,\n 'Timeout': 12, 'Target': 'HTTP:%d/healthcheck' % VAR_7['Listener'][\n 'InstancePort']}\n", "VAR_9 = self.agent.type_to_payload('upsertAmazonLoadBalancerDescription', {\n 'credentials': VAR_2['AWS_CREDENTIALS'], 'clusterName': VAR_2[\n 'TEST_APP'], 'name': VAR_4, 'availabilityZones': {VAR_5: VAR_6},\n 'listeners': [{'internalProtocol': 'HTTP', 'internalPort': VAR_7[\n 'Listener']['InstancePort'], 'externalProtocol': 'HTTP', 'externalPort':\n VAR_7['Listener']['LoadBalancerPort']}], 'healthCheck': VAR_8['Target'],\n 'healthTimeout': VAR_8['Timeout'], 'healthInterval': VAR_8['Interval'],\n 'healthyThreshold': VAR_8['HealthyThreshold'], 'unhealthyThreshold':\n VAR_8['UnhealthyThreshold']})\n", "VAR_10 = aws.AwsContractBuilder(self.aws_observer)\n", "VAR_10.new_clause_builder('Load Balancer Added', retryable_for_secs=30\n ).collect_resources(aws_module='elb', command='describe-load-balancers',\n args=['--load-balancer-names', self.__use_lb_name]).contains_pred_list([\n jc.PathContainsPredicate('LoadBalancerDescriptions/HealthCheck', VAR_8),\n jc.PathPredicate('LoadBalancerDescriptions/AvailabilityZones', jc.\n LIST_SIMILAR(VAR_6)), jc.PathElementsContainPredicate(\n 'LoadBalancerDescriptions/ListenerDescriptions', VAR_7)])\n", "return st.OperationContract(self.new_post_operation(title=\n 'upsert_amazon_load_balancer', data=payload, path='ops'), contract=\n builder.build())\n" ]
[ "def upsert_load_balancer(self):...\n", "\"\"\"docstring\"\"\"\n", "detail_raw_name = 'katotestlb' + self.test_id\n", "self.__use_lb_name = detail_raw_name\n", "bindings = self.bindings\n", "region = bindings['TEST_AWS_REGION']\n", "avail_zones = [region + 'a', region + 'b']\n", "listener = {'Listener': {'InstancePort': 7001, 'LoadBalancerPort': 80}}\n", "health_check = {'HealthyThreshold': 8, 'UnhealthyThreshold': 3, 'Interval':\n 123, 'Timeout': 12, 'Target': 'HTTP:%d/healthcheck' % listener[\n 'Listener']['InstancePort']}\n", "payload = self.agent.type_to_payload('upsertAmazonLoadBalancerDescription',\n {'credentials': bindings['AWS_CREDENTIALS'], 'clusterName': bindings[\n 'TEST_APP'], 'name': detail_raw_name, 'availabilityZones': {region:\n avail_zones}, 'listeners': [{'internalProtocol': 'HTTP', 'internalPort':\n listener['Listener']['InstancePort'], 'externalProtocol': 'HTTP',\n 'externalPort': listener['Listener']['LoadBalancerPort']}],\n 'healthCheck': health_check['Target'], 'healthTimeout': health_check[\n 'Timeout'], 'healthInterval': health_check['Interval'],\n 'healthyThreshold': health_check['HealthyThreshold'],\n 'unhealthyThreshold': health_check['UnhealthyThreshold']})\n", "builder = aws.AwsContractBuilder(self.aws_observer)\n", "builder.new_clause_builder('Load Balancer Added', retryable_for_secs=30\n ).collect_resources(aws_module='elb', command='describe-load-balancers',\n args=['--load-balancer-names', self.__use_lb_name]).contains_pred_list([\n jc.PathContainsPredicate('LoadBalancerDescriptions/HealthCheck',\n health_check), jc.PathPredicate(\n 'LoadBalancerDescriptions/AvailabilityZones', jc.LIST_SIMILAR(\n avail_zones)), jc.PathElementsContainPredicate(\n 'LoadBalancerDescriptions/ListenerDescriptions', listener)])\n", "return st.OperationContract(self.new_post_operation(title=\n 'upsert_amazon_load_balancer', data=payload, path='ops'), contract=\n builder.build())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "\"\"\"Base core views.\"\"\"\n", "from __future__ import unicode_literals\n", "from django.core.urlresolvers import reverse\n", "from django.views import generic\n", "from django.contrib.auth import mixins as auth_mixins\n", "from ..extensions import exts_pool\n", "def FUNC_0(VAR_0, VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "if not VAR_1.last_login:\n", "return reverse('core:user_index')\n", "VAR_2 = VAR_0.POST.get('next', None)\n", "if VAR_2 is None or VAR_2 == 'None':\n", "if VAR_0.user.role == 'SimpleUsers':\n", "return VAR_2\n", "VAR_3 = VAR_0.localconfig.parameters.get_value('default_top_redirection')\n", "VAR_2 = reverse('core:dashboard')\n", "if VAR_3 != 'user':\n", "VAR_4 = exts_pool.get_extension_infos(VAR_3)\n", "VAR_2 = reverse('core:user_index')\n", "VAR_2 = VAR_4['topredirection_url']\n" ]
[ "\"\"\"Base core views.\"\"\"\n", "from __future__ import unicode_literals\n", "from django.core.urlresolvers import reverse\n", "from django.views import generic\n", "from django.contrib.auth import mixins as auth_mixins\n", "from ..extensions import exts_pool\n", "def find_nextlocation(request, user):...\n", "\"\"\"docstring\"\"\"\n", "if not user.last_login:\n", "return reverse('core:user_index')\n", "nextlocation = request.POST.get('next', None)\n", "if nextlocation is None or nextlocation == 'None':\n", "if request.user.role == 'SimpleUsers':\n", "return nextlocation\n", "topredir = request.localconfig.parameters.get_value('default_top_redirection')\n", "nextlocation = reverse('core:dashboard')\n", "if topredir != 'user':\n", "infos = exts_pool.get_extension_infos(topredir)\n", "nextlocation = reverse('core:user_index')\n", "nextlocation = infos['topredirection_url']\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Condition", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'" ]
[ "def __init__(self, VAR_2, *VAR_3, VAR_4=None, VAR_10=None, **VAR_7):...\n", "if VAR_10 is None:\n", "VAR_10 = {}\n", "self._load_default_options(VAR_10)\n", "self.options = VAR_10\n", "assert isinstance(VAR_2, rdflib.Graph\n ), 'target_graph must be a rdflib Graph object'\n", "self.target_graph = VAR_2\n", "if VAR_4 is None:\n", "VAR_4 = VAR_2\n", "assert isinstance(VAR_4, rdflib.Graph\n ), 'shacl_graph must be a rdflib Graph object'\n", "self.shacl_graph = VAR_4\n" ]
[ "def __init__(self, target_graph, *args, shacl_graph=None, options=None, **...\n", "if options is None:\n", "options = {}\n", "self._load_default_options(options)\n", "self.options = options\n", "assert isinstance(target_graph, rdflib.Graph\n ), 'target_graph must be a rdflib Graph object'\n", "self.target_graph = target_graph\n", "if shacl_graph is None:\n", "shacl_graph = target_graph\n", "assert isinstance(shacl_graph, rdflib.Graph\n ), 'shacl_graph must be a rdflib Graph object'\n", "self.shacl_graph = shacl_graph\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Expr'", "Assign'", "Assert'", "Assign'", "Condition", "Assign'", "Assert'", "Assign'" ]
[ "@retries(5, delay=0.5, backoff=1.5)...\n", "if VAR_11 == 'POST':\n", "return requests.post(FUNC_2(VAR_7), VAR_9=ToUtf8Json(data), headers=_HEADERS)\n", "if VAR_11 == 'GET':\n", "return requests.get(FUNC_2(VAR_7), headers=_HEADERS)\n" ]
[ "@retries(5, delay=0.5, backoff=1.5)...\n", "if method == 'POST':\n", "return requests.post(_BuildUri(handler), data=ToUtf8Json(data), headers=\n _HEADERS)\n", "if method == 'GET':\n", "return requests.get(_BuildUri(handler), headers=_HEADERS)\n" ]
[ 0, 0, 7, 0, 0 ]
[ "Condition", "Condition", "Return'", "Condition", "Return'" ]
[ "def FUNC_13(self):...\n", "\"\"\"docstring\"\"\"\n", "return self._phrase_table_column_names[:]\n" ]
[ "def get_phrase_table_column_names(self):...\n", "\"\"\"docstring\"\"\"\n", "return self._phrase_table_column_names[:]\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_3(self, VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_19 = 'SELECT name FROM cve'\n", "VAR_19 += ' JOIN errata_cve ON cve_id = cve.id'\n", "VAR_19 += ' WHERE errata_cve.errata_id = %s' % str(VAR_0)\n", "self.cursor.execute(VAR_19)\n", "VAR_20 = self.cursor.fetchall()\n", "VAR_8 = []\n", "for cve_name in VAR_20:\n", "VAR_8.append(cve_name[0])\n", "return VAR_8\n" ]
[ "def get_cve_names_for_erratum_id(self, id):...\n", "\"\"\"docstring\"\"\"\n", "cve_query = 'SELECT name FROM cve'\n", "cve_query += ' JOIN errata_cve ON cve_id = cve.id'\n", "cve_query += ' WHERE errata_cve.errata_id = %s' % str(id)\n", "self.cursor.execute(cve_query)\n", "cve_names = self.cursor.fetchall()\n", "cve_name_list = []\n", "for cve_name in cve_names:\n", "cve_name_list.append(cve_name[0])\n", "return cve_name_list\n" ]
[ 0, 0, 4, 4, 4, 4, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "AugAssign'", "AugAssign'", "Expr'", "Assign'", "Assign'", "For", "Expr'", "Return'" ]
[ "def FUNC_12():...\n", "if not pathlib.Path(VAR_0['syscall_header_file']).exists():\n" ]
[ "def check_syscall_header_file():...\n", "if not pathlib.Path(CONFIG['syscall_header_file']).exists():\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_11(self, VAR_7, VAR_9):...\n", "if isinstance(VAR_9, list):\n", "for v in VAR_9:\n", "self.append(VAR_7, v)\n" ]
[ "def extend(self, key, value):...\n", "if isinstance(value, list):\n", "for v in value:\n", "self.append(key, v)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "For", "Expr'" ]
[ "@VAR_0.route('/create-account', methods=['POST'])...\n", "VAR_10 = request.form['password']\n", "VAR_9 = request.form['email']\n", "VAR_17 = request.form['difficulty']\n", "VAR_19 = CLASS_0(VAR_9=email, VAR_10=pwd, VAR_17=difficulty)\n", "VAR_20 = CLASS_0.query.get(VAR_9)\n", "if VAR_20:\n", "return render_template('login.html', success=False)\n", "VAR_19.authenticated = True\n", "VAR_2.session.add(VAR_19)\n", "VAR_2.session.commit()\n", "login_user(VAR_19, remember=True)\n", "return redirect(VAR_4)\n" ]
[ "@app.route('/create-account', methods=['POST'])...\n", "pwd = request.form['password']\n", "email = request.form['email']\n", "difficulty = request.form['difficulty']\n", "user = User(email=email, pwd=pwd, difficulty=difficulty)\n", "db_user = User.query.get(email)\n", "if db_user:\n", "return render_template('login.html', success=False)\n", "user.authenticated = True\n", "db.session.add(user)\n", "db.session.commit()\n", "login_user(user, remember=True)\n", "return redirect(url_prefix)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_10(self, VAR_3, VAR_4):...\n", "VAR_12 = []\n", "VAR_13, VAR_14 = VAR_3\n", "for end in VAR_4:\n", "VAR_18, VAR_19 = end\n", "if VAR_12 == []:\n", "VAR_9 = False\n", "return None\n", "VAR_15 = VAR_12[0]\n", "if VAR_18 == VAR_13 and VAR_19 == VAR_14:\n", "for VAR_1 in range(1, len(VAR_12)):\n", "return None\n", "VAR_20 = self.stateMap[VAR_18][VAR_19].getTurning()\n", "if VAR_15[1] > VAR_12[VAR_1][1]:\n", "return VAR_15\n", "VAR_21 = self.stateMap[VAR_13][VAR_14].getTurning()\n", "VAR_15 = VAR_12[VAR_1]\n", "for index in range(len(VAR_20)):\n", "if VAR_20[index] != -1 and VAR_21[index] != -1 and VAR_20[index][0] == VAR_21[\n", "if not VAR_9:\n", "VAR_9 = True\n", "VAR_24 = [(neigh[0], self.getDist((VAR_18, VAR_19), neigh[0])) for neigh in\n VAR_20 if neigh != -1]\n", "VAR_12.append([[[VAR_13, VAR_14], [VAR_18, VAR_19]], self.getDist((VAR_13,\n VAR_14), (VAR_18, VAR_19))])\n", "VAR_25 = BinaryHeap()\n", "VAR_26 = self.coordToID(VAR_13, VAR_14)\n", "VAR_27 = 0\n", "VAR_28 = self.getDist((VAR_13, VAR_14), (VAR_18, VAR_19))\n", "VAR_25.pool[VAR_26] = Node(VAR_26, VAR_27, VAR_28, None)\n", "VAR_25.insert(VAR_26)\n", "while not VAR_9 and VAR_25.size >= 0:\n", "VAR_29 = VAR_25.remove()\n", "VAR_30, VAR_31 = self.IDToCoord(VAR_29)\n", "for endTurn in VAR_24:\n", "if endTurn[0][0] == VAR_30 and endTurn[0][1] == VAR_31 and not VAR_9:\n", "if not VAR_9:\n", "VAR_33 = [(VAR_18, VAR_19), endTurn[0]]\n", "VAR_32 = self.stateMap[VAR_30][VAR_31].getTurning()\n", "VAR_34 = VAR_25.pool[VAR_29]\n", "for node in VAR_32:\n", "VAR_35 = endTurn[1]\n", "if node != -1:\n", "while VAR_34.prevID != None:\n", "VAR_26 = self.coordToID(node[0][0], node[0][1])\n", "VAR_36 = self.IDToCoord(VAR_34.id_)\n", "VAR_9 = True\n", "if VAR_26 not in VAR_25.pool.keys():\n", "VAR_34 = VAR_34.prevID\n", "VAR_12.append([VAR_33[::-1], VAR_35])\n", "VAR_27 = VAR_25.pool[VAR_29].g_ + self.getDist((VAR_30, VAR_31), node[0])\n", "VAR_27 = VAR_25.pool[VAR_29].g_ + self.getDist((VAR_30, VAR_31), node[0])\n", "VAR_37 = self.IDToCoord(VAR_34.id_)\n", "VAR_28 = VAR_27 + min(map(lambda VAR_1: self.getDist(node[0], VAR_1[0]) +\n VAR_1[1], VAR_24))\n", "if VAR_27 <= VAR_25.pool[VAR_26].g_:\n", "VAR_33.append(VAR_37)\n", "VAR_25.pool[VAR_26] = Node(VAR_26, VAR_27, VAR_28, VAR_25.pool[VAR_29])\n", "VAR_25.pool[VAR_26].g_ = VAR_27\n", "VAR_35 += self.getDist(VAR_36, VAR_37)\n", "VAR_25.insert(VAR_26)\n", "VAR_25.pool[VAR_26].f_ = VAR_27 + min(map(lambda VAR_1: self.getDist(node[0\n ], VAR_1[0]) + VAR_1[1], VAR_24))\n", "VAR_25.pool[VAR_26].prevID = VAR_25.pool[VAR_29]\n", "VAR_25.insert(VAR_26)\n" ]
[ "def shortPath(self, start_coord, end_coords):...\n", "paths = []\n", "start_x, start_y = start_coord\n", "for end in end_coords:\n", "end_x, end_y = end\n", "if paths == []:\n", "found = False\n", "return None\n", "cur_min = paths[0]\n", "if end_x == start_x and end_y == start_y:\n", "for x in range(1, len(paths)):\n", "return None\n", "end_neighbour = self.stateMap[end_x][end_y].getTurning()\n", "if cur_min[1] > paths[x][1]:\n", "return cur_min\n", "start_neighbour = self.stateMap[start_x][start_y].getTurning()\n", "cur_min = paths[x]\n", "for index in range(len(end_neighbour)):\n", "if end_neighbour[index] != -1 and start_neighbour[index\n", "if not found:\n", "found = True\n", "end_points = [(neigh[0], self.getDist((end_x, end_y), neigh[0])) for neigh in\n end_neighbour if neigh != -1]\n", "paths.append([[[start_x, start_y], [end_x, end_y]], self.getDist((start_x,\n start_y), (end_x, end_y))])\n", "heap = BinaryHeap()\n", "nid = self.coordToID(start_x, start_y)\n", "g_ = 0\n", "f_ = self.getDist((start_x, start_y), (end_x, end_y))\n", "heap.pool[nid] = Node(nid, g_, f_, None)\n", "heap.insert(nid)\n", "while not found and heap.size >= 0:\n", "removed = heap.remove()\n", "cur_x, cur_y = self.IDToCoord(removed)\n", "for endTurn in end_points:\n", "if endTurn[0][0] == cur_x and endTurn[0][1] == cur_y and not found:\n", "if not found:\n", "cur_path = [(end_x, end_y), endTurn[0]]\n", "newTurning = self.stateMap[cur_x][cur_y].getTurning()\n", "cur_elem = heap.pool[removed]\n", "for node in newTurning:\n", "dist = endTurn[1]\n", "if node != -1:\n", "while cur_elem.prevID != None:\n", "nid = self.coordToID(node[0][0], node[0][1])\n", "coord1 = self.IDToCoord(cur_elem.id_)\n", "found = True\n", "if nid not in heap.pool.keys():\n", "cur_elem = cur_elem.prevID\n", "paths.append([cur_path[::-1], dist])\n", "g_ = heap.pool[removed].g_ + self.getDist((cur_x, cur_y), node[0])\n", "g_ = heap.pool[removed].g_ + self.getDist((cur_x, cur_y), node[0])\n", "coord2 = self.IDToCoord(cur_elem.id_)\n", "f_ = g_ + min(map(lambda x: self.getDist(node[0], x[0]) + x[1], end_points))\n", "if g_ <= heap.pool[nid].g_:\n", "cur_path.append(coord2)\n", "heap.pool[nid] = Node(nid, g_, f_, heap.pool[removed])\n", "heap.pool[nid].g_ = g_\n", "dist += self.getDist(coord1, coord2)\n", "heap.insert(nid)\n", "heap.pool[nid].f_ = g_ + min(map(lambda x: self.getDist(node[0], x[0]) + x[\n 1], end_points))\n", "heap.pool[nid].prevID = heap.pool[removed]\n", "heap.insert(nid)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Condition", "For", "Return'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "For", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "For", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "AugAssign'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_5(self, VAR_12):...\n", "\"\"\"docstring\"\"\"\n", "VAR_30 = FUNC_1(None, self.st_uuid_idx)\n", "VAR_31 = len(VAR_12)\n", "VAR_32 = 1\n", "VAR_33 = self.hash_algo(VAR_12).hexdigest()\n", "VAR_34 = self.UniqueFile(VAR_30, VAR_31, VAR_32, VAR_33, VAR_29=self)\n", "VAR_12 = binascii.hexlify(VAR_12).decode('ascii')\n", "self.st_db.execute(\n \"INSERT INTO file_storage (uuid, size, count, hash, content) VALUES ('%s', %d, %d, '%s', E'\\\\x%s');\"\n % (VAR_30, VAR_31, VAR_32, VAR_33, VAR_12))\n", "self.st_uuid_idx[VAR_30] = VAR_34\n", "self.st_hash_idx[VAR_33] = VAR_34\n", "return VAR_30\n" ]
[ "def new_unique_file(self, content):...\n", "\"\"\"docstring\"\"\"\n", "n_uuid = get_new_uuid(None, self.st_uuid_idx)\n", "n_size = len(content)\n", "n_count = 1\n", "n_hash = self.hash_algo(content).hexdigest()\n", "u_fl = self.UniqueFile(n_uuid, n_size, n_count, n_hash, master=self)\n", "content = binascii.hexlify(content).decode('ascii')\n", "self.st_db.execute(\n \"INSERT INTO file_storage (uuid, size, count, hash, content) VALUES ('%s', %d, %d, '%s', E'\\\\x%s');\"\n % (n_uuid, n_size, n_count, n_hash, content))\n", "self.st_uuid_idx[n_uuid] = u_fl\n", "self.st_hash_idx[n_hash] = u_fl\n", "return n_uuid\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_0(VAR_2):...\n", "VAR_6 = VAR_0[VAR_2][0]\n", "VAR_7 = VAR_0[VAR_2][2]\n", "return [join(VAR_6, file) for file in VAR_7]\n" ]
[ "def _expected_files_from_dir(dir_index):...\n", "path = MOCK_PATHS[dir_index][0]\n", "files = MOCK_PATHS[dir_index][2]\n", "return [join(path, file) for file in files]\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_1(self):...\n", "" ]
[ "def close(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_16(self):...\n", "\"\"\"docstring\"\"\"\n", "for host_type in self.config['host_types']:\n", "VAR_49 = self.config['host_types'][host_type](self.address)\n", "self.log_error('Unable to determine host installation. Ignoring node')\n", "VAR_50 = self.read_file(VAR_49.release_file).strip()\n", "VAR_50 = VAR_50.decode('utf-8')\n", "if VAR_49._check_enabled(VAR_50):\n", "self.log_debug('Host installation found to be %s' % VAR_49.distribution)\n", "return VAR_49\n" ]
[ "def determine_host(self):...\n", "\"\"\"docstring\"\"\"\n", "for host_type in self.config['host_types']:\n", "host = self.config['host_types'][host_type](self.address)\n", "self.log_error('Unable to determine host installation. Ignoring node')\n", "rel_string = self.read_file(host.release_file).strip()\n", "rel_string = rel_string.decode('utf-8')\n", "if host._check_enabled(rel_string):\n", "self.log_debug('Host installation found to be %s' % host.distribution)\n", "return host\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Expr'", "Return'" ]
[ "@pyqtSlot()...\n", "self.status_monitor_queue.put(ExitFlag())\n", "self.status_monitor.update_thread.wait()\n", "self.log_queue.put(ExitFlag())\n", "self.log_thread.wait()\n" ]
[ "@pyqtSlot()...\n", "self.status_monitor_queue.put(ExitFlag())\n", "self.status_monitor.update_thread.wait()\n", "self.log_queue.put(ExitFlag())\n", "self.log_thread.wait()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_4(VAR_3):...\n", "if not VAR_3:\n", "return errors.BAD_USERNAME_CHARS\n", "if len(VAR_3) < 3:\n", "return errors.BAD_USERNAME_SHORT\n", "if len(VAR_3) > 20:\n", "return errors.BAD_USERNAME_LONG\n", "return errors.BAD_USERNAME_CHARS\n" ]
[ "def whyuserbad(x):...\n", "if not x:\n", "return errors.BAD_USERNAME_CHARS\n", "if len(x) < 3:\n", "return errors.BAD_USERNAME_SHORT\n", "if len(x) > 20:\n", "return errors.BAD_USERNAME_LONG\n", "return errors.BAD_USERNAME_CHARS\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_7(self):...\n", "return self.groups\n" ]
[ "def get_groups(self):...\n", "return self.groups\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(self):...\n", "return HttpResponseRedirect(self.failure_url)\n" ]
[ "def login_failure(self):...\n", "return HttpResponseRedirect(self.failure_url)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2():...\n", "return socket.getfqdn() + VAR_4 + '.' + VAR_4\n" ]
[ "def nodes():...\n", "return socket.getfqdn() + LINEBREAK + '.' + LINEBREAK\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@tornado.gen.coroutine...\n", "VAR_17 = 'string'.format(','.join(VAR_4))\n", "VAR_18 = VAR_2, VAR_3\n", "VAR_19 = yield VAR_1.execute(query=sql, VAR_18=params)\n", "VAR_20 = {}\n", "VAR_21 = VAR_19.fetchone()\n", "while VAR_21:\n", "VAR_33 = VAR_21[0]\n", "return VAR_20\n", "VAR_11 = VAR_21[1]\n", "VAR_34 = {}\n", "if VAR_33 in VAR_20:\n", "VAR_34 = VAR_20[VAR_33]\n", "VAR_20[VAR_33] = VAR_34\n", "for VAR_36 in range(2, len(VAR_4) + 2):\n", "VAR_40 = VAR_21[VAR_36]\n", "VAR_21 = VAR_19.fetchone()\n", "VAR_41 = VAR_4[VAR_36 - 2]\n", "VAR_42 = {}\n", "if VAR_41 in VAR_34:\n", "VAR_42 = VAR_34[VAR_41]\n", "VAR_34[VAR_41] = VAR_42\n", "VAR_42[VAR_11] = VAR_40\n" ]
[ "@tornado.gen.coroutine...\n", "sql = (\n 'SELECT boiler_room_id, DAY(date), {} FROM boiler_room_reports JOIN reports ON(report_id = reports.id) WHERE YEAR(date) = %s AND MONTH(date) = %s'\n .format(','.join(columns)))\n", "params = year, month\n", "cursor = yield tx.execute(query=sql, params=params)\n", "boilers = {}\n", "row = cursor.fetchone()\n", "while row:\n", "boiler_id = row[0]\n", "return boilers\n", "day = row[1]\n", "parameters = {}\n", "if boiler_id in boilers:\n", "parameters = boilers[boiler_id]\n", "boilers[boiler_id] = parameters\n", "for i in range(2, len(columns) + 2):\n", "val = row[i]\n", "row = cursor.fetchone()\n", "col = columns[i - 2]\n", "values = {}\n", "if col in parameters:\n", "values = parameters[col]\n", "parameters[col] = values\n", "values[day] = val\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_41(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_32 = list()\n", "for VAR_14 in self.get_columns_list():\n", "if not self.is_pk(VAR_14) and not self.is_fk(VAR_14):\n", "return VAR_32\n", "VAR_32.append(VAR_14)\n" ]
[ "def get_user_columns_list(self):...\n", "\"\"\"docstring\"\"\"\n", "ret_lst = list()\n", "for col_name in self.get_columns_list():\n", "if not self.is_pk(col_name) and not self.is_fk(col_name):\n", "return ret_lst\n", "ret_lst.append(col_name)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Condition", "Return'", "Expr'" ]
[ "def FUNC_9(*VAR_11):...\n", "VAR_12 = ' '.join(str(elem) for elem in VAR_11)\n", "return VAR_12\n" ]
[ "def adb_call(*args):...\n", "arg_str = ' '.join(str(elem) for elem in args)\n", "return arg_str\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_27(self):...\n", "VAR_16 = vimsupport.CurrentFiletypes()\n", "VAR_17 = self._user_options['filetype_specific_completion_to_disable']\n", "return not all([(x in VAR_17) for x in VAR_16])\n" ]
[ "def CurrentFiletypeCompletionEnabled(self):...\n", "filetypes = vimsupport.CurrentFiletypes()\n", "filetype_to_disable = self._user_options[\n 'filetype_specific_completion_to_disable']\n", "return not all([(x in filetype_to_disable) for x in filetypes])\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_4(self, VAR_7):...\n", "super(CLASS_1, self).add_items(VAR_7)\n", "VAR_23 = len(self.data_items)\n", "VAR_24 = len(VAR_7)\n", "for i, VAR_27 in enumerate(VAR_7):\n", "if 'infohash' in VAR_27:\n", "self.infohashes[VAR_27['infohash']] = VAR_23 - VAR_24 + i\n" ]
[ "def add_items(self, new_data_items):...\n", "super(TriblerContentModel, self).add_items(new_data_items)\n", "items_len = len(self.data_items)\n", "new_items_len = len(new_data_items)\n", "for i, item in enumerate(new_data_items):\n", "if 'infohash' in item:\n", "self.infohashes[item['infohash']] = items_len - new_items_len + i\n" ]
[ 0, 0, 0, 0, 0, 4, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "For", "Condition", "Assign'" ]
[ "def FUNC_0(self):...\n", "if not LoginRequest.test_success(self):\n", "return False\n", "if self.redirected_to != self.base_url:\n", "return False\n", "return True\n" ]
[ "def test_success(self):...\n", "if not LoginRequest.test_success(self):\n", "return False\n", "if self.redirected_to != self.base_url:\n", "return False\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_1(self, VAR_1):...\n", "return self.properties\n" ]
[ "def _fake_get_iscsi_properties(self, volume):...\n", "return self.properties\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_3(VAR_5, VAR_6):...\n", "for t in VAR_5:\n", "if FUNC_2(t['os_v_min'], t['os_v_max']) and FUNC_1(t['group'], VAR_6.groups,\n", "FUNC_4(t['set'])\n" ]
[ "def run_batch_mode(tweaks, args):...\n", "for t in tweaks:\n", "if os_supported(t['os_v_min'], t['os_v_max']) and is_executable(t['group'],\n", "run_command(t['set'])\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Expr'" ]
[ "def FUNC_2(self):...\n", "VAR_2 = EngineAuthRequest.blank('/auth/google')\n", "VAR_3 = models.Session.query().count()\n", "self.assertTrue(VAR_3 == 0)\n", "VAR_4 = VAR_2._load_session()\n", "VAR_3 = models.Session.query().count()\n", "self.assertTrue(VAR_3 == 1)\n" ]
[ "def test_load_session_no_session(self):...\n", "req = EngineAuthRequest.blank('/auth/google')\n", "s_count = models.Session.query().count()\n", "self.assertTrue(s_count == 0)\n", "sess = req._load_session()\n", "s_count = models.Session.query().count()\n", "self.assertTrue(s_count == 1)\n" ]
[ 0, 6, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_0(self):...\n", "self.assertTrue(self.client.login(username='autotest', password='password'))\n", "settings.SSH_PRIVATE_KEY = '<some-ssh-private-key>'\n" ]
[ "def setUp(self):...\n", "self.assertTrue(self.client.login(username='autotest', password='password'))\n", "settings.SSH_PRIVATE_KEY = '<some-ssh-private-key>'\n" ]
[ 0, 5, 0 ]
[ "FunctionDef'", "Expr'", "Assign'" ]
[ "def FUNC_6(self, VAR_42, VAR_43):...\n", "if not VAR_101.user_is_loggedin or VAR_101.user.needs_captcha():\n", "if not captcha.valid_solution(VAR_42, VAR_43):\n", "VAR_101.errors.add(errors.BAD_CAPTCHA)\n" ]
[ "def run(self, iden, solution):...\n", "if not c.user_is_loggedin or c.user.needs_captcha():\n", "if not captcha.valid_solution(iden, solution):\n", "c.errors.add(errors.BAD_CAPTCHA)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Expr'" ]
[ "def __init__(self, *VAR_11, **VAR_12):...\n", "super().__init__(*VAR_11, **kwargs)\n", "keepassgtk.config_manager.configure()\n", "self.assemble_window()\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "super().__init__(*args, **kwargs)\n", "keepassgtk.config_manager.configure()\n", "self.assemble_window()\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_7(VAR_3):...\n", "VAR_13 = FUNC_1(VAR_3)\n", "return os.path.isfile(VAR_13)\n" ]
[ "def goal_name_exists(goal_name):...\n", "file_name = get_goal_file_path(goal_name)\n", "return os.path.isfile(file_name)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_13(self):...\n", "return any([FiletypeCompleterExistsForFiletype(x) for x in vimsupport.\n CurrentFiletypes()])\n" ]
[ "def NativeFiletypeCompletionAvailable(self):...\n", "return any([FiletypeCompleterExistsForFiletype(x) for x in vimsupport.\n CurrentFiletypes()])\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(*VAR_0, **VAR_1):...\n", "VAR_2 = requests.Response()\n", "VAR_2.status_code = 200\n", "VAR_2._content_consumed = True\n", "return VAR_2\n" ]
[ "def mock_import_repository_task(*args, **kwargs):...\n", "resp = requests.Response()\n", "resp.status_code = 200\n", "resp._content_consumed = True\n", "return resp\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_4(self):...\n", "g.oidc_id_token = None\n", "self.authenticate_or_redirect()\n" ]
[ "def before_request(self):...\n", "g.oidc_id_token = None\n", "self.authenticate_or_redirect()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_4(VAR_4, VAR_5):...\n", "print(VAR_1)\n", "VAR_12 = VAR_1[0]\n", "print(VAR_12)\n", "print(VAR_4)\n", "for dev in VAR_1:\n", "for ap in dev.AccessPoints:\n", "print(VAR_26)\n", "if ap.Ssid == VAR_4:\n", "VAR_13 = {'802-11-wireless': {'security': '802-11-wireless-security'},\n '802-11-wireless-security': {'key-mgmt': 'wpa-psk', 'psk': VAR_5}}\n", "VAR_26 = ap\n", "VAR_14 = VAR_0.AddAndActivateConnection(VAR_13, VAR_12, VAR_26)\n" ]
[ "def add_newWifiConn(wifiname, wifipass):...\n", "print(wlans)\n", "wlan0 = wlans[0]\n", "print(wlan0)\n", "print(wifiname)\n", "for dev in wlans:\n", "for ap in dev.AccessPoints:\n", "print(currentwifi)\n", "if ap.Ssid == wifiname:\n", "params = {'802-11-wireless': {'security': '802-11-wireless-security'},\n '802-11-wireless-security': {'key-mgmt': 'wpa-psk', 'psk': wifipass}}\n", "currentwifi = ap\n", "conn = nm.AddAndActivateConnection(params, wlan0, currentwifi)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'", "For", "For", "Expr'", "Condition", "Assign'", "Assign'", "Assign'" ]
[ "def __init__(self, VAR_11):...\n", "self.key_name = VAR_11\n" ]
[ "def __init__(self, key_name):...\n", "self.key_name = key_name\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_9(self):...\n", "VAR_14 = self.new_mocked_client(client.RESTClient, url_prefix='api/v1/ports')\n", "VAR_14.update('unique-id', jsonutils.dumps({'name': 'a-new-name'}))\n", "FUNC_1('put', VAR_14, 'https://1.2.3.4/api/v1/ports/unique-id', VAR_8=\n jsonutils.dumps({'name': 'a-new-name'}))\n" ]
[ "def test_client_update(self):...\n", "api = self.new_mocked_client(client.RESTClient, url_prefix='api/v1/ports')\n", "api.update('unique-id', jsonutils.dumps({'name': 'a-new-name'}))\n", "assert_call('put', api, 'https://1.2.3.4/api/v1/ports/unique-id', data=\n jsonutils.dumps({'name': 'a-new-name'}))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_0(self):...\n", "self._mox = mox.Mox()\n", "self.mock(logging, 'error', lambda *_: None)\n", "self.mock(logging, 'exception', lambda *_: None)\n", "self.mock(logging, 'info', lambda *_: None)\n", "self.mock(logging, 'warning', lambda *_: None)\n", "self._mox.StubOutWithMock(time, 'sleep')\n", "self._mox.StubOutWithMock(urllib2, 'urlopen')\n" ]
[ "def setUp(self):...\n", "self._mox = mox.Mox()\n", "self.mock(logging, 'error', lambda *_: None)\n", "self.mock(logging, 'exception', lambda *_: None)\n", "self.mock(logging, 'info', lambda *_: None)\n", "self.mock(logging, 'warning', lambda *_: None)\n", "self._mox.StubOutWithMock(time, 'sleep')\n", "self._mox.StubOutWithMock(urllib2, 'urlopen')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_83, VAR_10, VAR_1, VAR_65):...\n", "self.workflow = VAR_83\n", "self.name = VAR_10\n", "self._snakefile = VAR_1\n", "self._workdir = VAR_65\n" ]
[ "def __init__(self, workflow, name, snakefile, workdir):...\n", "self.workflow = workflow\n", "self.name = name\n", "self._snakefile = snakefile\n", "self._workdir = workdir\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_3(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = jc.Contract()\n", "return st.OperationContract(self.agent.make_create_app_operation(VAR_1=self\n .bindings, application=self.TEST_APP), VAR_5=contract)\n" ]
[ "def create_app(self):...\n", "\"\"\"docstring\"\"\"\n", "contract = jc.Contract()\n", "return st.OperationContract(self.agent.make_create_app_operation(bindings=\n self.bindings, application=self.TEST_APP), contract=contract)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_13(VAR_35, VAR_27):...\n", "\"\"\"docstring\"\"\"\n", "return 'string'.format(FUNC_12(VAR_35).replace('\\n', '<br>')).replace('\\n', '')\n" ]
[ "def _output_html(complete_output, categories):...\n", "\"\"\"docstring\"\"\"\n", "return \"\"\"<html>\n <head>\n <title>Automatically generated keywords by bibclassify</title>\n </head>\n <body>\n {0}\n </body>\n </html>\"\"\".format(\n _output_text(complete_output).replace('\\n', '<br>')).replace('\\n', '')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@decorators.require_cronjob...\n", "service.unregister_builders()\n" ]
[ "@decorators.require_cronjob...\n", "service.unregister_builders()\n" ]
[ 0, 0 ]
[ "Condition", "Expr'" ]
[ "def FUNC_11(self, VAR_5, VAR_6):...\n", "return np.abs(VAR_5[0] - VAR_6[0]) + np.abs(VAR_5[1] - VAR_6[1])\n" ]
[ "def getDist(self, point1, point2):...\n", "return np.abs(point1[0] - point2[0]) + np.abs(point1[1] - point2[1])\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "async def FUNC_6(self, VAR_10):...\n", "VAR_9 = self.controller\n", "for VAR_19 in VAR_9._enabledBehavior:\n", "VAR_39 = getattr(VAR_19, 'on' + VAR_10)\n", "await VAR_9.processItem(VAR_5)\n" ]
[ "async def _runon(self, method):...\n", "controller = self.controller\n", "for b in controller._enabledBehavior:\n", "f = getattr(b, 'on' + method)\n", "await controller.processItem(item)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Assign'", "For", "Assign'", "Expr'" ]
[ "def FUNC_19(self):...\n", "VAR_14 = ['8.9.10.11', '9.10.11.12', '10.11.12.13']\n", "VAR_15 = self.mock_nsx_clustered_api(nsx_api_managers=conf_managers)\n", "VAR_15._validate = mock.Mock()\n", "VAR_20 = list(VAR_15._endpoints.values())\n", "def FUNC_25(VAR_21):...\n", "return [VAR_15._select_endpoint() for i in range(VAR_21)]\n" ]
[ "def test_cluster_round_robin_servicing(self):...\n", "conf_managers = ['8.9.10.11', '9.10.11.12', '10.11.12.13']\n", "api = self.mock_nsx_clustered_api(nsx_api_managers=conf_managers)\n", "api._validate = mock.Mock()\n", "eps = list(api._endpoints.values())\n", "def _get_schedule(num_eps):...\n", "return [api._select_endpoint() for i in range(num_eps)]\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Return'" ]
[ "def FUNC_14(self, VAR_17):...\n", "self.cursor.execute(\n \"SELECT * FROM customer WHERE LOWER(username) = LOWER('\" + VAR_17 + \"');\")\n", "return self.cursor.fetchone()\n" ]
[ "def find_customer(self, username):...\n", "self.cursor.execute(\n \"SELECT * FROM customer WHERE LOWER(username) = LOWER('\" + username + \"');\"\n )\n", "return self.cursor.fetchone()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "def FUNC_26(self, VAR_14):...\n", "" ]
[ "def get_max_length(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def __init__(self, VAR_18, VAR_35=200, **VAR_16):...\n", "CLASS_17.__init__(self, VAR_18, VAR_35=length, VAR_36=errors.NO_TITLE,\n VAR_37=errors.TITLE_TOO_LONG, **kw)\n" ]
[ "def __init__(self, item, length=200, **kw):...\n", "VLength.__init__(self, item, length=length, empty_error=errors.NO_TITLE,\n length_error=errors.TITLE_TOO_LONG, **kw)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_5(self):...\n", "VAR_1 = '/api/apps'\n", "VAR_2 = self.client.post(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 201)\n", "VAR_3 = VAR_2.data['id']\n", "VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 201)\n", "VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 2)\n", "VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 2)\n", "VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 1)\n", "VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_4 = {'web': 20}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 204)\n", "chaos.CREATE_ERROR_RATE = 0.5\n", "chaos.START_ERROR_RATE = 0.5\n", "VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_4 = {'image': 'autotest/example', 'sha': 'b' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 503)\n", "VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 2)\n", "VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 20)\n", "VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n", "self.assertEqual(VAR_5, set(['up']))\n" ]
[ "def test_build_chaos(self):...\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 2)\n", "url = '/api/apps/{app_id}/releases'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 2)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 1)\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'web': 20}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "chaos.CREATE_ERROR_RATE = 0.5\n", "chaos.START_ERROR_RATE = 0.5\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'b' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 503)\n", "url = '/api/apps/{app_id}/releases'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 2)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 20)\n", "states = set([c['state'] for c in response.data['results']])\n", "self.assertEqual(states, set(['up']))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_1(VAR_1, VAR_6):...\n", "" ]
[ "def directory_index(path, fullpath):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_1(self, VAR_1, VAR_2, VAR_5, VAR_6, VAR_4={}):...\n", "\"\"\"docstring\"\"\"\n", "VAR_22 = self.pool.get('stock.location')\n", "VAR_23 = self.pool.get('account.move')\n", "VAR_24 = self.pool.get('account.move.line')\n", "VAR_25 = VAR_6.get('new_price', 0.0)\n", "VAR_19 = VAR_6.get('stock_output_account', False)\n", "VAR_18 = VAR_6.get('stock_input_account', False)\n", "VAR_20 = VAR_6.get('stock_journal', False)\n", "VAR_17 = self.browse(VAR_1, VAR_2, VAR_5)[0]\n", "VAR_21 = VAR_17.categ_id.property_stock_variation\n", "VAR_26 = VAR_21 and VAR_21.id or False\n", "if not VAR_26:\n", "VAR_27 = []\n", "VAR_28 = VAR_22.search(VAR_1, VAR_2, [('usage', '=', 'internal')])\n", "for rec_id in VAR_5:\n", "for location in VAR_22.browse(VAR_1, VAR_2, VAR_28):\n", "return VAR_27\n", "VAR_49 = VAR_4.copy()\n", "self.write(VAR_1, VAR_2, rec_id, {'standard_price': VAR_25})\n", "VAR_49.update({'location': location.id, 'compute_child': False})\n", "VAR_45 = self.browse(VAR_1, VAR_2, rec_id, VAR_4=c)\n", "VAR_53 = VAR_45.qty_available\n", "VAR_54 = VAR_45.standard_price - VAR_25\n", "if not VAR_54:\n", "if VAR_53:\n", "VAR_56 = location.company_id and location.company_id.id or False\n", "if not VAR_56:\n", "if not VAR_20:\n", "VAR_20 = (VAR_45.categ_id.property_stock_journal and VAR_45.categ_id.\n property_stock_journal.id or False)\n", "if not VAR_20:\n", "VAR_57 = VAR_23.create(VAR_1, VAR_2, {'journal_id': VAR_20, 'company_id':\n VAR_56})\n", "VAR_27.append(VAR_57)\n", "if VAR_54 > 0:\n", "if not VAR_18:\n", "if VAR_54 < 0:\n", "VAR_18 = VAR_45.product_tmpl_id.property_stock_account_input.id\n", "if not VAR_18:\n", "if not VAR_19:\n", "VAR_18 = VAR_45.categ_id.property_stock_account_input_categ.id\n", "if not VAR_18:\n", "VAR_19 = VAR_45.product_tmpl_id.property_stock_account_output.id\n", "if not VAR_19:\n", "VAR_58 = VAR_53 * VAR_54\n", "VAR_19 = VAR_45.categ_id.property_stock_account_output_categ.id\n", "if not VAR_19:\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.name, 'account_id': VAR_18,\n 'debit': VAR_58, 'move_id': VAR_57})\n", "VAR_58 = VAR_53 * -VAR_54\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.categ_id.name, 'account_id':\n VAR_26, 'credit': VAR_58, 'move_id': VAR_57})\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.name, 'account_id': VAR_19,\n 'credit': VAR_58, 'move_id': VAR_57})\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.categ_id.name, 'account_id':\n VAR_26, 'debit': VAR_58, 'move_id': VAR_57})\n" ]
[ "def do_change_standard_price(self, cr, uid, ids, datas, context={}):...\n", "\"\"\"docstring\"\"\"\n", "location_obj = self.pool.get('stock.location')\n", "move_obj = self.pool.get('account.move')\n", "move_line_obj = self.pool.get('account.move.line')\n", "new_price = datas.get('new_price', 0.0)\n", "stock_output_acc = datas.get('stock_output_account', False)\n", "stock_input_acc = datas.get('stock_input_account', False)\n", "journal_id = datas.get('stock_journal', False)\n", "product_obj = self.browse(cr, uid, ids)[0]\n", "account_variation = product_obj.categ_id.property_stock_variation\n", "account_variation_id = account_variation and account_variation.id or False\n", "if not account_variation_id:\n", "move_ids = []\n", "loc_ids = location_obj.search(cr, uid, [('usage', '=', 'internal')])\n", "for rec_id in ids:\n", "for location in location_obj.browse(cr, uid, loc_ids):\n", "return move_ids\n", "c = context.copy()\n", "self.write(cr, uid, rec_id, {'standard_price': new_price})\n", "c.update({'location': location.id, 'compute_child': False})\n", "product = self.browse(cr, uid, rec_id, context=c)\n", "qty = product.qty_available\n", "diff = product.standard_price - new_price\n", "if not diff:\n", "if qty:\n", "company_id = location.company_id and location.company_id.id or False\n", "if not company_id:\n", "if not journal_id:\n", "journal_id = (product.categ_id.property_stock_journal and product.categ_id.\n property_stock_journal.id or False)\n", "if not journal_id:\n", "move_id = move_obj.create(cr, uid, {'journal_id': journal_id, 'company_id':\n company_id})\n", "move_ids.append(move_id)\n", "if diff > 0:\n", "if not stock_input_acc:\n", "if diff < 0:\n", "stock_input_acc = product.product_tmpl_id.property_stock_account_input.id\n", "if not stock_input_acc:\n", "if not stock_output_acc:\n", "stock_input_acc = product.categ_id.property_stock_account_input_categ.id\n", "if not stock_input_acc:\n", "stock_output_acc = product.product_tmpl_id.property_stock_account_output.id\n", "if not stock_output_acc:\n", "amount_diff = qty * diff\n", "stock_output_acc = product.categ_id.property_stock_account_output_categ.id\n", "if not stock_output_acc:\n", "move_line_obj.create(cr, uid, {'name': product.name, 'account_id':\n stock_input_acc, 'debit': amount_diff, 'move_id': move_id})\n", "amount_diff = qty * -diff\n", "move_line_obj.create(cr, uid, {'name': product.categ_id.name, 'account_id':\n account_variation_id, 'credit': amount_diff, 'move_id': move_id})\n", "move_line_obj.create(cr, uid, {'name': product.name, 'account_id':\n stock_output_acc, 'credit': amount_diff, 'move_id': move_id})\n", "move_line_obj.create(cr, uid, {'name': product.categ_id.name, 'account_id':\n account_variation_id, 'debit': amount_diff, 'move_id': move_id})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "For", "For", "Return'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Condition", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_3(self):...\n", "VAR_8 = FUNC_0()[0]\n", "self.assertEqual(False, is_profitable(VAR_8))\n" ]
[ "def test_is_profitable(self):...\n", "path = expected_paths_small_same_currency()[0]\n", "self.assertEqual(False, is_profitable(path))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_3(self):...\n", "VAR_1 = self.objects.dg.pk\n", "self.objects.doc.matched = True\n", "self.objects.doc.save()\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertIsInstance(VAR_2.context['extract_form'], ExtractionScriptForm,\n 'ExtractForm should be included in the page!')\n", "self.objects.gt.code = 'UN'\n", "self.objects.gt.save()\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertFalse(VAR_2.context['extract_form'],\n 'ExtractForm should not be included in the page!')\n" ]
[ "def test_unidentifed_group_type(self):...\n", "pk = self.objects.dg.pk\n", "self.objects.doc.matched = True\n", "self.objects.doc.save()\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertIsInstance(response.context['extract_form'],\n ExtractionScriptForm, 'ExtractForm should be included in the page!')\n", "self.objects.gt.code = 'UN'\n", "self.objects.gt.save()\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertFalse(response.context['extract_form'],\n 'ExtractForm should not be included in the page!')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "@property...\n", "return self._params\n" ]
[ "@property...\n", "return self._params\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_5(VAR_7, VAR_8=True):...\n", "if VAR_8:\n", "VAR_6 = FUNC_4(VAR_7)\n", "sleep(0.02)\n", "if VAR_6:\n", "VAR_41 = get(VAR_7)\n", "return VAR_6, 200\n", "VAR_6 = VAR_41.text\n", "if FUNC_7(VAR_6, VAR_7=url) and VAR_8:\n", "FUNC_3(VAR_7, VAR_6)\n", "return VAR_6, VAR_41.status_code\n" ]
[ "def hit_url(url, load_from_cache=True):...\n", "if load_from_cache:\n", "data = load_pickle_data(url)\n", "sleep(0.02)\n", "if data:\n", "r = get(url)\n", "return data, 200\n", "data = r.text\n", "if is_valid(data, url=url) and load_from_cache:\n", "dump_pickle_data(url, data)\n", "return data, r.status_code\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Return'", "Assign'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_18(VAR_16):...\n", "VAR_40 = ''\n", "VAR_41 = 1\n", "for item in VAR_16:\n", "if not item[0]:\n", "return VAR_40\n", "VAR_40 += '{}. {}\\n'.format(VAR_41, item[0])\n", "VAR_41 += 1\n" ]
[ "def list_to_ordered_str_list(list_of_gadgets):...\n", "string_roaster = ''\n", "index = 1\n", "for item in list_of_gadgets:\n", "if not item[0]:\n", "return string_roaster\n", "string_roaster += '{}. {}\\n'.format(index, item[0])\n", "index += 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Condition", "Return'", "AugAssign'", "AugAssign'" ]
[ "def FUNC_8(self, VAR_6):...\n", "\"\"\"docstring\"\"\"\n", "if self.config['become_root']:\n", "return \"su -c '%s'\" % VAR_6\n", "if self.config['need_sudo']:\n", "return 'sudo -S %s' % VAR_6\n", "return VAR_6\n" ]
[ "def _format_cmd(self, cmd):...\n", "\"\"\"docstring\"\"\"\n", "if self.config['become_root']:\n", "return \"su -c '%s'\" % cmd\n", "if self.config['need_sudo']:\n", "return 'sudo -S %s' % cmd\n", "return cmd\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "@classmethod...\n", "if VAR_5 not in VAR_4.VALID_PACKAGE_MANAGER_LIST.keys():\n", "VAR_5 = VAR_4.VALID_PACKAGE_MANAGER_LIST[VAR_5]\n", "return VAR_5\n" ]
[ "@classmethod...\n", "if package_manager not in cls.VALID_PACKAGE_MANAGER_LIST.keys():\n", "package_manager = cls.VALID_PACKAGE_MANAGER_LIST[package_manager]\n", "return package_manager\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "Return'" ]
[ "def FUNC_14(VAR_7):...\n", "VAR_12 = {(1):\n 'SELECT year,faculty, count(*) as N from hej where faculty=1 group by faculty,year;'\n , (2):\n 'SELECT fulltimecontingent, count(*) from hej where year =2010 group by fulltimecontingent'\n , (3):\n 'SELECT parttimecontingent, count(*) from hej where year =2010 group by parttimecontingent'\n , (4):\n 'SELECT year,count(*) from hej where (tenured=1 or tenured_track=1) group by year;'\n , (5): 'string', (6): 'string'}\n", "VAR_9 = FUNC_15(VAR_12.get(VAR_7, 0))\n", "return VAR_9\n" ]
[ "def demo(n):...\n", "switcher = {(1):\n 'SELECT year,faculty, count(*) as N from hej where faculty=1 group by faculty,year;'\n , (2):\n 'SELECT fulltimecontingent, count(*) from hej where year =2010 group by fulltimecontingent'\n , (3):\n 'SELECT parttimecontingent, count(*) from hej where year =2010 group by parttimecontingent'\n , (4):\n 'SELECT year,count(*) from hej where (tenured=1 or tenured_track=1) group by year;'\n , (5):\n 'SELECT count(*) from hej where (tenured = 1 or tenured_track =1) and (year=2007 or year=2012 or year=2017) group by year'\n , (6):\n 'SELECT count(*) as N, maintable.minimumedurequirements as R from hej,maintable where (hej.jobid=maintable.jobid) and (hej.faculty = 1) and (hej.year>= 2010) group by maintable.minimumedurequirements'\n }\n", "z = queryAll(switcher.get(n, 0))\n", "return z\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_28(self, VAR_18=False):...\n", "\"\"\"docstring\"\"\"\n", "def FUNC_45(VAR_25, VAR_39):...\n", "if self.parentfield:\n", "return '{} #{}: {}: {}'.format(_('Row'), self.idx, _(VAR_25.label), VAR_39)\n", "return '{}: {}'.format(_(VAR_25.label), VAR_39)\n" ]
[ "def get_invalid_links(self, is_submittable=False):...\n", "\"\"\"docstring\"\"\"\n", "def get_msg(df, docname):...\n", "if self.parentfield:\n", "return '{} #{}: {}: {}'.format(_('Row'), self.idx, _(df.label), docname)\n", "return '{}: {}'.format(_(df.label), docname)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_9(self, VAR_17=None):...\n", "\"\"\"docstring\"\"\"\n", "return CLASS_0.generate([api.get_current_identity().to_bytes()], VAR_17)\n" ]
[ "def generate_xsrf_token(self, xsrf_token_data=None):...\n", "\"\"\"docstring\"\"\"\n", "return XSRFToken.generate([api.get_current_identity().to_bytes()],\n xsrf_token_data)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@classmethod...\n", "VAR_14 = [VAR_6.cmdlineConfig]\n", "while len(VAR_6.docConfig):\n", "for stub in VAR_6.docConfig.pop(0).parsedConfig:\n", "for stub in yaml.load_all(re.sub('(^|\\\\n\\\\.\\\\.\\\\.)[\\\\s\\\\S]*?($|\\\\n---)',\n", "for c in reversed(VAR_14):\n", "VAR_14.append(stub)\n", "if stub:\n", "VAR_6.recursiveUpdate(VAR_6.effectiveConfig, c)\n", "VAR_14.append(stub)\n" ]
[ "@classmethod...\n", "configStubs = [cls.cmdlineConfig]\n", "while len(cls.docConfig):\n", "for stub in cls.docConfig.pop(0).parsedConfig:\n", "for stub in yaml.load_all(re.sub('(^|\\\\n\\\\.\\\\.\\\\.)[\\\\s\\\\S]*?($|\\\\n---)',\n", "for c in reversed(configStubs):\n", "configStubs.append(stub)\n", "if stub:\n", "cls.recursiveUpdate(cls.effectiveConfig, c)\n", "configStubs.append(stub)\n" ]
[ 0, 0, 2, 0, 0, 0, 0, 0, 2, 0 ]
[ "Condition", "Assign'", "Condition", "For", "For", "For", "Expr'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_12(self):...\n", "self.run_test_case(self.scenario.upsert_google_server_group_tags())\n" ]
[ "def Xtest_a_upsert_server_group_tags(self):...\n", "self.run_test_case(self.scenario.upsert_google_server_group_tags())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_13(self, VAR_3: UserRole):...\n", "\"\"\"docstring\"\"\"\n", "self.role = VAR_3.value\n", "db.session.commit()\n" ]
[ "def set_user_role(self, role: UserRole):...\n", "\"\"\"docstring\"\"\"\n", "self.role = role.value\n", "db.session.commit()\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "def FUNC_9(self, VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "logging.error('Error: %s\\n%s', self._attributes, VAR_8)\n", "self.post_event('bot_error', VAR_8)\n", "logging.exception('post_error(%s) failed.', VAR_8)\n" ]
[ "def post_error(self, message):...\n", "\"\"\"docstring\"\"\"\n", "logging.error('Error: %s\\n%s', self._attributes, message)\n", "self.post_event('bot_error', message)\n", "logging.exception('post_error(%s) failed.', message)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'", "Expr'" ]
[ "@app.route(...\n", "app.logger.debug('duplicate item {}'.format(VAR_9))\n", "VAR_33 = VAR_19.item(VAR_9)\n", "VAR_19.sequence_items.append(VAR_33.duplicate())\n", "return VAR_19.to_map()\n" ]
[ "@app.route(...\n", "app.logger.debug('duplicate item {}'.format(sequence_item_id))\n", "sequence_item = sequence.item(sequence_item_id)\n", "sequence.sequence_items.append(sequence_item.duplicate())\n", "return sequence.to_map()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_0(self, VAR_1, VAR_6, VAR_7, VAR_3):...\n", "VAR_8, VAR_9, VAR_10 = ((VAR_7 or '').split('|') + ['', '', ''])[:3]\n", "return 'string' % (_('Name'), self.text_input(VAR_6 + '.name', VAR_8), _(\n 'Phone'), self.text_input(VAR_6 + '.phone', VAR_9), _('E-mail'), self.\n text_input(VAR_6 + '.email', VAR_10))\n" ]
[ "def make_input(self, version, name, value, attribute):...\n", "contact_name, contact_phone, contact_email = ((value or '').split('|') + [\n '', '', ''])[:3]\n", "return \"\"\"<table>\n <tr><td class=\"label\">%s</td><td>%s</td></tr>\n <tr><td class=\"label\">%s</td><td>%s</td></tr>\n <tr><td class=\"label\">%s</td><td>%s</td></tr>\n </table>\"\"\" % (\n _('Name'), self.text_input(name + '.name', contact_name), _('Phone'),\n self.text_input(name + '.phone', contact_phone), _('E-mail'), self.\n text_input(name + '.email', contact_email))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_1(self, VAR_0):...\n", "" ]
[ "def put(self, user_id):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_4(VAR_2):...\n", "VAR_8 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\users\\\\' + VAR_3 + '.db')\n", "VAR_9 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + '\\\\cf.db')\n", "VAR_10 = VAR_8.cursor()\n", "VAR_11 = VAR_9.cursor()\n", "VAR_11.execute('SELECT * FROM ' + VAR_2)\n", "VAR_18 = list()\n", "VAR_12 = VAR_11.fetchone()\n", "while VAR_12 != None:\n", "VAR_10.execute(\"SELECT * FROM result WHERE problem = '\" + str(VAR_12[0]) +\n \"' AND diff = '\" + str(VAR_12[1]) + \"' AND NOT verdict = 'OK'\")\n", "VAR_8.close()\n", "VAR_30 = VAR_10.fetchone()\n", "VAR_9.close()\n", "if VAR_30 != None and VAR_30 in VAR_4:\n", "return VAR_18\n", "VAR_18.append(VAR_30)\n", "VAR_12 = VAR_11.fetchone()\n" ]
[ "def find_intersection(tag):...\n", "conn = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\users\\\\' + username + '.db')\n", "conn2 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + '\\\\cf.db')\n", "cursor = conn.cursor()\n", "cursor2 = conn2.cursor()\n", "cursor2.execute('SELECT * FROM ' + tag)\n", "a = list()\n", "problem_and_diff = cursor2.fetchone()\n", "while problem_and_diff != None:\n", "cursor.execute(\"SELECT * FROM result WHERE problem = '\" + str(\n problem_and_diff[0]) + \"' AND diff = '\" + str(problem_and_diff[1]) +\n \"' AND NOT verdict = 'OK'\")\n", "conn.close()\n", "problem_and_diff_and_ok = cursor.fetchone()\n", "conn2.close()\n", "if problem_and_diff_and_ok != None and problem_and_diff_and_ok in tasks:\n", "return a\n", "a.append(problem_and_diff_and_ok)\n", "problem_and_diff = cursor2.fetchone()\n" ]
[ 0, 0, 0, 0, 0, 0, 4, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Assign'", "Expr'", "Condition", "Return'", "Expr'", "Assign'" ]
[ "@pytest.yield_fixture...\n", "config.app = Sanic('test_sanic_app')\n", "config_app()\n", "yield config.app\n" ]
[ "@pytest.yield_fixture...\n", "config.app = Sanic('test_sanic_app')\n", "config_app()\n", "yield config.app\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_22(self, VAR_14):...\n", "if self.is_relation_many_to_one(VAR_14):\n", "VAR_34 = self.get_relation_fk(VAR_14)\n", "return self.list_columns[VAR_14].nullable\n", "return False\n", "return VAR_34.nullable\n" ]
[ "def is_nullable(self, col_name):...\n", "if self.is_relation_many_to_one(col_name):\n", "col = self.get_relation_fk(col_name)\n", "return self.list_columns[col_name].nullable\n", "return False\n", "return col.nullable\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'", "Return'", "Return'" ]
[ "def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_14 = tempfile.mkdtemp(prefix='sos-collector-', dir='/var/tmp')\n", "self.config['tmp_dir'] = VAR_14\n", "self.config['tmp_dir_created'] = True\n" ]
[ "def create_tmp_dir(self):...\n", "\"\"\"docstring\"\"\"\n", "tmpdir = tempfile.mkdtemp(prefix='sos-collector-', dir='/var/tmp')\n", "self.config['tmp_dir'] = tmpdir\n", "self.config['tmp_dir_created'] = True\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_10(self, VAR_14):...\n", "" ]
[ "def is_text(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_6(self):...\n", "VAR_4 = os.getcwd()\n", "self.assertEqual(tempdir, path)\n", "self.assertEqual(os.path.realpath(tempdir), os.getcwd())\n", "self.assertEqual(VAR_4, os.getcwd())\n", "self.assertEqual(VAR_4, os.getcwd())\n" ]
[ "def test_simple_pushd(self):...\n", "pre_cwd = os.getcwd()\n", "self.assertEqual(tempdir, path)\n", "self.assertEqual(os.path.realpath(tempdir), os.getcwd())\n", "self.assertEqual(pre_cwd, os.getcwd())\n", "self.assertEqual(pre_cwd, os.getcwd())\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_11(self, VAR_1):...\n", "VAR_22 = VAR_1.split('-')\n", "VAR_23 = int(datetime(int(VAR_22[0]), int(VAR_22[1]), int(VAR_22[2]), 0, 0,\n 0, tzinfo=pytz.utc).timestamp())\n", "VAR_24 = int(datetime(int(VAR_22[0]), int(VAR_22[1]), int(VAR_22[2]), 23, \n 59, 59, tzinfo=pytz.utc).timestamp())\n", "return VAR_23, VAR_24\n" ]
[ "def get_epoch_day(self, date):...\n", "s = date.split('-')\n", "epoch_start = int(datetime(int(s[0]), int(s[1]), int(s[2]), 0, 0, 0, tzinfo\n =pytz.utc).timestamp())\n", "epoch_end = int(datetime(int(s[0]), int(s[1]), int(s[2]), 23, 59, 59,\n tzinfo=pytz.utc).timestamp())\n", "return epoch_start, epoch_end\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "\"\"\"string\"\"\"\n", "import copy\n", "from flask import current_app, session\n", "from flask.ext.login import current_user\n", "from invenio.ext.sqlalchemy.utils import session_manager\n", "VAR_0 = dict(title='ORCID', description=\n 'Connecting Research and Researchers.', icon='', authorized_handler=\n 'invenio.modules.oauthclient.handlers:authorized_signup_handler',\n disconnect_handler=\n 'invenio.modules.oauthclient.handlers:disconnect_handler',\n signup_handler=dict(info=\n 'invenio.modules.oauthclient.contrib.orcid:account_info', setup=\n 'invenio.modules.oauthclient.contrib.orcid:account_setup', view=\n 'invenio.modules.oauthclient.handlers:signup_handler'), params=dict(\n request_token_params={'scope': '/authenticate'}, base_url=\n 'https://pub.orcid.com/', request_token_url=None, access_token_url=\n 'https://pub.orcid.org/oauth/token', access_token_method='POST',\n authorize_url='https://orcid.org/oauth/authorize#show_login', app_key=\n 'ORCID_APP_CREDENTIALS', content_type='application/json'))\n", "\"\"\" ORCID Remote Application. \"\"\"\n", "VAR_1 = copy.deepcopy(VAR_0)\n", "\"\"\"ORCID Sandbox Remote Application.\"\"\"\n", "VAR_1['params'].update(dict(base_url='https://api.sandbox.orcid.org/',\n access_token_url='https://api.sandbox.orcid.org/oauth/token',\n authorize_url='https://sandbox.orcid.org/oauth/authorize#show_login'))\n", "def FUNC_0(VAR_2, VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = dict(external_id=resp.get('orcid'), external_method='orcid')\n", "return VAR_5\n" ]
[ "\"\"\"Pre-configured remote application for enabling sign in/up with ORCID.\n\n**Usage:**\n\n1. Edit your configuration and add:\n\n .. code-block:: python\n\n from invenio.modules.oauthclient.contrib import orcid\n OAUTHCLIENT_REMOTE_APPS = dict(\n orcid=orcid.REMOTE_APP,\n )\n\n ORCID_APP_CREDENTIALS = dict(\n consumer_key=\"changeme\",\n consumer_secret=\"changeme\",\n )\n\n Note, if you want to use the ORCID sandbox, use ``orcid.REMOTE_SANDBOX_APP``\n instead of ``orcid.REMOTE_APP``.\n\n2. Register a new application with ORCID. When registering the\n application ensure that the *Redirect URI* points to:\n ``CFG_SITE_SECURE_URL/oauth/authorized/orcid/`` (note, ORCID does not\n allow localhost to be used, thus testing on development machines is\n somewhat complicated by this).\n\n\n3. Grab the *Client ID* and *Client Secret* after registering the application\n and add them to your instance configuration (``invenio.cfg``):\n\n .. code-block:: python\n\n ORCID_APP_CREDENTIALS = dict(\n consumer_key=\"<CLIENT ID>\",\n consumer_secret=\"<CLIENT SECRET>\",\n )\n\n4. Now go to ``CFG_SITE_SECURE_URL/oauth/login/orcid/`` (e.g.\n http://localhost:4000/oauth/login/orcid/)\n\n5. Also, you should see ORCID listed under Linked accounts:\n http://localhost:4000//account/settings/linkedaccounts/\n\nBy default the ORCID module will try first look if a link already exists\nbetween a ORCID account and a user. If no link is found, the user is asked\nto provide an email address to sign-up.\n\nIn templates you can add a sign in/up link:\n\n.. code-block:: jinja\n\n <a href=\"{{url_for('oauthclient.login', remote_app='orcid')}}\">Sign in with ORCID</a>\n\n\"\"\"\n", "import copy\n", "from flask import current_app, session\n", "from flask.ext.login import current_user\n", "from invenio.ext.sqlalchemy.utils import session_manager\n", "REMOTE_APP = dict(title='ORCID', description=\n 'Connecting Research and Researchers.', icon='', authorized_handler=\n 'invenio.modules.oauthclient.handlers:authorized_signup_handler',\n disconnect_handler=\n 'invenio.modules.oauthclient.handlers:disconnect_handler',\n signup_handler=dict(info=\n 'invenio.modules.oauthclient.contrib.orcid:account_info', setup=\n 'invenio.modules.oauthclient.contrib.orcid:account_setup', view=\n 'invenio.modules.oauthclient.handlers:signup_handler'), params=dict(\n request_token_params={'scope': '/authenticate'}, base_url=\n 'https://pub.orcid.com/', request_token_url=None, access_token_url=\n 'https://pub.orcid.org/oauth/token', access_token_method='POST',\n authorize_url='https://orcid.org/oauth/authorize#show_login', app_key=\n 'ORCID_APP_CREDENTIALS', content_type='application/json'))\n", "\"\"\" ORCID Remote Application. \"\"\"\n", "REMOTE_SANDBOX_APP = copy.deepcopy(REMOTE_APP)\n", "\"\"\"ORCID Sandbox Remote Application.\"\"\"\n", "REMOTE_SANDBOX_APP['params'].update(dict(base_url=\n 'https://api.sandbox.orcid.org/', access_token_url=\n 'https://api.sandbox.orcid.org/oauth/token', authorize_url=\n 'https://sandbox.orcid.org/oauth/authorize#show_login'))\n", "def account_info(remote, resp):...\n", "\"\"\"docstring\"\"\"\n", "account_info = dict(external_id=resp.get('orcid'), external_method='orcid')\n", "return account_info\n" ]
[ 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_6(self, VAR_41):...\n", "if VAR_41:\n", "return self.error()\n", "return Thing._by_fullname(VAR_41, False, data=True)\n" ]
[ "def run(self, fullname):...\n", "if fullname:\n", "return self.error()\n", "return Thing._by_fullname(fullname, False, data=True)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_6(self):...\n", "if not (VAR_101.default_sr or VAR_101.user_is_loggedin and VAR_101.site.\n", "return False\n", "return True\n" ]
[ "def run(self):...\n", "if not (c.default_sr or c.user_is_loggedin and c.site.can_submit(c.user)):\n", "return False\n", "return True\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_12(self, VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "VAR_24 = self.getfile(VAR_21)\n", "return False\n", "return VAR_24[VAR_2] == VAR_11\n" ]
[ "def islink(self, path):...\n", "\"\"\"docstring\"\"\"\n", "f = self.getfile(path)\n", "return False\n", "return f[A_TYPE] == T_LINK\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'", "Return'" ]
[ "def FUNC_4():...\n", "if request.params.get('_csrftoken', None) in VAR_6['modtokens']:\n", "VAR_4 = VAR_6['modtokens']\n", "return False\n", "VAR_4.remove(request.params.get('_csrftoken'))\n", "VAR_6['modtokens'] = VAR_4\n", "VAR_6.save()\n", "return True\n" ]
[ "def has_token():...\n", "if request.params.get('_csrftoken', None) in session['modtokens']:\n", "tokens = session['modtokens']\n", "return False\n", "tokens.remove(request.params.get('_csrftoken'))\n", "session['modtokens'] = tokens\n", "session.save()\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'", "Expr'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_20(self, VAR_6):...\n", "if VAR_6['host'] != 'localhost' and not self.run_on_localhost(VAR_6):\n", "self.logger.debug(\"Starting remote component '%s' on host '%s'\" % (VAR_6[\n 'name'], VAR_6['host']))\n", "VAR_13 = '%s/%s' % (VAR_3, VAR_6['name'])\n", "self.start_remote_component(VAR_6['name'], VAR_6['host'])\n", "VAR_9 = FUNC_6(self.session, VAR_6['name'])\n", "if VAR_9:\n", "self.logger.debug(\"Restarting '%s' in old window\" % VAR_6['name'])\n", "self.logger.info(\"creating window '%s'\" % VAR_6['name'])\n", "FUNC_5(VAR_9, VAR_6['cmd'][0]['start'], VAR_13, VAR_6['name'])\n", "VAR_9 = self.session.new_window(VAR_6['name'])\n", "FUNC_5(VAR_9, VAR_6['cmd'][0]['start'], VAR_13, VAR_6['name'])\n" ]
[ "def start_component_without_deps(self, comp):...\n", "if comp['host'] != 'localhost' and not self.run_on_localhost(comp):\n", "self.logger.debug(\"Starting remote component '%s' on host '%s'\" % (comp[\n 'name'], comp['host']))\n", "log_file = '%s/%s' % (TMP_LOG_PATH, comp['name'])\n", "self.start_remote_component(comp['name'], comp['host'])\n", "window = find_window(self.session, comp['name'])\n", "if window:\n", "self.logger.debug(\"Restarting '%s' in old window\" % comp['name'])\n", "self.logger.info(\"creating window '%s'\" % comp['name'])\n", "start_window(window, comp['cmd'][0]['start'], log_file, comp['name'])\n", "window = self.session.new_window(comp['name'])\n", "start_window(window, comp['cmd'][0]['start'], log_file, comp['name'])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_10(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_21 = self.agent.type_to_payload(\n 'deregisterInstancesFromGoogleLoadBalancerDescription', {\n 'loadBalancerNames': [self.__use_lb_name], 'instanceIds': self.\n use_instance_names[:2], 'region': self.bindings['TEST_GCE_REGION'],\n 'credentials': self.bindings['GCE_CREDENTIALS']})\n", "VAR_20 = gcp.GceContractBuilder(self.gce_observer)\n", "VAR_20.new_clause_builder('Instances not in Target Pool', retryable_for_secs=5\n ).list_resources('target-pools', VAR_35=['--region', self.bindings[\n 'TEST_GCE_REGION']]).excludes_pred_list([jc.PathContainsPredicate(\n 'name', self.__use_lb_tp_name), jc.PathElementsContainPredicate(\n 'instances', self.use_instance_names[0]), jc.\n PathElementsContainPredicate('instances', self.use_instance_names[1])])\n", "return st.OperationContract(self.new_post_operation(title=\n 'deregister_load_balancer_instances', data=payload, VAR_29='ops'),\n contract=builder.build())\n" ]
[ "def deregister_load_balancer_instances(self):...\n", "\"\"\"docstring\"\"\"\n", "payload = self.agent.type_to_payload(\n 'deregisterInstancesFromGoogleLoadBalancerDescription', {\n 'loadBalancerNames': [self.__use_lb_name], 'instanceIds': self.\n use_instance_names[:2], 'region': self.bindings['TEST_GCE_REGION'],\n 'credentials': self.bindings['GCE_CREDENTIALS']})\n", "builder = gcp.GceContractBuilder(self.gce_observer)\n", "builder.new_clause_builder('Instances not in Target Pool', retryable_for_secs=5\n ).list_resources('target-pools', extra_args=['--region', self.bindings[\n 'TEST_GCE_REGION']]).excludes_pred_list([jc.PathContainsPredicate(\n 'name', self.__use_lb_tp_name), jc.PathElementsContainPredicate(\n 'instances', self.use_instance_names[0]), jc.\n PathElementsContainPredicate('instances', self.use_instance_names[1])])\n", "return st.OperationContract(self.new_post_operation(title=\n 'deregister_load_balancer_instances', data=payload, path='ops'),\n contract=builder.build())\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_6():...\n", "VAR_20 = db.Filesystem.get_content(VAR_4)\n", "VAR_6.set_result(VAR_20)\n" ]
[ "def inquire_data_async():...\n", "_tf_data = db.Filesystem.get_content(file_path)\n", "future.set_result(_tf_data)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_10():...\n", "VAR_9 = cfg.get_config()\n", "VAR_10 = sa.create_engine(VAR_9.dsn)\n", "tables.Base.metadata.create_all(VAR_10)\n", "VAR_10.dispose()\n" ]
[ "def create_tables():...\n", "config = cfg.get_config()\n", "engine = sa.create_engine(config.dsn)\n", "tables.Base.metadata.create_all(engine)\n", "engine.dispose()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_10(self, VAR_2=None, VAR_3=None, VAR_4=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_12 = []\n", "def FUNC_31(VAR_1, VAR_16):...\n", "VAR_30 = VAR_1.xsrf_token_data == {'some': 'data'}\n", "VAR_12.append((VAR_16, VAR_30))\n", "@api.public...\n", "self.response.write(self.generate_xsrf_token({'some': 'data'}))\n", "@api.public...\n", "FUNC_31(self, 'POST')\n", "@api.public...\n", "FUNC_31(self, 'PUT')\n", "@api.public...\n", "FUNC_31(self, 'DELETE')\n", "if VAR_2 is not None:\n", "CLASS_7.xsrf_token_enforce_on = VAR_2\n", "if VAR_3 is not None:\n", "CLASS_7.xsrf_token_header = VAR_3\n", "if VAR_4 is not None:\n", "CLASS_7.xsrf_token_request_param = VAR_4\n", "VAR_7 = self.make_test_app('/request', CLASS_7)\n", "return VAR_7, VAR_12\n" ]
[ "def make_xsrf_handling_app(self, xsrf_token_enforce_on=None,...\n", "\"\"\"docstring\"\"\"\n", "calls = []\n", "def record(request_handler, method):...\n", "is_valid = request_handler.xsrf_token_data == {'some': 'data'}\n", "calls.append((method, is_valid))\n", "@api.public...\n", "self.response.write(self.generate_xsrf_token({'some': 'data'}))\n", "@api.public...\n", "record(self, 'POST')\n", "@api.public...\n", "record(self, 'PUT')\n", "@api.public...\n", "record(self, 'DELETE')\n", "if xsrf_token_enforce_on is not None:\n", "Handler.xsrf_token_enforce_on = xsrf_token_enforce_on\n", "if xsrf_token_header is not None:\n", "Handler.xsrf_token_header = xsrf_token_header\n", "if xsrf_token_request_param is not None:\n", "Handler.xsrf_token_request_param = xsrf_token_request_param\n", "app = self.make_test_app('/request', Handler)\n", "return app, calls\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "FunctionDef'", "Assign'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def __init__(self, VAR_10, VAR_11=None):...\n", "if VAR_11 is None:\n", "VAR_11 = 'http://localhost:8888/'\n", "self.browser = VAR_10\n", "self.base_url = VAR_11\n", "self.outcome = None\n", "self.start_time = None\n", "self.stop_time = None\n", "self.duration = None\n", "self.exception_data = None\n", "self.url = None\n", "self.data = None\n", "self.files = None\n", "self.status_code = None\n", "self.response = None\n", "self.res_data = None\n", "self.redirected_to = None\n" ]
[ "def __init__(self, browser, base_url=None):...\n", "if base_url is None:\n", "base_url = 'http://localhost:8888/'\n", "self.browser = browser\n", "self.base_url = base_url\n", "self.outcome = None\n", "self.start_time = None\n", "self.stop_time = None\n", "self.duration = None\n", "self.exception_data = None\n", "self.url = None\n", "self.data = None\n", "self.files = None\n", "self.status_code = None\n", "self.response = None\n", "self.res_data = None\n", "self.redirected_to = None\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_28 = self.add_argument_group('ParlAI Model Arguments')\n", "VAR_28.add_argument('-m', '--model', default=None, help=\n 'the model class name. can match parlai/agents/<model> for agents in that directory, or can provide a fully specified module for `from X import Y` via `-m X:Y` (e.g. `-m parlai.agents.seq2seq.seq2seq:Seq2SeqAgent`)'\n )\n", "VAR_28.add_argument('-mf', '--model-file', default=None, help=\n 'model file name for loading and saving models')\n", "VAR_28.add_argument('--dict-class', help=\n 'the class of the dictionary agent uses')\n" ]
[ "def add_model_args(self):...\n", "\"\"\"docstring\"\"\"\n", "model_args = self.add_argument_group('ParlAI Model Arguments')\n", "model_args.add_argument('-m', '--model', default=None, help=\n 'the model class name. can match parlai/agents/<model> for agents in that directory, or can provide a fully specified module for `from X import Y` via `-m X:Y` (e.g. `-m parlai.agents.seq2seq.seq2seq:Seq2SeqAgent`)'\n )\n", "model_args.add_argument('-mf', '--model-file', default=None, help=\n 'model file name for loading and saving models')\n", "model_args.add_argument('--dict-class', help=\n 'the class of the dictionary agent uses')\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "import helpers\n", "import ctypes\n", "from symbolicinput import SymbolicInput\n", "from math import copysign\n", "def FUNC_0(VAR_0, VAR_1, VAR_2, VAR_3, VAR_4):...\n", "VAR_8 = VAR_1.pop()\n", "VAR_21 = -1, False\n", "if VAR_0 == 'JUMP':\n", "return FUNC_3(VAR_8, VAR_2)\n", "if VAR_0 == 'JUMPI':\n", "VAR_28 = VAR_1.pop()\n", "return VAR_21\n", "if VAR_28 > 0:\n", "return FUNC_3(VAR_8, VAR_2)\n", "if VAR_28 < 0:\n", "return FUNC_1(VAR_28, VAR_8, VAR_3, VAR_2, VAR_4)\n" ]
[ "import helpers\n", "import ctypes\n", "from symbolicinput import SymbolicInput\n", "from math import copysign\n", "def handleJumpOps(op, stack, items, symbols, symId):...\n", "adr = stack.pop()\n", "out = -1, False\n", "if op == 'JUMP':\n", "return jumpToLoc(adr, items)\n", "if op == 'JUMPI':\n", "cond = stack.pop()\n", "return out\n", "if cond > 0:\n", "return jumpToLoc(adr, items)\n", "if cond < 0:\n", "return symbAdrJump(cond, adr, symbols, items, symId)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Condition", "Assign'", "Return'", "Condition", "Return'", "Condition", "Return'" ]
[ "def FUNC_30(self, VAR_21, VAR_23=True):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_21 == '/':\n", "VAR_57 = {VAR_2: VAR_12, VAR_3: 0, VAR_4: 0, VAR_5: 4096, VAR_6: 16877,\n VAR_7: time.time()}\n", "VAR_57 = self.getfile(VAR_21, VAR_23=follow_symlinks)\n", "if VAR_57 == False:\n", "return CLASS_3(VAR_57[VAR_6], 0, 0, 1, VAR_57[VAR_3], VAR_57[VAR_4], VAR_57\n [VAR_5], VAR_57[VAR_7], VAR_57[VAR_7], VAR_57[VAR_7])\n" ]
[ "def stat(self, path, follow_symlinks=True):...\n", "\"\"\"docstring\"\"\"\n", "if path == '/':\n", "p = {A_TYPE: T_DIR, A_UID: 0, A_GID: 0, A_SIZE: 4096, A_MODE: 16877,\n A_CTIME: time.time()}\n", "p = self.getfile(path, follow_symlinks=follow_symlinks)\n", "if p == False:\n", "return _statobj(p[A_MODE], 0, 0, 1, p[A_UID], p[A_GID], p[A_SIZE], p[\n A_CTIME], p[A_CTIME], p[A_CTIME])\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Condition", "Return'" ]
[ "@app.route('/people/', methods=['POST'])...\n", "if not current_user.editor:\n", "return redirect(url_for('error403'))\n", "VAR_2 = NameForm(request.form)\n", "if not VAR_2.validate():\n", "return render_template('people/new.html', VAR_2=form)\n", "VAR_3 = User(VAR_2.name.data, '', '')\n", "db.session().add(VAR_3)\n", "db.session().commit()\n", "VAR_3.add_name(VAR_2.name.data)\n", "return redirect(url_for('people_index'))\n" ]
[ "@app.route('/people/', methods=['POST'])...\n", "if not current_user.editor:\n", "return redirect(url_for('error403'))\n", "form = NameForm(request.form)\n", "if not form.validate():\n", "return render_template('people/new.html', form=form)\n", "u = User(form.name.data, '', '')\n", "db.session().add(u)\n", "db.session().commit()\n", "u.add_name(form.name.data)\n", "return redirect(url_for('people_index'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Assign'", "Condition", "Return'", "Assign'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def __init__(self, *VAR_4, **VAR_5):...\n", "super(CLASS_0, self).__init__(*VAR_4, **kwargs)\n", "self.configuration.append_config_values(VAR_1)\n", "self._group_ip = None\n", "self.sshpool = None\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "super(DellEQLSanISCSIDriver, self).__init__(*args, **kwargs)\n", "self.configuration.append_config_values(eqlx_opts)\n", "self._group_ip = None\n", "self.sshpool = None\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_35(VAR_29):...\n", "VAR_40 = VAR_29.group('name')\n", "VAR_16 = VAR_11[VAR_40]\n", "if VAR_15:\n", "if VAR_13 and VAR_16 == VAR_14:\n", "return '{{{}}}'.format(VAR_40)\n", "if VAR_12:\n", "return str(VAR_16)\n", "return VAR_14\n" ]
[ "def format_match(match):...\n", "name = match.group('name')\n", "value = wildcards[name]\n", "if keep_dynamic:\n", "if fail_dynamic and value == dynamic_fill:\n", "return '{{{}}}'.format(name)\n", "if fill_missing:\n", "return str(value)\n", "return dynamic_fill\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Condition", "Return'", "Condition", "Return'", "Return'" ]