lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"\"\"\"string\"\"\"\n",
"import socket\n",
"import MySQLdb\n",
"import sshtunnel\n",
"from photogpsbot import log\n",
"import config\n",
"\"\"\"string\"\"\"\n",
"VAR_0 = None\n",
"VAR_1 = None\n",
"VAR_2 = False\n",
"def FUNC_0(self):...\n",
"\"\"\"docstring\"\"\"\n",
"log.debug(\n 'Establishing SSH tunnel to the server where the database is located...')\n",
"sshtunnel.SSH_TIMEOUT = 5.0\n",
"sshtunnel.TUNNEL_TIMEOUT = 5.0\n",
"self.tunnel = sshtunnel.SSHTunnelForwarder(ssh_address_or_host=config.\n SERVER_ADDRESS, ssh_username=config.SSH_USER, ssh_password=config.\n SSH_PASSWD, ssh_port=22, remote_bind_address=('127.0.0.1', 3306))\n",
"self.tunnel.start()\n",
"self.tunnel_opened = True\n",
"log.debug('SSH tunnel has been established.')\n",
"def FUNC_1(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if socket.gethostname() == config.PROD_HOST_NAME:\n",
"log.info('Connecting to the local database...')\n",
"log.info('Connecting to the database via SSH...')\n",
"VAR_6 = 3306\n",
"if not self.tunnel_opened:\n",
"self.conn = MySQLdb.connect(host='127.0.0.1', user=config.DB_USER, password\n =config.DB_PASSWD, VAR_6=port, database=config.DB_NAME, charset='utf8')\n",
"self._open_ssh_tunnel()\n",
"VAR_6 = self.tunnel.local_bind_port\n",
"log.info('Connected to the database.')\n",
"def FUNC_2(self, VAR_3, VAR_4=None, VAR_5=0):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.conn or not self.conn.open:\n",
"self.connect()\n",
"VAR_7 = self.conn.cursor()\n",
"if e.args[0] in [2006, 2013]:\n",
"return VAR_7\n",
"VAR_7.execute(VAR_3, VAR_4)\n",
"log.info(e)\n",
"log.error(e)\n",
"def FUNC_3(self, VAR_3):...\n",
"self.connect()\n",
"log.error(e)\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_5 > 3:\n",
"self.execute_query(VAR_3)\n",
"log.errror(e)\n",
"def FUNC_4(self):...\n",
"log.error(e)\n",
"VAR_5 += 1\n",
"self.conn.commit()\n",
"\"\"\"docstring\"\"\"\n",
"log.warning('Ran out of limit of trials...')\n",
"log.warning(e)\n",
"if self.conn:\n",
"log.info('Trying execute the query again...')\n",
"self.conn.close()\n",
"if self.tunnel:\n",
"return self.execute_query(VAR_3, VAR_4, VAR_5)\n",
"log.info('Connection to the database has been closed.')\n",
"self.tunnel.stop()\n",
"self.tunnel_opened = False\n",
"log.info('SSH tunnel has been closed.')\n",
"return True\n"
] | [
"\"\"\"\nModule that provides a way to connect to MySQL and reconnect each time\nconnection is lost. It also can automatically set up SSH tunnel thanks to\nsshtunnel module\n\nOriginal way to do it was described at\nhttps://help.pythonanywhere.com/pages/ManagingDatabaseConnections/\n\"\"\"\n",
"import socket\n",
"import MySQLdb\n",
"import sshtunnel\n",
"from photogpsbot import log\n",
"import config\n",
"\"\"\"\n Class that provides method to execute queries and handles connection to\n the MySQL database directly and via ssh if necessary\n \"\"\"\n",
"conn = None\n",
"tunnel = None\n",
"tunnel_opened = False\n",
"def _open_ssh_tunnel(self):...\n",
"\"\"\"docstring\"\"\"\n",
"log.debug(\n 'Establishing SSH tunnel to the server where the database is located...')\n",
"sshtunnel.SSH_TIMEOUT = 5.0\n",
"sshtunnel.TUNNEL_TIMEOUT = 5.0\n",
"self.tunnel = sshtunnel.SSHTunnelForwarder(ssh_address_or_host=config.\n SERVER_ADDRESS, ssh_username=config.SSH_USER, ssh_password=config.\n SSH_PASSWD, ssh_port=22, remote_bind_address=('127.0.0.1', 3306))\n",
"self.tunnel.start()\n",
"self.tunnel_opened = True\n",
"log.debug('SSH tunnel has been established.')\n",
"def connect(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if socket.gethostname() == config.PROD_HOST_NAME:\n",
"log.info('Connecting to the local database...')\n",
"log.info('Connecting to the database via SSH...')\n",
"port = 3306\n",
"if not self.tunnel_opened:\n",
"self.conn = MySQLdb.connect(host='127.0.0.1', user=config.DB_USER, password\n =config.DB_PASSWD, port=port, database=config.DB_NAME, charset='utf8')\n",
"self._open_ssh_tunnel()\n",
"port = self.tunnel.local_bind_port\n",
"log.info('Connected to the database.')\n",
"def execute_query(self, query, parameters=None, trials=0):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.conn or not self.conn.open:\n",
"self.connect()\n",
"cursor = self.conn.cursor()\n",
"if e.args[0] in [2006, 2013]:\n",
"return cursor\n",
"cursor.execute(query, parameters)\n",
"log.info(e)\n",
"log.error(e)\n",
"def add(self, query):...\n",
"self.connect()\n",
"log.error(e)\n",
"\"\"\"docstring\"\"\"\n",
"if trials > 3:\n",
"self.execute_query(query)\n",
"log.errror(e)\n",
"def disconnect(self):...\n",
"log.error(e)\n",
"trials += 1\n",
"self.conn.commit()\n",
"\"\"\"docstring\"\"\"\n",
"log.warning('Ran out of limit of trials...')\n",
"log.warning(e)\n",
"if self.conn:\n",
"log.info('Trying execute the query again...')\n",
"self.conn.close()\n",
"if self.tunnel:\n",
"return self.execute_query(query, parameters, trials)\n",
"log.info('Connection to the database has been closed.')\n",
"self.tunnel.stop()\n",
"self.tunnel_opened = False\n",
"log.info('SSH tunnel has been closed.')\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Expr'",
"Expr'",
"Docstring",
"Condition",
"Expr'",
"Expr'",
"FunctionDef'",
"Expr'",
"AugAssign'",
"Expr'",
"Docstring",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"@Json...\n",
"if VAR_2._chk_error(errors.NO_TITLE):\n",
"VAR_2._chk_error(errors.TITLE_TOO_LONG)\n",
"VAR_2._chk_errors((errors.NO_LOCATION, errors.NO_DESCRIPTION, errors.\n INVALID_DATE, errors.NO_DATE))\n",
"VAR_2._focus('title')\n",
"if VAR_2.error:\n",
"return\n",
"VAR_0.title = VAR_4\n",
"VAR_0.description = VAR_8\n",
"VAR_0.location = VAR_9\n",
"VAR_0.latitude = VAR_10\n",
"VAR_0.longitude = VAR_11\n",
"VAR_0.timestamp = VAR_12 / 1000\n",
"VAR_0.tzoffset = VAR_13\n",
"g.rendercache.invalidate_key_group(Meetup.group_cache_key())\n",
"VAR_0._commit()\n",
"VAR_18 = Link._byID(VAR_0.assoc_link)\n",
"VAR_18._load()\n",
"VAR_19 = VAR_18.url\n",
"VAR_18.title = FUNC_1(VAR_0)\n",
"VAR_18.article = FUNC_0(VAR_0)\n",
"VAR_18._commit()\n",
"VAR_18.update_url_cache(VAR_19)\n",
"VAR_2._redirect(url_for(action='show', id=meetup._id36))\n"
] | [
"@Json...\n",
"if res._chk_error(errors.NO_TITLE):\n",
"res._chk_error(errors.TITLE_TOO_LONG)\n",
"res._chk_errors((errors.NO_LOCATION, errors.NO_DESCRIPTION, errors.\n INVALID_DATE, errors.NO_DATE))\n",
"res._focus('title')\n",
"if res.error:\n",
"return\n",
"meetup.title = title\n",
"meetup.description = description\n",
"meetup.location = location\n",
"meetup.latitude = latitude\n",
"meetup.longitude = longitude\n",
"meetup.timestamp = timestamp / 1000\n",
"meetup.tzoffset = tzoffset\n",
"g.rendercache.invalidate_key_group(Meetup.group_cache_key())\n",
"meetup._commit()\n",
"article = Link._byID(meetup.assoc_link)\n",
"article._load()\n",
"article_old_url = article.url\n",
"article.title = meetup_article_title(meetup)\n",
"article.article = meetup_article_text(meetup)\n",
"article._commit()\n",
"article.update_url_cache(article_old_url)\n",
"res._redirect(url_for(action='show', id=meetup._id36))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"VAR_16 = []\n",
"VAR_17 = strftime('%Y-%m-%d %H:%M:%S', localtime())\n",
"VAR_16.append('%s on %s' % (CLASS_0.MON_CONF_GEN_COMMENT, VAR_17))\n",
"if self.etag:\n",
"VAR_16.append('%s%s' % (CLASS_0.ETAG_COMMENT, self.etag))\n",
"if self.mtime:\n",
"VAR_16.append('%s%d' % (CLASS_0.MTIME_COMMMENT, self.mtime))\n",
"return VAR_16\n"
] | [
"def serialize(self):...\n",
"lines = []\n",
"time_string = strftime('%Y-%m-%d %H:%M:%S', localtime())\n",
"lines.append('%s on %s' % (Header.MON_CONF_GEN_COMMENT, time_string))\n",
"if self.etag:\n",
"lines.append('%s%s' % (Header.ETAG_COMMENT, self.etag))\n",
"if self.mtime:\n",
"lines.append('%s%d' % (Header.MTIME_COMMMENT, self.mtime))\n",
"return lines\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Return'"
] |
[
"@app.route('/people/<user_id>/edit', methods=['GET'])...\n",
"VAR_2 = NameForm()\n",
"VAR_4 = ''\n",
"VAR_5 = ''\n",
"VAR_6 = User.query.filter_by(id=user_id).first()\n",
"if VAR_6.username != '':\n",
"VAR_5 = VAR_6.username\n",
"VAR_4 = VAR_6.name\n",
"VAR_7 = list(map(lambda VAR_4: {'name': VAR_4.name, 'id': VAR_4.id}, VAR_6.\n names))\n",
"VAR_8 = {'id': VAR_0, 'name': VAR_4, 'username': VAR_5, 'names': VAR_7}\n",
"return render_template('/people/edit.html', VAR_8=person, VAR_2=form)\n"
] | [
"@app.route('/people/<user_id>/edit', methods=['GET'])...\n",
"form = NameForm()\n",
"name = ''\n",
"username = ''\n",
"prsn = User.query.filter_by(id=user_id).first()\n",
"if prsn.username != '':\n",
"username = prsn.username\n",
"name = prsn.name\n",
"names = list(map(lambda name: {'name': name.name, 'id': name.id}, prsn.names))\n",
"person = {'id': user_id, 'name': name, 'username': username, 'names': names}\n",
"return render_template('/people/edit.html', person=person, form=form)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@login_required...\n",
"VAR_10 = VAR_2.GET['term']\n",
"VAR_30 = FUNC_26(Keyword, VAR_10)\n",
"VAR_30.extend(FUNC_29(VAR_10))\n",
"return JsonResponse({'err': 'nil', 'results': VAR_30})\n"
] | [
"@login_required...\n",
"term = request.GET['term']\n",
"results = _get_previously_used(Keyword, term)\n",
"results.extend(_get_fast_results(term))\n",
"return JsonResponse({'err': 'nil', 'results': results})\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_1(VAR_1):...\n",
"for VAR_15, deps in VAR_1.items():\n",
"print(VAR_15, '->', deps)\n"
] | [
"def print_deps(graph):...\n",
"for c, deps in graph.items():\n",
"print(c, '->', deps)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"For",
"Expr'"
] |
[
"def FUNC_0(self, VAR_3, VAR_4, VAR_5, VAR_6, VAR_7, VAR_8):...\n",
"VAR_22 = 'string' + \"({}, {}, {}, '{}', '{}', '{}', {}, {})\".format(json.\n dumps(VAR_3), json.dumps(VAR_4), json.dumps(VAR_5), VAR_6, datetime.now\n (), VAR_7, json.dumps(VAR_8), 0)\n",
"self.cur.execute(VAR_22)\n",
"self.conn.commit()\n",
"return self.cur.lastrowid\n"
] | [
"def insert_query_log(self, lhash, text, search, qhash, ip, browser):...\n",
"sql = (\n 'INSERT INTO log_query (log_hash, query_text, query_search, query_hash, query_time, client_ip, client_browser, clicked) VALUES'\n + \"({}, {}, {}, '{}', '{}', '{}', {}, {})\".format(json.dumps(lhash),\n json.dumps(text), json.dumps(search), qhash, datetime.now(), ip, json.\n dumps(browser), 0))\n",
"self.cur.execute(sql)\n",
"self.conn.commit()\n",
"return self.cur.lastrowid\n"
] | [
0,
0,
4,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_15(VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"if FUNC_12(VAR_16, 'temp'):\n",
"return VAR_19(VAR_16, 'protected')\n"
] | [
"def protected(value):...\n",
"\"\"\"docstring\"\"\"\n",
"if is_flagged(value, 'temp'):\n",
"return flag(value, 'protected')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'"
] |
[
"def FUNC_8(self, VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"return None\n"
] | [
"def set_hostname(self, hostname):...\n",
"\"\"\"docstring\"\"\"\n",
"return None\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._raylet_socket_name\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._raylet_socket_name\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_8(self, VAR_9, VAR_7=False, VAR_10=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_29 = self.output if VAR_7 else self.input\n",
"if isinstance(VAR_9, str):\n",
"if isinstance(VAR_9, _IOFile):\n",
"if callable(VAR_9):\n",
"self.dependencies[VAR_9] = VAR_9.rule\n",
"VAR_44 = IOFile(VAR_9, VAR_16=self)\n",
"if VAR_7:\n",
"VAR_48 = len(VAR_29)\n",
"if is_flagged(VAR_9, 'temp'):\n",
"VAR_29.append(VAR_9)\n",
"for VAR_51 in VAR_9:\n",
"if not VAR_7:\n",
"if is_flagged(VAR_9, 'protected'):\n",
"if VAR_10:\n",
"self._set_inoutput_item(VAR_51, VAR_7=output)\n",
"if VAR_10:\n",
"self.temp_output.add(VAR_44)\n",
"if not VAR_7:\n",
"if is_flagged(VAR_9, 'touch'):\n",
"VAR_29.add_name(VAR_10)\n",
"VAR_29.set_name(VAR_10, VAR_48, end=len(inoutput))\n",
"self.protected_output.add(VAR_44)\n",
"if not VAR_7:\n",
"if is_flagged(VAR_9, 'dynamic'):\n",
"self.touch_output.add(VAR_44)\n",
"if VAR_7:\n",
"if is_flagged(VAR_9, 'subworkflow'):\n",
"self.dynamic_output.add(VAR_44)\n",
"self.dynamic_input.add(VAR_44)\n",
"if VAR_7:\n",
"VAR_29.append(VAR_44)\n",
"self.subworkflow_input[VAR_44] = VAR_9.flags['subworkflow']\n",
"if VAR_10:\n",
"VAR_29.add_name(VAR_10)\n"
] | [
"def _set_inoutput_item(self, item, output=False, name=None):...\n",
"\"\"\"docstring\"\"\"\n",
"inoutput = self.output if output else self.input\n",
"if isinstance(item, str):\n",
"if isinstance(item, _IOFile):\n",
"if callable(item):\n",
"self.dependencies[item] = item.rule\n",
"_item = IOFile(item, rule=self)\n",
"if output:\n",
"start = len(inoutput)\n",
"if is_flagged(item, 'temp'):\n",
"inoutput.append(item)\n",
"for i in item:\n",
"if not output:\n",
"if is_flagged(item, 'protected'):\n",
"if name:\n",
"self._set_inoutput_item(i, output=output)\n",
"if name:\n",
"self.temp_output.add(_item)\n",
"if not output:\n",
"if is_flagged(item, 'touch'):\n",
"inoutput.add_name(name)\n",
"inoutput.set_name(name, start, end=len(inoutput))\n",
"self.protected_output.add(_item)\n",
"if not output:\n",
"if is_flagged(item, 'dynamic'):\n",
"self.touch_output.add(_item)\n",
"if output:\n",
"if is_flagged(item, 'subworkflow'):\n",
"self.dynamic_output.add(_item)\n",
"self.dynamic_input.add(_item)\n",
"if output:\n",
"inoutput.append(_item)\n",
"self.subworkflow_input[_item] = item.flags['subworkflow']\n",
"if name:\n",
"inoutput.add_name(name)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"For",
"Condition",
"Condition",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_3(self, VAR_15, VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_20 = gcp.GceContractBuilder(self.gce_observer)\n",
"VAR_22 = VAR_20.new_clause_builder('Instances Deleted', retryable_for_secs=\n 15, strict=True).list_resources('instances')\n",
"for name in VAR_15:\n",
"VAR_40 = jc.PathContainsPredicate('name', name)\n",
"VAR_21 = self.agent.type_to_payload('terminateInstances', {'instanceIds':\n VAR_15, 'zone': VAR_16, 'credentials': self.bindings['GCE_CREDENTIALS']})\n",
"VAR_41 = jc.PathEqPredicate('status', 'STOPPING')\n",
"return st.OperationContract(self.new_post_operation(title=\n 'terminate_instances', data=payload, VAR_29='gce/ops'), contract=\n builder.build())\n",
"VAR_22.add_mapped_constraint(jc.IF(VAR_40, VAR_41))\n"
] | [
"def terminate_instances(self, names, zone):...\n",
"\"\"\"docstring\"\"\"\n",
"builder = gcp.GceContractBuilder(self.gce_observer)\n",
"clause = builder.new_clause_builder('Instances Deleted', retryable_for_secs\n =15, strict=True).list_resources('instances')\n",
"for name in names:\n",
"name_matches_pred = jc.PathContainsPredicate('name', name)\n",
"payload = self.agent.type_to_payload('terminateInstances', {'instanceIds':\n names, 'zone': zone, 'credentials': self.bindings['GCE_CREDENTIALS']})\n",
"is_stopping_pred = jc.PathEqPredicate('status', 'STOPPING')\n",
"return st.OperationContract(self.new_post_operation(title=\n 'terminate_instances', data=payload, path='gce/ops'), contract=builder.\n build())\n",
"clause.add_mapped_constraint(jc.IF(name_matches_pred, is_stopping_pred))\n"
] | [
0,
0,
0,
0,
0,
1,
0,
1,
0,
1
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_0(VAR_0):...\n",
"return binascii.b2a_hex(base64.b64encode(VAR_0.encode('utf-8'))).decode('utf-8'\n )\n"
] | [
"def encode_str_to_hexed_b64(data):...\n",
"return binascii.b2a_hex(base64.b64encode(data.encode('utf-8'))).decode('utf-8')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_7(self):...\n",
"VAR_14 = self.client.get('/get_data/')\n",
"self.assertEqual(VAR_14.status_code, 200)\n",
"self.assertContains(VAR_14, 'Download Uncurated Chemicals')\n",
"VAR_15 = RawChem.objects.filter(dsstox_id__isnull=True).first()\n",
"VAR_14 = self.client.get('/dl_raw_chems/')\n",
"VAR_16 = f'%s,%s,%s,%s\\r\\n' % (VAR_15.id, VAR_15.raw_cas, VAR_15.\n raw_chem_name, VAR_15.rid if VAR_15.rid else '')\n",
"VAR_16 = bytes(VAR_16, 'utf-8')\n",
"self.assertIn(VAR_16, VAR_14.content, 'The non-curated row should appear')\n",
"VAR_16 = f'%s,%s,%s,%s,%s\\r\\n' % (VAR_15.extracted_text.data_document.\n data_group.id, VAR_15.id, VAR_15.raw_cas, VAR_15.raw_chem_name, VAR_15.\n rid if VAR_15.rid else '')\n",
"VAR_16 = bytes(VAR_16, 'utf-8')\n",
"self.assertIn(VAR_16, VAR_14.content,\n 'The data group id should be in the output')\n",
"VAR_15 = RawChem.objects.filter(dsstox_id__isnull=False).first()\n",
"VAR_16 = f'%s,%s,%s,%s\\r\\n' % (VAR_15.id, VAR_15.raw_cas, VAR_15.\n raw_chem_name, VAR_15.sid if VAR_15.sid else '')\n",
"VAR_16 = bytes(VAR_16, 'utf-8')\n",
"self.assertNotIn(VAR_16, VAR_14.content, 'The curated row should not appear')\n"
] | [
"def test_download_raw_chem_button(self):...\n",
"response = self.client.get('/get_data/')\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertContains(response, 'Download Uncurated Chemicals')\n",
"rc = RawChem.objects.filter(dsstox_id__isnull=True).first()\n",
"response = self.client.get('/dl_raw_chems/')\n",
"rc_row = f'%s,%s,%s,%s\\r\\n' % (rc.id, rc.raw_cas, rc.raw_chem_name, rc.rid if\n rc.rid else '')\n",
"rc_row = bytes(rc_row, 'utf-8')\n",
"self.assertIn(rc_row, response.content, 'The non-curated row should appear')\n",
"rc_row = f'%s,%s,%s,%s,%s\\r\\n' % (rc.extracted_text.data_document.\n data_group.id, rc.id, rc.raw_cas, rc.raw_chem_name, rc.rid if rc.rid else\n '')\n",
"rc_row = bytes(rc_row, 'utf-8')\n",
"self.assertIn(rc_row, response.content,\n 'The data group id should be in the output')\n",
"rc = RawChem.objects.filter(dsstox_id__isnull=False).first()\n",
"rc_row = f'%s,%s,%s,%s\\r\\n' % (rc.id, rc.raw_cas, rc.raw_chem_name, rc.sid if\n rc.sid else '')\n",
"rc_row = bytes(rc_row, 'utf-8')\n",
"self.assertNotIn(rc_row, response.content, 'The curated row should not appear')\n"
] | [
0,
0,
0,
6,
6,
6,
6,
6,
6,
6,
0,
6,
6,
6,
0,
6
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_1=None, VAR_2=8888, VAR_3=None, VAR_4=1.0):...\n",
"super(CLASS_0, self).__init__()\n",
"self._addr = Address.from_raw(VAR_1, defaultport=port)\n",
"self._connecttimeout = VAR_4\n",
"self._adapter = None\n",
"self._session = None\n",
"self._breakpoints = VAR_3\n"
] | [
"def __init__(self, addr=None, port=8888, breakpoints=None, connecttimeout=1.0):...\n",
"super(_LifecycleClient, self).__init__()\n",
"self._addr = Address.from_raw(addr, defaultport=port)\n",
"self._connecttimeout = connecttimeout\n",
"self._adapter = None\n",
"self._session = None\n",
"self._breakpoints = breakpoints\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_7(VAR_7, VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_7 is None:\n",
"VAR_24 = FUNC_10(VAR_7, VAR_8, VAR_0)\n",
"return VAR_24\n"
] | [
"def decodeRequestToken(token, client_pub_key):...\n",
"\"\"\"docstring\"\"\"\n",
"if token is None:\n",
"decoded_token_data = unpackJWT(token, client_pub_key, SERVER_JWT_PRIVATE_KEY)\n",
"return decoded_token_data\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_18(self):...\n",
"self.run_test_case(self.scenario.delete_load_balancer())\n"
] | [
"def test_z_delete_load_balancer(self):...\n",
"self.run_test_case(self.scenario.delete_load_balancer())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_10(self):...\n",
"self.run_test_case(self.scenario.upsert_load_balancer())\n"
] | [
"def test_b_upsert_load_balancer(self):...\n",
"self.run_test_case(self.scenario.upsert_load_balancer())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"if self.get_dg_folder() != 'no_folder_found':\n",
"return self.get_dg_folder()\n",
"return False\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"if self.get_dg_folder() != 'no_folder_found':\n",
"return self.get_dg_folder()\n",
"return False\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_1(self, VAR_9, VAR_10, VAR_11, VAR_12, VAR_13, VAR_14):...\n",
"VAR_22 = 'string' + \"('%s', '%s', '%s', '%s', '%s', '%s', '%s')\" % (VAR_9,\n datetime.now(), VAR_10, VAR_11, VAR_12, VAR_13, VAR_14)\n",
"self.cur.execute(VAR_22)\n",
"self.conn.commit()\n",
"return self.cur.lastrowid\n"
] | [
"def insert_result_log(self, qid, hoax, fact, unknown, unrelated, conclusion):...\n",
"sql = (\n 'INSERT INTO log_result (id_query, finished_at, hoax_score, fact_score, unknown_score, unrelated_score, conclusion) VALUES'\n + \"('%s', '%s', '%s', '%s', '%s', '%s', '%s')\" % (qid, datetime.now(),\n hoax, fact, unknown, unrelated, conclusion))\n",
"self.cur.execute(sql)\n",
"self.conn.commit()\n",
"return self.cur.lastrowid\n"
] | [
0,
4,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"super(CLASS_0, self).setUp()\n",
"self.mock(net, 'url_open', self._url_open)\n",
"self.mock(net, 'url_read_json', self._url_read_json)\n",
"self.mock(net, 'sleep_before_retry', lambda *VAR_10: None)\n",
"self._lock = threading.Lock()\n",
"self._requests = []\n"
] | [
"def setUp(self):...\n",
"super(TestCase, self).setUp()\n",
"self.mock(net, 'url_open', self._url_open)\n",
"self.mock(net, 'url_read_json', self._url_read_json)\n",
"self.mock(net, 'sleep_before_retry', lambda *_: None)\n",
"self._lock = threading.Lock()\n",
"self._requests = []\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"\"\"\"docstring\"\"\"\n",
"self.common.client_login()\n",
"self.common.terminate_connection(VAR_7, VAR_10['host'], VAR_10['initiator'])\n",
"self.common.client_logout()\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"\"\"\"docstring\"\"\"\n",
"self.common.client_login()\n",
"self.common.terminate_connection(volume, connector['host'], connector[\n 'initiator'])\n",
"self.common.client_logout()\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"VAR_8 = 'test_foo.py'\n",
"VAR_7 = BokChoyTestSuite('', test_spec=spec)\n",
"VAR_1 = 'tests/{}'.format(VAR_8)\n",
"self.assertEqual(VAR_7.cmd, self._expected_command(VAR_1=name))\n"
] | [
"def test_suite_spec(self):...\n",
"spec = 'test_foo.py'\n",
"suite = BokChoyTestSuite('', test_spec=spec)\n",
"name = 'tests/{}'.format(spec)\n",
"self.assertEqual(suite.cmd, self._expected_command(name=name))\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_24(self, VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_57 = self.getfile(VAR_21, VAR_23=False)\n",
"if VAR_57 == False:\n",
"self.get_path(os.path.dirname(VAR_21)).remove(VAR_57)\n",
"return\n"
] | [
"def remove(self, path):...\n",
"\"\"\"docstring\"\"\"\n",
"p = self.getfile(path, follow_symlinks=False)\n",
"if p == False:\n",
"self.get_path(os.path.dirname(path)).remove(p)\n",
"return\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"VAR_14 = self.get_secure_cookie('user')\n",
"if VAR_14 == None:\n",
"self.render('index.html')\n",
"self.render('postlogin.html')\n",
"return\n",
"return\n"
] | [
"def get(self):...\n",
"cookie_status = self.get_secure_cookie('user')\n",
"if cookie_status == None:\n",
"self.render('index.html')\n",
"self.render('postlogin.html')\n",
"return\n",
"return\n"
] | [
0,
6,
6,
6,
6,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'",
"Return'"
] |
[
"def __init__(self, VAR_11):...\n",
"self.content = VAR_11\n"
] | [
"def __init__(self, content):...\n",
"self.content = content\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_8(VAR_2):...\n",
"return FUNC_9(VAR_2, '')\n"
] | [
"def get_all_full(db):...\n",
"return get_full_sql(db, '')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"super(CLASS_0, VAR_0).initArgumentParser(VAR_2, VAR_3=defaults)\n",
"VAR_3 = VAR_3 or {}\n",
"VAR_2.add_argument('--test_component_detail', default='fe', help=\n 'Refinement for component name to create.')\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"super(GoogleSmokeTestScenario, cls).initArgumentParser(parser, defaults=\n defaults)\n",
"defaults = defaults or {}\n",
"parser.add_argument('--test_component_detail', default='fe', help=\n 'Refinement for component name to create.')\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(self, VAR_6: str):...\n",
""
] | [
"def get(self, survey_prefix: str):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"@mock.patch('requests.post', FUNC_0)...\n",
"VAR_4 = '/api/apps'\n",
"VAR_5 = self.client.post(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_6 = VAR_5.data['id']\n",
"VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_7 = {'image': 'autotest/example'}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_4 = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 1)\n",
"VAR_12 = VAR_5.data['results'][0]\n",
"self.assertEqual(VAR_12['type'], 'cmd')\n",
"self.assertEqual(VAR_12['num'], 1)\n",
"VAR_4 = '/api/apps'\n",
"VAR_5 = self.client.post(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_6 = VAR_5.data['id']\n",
"VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_7 = {'image': 'autotest/example', 'sha': 'a' * 40, 'dockerfile':\n 'FROM scratch'}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_4 = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 1)\n",
"VAR_12 = VAR_5.data['results'][0]\n",
"self.assertEqual(VAR_12['type'], 'cmd')\n",
"self.assertEqual(VAR_12['num'], 1)\n",
"VAR_4 = '/api/apps'\n",
"VAR_5 = self.client.post(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_6 = VAR_5.data['id']\n",
"VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_7 = {'image': 'autotest/example', 'sha': 'a' * 40, 'dockerfile':\n 'FROM scratch', 'procfile': {'worker': 'node worker.js'}}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_4 = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 1)\n",
"VAR_12 = VAR_5.data['results'][0]\n",
"self.assertEqual(VAR_12['type'], 'cmd')\n",
"self.assertEqual(VAR_12['num'], 1)\n",
"VAR_4 = '/api/apps'\n",
"VAR_5 = self.client.post(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_6 = VAR_5.data['id']\n",
"VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_7 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_4 = '/api/apps/{app_id}/containers/web'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 1)\n",
"VAR_12 = VAR_5.data['results'][0]\n",
"self.assertEqual(VAR_12['type'], 'web')\n",
"self.assertEqual(VAR_12['num'], 1)\n"
] | [
"@mock.patch('requests.post', mock_import_repository_task)...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example'}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"container = response.data['results'][0]\n",
"self.assertEqual(container['type'], 'cmd')\n",
"self.assertEqual(container['num'], 1)\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'dockerfile':\n 'FROM scratch'}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"container = response.data['results'][0]\n",
"self.assertEqual(container['type'], 'cmd')\n",
"self.assertEqual(container['num'], 1)\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'dockerfile':\n 'FROM scratch', 'procfile': {'worker': 'node worker.js'}}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"container = response.data['results'][0]\n",
"self.assertEqual(container['type'], 'cmd')\n",
"self.assertEqual(container['num'], 1)\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers/web'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"container = response.data['results'][0]\n",
"self.assertEqual(container['type'], 'web')\n",
"self.assertEqual(container['num'], 1)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_19(self):...\n",
"VAR_16 = self.mox.CreateMock(paramiko.SSHClient)\n",
"VAR_12 = self.mox.CreateMock(paramiko.Channel)\n",
"VAR_17 = self.mox.CreateMock(paramiko.Transport)\n",
"self.mox.StubOutWithMock(self.driver, '_get_output')\n",
"self.mox.StubOutWithMock(VAR_12, 'invoke_shell')\n",
"VAR_15 = ['NoError: test run']\n",
"VAR_16.get_transport().AndReturn(VAR_17)\n",
"VAR_17.open_session().AndReturn(VAR_12)\n",
"VAR_12.invoke_shell()\n",
"self.driver._get_output(VAR_12).AndReturn(VAR_15)\n",
"VAR_18 = 'this is dummy command'\n",
"VAR_12.send('stty columns 255' + '\\r')\n",
"self.driver._get_output(VAR_12).AndReturn(VAR_15)\n",
"VAR_12.send(VAR_18 + '\\r')\n",
"self.driver._get_output(VAR_12).AndReturn(VAR_15)\n",
"VAR_12.close()\n",
"self.mox.ReplayAll()\n",
"self.assertEqual(self.driver._ssh_execute(VAR_16, VAR_18), VAR_15)\n"
] | [
"def test_ssh_execute(self):...\n",
"ssh = self.mox.CreateMock(paramiko.SSHClient)\n",
"chan = self.mox.CreateMock(paramiko.Channel)\n",
"transport = self.mox.CreateMock(paramiko.Transport)\n",
"self.mox.StubOutWithMock(self.driver, '_get_output')\n",
"self.mox.StubOutWithMock(chan, 'invoke_shell')\n",
"expected_output = ['NoError: test run']\n",
"ssh.get_transport().AndReturn(transport)\n",
"transport.open_session().AndReturn(chan)\n",
"chan.invoke_shell()\n",
"self.driver._get_output(chan).AndReturn(expected_output)\n",
"cmd = 'this is dummy command'\n",
"chan.send('stty columns 255' + '\\r')\n",
"self.driver._get_output(chan).AndReturn(expected_output)\n",
"chan.send(cmd + '\\r')\n",
"self.driver._get_output(chan).AndReturn(expected_output)\n",
"chan.close()\n",
"self.mox.ReplayAll()\n",
"self.assertEqual(self.driver._ssh_execute(ssh, cmd), expected_output)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_31(self):...\n",
"self.compilation_ko('string')\n"
] | [
"def test_nonopened_tests_list(self):...\n",
"self.compilation_ko(\n \"\"\"\nif anyof header :is \"Sender\" \"[email protected]\",\n header :is \"Sender\" \"[email protected]\") {\n fileinto \"trash\";\n}\n\"\"\"\n )\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@cached_property...\n",
"\"\"\"docstring\"\"\"\n",
"return [VAR_46 for VAR_46 in self.fields if not VAR_46.is_attribute]\n"
] | [
"@cached_property...\n",
"\"\"\"docstring\"\"\"\n",
"return [f for f in self.fields if not f.is_attribute]\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_10(self, VAR_14):...\n",
""
] | [
"def is_text(self, col_name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_1(VAR_1, VAR_2, VAR_3):...\n",
"VAR_6 = (\n \"\"\"INSERT INTO dialogUser (dialog_id, user_id, permission)\nVALUES (%d, %d, %d)\"\"\"\n % VAR_2, VAR_1, VAR_3)\n",
"return {'status': 1}\n"
] | [
"def db_addUserInDialog(userID, dialogID, permission):...\n",
"sql = (\n \"\"\"INSERT INTO dialogUser (dialog_id, user_id, permission)\nVALUES (%d, %d, %d)\"\"\"\n % dialogID, userID, permission)\n",
"return {'status': 1}\n"
] | [
0,
4,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def __hash__(self):...\n",
"return self._hash\n"
] | [
"def __hash__(self):...\n",
"return self._hash\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_11(self, VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_24 = self.getfile(VAR_21)\n",
"return False\n",
"return VAR_24[VAR_2] == VAR_13\n"
] | [
"def isfile(self, path):...\n",
"\"\"\"docstring\"\"\"\n",
"f = self.getfile(path)\n",
"return False\n",
"return f[A_TYPE] == T_FILE\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_3(self):...\n",
"super(CLASS_1, self).prepare()\n",
"self._ = {'id': '', 'title': '', 'content': '', 'is_private': False,\n 'group': '', 'tmpatts': [], 'atts': [], 'tags': [], 'alert': ''}\n"
] | [
"def prepare(self):...\n",
"super(EditAnnHandler, self).prepare()\n",
"self._ = {'id': '', 'title': '', 'content': '', 'is_private': False,\n 'group': '', 'tmpatts': [], 'atts': [], 'tags': [], 'alert': ''}\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"\"\"\"\nCreated on 6 Feb 2018\n\n@author: Teodor Gherasim Nistor\n\n\"\"\"\n",
"import os.path\n",
"import re\n",
"from beamr.lexers import imageLexer\n",
"from beamr.parsers import imageParser\n",
"from beamr.debug import debug, warn\n",
"def __init__(self, VAR_0):...\n",
"self.txt = VAR_0\n",
"def __str__(self):...\n",
"return self.txt\n"
] | [
"\"\"\"\nCreated on 6 Feb 2018\n\n@author: Teodor Gherasim Nistor\n\n\"\"\"\n",
"import os.path\n",
"import re\n",
"from beamr.lexers import imageLexer\n",
"from beamr.parsers import imageParser\n",
"from beamr.debug import debug, warn\n",
"def __init__(self, txt):...\n",
"self.txt = txt\n",
"def __str__(self):...\n",
"return self.txt\n"
] | [
0,
0,
0,
2,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_25(self):...\n",
"return self._picture\n"
] | [
"def picture(self):...\n",
"return self._picture\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@auth.autologin...\n",
"VAR_12 = logscraper._log_request_id(VAR_0)\n",
"if not VAR_12:\n",
"self.abort(404, detail='Request id was not found.')\n",
"self.response.write(template.render('ereporter2/request.html', {'request':\n VAR_12}))\n"
] | [
"@auth.autologin...\n",
"data = logscraper._log_request_id(request_id)\n",
"if not data:\n",
"self.abort(404, detail='Request id was not found.')\n",
"self.response.write(template.render('ereporter2/request.html', {'request':\n data}))\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"return self.groups\n"
] | [
"def get_groups(self):...\n",
"return self.groups\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_29(self, VAR_77):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.priority = VAR_77\n",
"return VAR_101\n"
] | [
"def priority(self, priority):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.priority = priority\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_6(self, VAR_10):...\n",
"return '%s%s' % (self.configuration.iscsi_target_prefix, VAR_10['name'])\n"
] | [
"def _build_iscsi_target_name(self, volume):...\n",
"return '%s%s' % (self.configuration.iscsi_target_prefix, volume['name'])\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_3(self, VAR_3):...\n",
"VAR_20 = VAR_3.value.upper()\n",
"if VAR_3.type == 'ID':\n",
"if not VAR_20 in self.spdx.licenses:\n",
"if VAR_3.type == 'EXC':\n",
"self.lastid = VAR_20\n",
"if VAR_20 not in self.spdx.exceptions:\n",
"if VAR_3.type != 'WITH':\n",
"if self.lastid not in self.spdx.exceptions[VAR_20]:\n",
"self.lastid = None\n",
"self.lastid = None\n"
] | [
"def validate(self, tok):...\n",
"id = tok.value.upper()\n",
"if tok.type == 'ID':\n",
"if not id in self.spdx.licenses:\n",
"if tok.type == 'EXC':\n",
"self.lastid = id\n",
"if id not in self.spdx.exceptions:\n",
"if tok.type != 'WITH':\n",
"if self.lastid not in self.spdx.exceptions[id]:\n",
"self.lastid = None\n",
"self.lastid = None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_1():...\n",
"print('-' * 70)\n"
] | [
"def divider():...\n",
"print('-' * 70)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = '/api/apps'\n",
"VAR_3 = self.client.post(VAR_2)\n",
"self.assertEqual(VAR_3.status_code, 201)\n",
"VAR_4 = VAR_3.data['id']\n",
"self.assertIn('structure', VAR_3.data)\n",
"self.assertEqual(VAR_3.data['structure'], {})\n",
"VAR_7 = App.objects.get(id=app_id)\n",
"VAR_7.structure = {'web': 1}\n",
"VAR_7.save()\n",
"VAR_2 = '/api/apps/{}'.format(VAR_4)\n",
"VAR_3 = self.client.get(VAR_2)\n",
"self.assertIn('structure', VAR_3.data)\n",
"self.assertEqual(VAR_3.data['structure'], {'web': 1})\n"
] | [
"def test_app_structure_is_valid_json(self):...\n",
"\"\"\"docstring\"\"\"\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"self.assertIn('structure', response.data)\n",
"self.assertEqual(response.data['structure'], {})\n",
"app = App.objects.get(id=app_id)\n",
"app.structure = {'web': 1}\n",
"app.save()\n",
"url = '/api/apps/{}'.format(app_id)\n",
"response = self.client.get(url)\n",
"self.assertIn('structure', response.data)\n",
"self.assertEqual(response.data['structure'], {'web': 1})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_13(self, *VAR_16, **VAR_10):...\n",
"if not self.is_group_user(VAR_8):\n",
"return VAR_7(self, *VAR_16, **kwargs)\n"
] | [
"def wrapper(self, *args, **kwargs):...\n",
"if not self.is_group_user(group):\n",
"return method(self, *args, **kwargs)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'"
] |
[
"def FUNC_1(VAR_4, VAR_5=2):...\n",
"VAR_24 = iter(VAR_4)\n",
"VAR_25 = []\n",
"for e in range(0, VAR_5):\n",
"VAR_25.append(next(VAR_24))\n",
"yield VAR_25\n",
"for e in VAR_24:\n",
"VAR_25 = VAR_25[1:] + [e]\n",
"yield VAR_25\n"
] | [
"def window(iterable, size=2):...\n",
"i = iter(iterable)\n",
"win = []\n",
"for e in range(0, size):\n",
"win.append(next(i))\n",
"yield win\n",
"for e in i:\n",
"win = win[1:] + [e]\n",
"yield win\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Expr'",
"For",
"Assign'",
"Expr'"
] |
[
"def FUNC_8(VAR_6):...\n",
"VAR_12 = f\"\"\"\n INSERT INTO {VAR_3} (user_id)\n VALUES ({VAR_6})\n \"\"\"\n",
"VAR_14 = create_connection()\n",
"VAR_14.close()\n",
"VAR_15 = VAR_14.cursor()\n",
"VAR_15.execute(VAR_12)\n",
"VAR_14.commit()\n",
"return 'Ok'\n"
] | [
"def create_invoice(userId):...\n",
"sql_query = f\"\"\"\n INSERT INTO {INVOICES_TABLE} (user_id)\n VALUES ({userId})\n \"\"\"\n",
"connection = create_connection()\n",
"connection.close()\n",
"cursor = connection.cursor()\n",
"cursor.execute(sql_query)\n",
"connection.commit()\n",
"return 'Ok'\n"
] | [
0,
4,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"import sqlite3\n",
"from sqlite3 import Error\n",
"import pandas as pd\n",
"import sys\n",
"import logging\n",
"VAR_0 = logging.getLogger(__name__)\n",
"VAR_1 = logging.StreamHandler(sys.stdout)\n",
"VAR_1.setFormatter(logging.Formatter(\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s - %(funcName)s - line %(lineno)d'\n ))\n",
"VAR_0.addHandler(VAR_1)\n",
"VAR_0.setLevel(logging.INFO)\n",
"def FUNC_0(VAR_2='cert_db.sqlite3'):...\n",
"VAR_5 = sqlite3.connect(VAR_2)\n",
"VAR_0.critical(e)\n",
"return None\n",
"return VAR_5\n"
] | [
"import sqlite3\n",
"from sqlite3 import Error\n",
"import pandas as pd\n",
"import sys\n",
"import logging\n",
"logger = logging.getLogger(__name__)\n",
"log_handler = logging.StreamHandler(sys.stdout)\n",
"log_handler.setFormatter(logging.Formatter(\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s - %(funcName)s - line %(lineno)d'\n ))\n",
"logger.addHandler(log_handler)\n",
"logger.setLevel(logging.INFO)\n",
"def create_connection(db_name='cert_db.sqlite3'):...\n",
"conn = sqlite3.connect(db_name)\n",
"logger.critical(e)\n",
"return None\n",
"return conn\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_19(self):...\n",
"VAR_57 = frappe.get_hooks('permission_query_conditions', {}).get(self.\n doctype, [])\n",
"if VAR_57:\n",
"VAR_33 = []\n",
"for method in VAR_57:\n",
"VAR_71 = frappe.call(frappe.get_attr(method), self.user)\n",
"return ' and '.join(VAR_33) if VAR_33 else None\n",
"if VAR_71:\n",
"VAR_33.append(VAR_71)\n"
] | [
"def get_permission_query_conditions(self):...\n",
"condition_methods = frappe.get_hooks('permission_query_conditions', {}).get(\n self.doctype, [])\n",
"if condition_methods:\n",
"conditions = []\n",
"for method in condition_methods:\n",
"c = frappe.call(frappe.get_attr(method), self.user)\n",
"return ' and '.join(conditions) if conditions else None\n",
"if c:\n",
"conditions.append(c)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"For",
"Assign'",
"Return'",
"Condition",
"Expr'"
] |
[
"def __init__(self):...\n",
"super().__init__('Worker was interrupted at runtime')\n"
] | [
"def __init__(self):...\n",
"super().__init__('Worker was interrupted at runtime')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def __init__(self, **VAR_1):...\n",
"super(CLASS_0, self).__init__(**kwargs)\n",
"self.paths = dict()\n"
] | [
"def __init__(self, **kwargs):...\n",
"super(Dispatcher, self).__init__(**kwargs)\n",
"self.paths = dict()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"def FUNC_11(self):...\n",
"self.run_test_case(self.scenario.create_instances())\n"
] | [
"def test_b_create_server_group(self):...\n",
"self.run_test_case(self.scenario.create_instances())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0():...\n",
"pygame.init()\n",
"VAR_0 = 1360\n",
"VAR_1 = 760\n",
"VAR_2 = pygame.display.set_mode((VAR_0, VAR_1))\n",
"VAR_2.fill((0, 0, 0))\n",
"VAR_3 = Graph(34, 19)\n",
"VAR_4 = GraphVisual(VAR_3, 40, VAR_2)\n",
"VAR_5 = pygame.rect.Rect(3, 363, 36, 36)\n",
"VAR_6 = pygame.rect.Rect(1323, 363, 36, 36)\n",
"VAR_7 = Node(Vector2(0, 9))\n",
"VAR_8 = Node(Vector2(33, 9))\n",
"VAR_9 = AStar(VAR_3, VAR_7, VAR_8)\n",
"VAR_10 = []\n",
"VAR_11 = []\n",
"VAR_12 = False\n",
"VAR_13 = False\n",
"VAR_14 = False\n",
"VAR_15 = False\n",
"while True:\n",
"VAR_2.fill((0, 0, 0))\n",
"VAR_4 = GraphVisual(VAR_3, 40, VAR_2)\n",
"for event in pygame.event.get():\n",
"if event.type == pygame.QUIT:\n",
"pygame.event.pump()\n",
"return\n",
"if event.type == pygame.MOUSEBUTTONDOWN:\n",
"VAR_15 = False\n",
"if event.type == pygame.MOUSEBUTTONUP:\n",
"if event.button == 1:\n",
"VAR_14 = False\n",
"if event.type == pygame.MOUSEMOTION:\n",
"if VAR_5.collidepoint(event.pos):\n",
"if pygame.key.get_pressed()[pygame.K_RETURN]:\n",
"if event.button == 1:\n",
"if VAR_12:\n",
"VAR_12 = True\n",
"if VAR_6.collidepoint(event.pos):\n",
"VAR_15 = True\n",
"if VAR_15:\n",
"VAR_16 = 0\n",
"VAR_20, VAR_21 = event.pos\n",
"if VAR_13:\n",
"VAR_20, VAR_21 = event.pos\n",
"VAR_13 = True\n",
"VAR_16 = 0\n",
"if not VAR_11:\n",
"VAR_16 = 0\n",
"pygame.draw.rect(VAR_2, (0, 255, 0), VAR_5)\n",
"if VAR_12 is True or VAR_13 is True:\n",
"VAR_5.x = VAR_20 + VAR_22\n",
"VAR_20, VAR_21 = event.pos\n",
"if VAR_14:\n",
"VAR_22 = VAR_5.x - VAR_20\n",
"VAR_20, VAR_21 = event.pos\n",
"for node in VAR_4.node_visual_colliders:\n",
"VAR_11 = VAR_9.find_path()\n",
"VAR_9 = AStar(VAR_3, VAR_7, VAR_8)\n",
"VAR_17 = 1\n",
"pygame.draw.rect(VAR_2, (255, 0, 0), VAR_6)\n",
"for collider in VAR_4.node_visual_colliders:\n",
"VAR_5.y = VAR_21 + VAR_23\n",
"VAR_6.x = VAR_20 + VAR_22\n",
"VAR_16 = 0\n",
"VAR_23 = VAR_5.y - VAR_21\n",
"VAR_22 = VAR_6.x - VAR_20\n",
"if node.collidepoint(event.pos) and VAR_14 is False:\n",
"VAR_11 = VAR_9.find_path()\n",
"while VAR_17 <= len(VAR_11) - 1:\n",
"VAR_16 = 0\n",
"if VAR_5.colliderect(collider):\n",
"VAR_9 = AStar(VAR_3, VAR_7, VAR_8)\n",
"VAR_6.y = VAR_21 + VAR_23\n",
"for node in VAR_4.node_visual_colliders:\n",
"VAR_23 = VAR_6.y - VAR_21\n",
"VAR_24 = VAR_3.nodes[VAR_16].is_traversable\n",
"VAR_16 += 1\n",
"VAR_18 = Vector2(VAR_11[VAR_16].get_x() * 40, VAR_11[VAR_16].get_y() * 40)\n",
"for node in VAR_3.nodes:\n",
"VAR_5.left = VAR_4.node_visual_colliders[VAR_16].left\n",
"if VAR_6.colliderect(collider):\n",
"if node.collidepoint(event.pos) and VAR_3.nodes[VAR_16\n",
"VAR_14 = True\n",
"VAR_19 = Vector2(VAR_11[VAR_17].get_x() * 40, VAR_11[VAR_17].get_y() * 40)\n",
"if node.is_traversable is False:\n",
"pygame.display.flip()\n",
"VAR_5.top = VAR_4.node_visual_colliders[VAR_16].top\n",
"VAR_6.left = VAR_4.node_visual_colliders[VAR_16].left\n",
"VAR_16 += 1\n",
"VAR_3.nodes[VAR_16].toggle_state('wall')\n",
"VAR_16 += 1\n",
"VAR_10.append(Line(VAR_2, (0, 0, 255), Vector2(VAR_18.x_pos + 20, VAR_18.\n y_pos + 20), Vector2(VAR_19.x_pos + 20, VAR_19.y_pos + 20), 5))\n",
"pygame.draw.rect(VAR_2, (0, 0, 0), VAR_4.node_visual_colliders[VAR_16])\n",
"VAR_16 += 1\n",
"VAR_12 = False\n",
"VAR_6.top = VAR_4.node_visual_colliders[VAR_16].top\n",
"VAR_16 += 1\n",
"VAR_7 = Node(Vector2(VAR_4.node_visuals[VAR_16].node.get_x(), VAR_4.\n node_visuals[VAR_16].node.get_y()))\n",
"VAR_13 = False\n",
"VAR_17 += 1\n",
"VAR_8 = Node(Vector2(VAR_4.node_visuals[VAR_16].node.get_x(), VAR_4.\n node_visuals[VAR_16].node.get_y()))\n"
] | [
"def main():...\n",
"pygame.init()\n",
"screen_width = 1360\n",
"screen_height = 760\n",
"screen = pygame.display.set_mode((screen_width, screen_height))\n",
"screen.fill((0, 0, 0))\n",
"grid = Graph(34, 19)\n",
"visual_graph = GraphVisual(grid, 40, screen)\n",
"start_square = pygame.rect.Rect(3, 363, 36, 36)\n",
"goal_square = pygame.rect.Rect(1323, 363, 36, 36)\n",
"start_node = Node(Vector2(0, 9))\n",
"goal_node = Node(Vector2(33, 9))\n",
"astar = AStar(grid, start_node, goal_node)\n",
"drawn_path = []\n",
"path = []\n",
"dragging_start = False\n",
"dragging_goal = False\n",
"mouse_is_down = False\n",
"pressed_enter = False\n",
"while True:\n",
"screen.fill((0, 0, 0))\n",
"visual_graph = GraphVisual(grid, 40, screen)\n",
"for event in pygame.event.get():\n",
"if event.type == pygame.QUIT:\n",
"pygame.event.pump()\n",
"return\n",
"if event.type == pygame.MOUSEBUTTONDOWN:\n",
"pressed_enter = False\n",
"if event.type == pygame.MOUSEBUTTONUP:\n",
"if event.button == 1:\n",
"mouse_is_down = False\n",
"if event.type == pygame.MOUSEMOTION:\n",
"if start_square.collidepoint(event.pos):\n",
"if pygame.key.get_pressed()[pygame.K_RETURN]:\n",
"if event.button == 1:\n",
"if dragging_start:\n",
"dragging_start = True\n",
"if goal_square.collidepoint(event.pos):\n",
"pressed_enter = True\n",
"if pressed_enter:\n",
"count = 0\n",
"mouse_x, mouse_y = event.pos\n",
"if dragging_goal:\n",
"mouse_x, mouse_y = event.pos\n",
"dragging_goal = True\n",
"count = 0\n",
"if not path:\n",
"count = 0\n",
"pygame.draw.rect(screen, (0, 255, 0), start_square)\n",
"if dragging_start is True or dragging_goal is True:\n",
"start_square.x = mouse_x + offset_x\n",
"mouse_x, mouse_y = event.pos\n",
"if mouse_is_down:\n",
"offset_x = start_square.x - mouse_x\n",
"mouse_x, mouse_y = event.pos\n",
"for node in visual_graph.node_visual_colliders:\n",
"path = astar.find_path()\n",
"astar = AStar(grid, start_node, goal_node)\n",
"count_two = 1\n",
"pygame.draw.rect(screen, (255, 0, 0), goal_square)\n",
"for collider in visual_graph.node_visual_colliders:\n",
"start_square.y = mouse_y + offset_y\n",
"goal_square.x = mouse_x + offset_x\n",
"count = 0\n",
"offset_y = start_square.y - mouse_y\n",
"offset_x = goal_square.x - mouse_x\n",
"if node.collidepoint(event.pos) and mouse_is_down is False:\n",
"path = astar.find_path()\n",
"while count_two <= len(path) - 1:\n",
"count = 0\n",
"if start_square.colliderect(collider):\n",
"astar = AStar(grid, start_node, goal_node)\n",
"goal_square.y = mouse_y + offset_y\n",
"for node in visual_graph.node_visual_colliders:\n",
"offset_y = goal_square.y - mouse_y\n",
"current_state = grid.nodes[count].is_traversable\n",
"count += 1\n",
"line_start = Vector2(path[count].get_x() * 40, path[count].get_y() * 40)\n",
"for node in grid.nodes:\n",
"start_square.left = visual_graph.node_visual_colliders[count].left\n",
"if goal_square.colliderect(collider):\n",
"if node.collidepoint(event.pos) and grid.nodes[count\n",
"mouse_is_down = True\n",
"line_end = Vector2(path[count_two].get_x() * 40, path[count_two].get_y() * 40)\n",
"if node.is_traversable is False:\n",
"pygame.display.flip()\n",
"start_square.top = visual_graph.node_visual_colliders[count].top\n",
"goal_square.left = visual_graph.node_visual_colliders[count].left\n",
"count += 1\n",
"grid.nodes[count].toggle_state('wall')\n",
"count += 1\n",
"drawn_path.append(Line(screen, (0, 0, 255), Vector2(line_start.x_pos + 20, \n line_start.y_pos + 20), Vector2(line_end.x_pos + 20, line_end.y_pos + \n 20), 5))\n",
"pygame.draw.rect(screen, (0, 0, 0), visual_graph.node_visual_colliders[count])\n",
"count += 1\n",
"dragging_start = False\n",
"goal_square.top = visual_graph.node_visual_colliders[count].top\n",
"count += 1\n",
"start_node = Node(Vector2(visual_graph.node_visuals[count].node.get_x(),\n visual_graph.node_visuals[count].node.get_y()))\n",
"dragging_goal = False\n",
"count_two += 1\n",
"goal_node = Node(Vector2(visual_graph.node_visuals[count].node.get_x(),\n visual_graph.node_visuals[count].node.get_y()))\n"
] | [
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"For",
"Condition",
"Expr'",
"Return'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"AugAssign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"AugAssign'",
"Expr'",
"AugAssign'",
"Expr'",
"Expr'",
"AugAssign'",
"Assign'",
"Assign'",
"AugAssign'",
"Assign'",
"Assign'",
"AugAssign'",
"Assign'"
] |
[
"import copy\n",
"import logging\n",
"import os\n",
"import sflock\n",
"from cuckoo.common.config import emit_options\n",
"from cuckoo.common.exceptions import CuckooOperationalError\n",
"from cuckoo.common.files import Folders, Files, Storage\n",
"from cuckoo.common.utils import validate_url, validate_hash\n",
"from cuckoo.common.virustotal import VirusTotalAPI\n",
"from cuckoo.core.database import Database\n",
"VAR_0 = logging.getLogger(__name__)\n",
"VAR_1 = Database()\n",
"def FUNC_0(self, VAR_2, VAR_3, VAR_4):...\n",
"if not VAR_4:\n",
"return\n",
"if validate_hash(VAR_4):\n",
"if validate_url(VAR_4):\n",
"VAR_21 = VirusTotalAPI().hash_fetch(VAR_4)\n",
"VAR_2['errors'].append('Error retrieving file hash: %s' % e)\n",
"VAR_18 = Files.create(VAR_3, VAR_4, VAR_21)\n",
"VAR_2['data'].append({'type': 'url', 'data': VAR_4})\n",
"VAR_2['errors'].append(\"'%s' was neither a valid hash or url\" % VAR_4)\n",
"return\n",
"VAR_2['data'].append({'type': 'file', 'data': VAR_18})\n",
"return\n",
"def FUNC_1(self, VAR_5, VAR_6):...\n",
"return\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_5 not in ('strings', 'files'):\n",
"VAR_0.error(\"Bad parameter '%s' for submit_type\", VAR_5)\n",
"VAR_13 = Folders.create_temp()\n",
"return False\n",
"VAR_14 = {'data': [], 'errors': []}\n",
"if VAR_5 == 'strings':\n",
"for VAR_4 in VAR_6:\n",
"if VAR_5 == 'files':\n",
"self._handle_string(VAR_14, VAR_13, VAR_4)\n",
"for entry in VAR_6:\n",
"return Database().add_submit(VAR_13, VAR_5, VAR_14)\n",
"VAR_22 = Storage.get_filename_from_path(entry['name'])\n",
"VAR_18 = Files.create(VAR_13, VAR_22, entry['data'])\n",
"VAR_14['data'].append({'type': 'file', 'data': VAR_18})\n"
] | [
"import copy\n",
"import logging\n",
"import os\n",
"import sflock\n",
"from cuckoo.common.config import emit_options\n",
"from cuckoo.common.exceptions import CuckooOperationalError\n",
"from cuckoo.common.files import Folders, Files, Storage\n",
"from cuckoo.common.utils import validate_url, validate_hash\n",
"from cuckoo.common.virustotal import VirusTotalAPI\n",
"from cuckoo.core.database import Database\n",
"log = logging.getLogger(__name__)\n",
"db = Database()\n",
"def _handle_string(self, submit, tmppath, line):...\n",
"if not line:\n",
"return\n",
"if validate_hash(line):\n",
"if validate_url(line):\n",
"filedata = VirusTotalAPI().hash_fetch(line)\n",
"submit['errors'].append('Error retrieving file hash: %s' % e)\n",
"filepath = Files.create(tmppath, line, filedata)\n",
"submit['data'].append({'type': 'url', 'data': line})\n",
"submit['errors'].append(\"'%s' was neither a valid hash or url\" % line)\n",
"return\n",
"submit['data'].append({'type': 'file', 'data': filepath})\n",
"return\n",
"def pre(self, submit_type, data):...\n",
"return\n",
"\"\"\"docstring\"\"\"\n",
"if submit_type not in ('strings', 'files'):\n",
"log.error(\"Bad parameter '%s' for submit_type\", submit_type)\n",
"path_tmp = Folders.create_temp()\n",
"return False\n",
"submit_data = {'data': [], 'errors': []}\n",
"if submit_type == 'strings':\n",
"for line in data:\n",
"if submit_type == 'files':\n",
"self._handle_string(submit_data, path_tmp, line)\n",
"for entry in data:\n",
"return Database().add_submit(path_tmp, submit_type, submit_data)\n",
"filename = Storage.get_filename_from_path(entry['name'])\n",
"filepath = Files.create(path_tmp, filename, entry['data'])\n",
"submit_data['data'].append({'type': 'file', 'data': filepath})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Return'",
"FunctionDef'",
"Return'",
"Docstring",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"For",
"Condition",
"Expr'",
"For",
"Return'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"\"\"\"\nDownload and prepare training and fixtures data \nfrom various leagues.\n\"\"\"\n",
"from os.path import join\n",
"from itertools import product\n",
"from difflib import SequenceMatcher\n",
"from sqlite3 import connect\n",
"from argparse import ArgumentParser, RawDescriptionHelpFormatter\n",
"from scipy.stats import hmean\n",
"import numpy as np\n",
"import pandas as pd\n",
"from sportsbet import SOCCER_PATH\n",
"from sportsbet.soccer import TARGET_TYPES_MAPPING\n",
"VAR_0 = connect(join(SOCCER_PATH, 'soccer.db'))\n",
"VAR_1 = {'E0': 'Barclays Premier League', 'B1': 'Belgian Jupiler League',\n 'N1': 'Dutch Eredivisie', 'E1': 'English League Championship', 'E2':\n 'English League One', 'E3': 'English League Two', 'F1':\n 'French Ligue 1', 'F2': 'French Ligue 2', 'D1': 'German Bundesliga',\n 'D2': 'German 2. Bundesliga', 'G1': 'Greek Super League', 'I1':\n 'Italy Serie A', 'I2': 'Italy Serie B', 'P1': 'Portuguese Liga', 'SC0':\n 'Scottish Premiership', 'SP1': 'Spanish Primera Division', 'SP2':\n 'Spanish Segunda Division', 'T1': 'Turkish Turkcell Super Lig'}\n",
"def FUNC_0(VAR_2, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = 1 / pd.concat([(1 / VAR_2[target_type]) for target_type in VAR_3],\n axis=1).sum(axis=1)\n",
"VAR_5.name = '+'.join(VAR_3)\n",
"return pd.concat([VAR_2, VAR_5], axis=1)\n"
] | [
"\"\"\"\nDownload and prepare training and fixtures data \nfrom various leagues.\n\"\"\"\n",
"from os.path import join\n",
"from itertools import product\n",
"from difflib import SequenceMatcher\n",
"from sqlite3 import connect\n",
"from argparse import ArgumentParser, RawDescriptionHelpFormatter\n",
"from scipy.stats import hmean\n",
"import numpy as np\n",
"import pandas as pd\n",
"from sportsbet import SOCCER_PATH\n",
"from sportsbet.soccer import TARGET_TYPES_MAPPING\n",
"DB_CONNECTION = connect(join(SOCCER_PATH, 'soccer.db'))\n",
"LEAGUES_MAPPING = {'E0': 'Barclays Premier League', 'B1':\n 'Belgian Jupiler League', 'N1': 'Dutch Eredivisie', 'E1':\n 'English League Championship', 'E2': 'English League One', 'E3':\n 'English League Two', 'F1': 'French Ligue 1', 'F2': 'French Ligue 2',\n 'D1': 'German Bundesliga', 'D2': 'German 2. Bundesliga', 'G1':\n 'Greek Super League', 'I1': 'Italy Serie A', 'I2': 'Italy Serie B',\n 'P1': 'Portuguese Liga', 'SC0': 'Scottish Premiership', 'SP1':\n 'Spanish Primera Division', 'SP2': 'Spanish Segunda Division', 'T1':\n 'Turkish Turkcell Super Lig'}\n",
"def combine_odds(odds, target_types):...\n",
"\"\"\"docstring\"\"\"\n",
"combined_odds = 1 / pd.concat([(1 / odds[target_type]) for target_type in\n target_types], axis=1).sum(axis=1)\n",
"combined_odds.name = '+'.join(target_types)\n",
"return pd.concat([odds, combined_odds], axis=1)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_3(VAR_3, VAR_2, VAR_4, VAR_5=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_5 is not None and len(VAR_5) > 0:\n",
"VAR_27 = '/'.join(VAR_5)\n",
"VAR_5 = []\n",
"VAR_28 = '/' + VAR_4 + '/' + VAR_27 + '/'\n",
"VAR_27 = ''\n",
"VAR_29 = VAR_2 + '/' + VAR_27\n",
"print('[Setup][FTP] Error: Could not change to: {}'.format(VAR_28))\n",
"VAR_3.cwd(VAR_28)\n",
"VAR_28 = '/' + VAR_4 + '/'\n",
"os.mkdir(VAR_29)\n",
"VAR_18 = VAR_3.nlst()\n",
"print('[Setup][FTP] Created local folder: {}'.format(VAR_29))\n",
"VAR_19 = 1\n",
"VAR_20 = len(VAR_18)\n",
"for file in VAR_18:\n",
"VAR_30 = VAR_2 + '/' + VAR_27 + '/' + file\n",
"if not os.path.isfile(VAR_30):\n",
"print('[Setup][FTP] ({}/{}) File already exists. Skipping: {}'.format(\n VAR_19, VAR_20, VAR_30))\n",
"VAR_3.cwd(VAR_28 + file)\n",
"VAR_49 = VAR_3.nlst()\n",
"VAR_19 = VAR_19 + 1\n",
"print('[Setup][FTP] Switching to directory: {}'.format(VAR_27 + '/' + file))\n",
"if not os.path.isfile(VAR_30):\n",
"VAR_48 = VAR_5.copy()\n",
"VAR_3.retrbinary('RETR {}'.format(file), VAR_7.write)\n",
"VAR_48.append(file)\n",
"print('[Setup][FTP] ({}/{}) File downloaded: {}'.format(VAR_19, VAR_20, VAR_30)\n )\n",
"FUNC_3(VAR_3=ftp, VAR_2=local_directory, VAR_4=remote_directory, VAR_5=\n new_remote_subdirs_list)\n",
"VAR_3.cwd(VAR_28)\n"
] | [
"def fetch_data_via_ftp_recursive(ftp, local_directory, remote_directory,...\n",
"\"\"\"docstring\"\"\"\n",
"if remote_subdirs_list is not None and len(remote_subdirs_list) > 0:\n",
"remote_path_relative = '/'.join(remote_subdirs_list)\n",
"remote_subdirs_list = []\n",
"remote_path_absolute = ('/' + remote_directory + '/' + remote_path_relative +\n '/')\n",
"remote_path_relative = ''\n",
"local_path = local_directory + '/' + remote_path_relative\n",
"print('[Setup][FTP] Error: Could not change to: {}'.format(\n remote_path_absolute))\n",
"ftp.cwd(remote_path_absolute)\n",
"remote_path_absolute = '/' + remote_directory + '/'\n",
"os.mkdir(local_path)\n",
"file_list = ftp.nlst()\n",
"print('[Setup][FTP] Created local folder: {}'.format(local_path))\n",
"file_counter = 1\n",
"file_list_total = len(file_list)\n",
"for file in file_list:\n",
"file_path_local = local_directory + '/' + remote_path_relative + '/' + file\n",
"if not os.path.isfile(file_path_local):\n",
"print('[Setup][FTP] ({}/{}) File already exists. Skipping: {}'.format(\n file_counter, file_list_total, file_path_local))\n",
"ftp.cwd(remote_path_absolute + file)\n",
"temp = ftp.nlst()\n",
"file_counter = file_counter + 1\n",
"print('[Setup][FTP] Switching to directory: {}'.format(remote_path_relative +\n '/' + file))\n",
"if not os.path.isfile(file_path_local):\n",
"new_remote_subdirs_list = remote_subdirs_list.copy()\n",
"ftp.retrbinary('RETR {}'.format(file), local_file.write)\n",
"new_remote_subdirs_list.append(file)\n",
"print('[Setup][FTP] ({}/{}) File downloaded: {}'.format(file_counter,\n file_list_total, file_path_local))\n",
"fetch_data_via_ftp_recursive(ftp=ftp, local_directory=local_directory,\n remote_directory=remote_directory, remote_subdirs_list=\n new_remote_subdirs_list)\n",
"ftp.cwd(remote_path_absolute)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.check_image_exists()\n",
"if not self.security_groups:\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"VAR_5 = rand_name('ost1_test-server-smoke-')\n",
"VAR_6 = [self.security_groups[self.tenant_id].name]\n",
"VAR_7 = self.verify(250, self._create_server, 2,\n 'Server can not be created.', 'server creation', self.compute_client,\n VAR_5, VAR_6)\n",
"VAR_8 = self.verify(20, self._create_floating_ip, 3,\n 'Floating IP can not be created.', 'floating IP creation')\n",
"self.verify(20, self._assign_floating_ip_to_instance, 4,\n 'Floating IP can not be assigned.', 'floating IP assignment', self.\n compute_client, VAR_7, VAR_8)\n",
"self.floating_ips.append(VAR_8)\n",
"VAR_9 = VAR_8.ip\n",
"VAR_0.info('is address is {0}'.format(VAR_9))\n",
"VAR_0.debug(VAR_9)\n",
"self.verify(600, self._check_vm_connectivity, 5,\n 'VM connectivity doesn`t function properly.',\n 'VM connectivity checking', VAR_9, 30, (9, 60))\n",
"self.verify(600, self._check_connectivity_from_vm, 6,\n 'Connectivity to 8.8.8.8 from the VM doesn`t function properly.',\n 'public connectivity checking from VM', VAR_9, 30, (9, 60))\n",
"self.verify(20, self.compute_client.servers.remove_floating_ip, 7,\n 'Floating IP cannot be removed.', 'removing floating IP', VAR_7, VAR_8)\n",
"self.verify(20, self.compute_client.floating_ips.delete, 8,\n 'Floating IP cannot be deleted.', 'floating IP deletion', VAR_8)\n",
"if self.floating_ips:\n",
"self.floating_ips.remove(VAR_8)\n",
"self.verify(30, self._delete_server, 9, 'Server can not be deleted. ',\n 'server deletion', VAR_7)\n"
] | [
"def test_008_check_public_instance_connectivity_from_instance(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.check_image_exists()\n",
"if not self.security_groups:\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"name = rand_name('ost1_test-server-smoke-')\n",
"security_groups = [self.security_groups[self.tenant_id].name]\n",
"server = self.verify(250, self._create_server, 2,\n 'Server can not be created.', 'server creation', self.compute_client,\n name, security_groups)\n",
"floating_ip = self.verify(20, self._create_floating_ip, 3,\n 'Floating IP can not be created.', 'floating IP creation')\n",
"self.verify(20, self._assign_floating_ip_to_instance, 4,\n 'Floating IP can not be assigned.', 'floating IP assignment', self.\n compute_client, server, floating_ip)\n",
"self.floating_ips.append(floating_ip)\n",
"ip_address = floating_ip.ip\n",
"LOG.info('is address is {0}'.format(ip_address))\n",
"LOG.debug(ip_address)\n",
"self.verify(600, self._check_vm_connectivity, 5,\n 'VM connectivity doesn`t function properly.',\n 'VM connectivity checking', ip_address, 30, (9, 60))\n",
"self.verify(600, self._check_connectivity_from_vm, 6,\n 'Connectivity to 8.8.8.8 from the VM doesn`t function properly.',\n 'public connectivity checking from VM', ip_address, 30, (9, 60))\n",
"self.verify(20, self.compute_client.servers.remove_floating_ip, 7,\n 'Floating IP cannot be removed.', 'removing floating IP', server,\n floating_ip)\n",
"self.verify(20, self.compute_client.floating_ips.delete, 8,\n 'Floating IP cannot be deleted.', 'floating IP deletion', floating_ip)\n",
"if self.floating_ips:\n",
"self.floating_ips.remove(floating_ip)\n",
"self.verify(30, self._delete_server, 9, 'Server can not be deleted. ',\n 'server deletion', server)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"VAR_17 = self.args.nw\n",
"VAR_18 = []\n",
"VAR_19 = time.time()\n",
"for nfile, (p_field, p_file, p_data) in enumerate(self.parser.gen):\n",
"VAR_37 = os.devnull\n",
"VAR_20 = time.time() - VAR_19\n",
"if not VAR_17:\n",
"VAR_21 = sum(x[0] for x in VAR_18)\n",
"VAR_37 = sanitize_fn(p_file)\n",
"self.log('writing to {0}'.format(VAR_37))\n",
"VAR_22 = VAR_21 / VAR_20 / (1024 * 1024)\n",
"if os.path.exists(VAR_37):\n",
"VAR_23, VAR_39 = hashcopy(self.conn, p_data, f)\n",
"VAR_4 = 'OK'\n",
"VAR_37 += '.{:.6f}'.format(time.time())\n",
"if VAR_23 == 0:\n",
"if not self.ok:\n",
"VAR_18.append([VAR_23, VAR_39])\n",
"VAR_4 = 'ERROR'\n",
"VAR_2 = u'{0} // {1} bytes // {2:.3f} MiB/s\\n'.format(VAR_4, VAR_21, VAR_22)\n",
"for VAR_23, VAR_39 in VAR_18:\n",
"VAR_2 += u'sha512: {0} // {1} bytes\\n'.format(VAR_39[:56], VAR_23)\n",
"self.loud_reply(VAR_2)\n",
"if not VAR_17:\n",
"VAR_38 = 'up.{:.6f}.txt'.format(VAR_19)\n",
"f.write((u'\\n'.join(VAR_0(x) for x in [u':'.join(VAR_0(x) for x in self.\n addr), VAR_2.rstrip()]) + '\\n').encode('utf-8'))\n"
] | [
"def handle_plain_upload(self):...\n",
"nullwrite = self.args.nw\n",
"files = []\n",
"t0 = time.time()\n",
"for nfile, (p_field, p_file, p_data) in enumerate(self.parser.gen):\n",
"fn = os.devnull\n",
"td = time.time() - t0\n",
"if not nullwrite:\n",
"sz_total = sum(x[0] for x in files)\n",
"fn = sanitize_fn(p_file)\n",
"self.log('writing to {0}'.format(fn))\n",
"spd = sz_total / td / (1024 * 1024)\n",
"if os.path.exists(fn):\n",
"sz, sha512 = hashcopy(self.conn, p_data, f)\n",
"status = 'OK'\n",
"fn += '.{:.6f}'.format(time.time())\n",
"if sz == 0:\n",
"if not self.ok:\n",
"files.append([sz, sha512])\n",
"status = 'ERROR'\n",
"msg = u'{0} // {1} bytes // {2:.3f} MiB/s\\n'.format(status, sz_total, spd)\n",
"for sz, sha512 in files:\n",
"msg += u'sha512: {0} // {1} bytes\\n'.format(sha512[:56], sz)\n",
"self.loud_reply(msg)\n",
"if not nullwrite:\n",
"log_fn = 'up.{:.6f}.txt'.format(t0)\n",
"f.write((u'\\n'.join(unicode(x) for x in [u':'.join(unicode(x) for x in self\n .addr), msg.rstrip()]) + '\\n').encode('utf-8'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"AugAssign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"For",
"AugAssign'",
"Expr'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_7(VAR_24):...\n",
"for VAR_27 in VAR_24:\n",
"if VAR_27['submittable']:\n",
"VAR_27.update({'submission_count': 0, 'submissions': [], 'best_submission':\n None, 'points': 0, 'passed': VAR_27['points_to_pass'] == 0, 'graded': \n False, 'unofficial': False})\n",
"FUNC_7(VAR_27.get('children'))\n"
] | [
"def r_augment(children):...\n",
"for entry in children:\n",
"if entry['submittable']:\n",
"entry.update({'submission_count': 0, 'submissions': [], 'best_submission':\n None, 'points': 0, 'passed': entry['points_to_pass'] == 0, 'graded': \n False, 'unofficial': False})\n",
"r_augment(entry.get('children'))\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_6(self, VAR_10):...\n",
"return '%s%s' % (self.configuration.iscsi_target_prefix, VAR_10['name'])\n"
] | [
"def _build_iscsi_target_name(self, volume):...\n",
"return '%s%s' % (self.configuration.iscsi_target_prefix, volume['name'])\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"VAR_23 = self.common.create_volume(VAR_7)\n",
"self.common.client_logout()\n",
"return {'metadata': VAR_23}\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"metadata = self.common.create_volume(volume)\n",
"self.common.client_logout()\n",
"return {'metadata': metadata}\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def __init__(self, VAR_18, VAR_19):...\n",
"\"\"\"docstring\"\"\"\n",
"self.auth = VAR_18\n",
"self.data = None\n",
"self.station_data = None\n",
"self.station = VAR_19\n"
] | [
"def __init__(self, auth, station):...\n",
"\"\"\"docstring\"\"\"\n",
"self.auth = auth\n",
"self.data = None\n",
"self.station_data = None\n",
"self.station = station\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_1(self, **VAR_3):...\n",
"VAR_13 = super().get_context_data(**kwargs)\n",
"VAR_13['articles'] = self.object.article_set.all()\n",
"VAR_13['form'] = CLASS_1()\n",
"return VAR_13\n"
] | [
"def get_context_data(self, **kwargs):...\n",
"context = super().get_context_data(**kwargs)\n",
"context['articles'] = self.object.article_set.all()\n",
"context['form'] = CommentForm()\n",
"return context\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1():...\n",
"VAR_3 = Context().list_devices(ID_VENDOR_ID='04b4', ID_MODEL_ID='0008')\n",
"VAR_2 = {}\n",
"for VAR_33 in VAR_3:\n",
"if 'DEVLINKS' in VAR_33:\n",
"return VAR_2\n",
"VAR_2[VAR_33['DEVNAME']] = VAR_33['ID_SERIAL_SHORT']\n"
] | [
"def find_uart_serials():...\n",
"uarts = Context().list_devices(ID_VENDOR_ID='04b4', ID_MODEL_ID='0008')\n",
"serials = {}\n",
"for uart in uarts:\n",
"if 'DEVLINKS' in uart:\n",
"return serials\n",
"serials[uart['DEVNAME']] = uart['ID_SERIAL_SHORT']\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'"
] |
[
"def __bool__(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return True\n"
] | [
"def __bool__(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return True\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_1(self, VAR_2, VAR_4=None, *VAR_5, **VAR_6):...\n",
"VAR_2 = db.get_dataset(VAR_2)\n",
"if VAR_4:\n",
"VAR_0 = VAR_2.versions.where(db.DatasetVersion.version == VAR_4).get()\n",
"VAR_0 = VAR_2.current_version.get()\n",
"VAR_12 = []\n",
"for f in VAR_0.files:\n",
"VAR_12.append(db.build_dict_from_row(f))\n",
"self.finish({'files': VAR_12})\n"
] | [
"def get(self, dataset, version=None, *args, **kwargs):...\n",
"dataset = db.get_dataset(dataset)\n",
"if version:\n",
"dataset_version = dataset.versions.where(db.DatasetVersion.version == version\n ).get()\n",
"dataset_version = dataset.current_version.get()\n",
"ret = []\n",
"for f in dataset_version.files:\n",
"ret.append(db.build_dict_from_row(f))\n",
"self.finish({'files': ret})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Expr'"
] |
[
"def FUNC_4(self, VAR_7):...\n",
"\"\"\"docstring\"\"\"\n",
"return self._relationships[VAR_7.entity_attr]\n"
] | [
"def get_relationship(self, attribute):...\n",
"\"\"\"docstring\"\"\"\n",
"return self._relationships[attribute.entity_attr]\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"@integration_synonym_api...\n",
"FUNC_2(VAR_2)\n",
"FUNC_3(VAR_2, 'KLASS', VAR_7='1')\n",
"FUNC_5(VAR_4, VAR_5, VAR_11='CLASS', VAR_10=[{'name': '----CLASS'}, {'name':\n 'KLASS'}])\n"
] | [
"@integration_synonym_api...\n",
"clean_database(solr)\n",
"seed_database_with(solr, 'KLASS', id='1')\n",
"verify_results(client, jwt, query='CLASS', expected=[{'name': '----CLASS'},\n {'name': 'KLASS'}])\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def __init__(self, *VAR_3, **VAR_4):...\n",
"super(CLASS_1, self).__init__(*VAR_3, **kwargs)\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super(SanISCSIDriver, self).__init__(*args, **kwargs)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_4(self, VAR_2):...\n",
"if self == VAR_2:\n",
"if VAR_2 not in self.child_groups:\n",
"self.child_groups.append(VAR_2)\n",
"VAR_2.depth = max([self.depth + 1, VAR_2.depth])\n",
"VAR_2._check_children_depth()\n",
"if self.name not in [VAR_12.name for VAR_12 in VAR_2.parent_groups]:\n",
"VAR_2.parent_groups.append(self)\n",
"self.clear_hosts_cache()\n",
"for h in VAR_2.get_hosts():\n",
"h.populate_ancestors()\n"
] | [
"def add_child_group(self, group):...\n",
"if self == group:\n",
"if group not in self.child_groups:\n",
"self.child_groups.append(group)\n",
"group.depth = max([self.depth + 1, group.depth])\n",
"group._check_children_depth()\n",
"if self.name not in [g.name for g in group.parent_groups]:\n",
"group.parent_groups.append(self)\n",
"self.clear_hosts_cache()\n",
"for h in group.get_hosts():\n",
"h.populate_ancestors()\n"
] | [
0,
0,
0,
0,
1,
1,
0,
0,
0,
0,
1
] | [
"FunctionDef'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Expr'",
"For",
"Expr'"
] |
[
"def FUNC_25(VAR_6):...\n",
"return FUNC_8(VAR_6, 'must-revalidate')\n"
] | [
"def validate_cache_must_revalidate(page):...\n",
"return validate_cache_control_header(page, 'must-revalidate')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_15(self, VAR_17, VAR_18):...\n",
"if VAR_6 in VAR_18.bookmarks:\n",
"VAR_29 = ((x, None) if x not in VAR_17.collections else (x, VAR_17.\n collections[x]) for x in VAR_17.parameters.keys())\n",
"self._store.append(dict(VAR_29))\n"
] | [
"def test(self, pathctx, levelctx):...\n",
"if bookmark in levelctx.bookmarks:\n",
"found = ((x, None) if x not in pathctx.collections else (x, pathctx.\n collections[x]) for x in pathctx.parameters.keys())\n",
"self._store.append(dict(found))\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Expr'"
] |
[
"from flask import Flask, request\n",
"from db import Database\n",
"from datetime import datetime, timedelta\n",
"from log import Logger\n",
"import sql_queries\n",
"import simplejson\n",
"VAR_0 = Logger().logger\n",
"VAR_1 = Flask(__name__)\n",
"VAR_2 = 40327\n",
"VAR_3 = Database()\n",
"VAR_4 = {}\n",
"VAR_5 = {}\n",
"VAR_6 = timedelta(days=1)\n",
"VAR_7 = 0.12\n",
"@VAR_1.before_request...\n",
"return\n"
] | [
"from flask import Flask, request\n",
"from db import Database\n",
"from datetime import datetime, timedelta\n",
"from log import Logger\n",
"import sql_queries\n",
"import simplejson\n",
"logger = Logger().logger\n",
"app = Flask(__name__)\n",
"port_number = 40327\n",
"database = Database()\n",
"cuisine_discovery_cache = {}\n",
"unique_ingredients_cache = {}\n",
"cache_persistence_time = timedelta(days=1)\n",
"geodist = 0.12\n",
"@app.before_request...\n",
"return\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Return'"
] |
[
"def FUNC_3(self):...\n",
"\"\"\"docstring\"\"\"\n",
"db.session.add(self)\n",
"db.session.commit()\n"
] | [
"def save(self):...\n",
"\"\"\"docstring\"\"\"\n",
"db.session.add(self)\n",
"db.session.commit()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'"
] |
[
"@gen.coroutine...\n",
"VAR_17 = VAR_4.find()\n",
"VAR_17.sort([('date', -1)]).limit(1)\n",
"VAR_18 = None\n",
"while (yield VAR_17.fetch_next):\n",
"VAR_18 = VAR_17.next_object()\n",
"if VAR_18:\n",
"VAR_2 = VAR_18['date']\n",
"VAR_26 = FUNC_4()\n",
"VAR_26 = FUNC_4(VAR_3=date)\n",
"if len(VAR_26) > 0:\n",
"FUNC_5(VAR_4, VAR_26)\n"
] | [
"@gen.coroutine...\n",
"cursor = collection.find()\n",
"cursor.sort([('date', -1)]).limit(1)\n",
"document = None\n",
"while (yield cursor.fetch_next):\n",
"document = cursor.next_object()\n",
"if document:\n",
"date = document['date']\n",
"articles = build_json_from_raw_data()\n",
"articles = build_json_from_raw_data(ch_date=date)\n",
"if len(articles) > 0:\n",
"bulk_insert(collection, articles)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'"
] |
[
"@memoized_method...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_16 = Java.global_instance()\n",
"VAR_17 = ScalaPlatform.global_instance()\n",
"def FUNC_16(VAR_18, VAR_19):...\n",
"VAR_25 = VAR_18.options_scope\n",
"return VAR_18.tool_classpath_from_products(self._products, VAR_19, VAR_25=scope\n )\n"
] | [
"@memoized_method...\n",
"\"\"\"docstring\"\"\"\n",
"java_options_src = Java.global_instance()\n",
"scala_options_src = ScalaPlatform.global_instance()\n",
"def cp(instance, toolname):...\n",
"scope = instance.options_scope\n",
"return instance.tool_classpath_from_products(self._products, toolname,\n scope=scope)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_42(self):...\n",
"VAR_54 = 0\n",
"for VAR_40, VAR_41 in sorted(self._names.items(), VAR_44=lambda item: item[\n",
"VAR_57, VAR_42 = VAR_41\n",
"for item in self[VAR_54:]:\n",
"if VAR_42 is None:\n",
"yield None, item\n",
"VAR_42 = VAR_57 + 1\n",
"if VAR_57 > VAR_54:\n",
"for item in self[VAR_54:VAR_57]:\n",
"yield VAR_40, getattr(self, VAR_40)\n",
"yield None, item\n",
"VAR_54 = VAR_42\n"
] | [
"def allitems(self):...\n",
"next = 0\n",
"for name, index in sorted(self._names.items(), key=lambda item: item[1][0]):\n",
"start, end = index\n",
"for item in self[next:]:\n",
"if end is None:\n",
"yield None, item\n",
"end = start + 1\n",
"if start > next:\n",
"for item in self[next:start]:\n",
"yield name, getattr(self, name)\n",
"yield None, item\n",
"next = end\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"For",
"Condition",
"Expr'",
"Assign'",
"Condition",
"For",
"Expr'",
"Expr'",
"Assign'"
] |
[
"def FUNC_4(self, VAR_8=False):...\n",
"self.install_prerequisites()\n",
"self.port = FUNC_2()\n",
"if VAR_8:\n",
"VAR_23 = None\n",
"VAR_23 = subprocess.PIPE\n",
"VAR_24 = None\n",
"VAR_24 = subprocess.PIPE\n",
"VAR_17 = os.environ.copy()\n",
"VAR_17['LANGUAGE'] = 'en'\n",
"VAR_18, self.tmp_db = tempfile.mkstemp(prefix='local_gae')\n",
"os.close(VAR_18)\n",
"VAR_13 = [sys.executable, os.path.join(VAR_1, 'dev_appserver.py'), self.\n base_dir, '--port', str(self.port), '--datastore_path', self.tmp_db,\n '--datastore_consistency_policy', 'consistent', '--skip_sdk_update_check']\n",
"if VAR_8:\n",
"VAR_13.extend(['--log_level', 'debug'])\n",
"self.test_server = subprocess.Popen(VAR_13, VAR_23=stdout, VAR_24=stderr,\n VAR_17=env)\n",
"while not FUNC_1(self.port):\n",
"self.test_server.poll()\n",
"self.url = 'http://localhost:%d/' % self.port\n",
"if self.test_server.returncode is not None:\n",
"time.sleep(0.001)\n"
] | [
"def start_server(self, verbose=False):...\n",
"self.install_prerequisites()\n",
"self.port = find_free_port()\n",
"if verbose:\n",
"stdout = None\n",
"stdout = subprocess.PIPE\n",
"stderr = None\n",
"stderr = subprocess.PIPE\n",
"env = os.environ.copy()\n",
"env['LANGUAGE'] = 'en'\n",
"h, self.tmp_db = tempfile.mkstemp(prefix='local_gae')\n",
"os.close(h)\n",
"cmd = [sys.executable, os.path.join(GAE_SDK, 'dev_appserver.py'), self.\n base_dir, '--port', str(self.port), '--datastore_path', self.tmp_db,\n '--datastore_consistency_policy', 'consistent', '--skip_sdk_update_check']\n",
"if verbose:\n",
"cmd.extend(['--log_level', 'debug'])\n",
"self.test_server = subprocess.Popen(cmd, stdout=stdout, stderr=stderr, env=env)\n",
"while not test_port(self.port):\n",
"self.test_server.poll()\n",
"self.url = 'http://localhost:%d/' % self.port\n",
"if self.test_server.returncode is not None:\n",
"time.sleep(0.001)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'"
] |
[
"import fnmatch\n",
"import logging\n",
"from flask import abort, request, g\n",
"from flask import url_for\n",
"from functools import wraps\n",
"from confidant import keymanager\n",
"from confidant.app import app\n",
"from confidant.utils import stats\n",
"from confidant.authnz.errors import UserUnknownError, TokenVersionError, AuthenticationError, NotAuthorized\n",
"from confidant.authnz import userauth\n",
"VAR_0 = {'user': ['*'], 'service': ['get_service']}\n",
"VAR_1 = userauth.init_user_auth_class()\n",
"def FUNC_0():...\n",
"\"\"\"docstring\"\"\"\n",
"if hasattr(g, 'username'):\n",
"return g.username\n",
"if VAR_1.is_authenticated():\n",
"return VAR_1.current_email()\n",
"def FUNC_1(VAR_2):...\n",
"if not app.config.get('USE_AUTH'):\n",
"return True\n",
"if VAR_2 == g.user_type:\n",
"return True\n",
"return False\n"
] | [
"import fnmatch\n",
"import logging\n",
"from flask import abort, request, g\n",
"from flask import url_for\n",
"from functools import wraps\n",
"from confidant import keymanager\n",
"from confidant.app import app\n",
"from confidant.utils import stats\n",
"from confidant.authnz.errors import UserUnknownError, TokenVersionError, AuthenticationError, NotAuthorized\n",
"from confidant.authnz import userauth\n",
"PRIVILEGES = {'user': ['*'], 'service': ['get_service']}\n",
"user_mod = userauth.init_user_auth_class()\n",
"def get_logged_in_user():...\n",
"\"\"\"docstring\"\"\"\n",
"if hasattr(g, 'username'):\n",
"return g.username\n",
"if user_mod.is_authenticated():\n",
"return user_mod.current_email()\n",
"def user_is_user_type(user_type):...\n",
"if not app.config.get('USE_AUTH'):\n",
"return True\n",
"if user_type == g.user_type:\n",
"return True\n",
"return False\n"
] | [
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Condition",
"Return'",
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_40(VAR_63=None):...\n",
"VAR_81 = VAR_20.getWrittenData()\n",
"if VAR_81:\n",
"return FUNC_39(VAR_81)\n"
] | [
"def get_request_written_data(_=None):...\n",
"written_data = request.getWrittenData()\n",
"if written_data:\n",
"return get_str(written_data)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_0(VAR_0):...\n",
"return text_type(VAR_0).translate({ord(u'\"'): u'\"\"', ord(u\"'\"): u\"''\"})\n"
] | [
"def sanitize_for_fts(text):...\n",
"return text_type(text).translate({ord(u'\"'): u'\"\"', ord(u\"'\"): u\"''\"})\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_25(VAR_1, **VAR_6):...\n",
"import json\n",
"import sql\n",
"VAR_45 = sql.get_setting('haproxy_sock_port')\n",
"VAR_15 = 'echo \"show backend\" |nc %s %s' % (VAR_1, VAR_45)\n",
"VAR_41, VAR_42 = FUNC_24(VAR_15)\n",
"VAR_61 = ''\n",
"for line in VAR_41:\n",
"if '#' in line or 'stats' in line:\n",
"if VAR_6.get('ret'):\n",
"if line != '':\n",
"return VAR_61\n",
"VAR_80 = json.dumps(line).split('\"')\n",
"if VAR_6.get('ret'):\n",
"VAR_61 += VAR_80[1]\n",
"print(VAR_80[1], end='<br>')\n",
"VAR_61 += '<br />'\n"
] | [
"def show_backends(serv, **kwargs):...\n",
"import json\n",
"import sql\n",
"haproxy_sock_port = sql.get_setting('haproxy_sock_port')\n",
"cmd = 'echo \"show backend\" |nc %s %s' % (serv, haproxy_sock_port)\n",
"output, stderr = subprocess_execute(cmd)\n",
"ret = ''\n",
"for line in output:\n",
"if '#' in line or 'stats' in line:\n",
"if kwargs.get('ret'):\n",
"if line != '':\n",
"return ret\n",
"back = json.dumps(line).split('\"')\n",
"if kwargs.get('ret'):\n",
"ret += back[1]\n",
"print(back[1], end='<br>')\n",
"ret += '<br />'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Import'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Condition",
"Condition",
"Return'",
"Assign'",
"Condition",
"AugAssign'",
"Expr'",
"AugAssign'"
] |
[
"def FUNC_17(*VAR_20, **VAR_7):...\n",
"return DebugAdapter.start_wrapper_script(VAR_10, *VAR_20, **kwargs)\n"
] | [
"def start(*args, **kwargs):...\n",
"return DebugAdapter.start_wrapper_script(script, *args, **kwargs)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_10(self):...\n",
"return self.current_user()['email'].lower()\n"
] | [
"def current_email(self):...\n",
"return self.current_user()['email'].lower()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_34(self, VAR_64):...\n",
"return CLASS_1(VAR_64)\n"
] | [
"def run(self, func):...\n",
"return RuleInfo(func)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@property...\n",
"return self.contexts[-1]\n"
] | [
"@property...\n",
"return self.contexts[-1]\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@staticmethod...\n",
"def FUNC_13(VAR_9, VAR_7, VAR_11, VAR_10):...\n",
"if VAR_11 == 'POST':\n",
"return CLASS_0.session.post(FUNC_2(VAR_7), VAR_9=ToUtf8Json(data), headers=\n _HEADERS, VAR_10=timeout)\n",
"if VAR_11 == 'GET':\n",
"return CLASS_0.session.get(FUNC_2(VAR_7), headers=_HEADERS, VAR_10=timeout)\n",
"@retries(5, delay=0.5, backoff=1.5)...\n",
"if VAR_11 == 'POST':\n",
"return requests.post(FUNC_2(VAR_7), VAR_9=ToUtf8Json(data), headers=_HEADERS)\n",
"if VAR_11 == 'GET':\n",
"return requests.get(FUNC_2(VAR_7), headers=_HEADERS)\n",
"if not FUNC_3():\n",
"return VAR_1.submit(FUNC_14, VAR_9, VAR_7, VAR_11)\n",
"return FUNC_13(VAR_9, VAR_7, VAR_11, VAR_10)\n"
] | [
"@staticmethod...\n",
"def SendRequest(data, handler, method, timeout):...\n",
"if method == 'POST':\n",
"return BaseRequest.session.post(_BuildUri(handler), data=ToUtf8Json(data),\n headers=_HEADERS, timeout=timeout)\n",
"if method == 'GET':\n",
"return BaseRequest.session.get(_BuildUri(handler), headers=_HEADERS,\n timeout=timeout)\n",
"@retries(5, delay=0.5, backoff=1.5)...\n",
"if method == 'POST':\n",
"return requests.post(_BuildUri(handler), data=ToUtf8Json(data), headers=\n _HEADERS)\n",
"if method == 'GET':\n",
"return requests.get(_BuildUri(handler), headers=_HEADERS)\n",
"if not _CheckServerIsHealthyWithCache():\n",
"return _EXECUTOR.submit(DelayedSendRequest, data, handler, method)\n",
"return SendRequest(data, handler, method, timeout)\n"
] | [
0,
0,
0,
7,
0,
7,
0,
0,
7,
0,
0,
0,
0,
0
] | [
"Condition",
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def __enter__(self):...\n",
"return CLASS_0()\n"
] | [
"def __enter__(self):...\n",
"return DBase()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@staticmethod...\n",
"VAR_22 = CLASS_1.get_user_by_username(VAR_4)\n",
"VAR_27 = UserStatsDTO()\n",
"VAR_28 = TaskHistory.query.filter(TaskHistory.user_id == VAR_22.id, \n TaskHistory.action_text != '').all()\n",
"VAR_29 = TaskHistory.query.filter(TaskHistory.user_id == VAR_22.id, \n TaskHistory.action_text == 'MAPPED').count()\n",
"VAR_30 = TaskHistory.query.filter(TaskHistory.user_id == VAR_22.id, \n TaskHistory.action_text == 'VALIDATED').count()\n",
"VAR_31 = TaskHistory.query.filter(TaskHistory.user_id == VAR_22.id, \n TaskHistory.action == 'STATE_CHANGE').distinct(TaskHistory.project_id\n ).count()\n",
"VAR_27.tasks_mapped = VAR_29\n",
"VAR_27.tasks_validated = VAR_30\n",
"VAR_27.projects_mapped = VAR_31\n",
"VAR_27.total_time_spent = 0\n",
"VAR_27.time_spent_mapping = 0\n",
"VAR_27.time_spent_validating = 0\n",
"VAR_32 = 'string'.format(VAR_22.id)\n",
"VAR_33 = db.engine.execute(VAR_32)\n",
"for time in VAR_33:\n",
"VAR_33 = time[0]\n",
"VAR_32 = 'string'.format(VAR_22.id)\n",
"if VAR_33:\n",
"VAR_34 = db.engine.execute(VAR_32)\n",
"VAR_27.time_spent_validating = VAR_33.total_seconds()\n",
"for time in VAR_34:\n",
"VAR_27.total_time_spent += VAR_27.time_spent_validating\n",
"VAR_34 = time[0]\n",
"return VAR_27\n",
"if VAR_34:\n",
"VAR_27.time_spent_mapping = VAR_34.total_seconds()\n",
"VAR_27.total_time_spent += VAR_27.time_spent_mapping\n"
] | [
"@staticmethod...\n",
"user = UserService.get_user_by_username(username)\n",
"stats_dto = UserStatsDTO()\n",
"actions = TaskHistory.query.filter(TaskHistory.user_id == user.id, \n TaskHistory.action_text != '').all()\n",
"tasks_mapped = TaskHistory.query.filter(TaskHistory.user_id == user.id, \n TaskHistory.action_text == 'MAPPED').count()\n",
"tasks_validated = TaskHistory.query.filter(TaskHistory.user_id == user.id, \n TaskHistory.action_text == 'VALIDATED').count()\n",
"projects_mapped = TaskHistory.query.filter(TaskHistory.user_id == user.id, \n TaskHistory.action == 'STATE_CHANGE').distinct(TaskHistory.project_id\n ).count()\n",
"stats_dto.tasks_mapped = tasks_mapped\n",
"stats_dto.tasks_validated = tasks_validated\n",
"stats_dto.projects_mapped = projects_mapped\n",
"stats_dto.total_time_spent = 0\n",
"stats_dto.time_spent_mapping = 0\n",
"stats_dto.time_spent_validating = 0\n",
"sql = (\n \"\"\"SELECT SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME) FROM task_history\n WHERE action='LOCKED_FOR_VALIDATION'\n and user_id = {0};\"\"\"\n .format(user.id))\n",
"total_validation_time = db.engine.execute(sql)\n",
"for time in total_validation_time:\n",
"total_validation_time = time[0]\n",
"sql = (\n \"\"\"SELECT SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME) FROM task_history\n WHERE action='LOCKED_FOR_MAPPING'\n and user_id = {0};\"\"\"\n .format(user.id))\n",
"if total_validation_time:\n",
"total_mapping_time = db.engine.execute(sql)\n",
"stats_dto.time_spent_validating = total_validation_time.total_seconds()\n",
"for time in total_mapping_time:\n",
"stats_dto.total_time_spent += stats_dto.time_spent_validating\n",
"total_mapping_time = time[0]\n",
"return stats_dto\n",
"if total_mapping_time:\n",
"stats_dto.time_spent_mapping = total_mapping_time.total_seconds()\n",
"stats_dto.total_time_spent += stats_dto.time_spent_mapping\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"AugAssign'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"AugAssign'"
] |
[
"def FUNC_11(self, VAR_3):...\n",
"VAR_3 = VAR_3.lower().split(', ')\n",
"VAR_8 = sqlite3.connect('data/ed.db').cursor()\n",
"if len(VAR_3) == 1:\n",
"VAR_9 = VAR_8.execute(\n f\"select * from commodities where lower(name)='{VAR_3[0]}'\")\n",
"if len(VAR_3) < 4:\n",
"VAR_7 = VAR_9.fetchone()\n",
"VAR_9 = VAR_8.execute(\n f\"select id from commodities where lower(name)='{VAR_3[0]}'\")\n",
"return 'Too many commas. What does that even mean.'\n",
"if VAR_7:\n",
"VAR_7 = VAR_9.fetchone()\n",
"VAR_12 = tuple(i[0] for i in VAR_9.description)\n",
"if not VAR_7:\n",
"return '\\n'.join(f\"{key.replace('_', ' ').title()}: {field}\" for key, field in\n zip(VAR_12[1:], VAR_7[1:]))\n",
"return 'Commodity not found.'\n",
"VAR_13 = VAR_7[0]\n",
"VAR_11 = f\"select id from stations where lower(name)='{VAR_3[1]}'\"\n",
"if len(VAR_3) == 3:\n",
"VAR_9 = VAR_8.execute(f\"select id from systems where lower(name)='{VAR_3[2]}'\")\n",
"VAR_9 = VAR_8.execute(VAR_11)\n",
"VAR_7 = VAR_9.fetchone()\n",
"VAR_7 = VAR_9.fetchall()\n",
"if not VAR_7:\n",
"if not VAR_7:\n",
"return 'System not found.'\n",
"VAR_16 = VAR_7[0]\n",
"return 'Station not found.'\n",
"if len(VAR_7) > 1:\n",
"VAR_11 += f' and system_id={VAR_16}'\n",
"return 'Multiple stations found, please specify system.'\n",
"VAR_14 = VAR_7[0][0]\n",
"VAR_9 = VAR_8.execute(\n f'select * from listings where station_id={VAR_14} and commodity_id={VAR_13}'\n )\n",
"VAR_7 = VAR_9.fetchone()\n",
"if not VAR_7:\n",
"return 'Commodity not available to be bought or sold at station.'\n",
"VAR_12 = (i[0] for i in VAR_9.description)\n",
"VAR_7 = {k: v for k, v in zip(VAR_12, VAR_7)}\n",
"VAR_7.pop('station_id')\n",
"VAR_7.pop('commodity_id')\n",
"VAR_7.pop('id')\n",
"VAR_15 = f'Commodity: {VAR_3[0].title()}\\n'\n",
"if len(VAR_3) > 1:\n",
"VAR_15 += f'Station: {VAR_3[1].title()}\\n'\n",
"if len(VAR_3) > 2:\n",
"VAR_15 += f'System: {VAR_3[2].title()}\\n'\n",
"return VAR_15 + '\\n'.join(f\"{key.replace('_', ' ').title()}: {field}\" for \n key, field in VAR_7.items())\n"
] | [
"def commodity_search(self, search):...\n",
"search = search.lower().split(', ')\n",
"conn = sqlite3.connect('data/ed.db').cursor()\n",
"if len(search) == 1:\n",
"table = conn.execute(\n f\"select * from commodities where lower(name)='{search[0]}'\")\n",
"if len(search) < 4:\n",
"result = table.fetchone()\n",
"table = conn.execute(\n f\"select id from commodities where lower(name)='{search[0]}'\")\n",
"return 'Too many commas. What does that even mean.'\n",
"if result:\n",
"result = table.fetchone()\n",
"keys = tuple(i[0] for i in table.description)\n",
"if not result:\n",
"return '\\n'.join(f\"{key.replace('_', ' ').title()}: {field}\" for key, field in\n zip(keys[1:], result[1:]))\n",
"return 'Commodity not found.'\n",
"commodity_id = result[0]\n",
"query = f\"select id from stations where lower(name)='{search[1]}'\"\n",
"if len(search) == 3:\n",
"table = conn.execute(f\"select id from systems where lower(name)='{search[2]}'\")\n",
"table = conn.execute(query)\n",
"result = table.fetchone()\n",
"result = table.fetchall()\n",
"if not result:\n",
"if not result:\n",
"return 'System not found.'\n",
"system_id = result[0]\n",
"return 'Station not found.'\n",
"if len(result) > 1:\n",
"query += f' and system_id={system_id}'\n",
"return 'Multiple stations found, please specify system.'\n",
"station_id = result[0][0]\n",
"table = conn.execute(\n f'select * from listings where station_id={station_id} and commodity_id={commodity_id}'\n )\n",
"result = table.fetchone()\n",
"if not result:\n",
"return 'Commodity not available to be bought or sold at station.'\n",
"keys = (i[0] for i in table.description)\n",
"result = {k: v for k, v in zip(keys, result)}\n",
"result.pop('station_id')\n",
"result.pop('commodity_id')\n",
"result.pop('id')\n",
"ret = f'Commodity: {search[0].title()}\\n'\n",
"if len(search) > 1:\n",
"ret += f'Station: {search[1].title()}\\n'\n",
"if len(search) > 2:\n",
"ret += f'System: {search[2].title()}\\n'\n",
"return ret + '\\n'.join(f\"{key.replace('_', ' ').title()}: {field}\" for key,\n field in result.items())\n"
] | [
0,
0,
0,
0,
4,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Assign'",
"Return'",
"Condition",
"AugAssign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"AugAssign'",
"Condition",
"AugAssign'",
"Return'"
] |
[
"def FUNC_24(self, VAR_39):...\n",
"for r in VAR_39:\n",
"if not r.name:\n",
"r._comment_count = 0\n",
"if '_comments' in r:\n",
"r._comment_count = len(json.loads(r._comments or '[]'))\n"
] | [
"def add_comment_count(self, result):...\n",
"for r in result:\n",
"if not r.name:\n",
"r._comment_count = 0\n",
"if '_comments' in r:\n",
"r._comment_count = len(json.loads(r._comments or '[]'))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Assign'",
"Condition",
"Assign'"
] |
[
"from django.http import HttpResponse, HttpResponseRedirect\n",
"from django.shortcuts import get_object_or_404, render\n",
"from django.views.generic import ListView, DetailView\n",
"from pure_pagination.mixins import PaginationMixin\n",
"from django.db.models import Q\n",
"from .models import Event, Attribute, Org, Tag, Object, ObjectReference\n",
"from .forms import EventSearchForm, AttributeSearchForm\n",
"from datetime import datetime, timezone, timedelta\n",
"VAR_0 = Event\n",
"VAR_1 = 'threat/event_list.html'\n",
"VAR_2 = 'events'\n",
"VAR_3 = 30\n",
"def FUNC_0(self, **VAR_4):...\n",
"VAR_5 = super().get_context_data(**kwargs)\n",
"VAR_5['count'] = self.object_list.count()\n",
"VAR_5['alltag'] = Tag.objects.order_by('id')\n",
"VAR_6 = self.request.GET.getlist('tag')\n",
"VAR_5['tags'] = Tag.objects.filter(id__in=taglist)\n",
"VAR_7 = EventSearchForm(self.request.GET)\n",
"VAR_5['search_form'] = VAR_7\n",
"VAR_5['30_day_labels'] = self.thirty_day_labels()\n",
"VAR_5['30_day_data'] = self.thirty_day_data()\n",
"return VAR_5\n"
] | [
"from django.http import HttpResponse, HttpResponseRedirect\n",
"from django.shortcuts import get_object_or_404, render\n",
"from django.views.generic import ListView, DetailView\n",
"from pure_pagination.mixins import PaginationMixin\n",
"from django.db.models import Q\n",
"from .models import Event, Attribute, Org, Tag, Object, ObjectReference\n",
"from .forms import EventSearchForm, AttributeSearchForm\n",
"from datetime import datetime, timezone, timedelta\n",
"model = Event\n",
"template_name = 'threat/event_list.html'\n",
"context_object_name = 'events'\n",
"paginate_by = 30\n",
"def get_context_data(self, **kwargs):...\n",
"context = super().get_context_data(**kwargs)\n",
"context['count'] = self.object_list.count()\n",
"context['alltag'] = Tag.objects.order_by('id')\n",
"taglist = self.request.GET.getlist('tag')\n",
"context['tags'] = Tag.objects.filter(id__in=taglist)\n",
"search_form = EventSearchForm(self.request.GET)\n",
"context['search_form'] = search_form\n",
"context['30_day_labels'] = self.thirty_day_labels()\n",
"context['30_day_data'] = self.thirty_day_data()\n",
"return context\n"
] | [
6,
6,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_3(self):...\n",
"return self.data['notifications']\n"
] | [
"def notifications(self):...\n",
"return self.data['notifications']\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"\"\"\"Volume driver for Dell EqualLogic Storage.\"\"\"\n",
"import functools\n",
"import random\n",
"import eventlet\n",
"from eventlet import greenthread\n",
"import greenlet\n",
"from oslo.config import cfg\n",
"from cinder import exception\n",
"from cinder.openstack.common import excutils\n",
"from cinder.openstack.common import log as logging\n",
"from cinder.openstack.common import processutils\n",
"from cinder import utils\n",
"from cinder.volume.drivers.san import SanISCSIDriver\n",
"VAR_0 = logging.getLogger(__name__)\n",
"VAR_1 = [cfg.StrOpt('eqlx_group_name', default='group-0', help=\n 'Group name to use for creating volumes'), cfg.IntOpt(\n 'eqlx_cli_timeout', default=30, help=\n 'Timeout for the Group Manager cli command execution'), cfg.IntOpt(\n 'eqlx_cli_max_retries', default=5, help=\n 'Maximum retry count for reconnection'), cfg.BoolOpt('eqlx_use_chap',\n default=False, help='Use CHAP authentication for targets?'), cfg.StrOpt\n ('eqlx_chap_login', default='admin', help='Existing CHAP account name'),\n cfg.StrOpt('eqlx_chap_password', default='password', help=\n 'Password for specified CHAP account name', secret=True), cfg.StrOpt(\n 'eqlx_pool', default='default', help=\n 'Pool in which volumes will be created')]\n",
"VAR_2 = cfg.CONF\n",
"VAR_2.register_opts(VAR_1)\n",
"def FUNC_0(VAR_3):...\n",
"@functools.wraps(VAR_3)...\n",
"VAR_23 = VAR_5.pop('timeout', None)\n",
"VAR_24 = eventlet.spawn(VAR_3, self, *VAR_4, **kwargs)\n",
"if VAR_23 is None:\n",
"return VAR_24.wait()\n",
"VAR_36 = eventlet.spawn_after(VAR_23, VAR_24.kill)\n",
"VAR_48 = VAR_24.wait()\n",
"VAR_36.cancel()\n",
"return FUNC_1\n",
"return VAR_48\n"
] | [
"\"\"\"Volume driver for Dell EqualLogic Storage.\"\"\"\n",
"import functools\n",
"import random\n",
"import eventlet\n",
"from eventlet import greenthread\n",
"import greenlet\n",
"from oslo.config import cfg\n",
"from cinder import exception\n",
"from cinder.openstack.common import excutils\n",
"from cinder.openstack.common import log as logging\n",
"from cinder.openstack.common import processutils\n",
"from cinder import utils\n",
"from cinder.volume.drivers.san import SanISCSIDriver\n",
"LOG = logging.getLogger(__name__)\n",
"eqlx_opts = [cfg.StrOpt('eqlx_group_name', default='group-0', help=\n 'Group name to use for creating volumes'), cfg.IntOpt(\n 'eqlx_cli_timeout', default=30, help=\n 'Timeout for the Group Manager cli command execution'), cfg.IntOpt(\n 'eqlx_cli_max_retries', default=5, help=\n 'Maximum retry count for reconnection'), cfg.BoolOpt('eqlx_use_chap',\n default=False, help='Use CHAP authentication for targets?'), cfg.StrOpt\n ('eqlx_chap_login', default='admin', help='Existing CHAP account name'),\n cfg.StrOpt('eqlx_chap_password', default='password', help=\n 'Password for specified CHAP account name', secret=True), cfg.StrOpt(\n 'eqlx_pool', default='default', help=\n 'Pool in which volumes will be created')]\n",
"CONF = cfg.CONF\n",
"CONF.register_opts(eqlx_opts)\n",
"def with_timeout(f):...\n",
"@functools.wraps(f)...\n",
"timeout = kwargs.pop('timeout', None)\n",
"gt = eventlet.spawn(f, self, *args, **kwargs)\n",
"if timeout is None:\n",
"return gt.wait()\n",
"kill_thread = eventlet.spawn_after(timeout, gt.kill)\n",
"res = gt.wait()\n",
"kill_thread.cancel()\n",
"return __inner\n",
"return res\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Return'"
] |
[
"@VAR_0.route('/clear')...\n",
""
] | [
"@app.route('/clear')...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_7(self):...\n",
"self.assertEquals('123456 test course: Fall 2011 day 1', str(self.\n current_course_instance))\n",
"self.assertEquals('123456 test course: Secret super course', str(self.\n hidden_course_instance))\n"
] | [
"def test_course_instance_unicode_string(self):...\n",
"self.assertEquals('123456 test course: Fall 2011 day 1', str(self.\n current_course_instance))\n",
"self.assertEquals('123456 test course: Secret super course', str(self.\n hidden_course_instance))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._unit_of_measurement\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._unit_of_measurement\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.