lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def __init__(self, VAR_11):...\n",
"self.content = VAR_11\n"
] | [
"def __init__(self, content):...\n",
"self.content = content\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_1(self, VAR_2, VAR_6, VAR_7, VAR_4, VAR_3):...\n",
"if VAR_7:\n",
"VAR_7 = int(float(VAR_7))\n",
"VAR_7 = None\n",
"setattr(VAR_2, VAR_6, VAR_7)\n"
] | [
"def parse_input(self, report, name, value, request, attribute):...\n",
"if value:\n",
"value = int(float(value))\n",
"value = None\n",
"setattr(report, name, value)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_4(self, VAR_2, VAR_3):...\n",
"VAR_4 = []\n",
"for item in event.objects.filter(result__campaign_id=campaign).values_list(\n",
"if item is not None:\n",
"return sorted(VAR_4, key=fix_sort_list)\n",
"VAR_4.append((item, item))\n"
] | [
"def event_choices(self, campaign, attribute):...\n",
"choices = []\n",
"for item in event.objects.filter(result__campaign_id=campaign).values_list(\n",
"if item is not None:\n",
"return sorted(choices, key=fix_sort_list)\n",
"choices.append((item, item))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Condition",
"Return'",
"Expr'"
] |
[
"import sublime\n",
"import sublime_plugin\n",
"import os\n",
"import re\n",
"import Urtext.datestimes\n",
"import Urtext.meta\n",
"import sys\n",
"sys.path.append(os.path.join(os.path.dirname(__file__)))\n",
"from anytree import Node, RenderTree\n",
"import codecs\n",
"import logging\n",
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_0.project_data():\n",
"VAR_2 = VAR_0.project_data()['urtext_path']\n",
"VAR_2 = '.'\n",
"return VAR_2\n"
] | [
"import sublime\n",
"import sublime_plugin\n",
"import os\n",
"import re\n",
"import Urtext.datestimes\n",
"import Urtext.meta\n",
"import sys\n",
"sys.path.append(os.path.join(os.path.dirname(__file__)))\n",
"from anytree import Node, RenderTree\n",
"import codecs\n",
"import logging\n",
"def get_path(window):...\n",
"\"\"\"docstring\"\"\"\n",
"if window.project_data():\n",
"path = window.project_data()['urtext_path']\n",
"path = '.'\n",
"return path\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Expr'",
"ImportFrom'",
"Import'",
"Import'",
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_0():...\n",
"system('cls')\n"
] | [
"def clear():...\n",
"system('cls')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_1():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = str(sys.modules[__name__].__file__)\n",
"VAR_11 = os.path.dirname(os.path.dirname(VAR_10))\n",
"return os.path.join(VAR_11, 'data_url.json')\n"
] | [
"def get_data_url_path():...\n",
"\"\"\"docstring\"\"\"\n",
"path_file = str(sys.modules[__name__].__file__)\n",
"url_path = os.path.dirname(os.path.dirname(path_file))\n",
"return os.path.join(url_path, 'data_url.json')\n"
] | [
0,
0,
1,
1,
1
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(self, VAR_0):...\n",
"return VAR_0.imageupload_set.count()\n"
] | [
"def get_images_count(self, obj):...\n",
"return obj.imageupload_set.count()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@require_http_methods(['POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_17 = BadRequestRateLimiter()\n",
"if VAR_17.is_rate_limit_exceeded(VAR_3):\n",
"VAR_0.warning('Password reset rate limit exceeded')\n",
"VAR_12 = VAR_3.user\n",
"return HttpResponseForbidden()\n",
"VAR_18 = VAR_12.email if VAR_12.is_authenticated() else VAR_3.POST.get('email')\n",
"if VAR_18:\n",
"return HttpResponseBadRequest(_('No email address provided.'))\n",
"request_password_change(VAR_18, VAR_3.is_secure())\n",
"VAR_0.info('Invalid password reset attempt')\n",
"return HttpResponse(status=200)\n",
"VAR_12 = VAR_12 if VAR_12.is_authenticated() else VAR_2.objects.get(VAR_18=\n email)\n",
"VAR_17.tick_bad_request_counter(VAR_3)\n",
"destroy_oauth_tokens(VAR_12)\n"
] | [
"@require_http_methods(['POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"limiter = BadRequestRateLimiter()\n",
"if limiter.is_rate_limit_exceeded(request):\n",
"AUDIT_LOG.warning('Password reset rate limit exceeded')\n",
"user = request.user\n",
"return HttpResponseForbidden()\n",
"email = user.email if user.is_authenticated() else request.POST.get('email')\n",
"if email:\n",
"return HttpResponseBadRequest(_('No email address provided.'))\n",
"request_password_change(email, request.is_secure())\n",
"AUDIT_LOG.info('Invalid password reset attempt')\n",
"return HttpResponse(status=200)\n",
"user = user if user.is_authenticated() else User.objects.get(email=email)\n",
"limiter.tick_bad_request_counter(request)\n",
"destroy_oauth_tokens(user)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Expr'",
"Return'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_49(self):...\n",
"for VAR_14 in self.list_columns.keys():\n",
"if self.is_pk(VAR_14):\n",
"return VAR_14\n"
] | [
"def get_pk_name(self):...\n",
"for col_name in self.list_columns.keys():\n",
"if self.is_pk(col_name):\n",
"return col_name\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Return'"
] |
[
"@tornado.web.authenticated...\n",
"self.render('create.html')\n"
] | [
"@tornado.web.authenticated...\n",
"self.render('create.html')\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"\"\"\"string\"\"\"\n",
"__revision__ = '$Rev$'\n",
"__date__ = '$Date$'\n",
"__author__ = '$Author$'\n",
"from django.db import connection, models, transaction\n",
"def FUNC_0(self, **VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = ['UPDATE', connection.ops.quote_name(self._meta.db_table), 'SET']\n",
"for field_name in VAR_0:\n",
"setattr(self, field_name, VAR_0[field_name])\n",
"VAR_1.pop(-1)\n",
"VAR_2 = self._meta.get_field(field_name)\n",
"VAR_1.extend(['WHERE', 'id', '=', str(self.id)])\n",
"VAR_3 = VAR_2.get_db_prep_save(VAR_0[field_name])\n",
"VAR_1 = ' '.join(VAR_1)\n",
"if isinstance(VAR_3, basestring):\n",
"connection.cursor().execute(VAR_1)\n",
"VAR_3 = \"'%s'\" % VAR_3.encode('utf-8').replace('\\\\', '\\\\\\\\')\n",
"if isinstance(VAR_3, models.Model):\n",
"transaction.commit_unless_managed()\n",
"VAR_1.extend((connection.ops.quote_name(VAR_2.column), '=', VAR_3, ','))\n",
"VAR_3 = str(VAR_3.id)\n",
"if VAR_3 is None:\n",
"FUNC_0.alters_data = True\n",
"VAR_3 = 'NULL'\n",
"VAR_3 = str(VAR_3)\n"
] | [
"\"\"\"\nUpdate only selected fields of a model.\n\nThe problem with model.save() is that it also overwrites all other\nfields with possibly stale data.\n\"\"\"\n",
"__revision__ = '$Rev$'\n",
"__date__ = '$Date$'\n",
"__author__ = '$Author$'\n",
"from django.db import connection, models, transaction\n",
"def update_fields(self, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"sql = ['UPDATE', connection.ops.quote_name(self._meta.db_table), 'SET']\n",
"for field_name in kwargs:\n",
"setattr(self, field_name, kwargs[field_name])\n",
"sql.pop(-1)\n",
"field = self._meta.get_field(field_name)\n",
"sql.extend(['WHERE', 'id', '=', str(self.id)])\n",
"value = field.get_db_prep_save(kwargs[field_name])\n",
"sql = ' '.join(sql)\n",
"if isinstance(value, basestring):\n",
"connection.cursor().execute(sql)\n",
"value = \"'%s'\" % value.encode('utf-8').replace('\\\\', '\\\\\\\\')\n",
"if isinstance(value, models.Model):\n",
"transaction.commit_unless_managed()\n",
"sql.extend((connection.ops.quote_name(field.column), '=', value, ','))\n",
"value = str(value.id)\n",
"if value is None:\n",
"update_fields.alters_data = True\n",
"value = 'NULL'\n",
"value = str(value)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
4,
4,
4,
4,
0,
4,
4,
4,
0,
4,
4
] | [
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def __eq__(self, VAR_11):...\n",
"if VAR_11 is None:\n",
"return False\n",
"return self.rule == VAR_11.rule and (self.dynamic_output or self.\n wildcards_dict == VAR_11.wildcards_dict)\n"
] | [
"def __eq__(self, other):...\n",
"if other is None:\n",
"return False\n",
"return self.rule == other.rule and (self.dynamic_output or self.\n wildcards_dict == other.wildcards_dict)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_5(self):...\n",
"self.uut.executable = 'echo'\n",
"self.uut.arguments = 'hello'\n",
"self.uut.use_stdin = False\n",
"self.uut.use_stderr = True\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"VAR_1 = self.uut.lint('unused_filename')\n",
"self.assertEqual((), VAR_1)\n",
"self.uut.use_stderr = False\n",
"VAR_1 = self.uut.lint('unused_filename')\n",
"self.assertEqual(('hello\\n',), VAR_1)\n",
"def FUNC_9(VAR_2):...\n",
"assert VAR_2 == 'hello'\n",
"VAR_3 = self.uut.warn\n",
"self.uut.warn = FUNC_9\n",
"self.uut._print_errors(['hello', '\\n'])\n",
"self.uut.warn = VAR_3\n"
] | [
"def test_stderr_output(self):...\n",
"self.uut.executable = 'echo'\n",
"self.uut.arguments = 'hello'\n",
"self.uut.use_stdin = False\n",
"self.uut.use_stderr = True\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"out = self.uut.lint('unused_filename')\n",
"self.assertEqual((), out)\n",
"self.uut.use_stderr = False\n",
"out = self.uut.lint('unused_filename')\n",
"self.assertEqual(('hello\\n',), out)\n",
"def assert_warn(line):...\n",
"assert line == 'hello'\n",
"old_warn = self.uut.warn\n",
"self.uut.warn = assert_warn\n",
"self.uut._print_errors(['hello', '\\n'])\n",
"self.uut.warn = old_warn\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assert'",
"Assign'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"@defer.inlineCallbacks...\n",
"VAR_70 = yield self.mail_store.get_mailbox_mail_ids(VAR_42)\n",
"VAR_58 = yield self.mail_store.get_mails(VAR_70)\n",
"defer.returnValue(VAR_58)\n"
] | [
"@defer.inlineCallbacks...\n",
"mail_ids = yield self.mail_store.get_mailbox_mail_ids(mbox_name)\n",
"mails = yield self.mail_store.get_mails(mail_ids)\n",
"defer.returnValue(mails)\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_26(self):...\n",
"FUNC_4()\n"
] | [
"def setUp(self):...\n",
"create_dummy_data()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_5(self, VAR_1):...\n",
"VAR_5 = dict()\n",
"VAR_17, VAR_18 = self.get_epoch_month(VAR_1)\n",
"VAR_5['interval'] = {'from': self.convert_local_ts_to_utc(VAR_17, self.\n local_timezone), 'to': self.convert_local_ts_to_utc(VAR_18, self.\n local_timezone)}\n",
"VAR_19 = 0\n",
"VAR_12 = 'string'\n",
"VAR_5['data'] = list()\n",
"for VAR_13 in self.c.execute(VAR_12 % (VAR_17, VAR_18)):\n",
"VAR_5['data'].append({'time': self.convert_local_ts_to_utc(VAR_13[0], self.\n local_timezone), 'power': VAR_13[1]})\n",
"VAR_5['total'] = VAR_19\n",
"VAR_19 += VAR_13[1]\n",
"VAR_12 = 'string'\n",
"self.c.execute(VAR_12)\n",
"VAR_14, VAR_15 = self.c.fetchone()\n",
"if VAR_14:\n",
"VAR_5['hasPrevious'] = VAR_14 < VAR_17\n",
"VAR_5['hasPrevious'] = False\n",
"if VAR_15:\n",
"VAR_5['hasNext'] = VAR_15 > VAR_18\n",
"VAR_5['hasNext'] = False\n",
"return VAR_5\n"
] | [
"def get_requested_month(self, date):...\n",
"data = dict()\n",
"month_start, month_end = self.get_epoch_month(date)\n",
"data['interval'] = {'from': self.convert_local_ts_to_utc(month_start, self.\n local_timezone), 'to': self.convert_local_ts_to_utc(month_end, self.\n local_timezone)}\n",
"month_total = 0\n",
"query = \"\"\"\n SELECT TimeStamp, SUM(DayYield) AS Power \n FROM MonthData \n WHERE TimeStamp BETWEEN %s AND %s\n GROUP BY TimeStamp\n \"\"\"\n",
"data['data'] = list()\n",
"for row in self.c.execute(query % (month_start, month_end)):\n",
"data['data'].append({'time': self.convert_local_ts_to_utc(row[0], self.\n local_timezone), 'power': row[1]})\n",
"data['total'] = month_total\n",
"month_total += row[1]\n",
"query = \"\"\"\n SELECT MIN(TimeStamp) as Min, MAX(TimeStamp) as Max \n FROM ( SELECT TimeStamp FROM MonthData GROUP BY TimeStamp );\n \"\"\"\n",
"self.c.execute(query)\n",
"first_data, last_data = self.c.fetchone()\n",
"if first_data:\n",
"data['hasPrevious'] = first_data < month_start\n",
"data['hasPrevious'] = False\n",
"if last_data:\n",
"data['hasNext'] = last_data > month_end\n",
"data['hasNext'] = False\n",
"return data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Assign'",
"AugAssign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_1, VAR_3=None):...\n",
"FUNC_0(self)\n",
"self.list_columns = dict()\n",
"self.list_properties = dict()\n",
"self.session = VAR_3\n",
"for VAR_36 in sa.orm.class_mapper(VAR_1).iterate_properties:\n",
"if type(VAR_36) != SynonymProperty:\n",
"for VAR_14 in VAR_1.__mapper__.columns.keys():\n",
"self.list_properties[VAR_36.key] = VAR_36\n",
"if VAR_14 in self.list_properties:\n",
"super(CLASS_0, self).__init__(VAR_1)\n",
"self.list_columns[VAR_14] = VAR_1.__mapper__.columns[VAR_14]\n"
] | [
"def __init__(self, obj, session=None):...\n",
"_include_filters(self)\n",
"self.list_columns = dict()\n",
"self.list_properties = dict()\n",
"self.session = session\n",
"for prop in sa.orm.class_mapper(obj).iterate_properties:\n",
"if type(prop) != SynonymProperty:\n",
"for col_name in obj.__mapper__.columns.keys():\n",
"self.list_properties[prop.key] = prop\n",
"if col_name in self.list_properties:\n",
"super(SQLAInterface, self).__init__(obj)\n",
"self.list_columns[col_name] = obj.__mapper__.columns[col_name]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"For",
"Assign'",
"Condition",
"Expr'",
"Assign'"
] |
[
"def FUNC_19(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.config['cluster_type']:\n",
"VAR_26 = self.config['cluster']._get_nodes()\n",
"self.log_debug('Node list: %s' % VAR_26)\n",
"return VAR_26\n"
] | [
"def get_nodes_from_cluster(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.config['cluster_type']:\n",
"nodes = self.config['cluster']._get_nodes()\n",
"self.log_debug('Node list: %s' % nodes)\n",
"return nodes\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_5(self):...\n",
"self._check_duplicate_key(VAR_0)\n"
] | [
"def test_rsa_duplicate_key(self):...\n",
"self._check_duplicate_key(RSA_PUBKEY)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_10(self, VAR_11):...\n",
"VAR_11.update({'source': VAR_11.get('source') or self.metadata('source'),\n 'source_url': VAR_11.get('source_url') or self.metadata('source_url'),\n 'license': VAR_11.get('license') or self.metadata('license'),\n 'license_url': VAR_11.get('license_url') or self.metadata('license_url'\n ), 'about': VAR_11.get('about') or self.metadata('about'), 'about_url':\n VAR_11.get('about_url') or self.metadata('about_url')})\n"
] | [
"def update_with_inherited_metadata(self, metadata):...\n",
"metadata.update({'source': metadata.get('source') or self.metadata('source'\n ), 'source_url': metadata.get('source_url') or self.metadata(\n 'source_url'), 'license': metadata.get('license') or self.metadata(\n 'license'), 'license_url': metadata.get('license_url') or self.metadata\n ('license_url'), 'about': metadata.get('about') or self.metadata(\n 'about'), 'about_url': metadata.get('about_url') or self.metadata(\n 'about_url')})\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_15(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.navloc.shutdown(self.rate)\n",
"Tester.shutdown(self)\n"
] | [
"def shutdown(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.navloc.shutdown(self.rate)\n",
"Tester.shutdown(self)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'"
] |
[
"def FUNC_10(VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = FUNC_3(VAR_2)['id']\n",
"VAR_9 = [1, VAR_8]\n",
"VAR_1.execute('UPDATE files SET verified=%s WHERE id=%s;', VAR_9)\n",
"VAR_0.commit()\n",
"return\n"
] | [
"def db_verify_file_integrity(path):...\n",
"\"\"\"docstring\"\"\"\n",
"file_id = db_get_file_details(path)['id']\n",
"params = [1, file_id]\n",
"cur.execute('UPDATE files SET verified=%s WHERE id=%s;', params)\n",
"db.commit()\n",
"return\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_28(VAR_38):...\n",
"return [item for item in VAR_10 if item['command'] == VAR_38]\n"
] | [
"def get_command(command_name):...\n",
"return [item for item in commands if item['command'] == command_name]\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"import psycopg2\n",
"def FUNC_0(VAR_0, VAR_1, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = 'string'.format(VAR_1, VAR_2)\n",
"VAR_6 = FUNC_3(VAR_0, VAR_4)\n",
"if VAR_6 is not None:\n",
"VAR_8 = open('top_articles_report.txt', 'w')\n",
"return False\n",
"for row in VAR_6:\n",
"VAR_8.write('\"{}\" - {} views \\n'.format(row[0], row[1]))\n",
"VAR_8.close()\n",
"return True\n"
] | [
"import psycopg2\n",
"def get_top_articles(cur, order, limit):...\n",
"\"\"\"docstring\"\"\"\n",
"query = (\n \"\"\"SELECT articles.title, COUNT(*) as views\n FROM log, articles\n WHERE log.path LIKE '%'||articles.slug AND\n log.method = 'GET'\n GROUP BY articles.title\n ORDER BY views {}\n LIMIT {}\"\"\"\n .format(order, limit))\n",
"rows = get_data(cur, query)\n",
"if rows is not None:\n",
"file = open('top_articles_report.txt', 'w')\n",
"return False\n",
"for row in rows:\n",
"file.write('\"{}\" - {} views \\n'.format(row[0], row[1]))\n",
"file.close()\n",
"return True\n"
] | [
0,
0,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"For",
"Expr'",
"Expr'",
"Return'"
] |
[
"\"\"\"Simple Markdown browser for a Git checkout.\"\"\"\n",
"from __future__ import print_function\n",
"import SimpleHTTPServer\n",
"import SocketServer\n",
"import argparse\n",
"import codecs\n",
"import os\n",
"import re\n",
"import socket\n",
"import sys\n",
"import threading\n",
"import time\n",
"import webbrowser\n",
"from xml.etree import ElementTree\n",
"VAR_0 = os.path.realpath(os.path.dirname(__file__))\n",
"VAR_1 = os.path.dirname(os.path.dirname(VAR_0))\n",
"sys.path.insert(0, os.path.join(VAR_1, 'third_party', 'Python-Markdown'))\n",
"import markdown\n",
"def FUNC_0(VAR_2):...\n",
"VAR_6 = argparse.ArgumentParser(prog='md_browser')\n",
"VAR_6.add_argument('-p', '--port', type=int, default=8080, help=\n 'port to run on (default = %(default)s)')\n",
"VAR_6.add_argument('-d', '--directory', type=str, default=SRC_DIR)\n",
"VAR_6.add_argument('-e', '--external', action='store_true', help=\n 'whether to bind to external port')\n",
"VAR_6.add_argument('file', nargs='?', help='open file in browser')\n",
"VAR_7 = VAR_6.parse_args(VAR_2)\n",
"VAR_8 = os.path.realpath(VAR_7.directory)\n",
"VAR_9 = '0.0.0.0' if VAR_7.external else 'localhost'\n",
"VAR_10 = VAR_9, VAR_7.port\n",
"VAR_11 = CLASS_0(VAR_10, VAR_8)\n",
"VAR_12 = 'http://' + VAR_9\n",
"if VAR_7.port != 80:\n",
"VAR_12 += ':%s' % VAR_7.port\n",
"print('Listening on %s/' % VAR_12)\n",
"VAR_13 = None\n",
"if VAR_7.file:\n",
"VAR_15 = os.path.realpath(VAR_7.file)\n",
"if os.path.isfile(os.path.join(VAR_8, 'docs', 'README.md')):\n",
"if not VAR_15.startswith(VAR_8):\n",
"print(' Try loading %s/docs/README.md' % VAR_12)\n",
"if os.path.isfile(os.path.join(VAR_7.directory, 'README.md')):\n",
"print('%s is not under %s' % (VAR_7.file, VAR_7.directory))\n",
"VAR_25 = os.path.relpath(VAR_15, VAR_8)\n",
"VAR_14 = 1\n",
"print(' Try loading %s/README.md' % VAR_12)\n",
"return 1\n",
"VAR_3 = '%s/%s' % (VAR_12, VAR_25)\n",
"VAR_11.serve_forever()\n",
"VAR_14 = 130\n",
"VAR_11.shutdown()\n",
"print('Opening %s' % VAR_3)\n",
"print('Exception raised: %s' % str(e))\n",
"if VAR_13:\n",
"VAR_13 = threading.Thread(target=_open_url, VAR_7=(url,))\n",
"VAR_13.join()\n",
"return VAR_14\n",
"VAR_13.start()\n"
] | [
"\"\"\"Simple Markdown browser for a Git checkout.\"\"\"\n",
"from __future__ import print_function\n",
"import SimpleHTTPServer\n",
"import SocketServer\n",
"import argparse\n",
"import codecs\n",
"import os\n",
"import re\n",
"import socket\n",
"import sys\n",
"import threading\n",
"import time\n",
"import webbrowser\n",
"from xml.etree import ElementTree\n",
"THIS_DIR = os.path.realpath(os.path.dirname(__file__))\n",
"SRC_DIR = os.path.dirname(os.path.dirname(THIS_DIR))\n",
"sys.path.insert(0, os.path.join(SRC_DIR, 'third_party', 'Python-Markdown'))\n",
"import markdown\n",
"def main(argv):...\n",
"parser = argparse.ArgumentParser(prog='md_browser')\n",
"parser.add_argument('-p', '--port', type=int, default=8080, help=\n 'port to run on (default = %(default)s)')\n",
"parser.add_argument('-d', '--directory', type=str, default=SRC_DIR)\n",
"parser.add_argument('-e', '--external', action='store_true', help=\n 'whether to bind to external port')\n",
"parser.add_argument('file', nargs='?', help='open file in browser')\n",
"args = parser.parse_args(argv)\n",
"top_level = os.path.realpath(args.directory)\n",
"hostname = '0.0.0.0' if args.external else 'localhost'\n",
"server_address = hostname, args.port\n",
"s = Server(server_address, top_level)\n",
"origin = 'http://' + hostname\n",
"if args.port != 80:\n",
"origin += ':%s' % args.port\n",
"print('Listening on %s/' % origin)\n",
"thread = None\n",
"if args.file:\n",
"path = os.path.realpath(args.file)\n",
"if os.path.isfile(os.path.join(top_level, 'docs', 'README.md')):\n",
"if not path.startswith(top_level):\n",
"print(' Try loading %s/docs/README.md' % origin)\n",
"if os.path.isfile(os.path.join(args.directory, 'README.md')):\n",
"print('%s is not under %s' % (args.file, args.directory))\n",
"rpath = os.path.relpath(path, top_level)\n",
"retcode = 1\n",
"print(' Try loading %s/README.md' % origin)\n",
"return 1\n",
"url = '%s/%s' % (origin, rpath)\n",
"s.serve_forever()\n",
"retcode = 130\n",
"s.shutdown()\n",
"print('Opening %s' % url)\n",
"print('Exception raised: %s' % str(e))\n",
"if thread:\n",
"thread = threading.Thread(target=_open_url, args=(url,))\n",
"thread.join()\n",
"return retcode\n",
"thread.start()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Assign'",
"Assign'",
"Expr'",
"Import'",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"AugAssign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Return'",
"Expr'"
] |
[
"@eqlx.with_timeout...\n",
"time.sleep(1)\n"
] | [
"@eqlx.with_timeout...\n",
"time.sleep(1)\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_21(VAR_38, VAR_20):...\n",
"VAR_63, VAR_64 = os.path.split(VAR_20)\n",
"FUNC_19(VAR_63)\n",
"shutil.copy2(VAR_38, VAR_20)\n"
] | [
"def cp(src, dst):...\n",
"r, f = os.path.split(dst)\n",
"mkdir_p(r)\n",
"shutil.copy2(src, dst)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def __repr__(self):...\n",
"return 'FlattenedColumn({}, {}, {})'.format(repr(self.base_name), repr(self\n .name), repr(self.type))\n"
] | [
"def __repr__(self):...\n",
"return 'FlattenedColumn({}, {}, {})'.format(repr(self.base_name), repr(self\n .name), repr(self.type))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@classmethod...\n",
"super(CLASS_0, VAR_1).setUpClass()\n",
"if VAR_1.manager.clients_initialized:\n",
"VAR_1.tenant_id = VAR_1.manager._get_identity_client(VAR_1.config.identity.\n admin_username, VAR_1.config.identity.admin_password, VAR_1.config.\n identity.admin_tenant_name).tenant_id\n",
"VAR_1.keypairs = {}\n",
"VAR_1.security_groups = {}\n",
"VAR_1.network = []\n",
"VAR_1.servers = []\n",
"VAR_1.floating_ips = []\n"
] | [
"@classmethod...\n",
"super(TestNovaNetwork, cls).setUpClass()\n",
"if cls.manager.clients_initialized:\n",
"cls.tenant_id = cls.manager._get_identity_client(cls.config.identity.\n admin_username, cls.config.identity.admin_password, cls.config.identity\n .admin_tenant_name).tenant_id\n",
"cls.keypairs = {}\n",
"cls.security_groups = {}\n",
"cls.network = []\n",
"cls.servers = []\n",
"cls.floating_ips = []\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return gate.new_agent(VAR_1)\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return gate.new_agent(bindings)\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_0():...\n",
"import socket\n",
"import getpass\n",
"return '%s in %s on %s' % (ansi.foreground(ansi.GREEN, env.project), ansi.\n foreground(env.color, env.name), ansi.foreground(ansi.CYAN, getpass.\n getuser() + '@' + socket.gethostname()))\n"
] | [
"def banner():...\n",
"import socket\n",
"import getpass\n",
"return '%s in %s on %s' % (ansi.foreground(ansi.GREEN, env.project), ansi.\n foreground(env.color, env.name), ansi.foreground(ansi.CYAN, getpass.\n getuser() + '@' + socket.gethostname()))\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Import'",
"Import'",
"Return'"
] |
[
"def FUNC_11(VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = VAR_3.user\n",
"VAR_33 = [(unicode(year), unicode(year)) for year in UserProfile.VALID_YEARS]\n",
"VAR_26 = FUNC_8(VAR_10)\n",
"VAR_1.exception('Error fetching order history from Otto.')\n",
"VAR_5 = {'auth': {}, 'duplicate_provider': None, 'nav_hidden': True,\n 'fields': {'country': {'options': list(countries)}, 'gender': {\n 'options': [(choice[0], _(choice[1])) for choice in UserProfile.\n GENDER_CHOICES]}, 'language': {'options': released_languages()},\n 'level_of_education': {'options': [(choice[0], _(choice[1])) for choice in\n UserProfile.LEVEL_OF_EDUCATION_CHOICES]}, 'password': {'url': reverse(\n 'password_reset')}, 'year_of_birth': {'options': VAR_33},\n 'preferred_language': {'options': all_languages()}, 'time_zone': {\n 'options': TIME_ZONE_CHOICES}}, 'platform_name': configuration_helpers.\n get_value('PLATFORM_NAME', settings.PLATFORM_NAME),\n 'password_reset_support_link': configuration_helpers.get_value(\n 'PASSWORD_RESET_SUPPORT_LINK', settings.PASSWORD_RESET_SUPPORT_LINK) or\n settings.SUPPORT_SITE_LINK, 'user_accounts_api_url': reverse(\n 'accounts_api', kwargs={'username': user.username}),\n 'user_preferences_api_url': reverse('preferences_api', kwargs={\n 'username': user.username}), 'disable_courseware_js': True,\n 'show_program_listing': ProgramsApiConfig.is_enabled(), 'order_history':\n VAR_26}\n",
"VAR_26 = []\n",
"if third_party_auth.is_enabled():\n",
"VAR_5['duplicate_provider'] = pipeline.get_duplicate_provider(messages.\n get_messages(VAR_3))\n",
"return VAR_5\n",
"VAR_37 = pipeline.get_provider_user_states(VAR_10)\n",
"VAR_5['auth']['providers'] = [{'id': state.provider.provider_id, 'name':\n state.provider.name, 'connected': state.has_account, 'connect_url':\n pipeline.get_login_url(state.provider.provider_id, pipeline.\n AUTH_ENTRY_ACCOUNT_SETTINGS, redirect_url=reverse('account_settings')),\n 'accepts_logins': state.provider.accepts_logins, 'disconnect_url':\n pipeline.get_disconnect_url(state.provider.provider_id, state.\n association_id)} for state in VAR_37 if state.provider.\n display_for_login or state.has_account]\n"
] | [
"def account_settings_context(request):...\n",
"\"\"\"docstring\"\"\"\n",
"user = request.user\n",
"year_of_birth_options = [(unicode(year), unicode(year)) for year in\n UserProfile.VALID_YEARS]\n",
"user_orders = get_user_orders(user)\n",
"log.exception('Error fetching order history from Otto.')\n",
"context = {'auth': {}, 'duplicate_provider': None, 'nav_hidden': True,\n 'fields': {'country': {'options': list(countries)}, 'gender': {\n 'options': [(choice[0], _(choice[1])) for choice in UserProfile.\n GENDER_CHOICES]}, 'language': {'options': released_languages()},\n 'level_of_education': {'options': [(choice[0], _(choice[1])) for choice in\n UserProfile.LEVEL_OF_EDUCATION_CHOICES]}, 'password': {'url': reverse(\n 'password_reset')}, 'year_of_birth': {'options': year_of_birth_options},\n 'preferred_language': {'options': all_languages()}, 'time_zone': {\n 'options': TIME_ZONE_CHOICES}}, 'platform_name': configuration_helpers.\n get_value('PLATFORM_NAME', settings.PLATFORM_NAME),\n 'password_reset_support_link': configuration_helpers.get_value(\n 'PASSWORD_RESET_SUPPORT_LINK', settings.PASSWORD_RESET_SUPPORT_LINK) or\n settings.SUPPORT_SITE_LINK, 'user_accounts_api_url': reverse(\n 'accounts_api', kwargs={'username': user.username}),\n 'user_preferences_api_url': reverse('preferences_api', kwargs={\n 'username': user.username}), 'disable_courseware_js': True,\n 'show_program_listing': ProgramsApiConfig.is_enabled(), 'order_history':\n user_orders}\n",
"user_orders = []\n",
"if third_party_auth.is_enabled():\n",
"context['duplicate_provider'] = pipeline.get_duplicate_provider(messages.\n get_messages(request))\n",
"return context\n",
"auth_states = pipeline.get_provider_user_states(user)\n",
"context['auth']['providers'] = [{'id': state.provider.provider_id, 'name':\n state.provider.name, 'connected': state.has_account, 'connect_url':\n pipeline.get_login_url(state.provider.provider_id, pipeline.\n AUTH_ENTRY_ACCOUNT_SETTINGS, redirect_url=reverse('account_settings')),\n 'accepts_logins': state.provider.accepts_logins, 'disconnect_url':\n pipeline.get_disconnect_url(state.provider.provider_id, state.\n association_id)} for state in auth_states if state.provider.\n display_for_login or state.has_account]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'"
] |
[
"def __iter__(self):...\n",
"return self.order.__iter__()\n"
] | [
"def __iter__(self):...\n",
"return self.order.__iter__()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_2(self, VAR_1):...\n",
"VAR_2 = self.connect()\n",
"VAR_3 = \"INSERT INTO crimes (description) VALUES('{}');\".format(VAR_1)\n",
"VAR_2.close()\n",
"cursor.execute(VAR_3)\n",
"VAR_2.commit()\n"
] | [
"def add_input(self, data):...\n",
"connection = self.connect()\n",
"query = \"INSERT INTO crimes (description) VALUES('{}');\".format(data)\n",
"connection.close()\n",
"cursor.execute(query)\n",
"connection.commit()\n"
] | [
0,
0,
4,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"from flask import Blueprint, request, make_response, jsonify\n",
"from flask.views import MethodView\n",
"from ...models import Table\n",
"from ....utils import jwt_required\n",
"VAR_0 = Blueprint('comments', __name__)\n",
"\"\"\" Update Instance api resource \"\"\"\n",
"@jwt_required...\n",
"VAR_3 = request.get_json(force=True)\n",
"VAR_4 = Table.save(VAR_2, data=post_data)\n",
"if VAR_4:\n",
"VAR_5 = {'status': 'success', 'message': 'Your comment was successful'}\n",
"VAR_5 = {'status': 'fail', 'message': 'Some error occurred. Please try again.'}\n",
"return make_response(jsonify(VAR_5)), 201\n",
"return make_response(jsonify(VAR_5)), 400\n"
] | [
"from flask import Blueprint, request, make_response, jsonify\n",
"from flask.views import MethodView\n",
"from ...models import Table\n",
"from ....utils import jwt_required\n",
"comments_blueprint = Blueprint('comments', __name__)\n",
"\"\"\" Update Instance api resource \"\"\"\n",
"@jwt_required...\n",
"post_data = request.get_json(force=True)\n",
"response = Table.save(answer_id, data=post_data)\n",
"if response:\n",
"response_object = {'status': 'success', 'message':\n 'Your comment was successful'}\n",
"response_object = {'status': 'fail', 'message':\n 'Some error occurred. Please try again.'}\n",
"return make_response(jsonify(response_object)), 201\n",
"return make_response(jsonify(response_object)), 400\n"
] | [
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_0(self, VAR_1, VAR_6, VAR_7, VAR_3):...\n",
"VAR_12 = []\n",
"if VAR_7 is None:\n",
"VAR_7 = ''\n",
"for choice in ([''] + VAR_3.values):\n",
"VAR_18 = get_message(VAR_1, 'attribute_value', choice)\n",
"return '<select name=\"%s\">%s</select>' % (html_escape(VAR_6), ''.join(VAR_12))\n",
"VAR_19 = html_escape(VAR_18 or _('(unspecified)'))\n",
"VAR_17 = VAR_7 == choice and 'selected' or ''\n",
"VAR_12.append('<option value=\"%s\" %s>%s</option>' % (choice, VAR_17, VAR_19))\n"
] | [
"def make_input(self, version, name, value, attribute):...\n",
"options = []\n",
"if value is None:\n",
"value = ''\n",
"for choice in ([''] + attribute.values):\n",
"message = get_message(version, 'attribute_value', choice)\n",
"return '<select name=\"%s\">%s</select>' % (html_escape(name), ''.join(options))\n",
"title = html_escape(message or _('(unspecified)'))\n",
"selected = value == choice and 'selected' or ''\n",
"options.append('<option value=\"%s\" %s>%s</option>' % (choice, selected, title))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_42(self, VAR_12):...\n",
"VAR_39 = rfm.RegressionTest()\n",
"VAR_39.name = VAR_12\n",
"VAR_39.valid_systems = ['*']\n",
"VAR_39.valid_prog_environs = ['*']\n",
"VAR_39.executable = 'echo'\n",
"VAR_39.executable_opts = [VAR_12]\n",
"return VAR_39\n"
] | [
"def create_test(self, name):...\n",
"test = rfm.RegressionTest()\n",
"test.name = name\n",
"test.valid_systems = ['*']\n",
"test.valid_prog_environs = ['*']\n",
"test.executable = 'echo'\n",
"test.executable_opts = [name]\n",
"return test\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_4(self, VAR_11='', VAR_12=None, VAR_13=None, VAR_6=None):...\n",
"VAR_5 = self.session.query(self.obj)\n",
"VAR_5 = self._get_base_query(VAR_5=query, VAR_6=filters)\n",
"VAR_25 = VAR_5.all()\n",
"VAR_26 = GroupByCol(VAR_11, 'Group by')\n",
"return VAR_26.apply(VAR_25)\n"
] | [
"def query_simple_group(self, group_by='', aggregate_func=None,...\n",
"query = self.session.query(self.obj)\n",
"query = self._get_base_query(query=query, filters=filters)\n",
"query_result = query.all()\n",
"group = GroupByCol(group_by, 'Group by')\n",
"return group.apply(query_result)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_16(self, VAR_2, VAR_3, VAR_4, *VAR_5):...\n",
"self.write_data({'type': 'market_bid_timeout', 'event': VAR_5[0]})\n"
] | [
"def on_market_bid_timeout(self, subject, changetype, objectID, *args):...\n",
"self.write_data({'type': 'market_bid_timeout', 'event': args[0]})\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_15(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.opt:\n",
"self.parse_args(VAR_16=False)\n",
"VAR_35 = {}\n",
"for VAR_39, VAR_0 in self.opt.items():\n",
"VAR_35[VAR_38(VAR_39)] = VAR_38(VAR_0)\n",
"for group in self._action_groups:\n",
"VAR_40 = {a.dest: getattr(self.args, a.dest, None) for a in group.\n _group_actions}\n",
"VAR_14 = argparse.Namespace(**group_dict)\n",
"VAR_41 = 0\n",
"for VAR_39 in VAR_14.__dict__:\n",
"if VAR_39 in VAR_35:\n",
"if VAR_41 == 0:\n",
"print('[ ' + group.title + ': ] ')\n",
"VAR_41 += 1\n",
"print('[ ' + VAR_39 + ': ' + VAR_35[VAR_39] + ' ]')\n"
] | [
"def print_args(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.opt:\n",
"self.parse_args(print_args=False)\n",
"values = {}\n",
"for key, value in self.opt.items():\n",
"values[str(key)] = str(value)\n",
"for group in self._action_groups:\n",
"group_dict = {a.dest: getattr(self.args, a.dest, None) for a in group.\n _group_actions}\n",
"namespace = argparse.Namespace(**group_dict)\n",
"count = 0\n",
"for key in namespace.__dict__:\n",
"if key in values:\n",
"if count == 0:\n",
"print('[ ' + group.title + ': ] ')\n",
"count += 1\n",
"print('[ ' + key + ': ' + values[key] + ' ]')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Assign'",
"For",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Condition",
"Expr'",
"AugAssign'",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"VAR_4 = self.redis_client.pubsub()\n",
"VAR_4.subscribe('__keyspace@0__:Exports')\n",
"VAR_5 = 0\n",
"VAR_9 = self.redis_client.lrange('Exports', 0, -1)\n",
"for VAR_2 in VAR_9:\n",
"VAR_5 += 1\n",
"for msg in VAR_4.listen():\n",
"self._process_key(VAR_2)\n",
"if msg['type'] == 'subscribe':\n",
"assert msg['data'] == b'rpush'\n",
"VAR_12 = self.redis_client.llen('Exports')\n",
"assert VAR_12 >= VAR_5\n",
"for i in range(VAR_5, VAR_12):\n",
"VAR_5 += 1\n",
"VAR_2 = self.redis_client.lindex('Exports', i)\n",
"self._process_key(VAR_2)\n"
] | [
"def _run(self):...\n",
"import_pubsub_client = self.redis_client.pubsub()\n",
"import_pubsub_client.subscribe('__keyspace@0__:Exports')\n",
"num_imported = 0\n",
"export_keys = self.redis_client.lrange('Exports', 0, -1)\n",
"for key in export_keys:\n",
"num_imported += 1\n",
"for msg in import_pubsub_client.listen():\n",
"self._process_key(key)\n",
"if msg['type'] == 'subscribe':\n",
"assert msg['data'] == b'rpush'\n",
"num_imports = self.redis_client.llen('Exports')\n",
"assert num_imports >= num_imported\n",
"for i in range(num_imported, num_imports):\n",
"num_imported += 1\n",
"key = self.redis_client.lindex('Exports', i)\n",
"self._process_key(key)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
6,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"AugAssign'",
"For",
"Expr'",
"Condition",
"Assert'",
"Assign'",
"Assert'",
"For",
"AugAssign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_5():...\n",
"\"\"\"docstring\"\"\"\n",
"print('On which days did more than 1% of requests lead to errors?\\n')\n",
"VAR_8 = '%s - % 6.2f%% errors\\n'\n",
"VAR_7 = ''.join(VAR_8 % (logdate, err_pct) for logdate, err_pct in FUNC_2())\n",
"print(VAR_7)\n"
] | [
"def print_top_errors():...\n",
"\"\"\"docstring\"\"\"\n",
"print('On which days did more than 1% of requests lead to errors?\\n')\n",
"top_authors = '%s - % 6.2f%% errors\\n'\n",
"results = ''.join(top_authors % (logdate, err_pct) for logdate, err_pct in\n get_most_error_day())\n",
"print(results)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_4(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = FUNC_0()\n",
"VAR_4 = VAR_3.cursor()\n",
"VAR_4.execute(\"INSERT INTO players (playerName) VALUES ('{}')\".format(re.\n sub(\"\\\\'\", '', VAR_0)))\n",
"VAR_3.commit()\n",
"VAR_3.close()\n"
] | [
"def registerPlayer(name):...\n",
"\"\"\"docstring\"\"\"\n",
"conn = connect()\n",
"c = conn.cursor()\n",
"c.execute(\"INSERT INTO players (playerName) VALUES ('{}')\".format(re.sub(\n \"\\\\'\", '', name)))\n",
"conn.commit()\n",
"conn.close()\n"
] | [
0,
0,
0,
0,
4,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = {'TEST_APP': 'awskatotest' + CLASS_0.DEFAULT_TEST_ID}\n",
"return st.ScenarioTestRunner.main(CLASS_0, default_binding_overrides=\n defaults, test_case_list=[AwsKatoIntegrationTest])\n"
] | [
"def main():...\n",
"\"\"\"docstring\"\"\"\n",
"defaults = {'TEST_APP': 'awskatotest' + AwsKatoTestScenario.DEFAULT_TEST_ID}\n",
"return st.ScenarioTestRunner.main(AwsKatoTestScenario,\n default_binding_overrides=defaults, test_case_list=[AwsKatoIntegrationTest]\n )\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_23(self, VAR_19):...\n",
"if isinstance(VAR_19, Failure):\n",
"VAR_42 = VAR_19.type\n",
"VAR_42, VAR_43, VAR_44 = sys.exc_info()\n",
"VAR_43 = VAR_19.value\n",
"if isinstance(VAR_19, (HTTPError, HTTPAuthenticationRequired)):\n",
"VAR_44 = VAR_19.getTracebackObject()\n",
"if GLSetting.http_log and VAR_19.log_message:\n",
"log.err('Uncaught exception %s %s %s' % (VAR_42, VAR_43, VAR_44))\n",
"VAR_19 = VAR_19.value\n",
"VAR_52 = '%d %s: ' + VAR_19.log_message\n",
"if VAR_19.status_code not in httplib.responses:\n",
"if GLSetting.http_log:\n",
"VAR_12 = [VAR_19.status_code, self._request_summary()] + list(VAR_19.args)\n",
"log.msg('Bad HTTP status code: %d' % VAR_19.status_code)\n",
"return self.send_error(VAR_19.status_code, VAR_24=e)\n",
"log.msg(VAR_19)\n",
"mail_exception(VAR_42, VAR_43, VAR_44)\n",
"VAR_53 = lambda *VAR_12: VAR_52 % VAR_12\n",
"return self.send_error(500, VAR_24=e)\n",
"return self.send_error(500, VAR_24=e)\n",
"log.msg(VAR_53(*VAR_12))\n"
] | [
"def _handle_request_exception(self, e):...\n",
"if isinstance(e, Failure):\n",
"exc_type = e.type\n",
"exc_type, exc_value, exc_tb = sys.exc_info()\n",
"exc_value = e.value\n",
"if isinstance(e, (HTTPError, HTTPAuthenticationRequired)):\n",
"exc_tb = e.getTracebackObject()\n",
"if GLSetting.http_log and e.log_message:\n",
"log.err('Uncaught exception %s %s %s' % (exc_type, exc_value, exc_tb))\n",
"e = e.value\n",
"string_format = '%d %s: ' + e.log_message\n",
"if e.status_code not in httplib.responses:\n",
"if GLSetting.http_log:\n",
"args = [e.status_code, self._request_summary()] + list(e.args)\n",
"log.msg('Bad HTTP status code: %d' % e.status_code)\n",
"return self.send_error(e.status_code, exception=e)\n",
"log.msg(e)\n",
"mail_exception(exc_type, exc_value, exc_tb)\n",
"msg = lambda *args: string_format % args\n",
"return self.send_error(500, exception=e)\n",
"return self.send_error(500, exception=e)\n",
"log.msg(msg(*args))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Return'",
"Expr'",
"Expr'",
"Assign'",
"Return'",
"Return'",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"self.clear_cookie('username')\n",
"self.redirect('/', permanent=true)\n"
] | [
"def get(self):...\n",
"self.clear_cookie('username')\n",
"self.redirect('/', permanent=true)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"@auth.require(acl.is_bot)...\n",
"VAR_4 = self.parse_body()\n",
"VAR_38 = FUNC_2(self.ACCEPTED_KEYS, self.REQUIRED_KEYS, VAR_4, self.request,\n 'bot', 'keys')\n",
"if VAR_38:\n",
"self.abort_with_error(400, error=msg)\n",
"VAR_14 = VAR_4['id']\n",
"VAR_39 = VAR_4['cost_usd']\n",
"VAR_17 = VAR_4['task_id']\n",
"VAR_40 = VAR_4.get('bot_overhead')\n",
"VAR_41 = VAR_4.get('duration')\n",
"VAR_42 = VAR_4.get('exit_code')\n",
"VAR_43 = VAR_4.get('hard_timeout')\n",
"VAR_44 = VAR_4.get('io_timeout')\n",
"VAR_45 = VAR_4.get('isolated_stats')\n",
"VAR_46 = VAR_4.get('output')\n",
"VAR_47 = VAR_4.get('output_chunk_start')\n",
"VAR_48 = VAR_4.get('outputs_ref')\n",
"if bool(VAR_45) != (VAR_40 is not None):\n",
"ereporter2.log_request(VAR_4=self.request, VAR_5='server', category=\n 'task_failure', VAR_8='Failed to update task: %s' % task_id)\n",
"VAR_13 = task_pack.unpack_run_result_key(VAR_17)\n",
"self.abort_with_error(400, error=\n \"\"\"Both bot_overhead and isolated_stats must be set simultaneously\nbot_overhead: %s\nisolated_stats: %s\"\"\"\n % (bot_overhead, isolated_stats))\n",
"VAR_49 = None\n",
"if VAR_45:\n",
"VAR_55 = VAR_45['download']\n",
"if VAR_46 is not None:\n",
"VAR_56 = VAR_45['upload']\n",
"if VAR_48:\n",
"VAR_46 = base64.b64decode(VAR_46)\n",
"logging.error(\"\"\"Failed to decode output\n%s\n%r\"\"\", e, VAR_46)\n",
"VAR_49 = task_result.PerformanceStats(VAR_40=bot_overhead,\n isolated_download=task_result.IsolatedOperation(duration=download[\n 'duration'], initial_number_items=download['initial_number_items'],\n initial_size=download['initial_size'], items_cold=base64.b64decode(\n download['items_cold']), items_hot=base64.b64decode(download[\n 'items_hot'])), isolated_upload=task_result.IsolatedOperation(duration=\n upload['duration'], items_cold=base64.b64decode(upload['items_cold']),\n items_hot=base64.b64decode(upload['items_hot'])))\n",
"VAR_48 = task_request.FilesRef(**outputs_ref)\n",
"VAR_25 = task_scheduler.bot_update_task(VAR_13=run_result_key, VAR_14=\n bot_id, VAR_46=output, VAR_47=output_chunk_start, VAR_42=exit_code,\n VAR_41=duration, VAR_43=hard_timeout, VAR_44=io_timeout, VAR_39=\n cost_usd, VAR_48=outputs_ref, VAR_49=performance_stats)\n",
"ereporter2.log_request(VAR_4=self.request, VAR_5='server', category=\n 'task_failure', VAR_8='Failed to update task: %s' % e)\n",
"self.send_response({'ok': True})\n",
"VAR_46 = VAR_46.encode('ascii', 'replace')\n",
"if not VAR_25:\n",
"self.abort_with_error(400, error=str(e))\n",
"logging.error(\"\"\"Failed to decode output\n%s\n%r\"\"\", e, VAR_46)\n",
"logging.info('Failed to update, please retry')\n",
"if VAR_25 in (task_result.State.COMPLETED, task_result.State.TIMED_OUT):\n",
"logging.exception('Internal error: %s', e)\n",
"self.abort_with_error(500, error='Failed to update, please retry')\n",
"VAR_30 = 'task_completed'\n",
"assert VAR_25 == task_result.State.RUNNING, VAR_25\n",
"self.abort_with_error(500, error=str(e))\n",
"bot_management.bot_event(VAR_31=action, VAR_14=bot_id, external_ip=self.\n request.remote_addr, VAR_24=None, VAR_25=None, VAR_9=None, VAR_16=None,\n VAR_17=task_id, VAR_32=None)\n",
"VAR_30 = 'task_update'\n"
] | [
"@auth.require(acl.is_bot)...\n",
"request = self.parse_body()\n",
"msg = log_unexpected_subset_keys(self.ACCEPTED_KEYS, self.REQUIRED_KEYS,\n request, self.request, 'bot', 'keys')\n",
"if msg:\n",
"self.abort_with_error(400, error=msg)\n",
"bot_id = request['id']\n",
"cost_usd = request['cost_usd']\n",
"task_id = request['task_id']\n",
"bot_overhead = request.get('bot_overhead')\n",
"duration = request.get('duration')\n",
"exit_code = request.get('exit_code')\n",
"hard_timeout = request.get('hard_timeout')\n",
"io_timeout = request.get('io_timeout')\n",
"isolated_stats = request.get('isolated_stats')\n",
"output = request.get('output')\n",
"output_chunk_start = request.get('output_chunk_start')\n",
"outputs_ref = request.get('outputs_ref')\n",
"if bool(isolated_stats) != (bot_overhead is not None):\n",
"ereporter2.log_request(request=self.request, source='server', category=\n 'task_failure', message='Failed to update task: %s' % task_id)\n",
"run_result_key = task_pack.unpack_run_result_key(task_id)\n",
"self.abort_with_error(400, error=\n \"\"\"Both bot_overhead and isolated_stats must be set simultaneously\nbot_overhead: %s\nisolated_stats: %s\"\"\"\n % (bot_overhead, isolated_stats))\n",
"performance_stats = None\n",
"if isolated_stats:\n",
"download = isolated_stats['download']\n",
"if output is not None:\n",
"upload = isolated_stats['upload']\n",
"if outputs_ref:\n",
"output = base64.b64decode(output)\n",
"logging.error(\"\"\"Failed to decode output\n%s\n%r\"\"\", e, output)\n",
"performance_stats = task_result.PerformanceStats(bot_overhead=bot_overhead,\n isolated_download=task_result.IsolatedOperation(duration=download[\n 'duration'], initial_number_items=download['initial_number_items'],\n initial_size=download['initial_size'], items_cold=base64.b64decode(\n download['items_cold']), items_hot=base64.b64decode(download[\n 'items_hot'])), isolated_upload=task_result.IsolatedOperation(duration=\n upload['duration'], items_cold=base64.b64decode(upload['items_cold']),\n items_hot=base64.b64decode(upload['items_hot'])))\n",
"outputs_ref = task_request.FilesRef(**outputs_ref)\n",
"state = task_scheduler.bot_update_task(run_result_key=run_result_key,\n bot_id=bot_id, output=output, output_chunk_start=output_chunk_start,\n exit_code=exit_code, duration=duration, hard_timeout=hard_timeout,\n io_timeout=io_timeout, cost_usd=cost_usd, outputs_ref=outputs_ref,\n performance_stats=performance_stats)\n",
"ereporter2.log_request(request=self.request, source='server', category=\n 'task_failure', message='Failed to update task: %s' % e)\n",
"self.send_response({'ok': True})\n",
"output = output.encode('ascii', 'replace')\n",
"if not state:\n",
"self.abort_with_error(400, error=str(e))\n",
"logging.error(\"\"\"Failed to decode output\n%s\n%r\"\"\", e, output)\n",
"logging.info('Failed to update, please retry')\n",
"if state in (task_result.State.COMPLETED, task_result.State.TIMED_OUT):\n",
"logging.exception('Internal error: %s', e)\n",
"self.abort_with_error(500, error='Failed to update, please retry')\n",
"action = 'task_completed'\n",
"assert state == task_result.State.RUNNING, state\n",
"self.abort_with_error(500, error=str(e))\n",
"bot_management.bot_event(event_type=action, bot_id=bot_id, external_ip=self\n .request.remote_addr, dimensions=None, state=None, version=None,\n quarantined=None, task_id=task_id, task_name=None)\n",
"action = 'task_update'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assert'",
"Expr'",
"Expr'",
"Assign'"
] |
[
"@classmethod...\n",
"super(CLASS_0.Factory, VAR_12).register_options(VAR_13)\n",
"VAR_23 = '1.0.3'\n",
"VAR_24 = [Shader.exclude_package('scala', recursive=True), Shader.\n exclude_package('xsbt', recursive=True), Shader.exclude_package('xsbti',\n recursive=True), Shader.exclude_package('org.apache.logging.log4j',\n recursive=True)]\n",
"VAR_12.register_jvm_tool(VAR_13, CLASS_0.ZINC_COMPILER_TOOL_NAME, classpath\n =[JarDependency('org.pantsbuild', 'zinc-compiler_2.11', '0.0.7')], main\n =Zinc.ZINC_COMPILE_MAIN, custom_rules=shader_rules)\n",
"VAR_12.register_jvm_tool(VAR_13, 'compiler-bridge', classpath=[\n ScalaJarDependency(org='org.scala-sbt', name='compiler-bridge', rev=\n zinc_rev, classifier='sources', intransitive=True)])\n",
"VAR_12.register_jvm_tool(VAR_13, 'compiler-interface', classpath=[\n JarDependency(org='org.scala-sbt', name='compiler-interface', rev=\n zinc_rev)], main='no.such.main.Main', custom_rules=shader_rules)\n",
"VAR_12.register_jvm_tool(VAR_13, CLASS_0.ZINC_EXTRACTOR_TOOL_NAME,\n classpath=[JarDependency('org.pantsbuild', 'zinc-extractor_2.11', '0.0.4')]\n )\n"
] | [
"@classmethod...\n",
"super(Zinc.Factory, cls).register_options(register)\n",
"zinc_rev = '1.0.3'\n",
"shader_rules = [Shader.exclude_package('scala', recursive=True), Shader.\n exclude_package('xsbt', recursive=True), Shader.exclude_package('xsbti',\n recursive=True), Shader.exclude_package('org.apache.logging.log4j',\n recursive=True)]\n",
"cls.register_jvm_tool(register, Zinc.ZINC_COMPILER_TOOL_NAME, classpath=[\n JarDependency('org.pantsbuild', 'zinc-compiler_2.11', '0.0.7')], main=\n Zinc.ZINC_COMPILE_MAIN, custom_rules=shader_rules)\n",
"cls.register_jvm_tool(register, 'compiler-bridge', classpath=[\n ScalaJarDependency(org='org.scala-sbt', name='compiler-bridge', rev=\n zinc_rev, classifier='sources', intransitive=True)])\n",
"cls.register_jvm_tool(register, 'compiler-interface', classpath=[\n JarDependency(org='org.scala-sbt', name='compiler-interface', rev=\n zinc_rev)], main='no.such.main.Main', custom_rules=shader_rules)\n",
"cls.register_jvm_tool(register, Zinc.ZINC_EXTRACTOR_TOOL_NAME, classpath=[\n JarDependency('org.pantsbuild', 'zinc-extractor_2.11', '0.0.4')])\n"
] | [
0,
0,
0,
0,
7,
0,
0,
7
] | [
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"from threading import Thread, Lock\n",
"import logging\n",
"import sys\n",
"import time\n",
"import hyperion.lib.util.config as config\n",
"from os import system\n",
"from subprocess import call\n",
"from psutil import Process, NoSuchProcess\n",
"VAR_0 = sys.version[0] == '2'\n",
"if VAR_0:\n",
"import Queue as Queue\n",
"import queue as Queue\n",
"\"\"\"Abstract class that represents a component monitoring job (local or remote).\"\"\"\n",
"def __init__(self, VAR_1, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"self.pid = VAR_1\n",
"self.comp_name = VAR_2\n",
"def FUNC_0(self):...\n",
"\"\"\"docstring\"\"\"\n",
"\"\"\"Class that represents a local component monitoring job.\"\"\"\n",
"def __init__(self, VAR_1, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"super(CLASS_1, self).__init__(VAR_1, VAR_2)\n",
"def FUNC_0(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = Process(self.pid)\n",
"return CLASS_4(self.comp_name)\n",
"if VAR_8.is_running():\n",
"return True\n"
] | [
"from threading import Thread, Lock\n",
"import logging\n",
"import sys\n",
"import time\n",
"import hyperion.lib.util.config as config\n",
"from os import system\n",
"from subprocess import call\n",
"from psutil import Process, NoSuchProcess\n",
"is_py2 = sys.version[0] == '2'\n",
"if is_py2:\n",
"import Queue as Queue\n",
"import queue as Queue\n",
"\"\"\"Abstract class that represents a component monitoring job (local or remote).\"\"\"\n",
"def __init__(self, pid, comp_name):...\n",
"\"\"\"docstring\"\"\"\n",
"self.pid = pid\n",
"self.comp_name = comp_name\n",
"def run_check(self):...\n",
"\"\"\"docstring\"\"\"\n",
"\"\"\"Class that represents a local component monitoring job.\"\"\"\n",
"def __init__(self, pid, comp_name):...\n",
"\"\"\"docstring\"\"\"\n",
"super(LocalComponentMonitoringJob, self).__init__(pid, comp_name)\n",
"def run_check(self):...\n",
"\"\"\"docstring\"\"\"\n",
"proc = Process(self.pid)\n",
"return CrashEvent(self.comp_name)\n",
"if proc.is_running():\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Condition",
"Import'",
"Import'",
"Expr'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Expr'",
"FunctionDef'",
"Docstring",
"Expr'",
"FunctionDef'",
"Docstring",
"Assign'",
"Return'",
"Condition",
"Return'"
] |
[
"import os\n",
"import sys\n",
"from curtin.util import ProcessExecutionError, get_architecture, install_packages, is_uefi_bootable, lsb_release, which\n",
"VAR_0 = [('import yaml', 'python-yaml', 'python3-yaml')]\n",
"VAR_1 = [('file', 'file'), ('lvcreate', 'lvm2'), ('mdadm', 'mdadm'), (\n 'mkfs.vfat', 'dosfstools'), ('mkfs.btrfs', 'btrfs-tools'), ('mkfs.ext4',\n 'e2fsprogs'), ('mkfs.xfs', 'xfsprogs'), ('partprobe', 'parted'), (\n 'sgdisk', 'gdisk'), ('udevadm', 'udev'), ('make-bcache', 'bcache-tools'\n ), ('iscsiadm', 'open-iscsi')]\n",
"if lsb_release()['codename'] == 'precise':\n",
"VAR_0.append(('import oauth.oauth', 'python-oauth', None))\n",
"VAR_0.append(('import oauthlib.oauth1', 'python-oauthlib', 'python3-oauthlib'))\n",
"if not is_uefi_bootable() and 'arm' in get_architecture():\n",
"VAR_1.append(('flash-kernel', 'flash-kernel'))\n",
"def __init__(self, VAR_5, VAR_12):...\n",
"self.message = VAR_5\n",
"if isinstance(VAR_12, str) or VAR_12 is None:\n",
"VAR_12 = [VAR_12]\n",
"self.deps = [d for d in VAR_12 if d is not None]\n",
"self.fatal = None in VAR_12\n",
"def __str__(self):...\n",
"if self.fatal:\n",
"if not len(self.deps):\n",
"return self.message + ' Install packages: %s' % ' '.join(self.deps)\n",
"return self.message + ' Unresolvable.'\n",
"return self.message + ' Unresolvable. Partially resolvable with packages: %s' % ' '.join(\n self.deps)\n"
] | [
"import os\n",
"import sys\n",
"from curtin.util import ProcessExecutionError, get_architecture, install_packages, is_uefi_bootable, lsb_release, which\n",
"REQUIRED_IMPORTS = [('import yaml', 'python-yaml', 'python3-yaml')]\n",
"REQUIRED_EXECUTABLES = [('file', 'file'), ('lvcreate', 'lvm2'), ('mdadm',\n 'mdadm'), ('mkfs.vfat', 'dosfstools'), ('mkfs.btrfs', 'btrfs-tools'), (\n 'mkfs.ext4', 'e2fsprogs'), ('mkfs.xfs', 'xfsprogs'), ('partprobe',\n 'parted'), ('sgdisk', 'gdisk'), ('udevadm', 'udev'), ('make-bcache',\n 'bcache-tools'), ('iscsiadm', 'open-iscsi')]\n",
"if lsb_release()['codename'] == 'precise':\n",
"REQUIRED_IMPORTS.append(('import oauth.oauth', 'python-oauth', None))\n",
"REQUIRED_IMPORTS.append(('import oauthlib.oauth1', 'python-oauthlib',\n 'python3-oauthlib'))\n",
"if not is_uefi_bootable() and 'arm' in get_architecture():\n",
"REQUIRED_EXECUTABLES.append(('flash-kernel', 'flash-kernel'))\n",
"def __init__(self, message, deps):...\n",
"self.message = message\n",
"if isinstance(deps, str) or deps is None:\n",
"deps = [deps]\n",
"self.deps = [d for d in deps if d is not None]\n",
"self.fatal = None in deps\n",
"def __str__(self):...\n",
"if self.fatal:\n",
"if not len(self.deps):\n",
"return self.message + ' Install packages: %s' % ' '.join(self.deps)\n",
"return self.message + ' Unresolvable.'\n",
"return self.message + ' Unresolvable. Partially resolvable with packages: %s' % ' '.join(\n self.deps)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"ImportFrom'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"Condition",
"Return'",
"Return'",
"Return'"
] |
[
"def FUNC_9(VAR_2):...\n",
"assert VAR_2 == 'hello'\n"
] | [
"def assert_warn(line):...\n",
"assert line == 'hello'\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assert'"
] |
[
"def __init__(self, VAR_0):...\n",
"self.active = False\n",
"self.bytes_buffer = io.BytesIO()\n",
"self.redirection_source = VAR_0\n",
"self.last_write_time = 0\n",
"super(CLASS_0, self).__init__(buffer=self.bytes_buffer, encoding=\n redirection_source.encoding, write_through=True)\n"
] | [
"def __init__(self, redirection_source):...\n",
"self.active = False\n",
"self.bytes_buffer = io.BytesIO()\n",
"self.redirection_source = redirection_source\n",
"self.last_write_time = 0\n",
"super(RedirectBuffer, self).__init__(buffer=self.bytes_buffer, encoding=\n redirection_source.encoding, write_through=True)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@VAR_0.assignment_tag...\n",
"if not isinstance(VAR_10.grading_data, dict):\n",
"return ''\n",
"VAR_22 = VAR_10.grading_data.get('grading_data')\n",
"if not isinstance(VAR_22, str):\n",
"return ''\n",
"if VAR_22.startswith('<pre>'):\n",
"return VAR_22[5:-6]\n",
"return json.loads(VAR_22).get('errors', '')\n",
"return ''\n"
] | [
"@register.assignment_tag...\n",
"if not isinstance(submission.grading_data, dict):\n",
"return ''\n",
"grading_data = submission.grading_data.get('grading_data')\n",
"if not isinstance(grading_data, str):\n",
"return ''\n",
"if grading_data.startswith('<pre>'):\n",
"return grading_data[5:-6]\n",
"return json.loads(grading_data).get('errors', '')\n",
"return ''\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'",
"Return'"
] |
[
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = discord.Embed(VAR_2='Updating B9S Guide', VAR_3=discord.Color(13506590)\n )\n",
"VAR_7.set_author(name='Plailect', url='https://3ds.guide/updating-b9s')\n",
"VAR_7.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n",
"VAR_7.url = 'https://3ds.guide/updating-b9s'\n",
"VAR_7.description = 'A guide for updating to new B9S versions.'\n",
"await self.bot.say('', VAR_7=embed)\n"
] | [
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"embed = discord.Embed(title='Updating B9S Guide', color=discord.Color(13506590)\n )\n",
"embed.set_author(name='Plailect', url='https://3ds.guide/updating-b9s')\n",
"embed.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n",
"embed.url = 'https://3ds.guide/updating-b9s'\n",
"embed.description = 'A guide for updating to new B9S versions.'\n",
"await self.bot.say('', embed=embed)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_16):...\n",
"self.key_name = VAR_16\n"
] | [
"def __init__(self, key_name):...\n",
"self.key_name = key_name\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"\"\"\"URL routing module.\"\"\"\n",
"from django.conf import urls\n",
"import site_settings\n",
"import views.admin.statistics\n",
"VAR_0 = [('global/admin/statistics/?', views.admin.statistics.\n AdminStatisticsView.as_view)]\n",
"VAR_1 = [urls.url('^(%s)$' % path_exp, view_func()) for path_exp, view_func in\n VAR_0]\n",
"if site_settings.OPTIONAL_PATH_PREFIX:\n",
"VAR_1 += [urls.url('^(%s)/(%s)$' % (site_settings.OPTIONAL_PATH_PREFIX,\n path_exp), view_func()) for path_exp, view_func in VAR_0]\n"
] | [
"\"\"\"URL routing module.\"\"\"\n",
"from django.conf import urls\n",
"import site_settings\n",
"import views.admin.statistics\n",
"_BASE_URL_PATTERNS = [('global/admin/statistics/?', views.admin.statistics.\n AdminStatisticsView.as_view)]\n",
"urlpatterns = [urls.url('^(%s)$' % path_exp, view_func()) for path_exp,\n view_func in _BASE_URL_PATTERNS]\n",
"if site_settings.OPTIONAL_PATH_PREFIX:\n",
"urlpatterns += [urls.url('^(%s)/(%s)$' % (site_settings.\n OPTIONAL_PATH_PREFIX, path_exp), view_func()) for path_exp, view_func in\n _BASE_URL_PATTERNS]\n"
] | [
0,
0,
0,
0,
5,
5,
0,
5
] | [
"Expr'",
"ImportFrom'",
"Import'",
"Import'",
"Assign'",
"Assign'",
"Condition",
"AugAssign'"
] |
[
"def FUNC_9():...\n",
"if VAR_4 == VAR_5:\n",
"VAR_14 = 'string' % (VAR_4, VAR_5, VAR_5, VAR_4)\n",
"VAR_7.execute(VAR_14)\n",
"if VAR_7.fetchone()[0] > 0:\n"
] | [
"def _checkPairing():...\n",
"if winner == loser:\n",
"q = (\n \"\"\"\n SELECT COUNT(*) FROM matches\n WHERE (matches.winner_id = %s AND matches.loser_id = %s)\n OR (matches.winner_id = %s AND matches.loser_id = %s);\n \"\"\"\n % (winner, loser, loser, winner))\n",
"cur.execute(q)\n",
"if cur.fetchone()[0] > 0:\n"
] | [
0,
0,
4,
4,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Expr'",
"Condition"
] |
[
"@scenario('../features/pods_alive.feature', 'Expected Pods')...\n",
""
] | [
"@scenario('../features/pods_alive.feature', 'Expected Pods')...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_11(self, VAR_10):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.connection:\n",
"self.cursor.execute(\n 'insert into itemtranslation (itemid, itemlanguageid, translation) values (\"%s\", \"%s\", \"%s\")'\n % (VAR_10[0], VAR_10[1], VAR_10[2]))\n",
"self.connection.commit()\n"
] | [
"def add_translation(self, trid):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.connection:\n",
"self.cursor.execute(\n 'insert into itemtranslation (itemid, itemlanguageid, translation) values (\"%s\", \"%s\", \"%s\")'\n % (trid[0], trid[1], trid[2]))\n",
"self.connection.commit()\n"
] | [
0,
0,
0,
4,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Expr'"
] |
[
"@VAR_1.route(VAR_0 + '/create', methods=['GET'])...\n",
"return ujson.dumps({'success': True})\n"
] | [
"@user.route(BASE_URL + '/create', methods=['GET'])...\n",
"return ujson.dumps({'success': True})\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_16(self):...\n",
"self._test_strtype('varchar', '')\n"
] | [
"def test_text_upperlatin(self):...\n",
"self._test_strtype('varchar', '')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0(VAR_0, VAR_1, VAR_2):...\n",
"VAR_4 = VAR_0.cursor()\n",
"VAR_5 = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"VAR_4.execute(VAR_5)\n",
"VAR_5 = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"VAR_4.execute(VAR_5)\n",
"VAR_6 = Table('Presets')\n",
"VAR_7 = MySQLQuery.into(VAR_6).columns('querval', 'description').insert(VAR_1,\n VAR_2)\n",
"print(VAR_7)\n",
"VAR_5 = str(VAR_7)\n",
"VAR_4.execute(VAR_5)\n",
"VAR_5 = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"VAR_4.execute(VAR_5)\n",
"VAR_5 = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"VAR_4.execute(VAR_5)\n"
] | [
"def write_preset(conn, queryin, descriptin):...\n",
"cursor = conn.cursor()\n",
"quer = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"cursor.execute(quer)\n",
"quer = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"cursor.execute(quer)\n",
"extable = Table('Presets')\n",
"q = MySQLQuery.into(extable).columns('querval', 'description').insert(queryin,\n descriptin)\n",
"print(q)\n",
"quer = str(q)\n",
"cursor.execute(quer)\n",
"quer = 'ALTER TABLE Presets DROP COLUMN id;'\n",
"cursor.execute(quer)\n",
"quer = (\n 'ALTER TABLE Presets ADD COLUMN id INT AUTO_INCREMENT PRIMARY KEY NOT NULL FIRST;'\n )\n",
"cursor.execute(quer)\n"
] | [
0,
4,
4,
4,
4,
0,
4,
4,
4,
4,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"await self.simple_embed('string', VAR_3=discord.Color.red())\n"
] | [
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"await self.simple_embed(\n \"\"\"If you have lost the contents of your SD card with CFW, you will need in SD root:\n-Homebrew launcher executable [here](https://smealum.github.io/ninjhax2/boot.3dsx)\n-`boot.firm` from [luma3ds latest release 7z](https://github.com/AuroraWright/Luma3DS/releases/latest)\nThen repeat the [finalizing setup](https://3ds.guide/finalizing-setup) page.\"\"\"\n , color=discord.Color.red())\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'"
] |
[
"def FUNC_2(self, VAR_5):...\n",
"if self.table_view.verticalScrollBar().value(\n",
"self.load_torrents()\n"
] | [
"def _on_list_scroll(self, event):...\n",
"if self.table_view.verticalScrollBar().value(\n",
"self.load_torrents()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'"
] |
[
"def FUNC_5(VAR_0, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"FUNC_0(VAR_0, VAR_1).close()\n"
] | [
"def shutdown(app, context):...\n",
"\"\"\"docstring\"\"\"\n",
"get_db(app, context).close()\n"
] | [
0,
0,
4
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"return self.extracted_text\n"
] | [
"def get_extractedtext(self):...\n",
"return self.extracted_text\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_39(self):...\n",
"\"\"\"docstring\"\"\"\n",
"for VAR_40, VAR_41 in self._names.items():\n",
"yield VAR_40, VAR_41\n"
] | [
"def get_names(self):...\n",
"\"\"\"docstring\"\"\"\n",
"for name, index in self._names.items():\n",
"yield name, index\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Expr'"
] |
[
"def __iter__(self):...\n",
"\"\"\"docstring\"\"\"\n",
"while True:\n",
"VAR_41 = self.token()\n",
"yield VAR_41\n",
"if VAR_41.ttype == CLASS_1.EOF:\n"
] | [
"def __iter__(self):...\n",
"\"\"\"docstring\"\"\"\n",
"while True:\n",
"token = self.token()\n",
"yield token\n",
"if token.ttype == TokenType.EOF:\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Expr'",
"Condition"
] |
[
"def FUNC_6(self, VAR_7):...\n",
"if VAR_7 == 'ro.build.id':\n",
"return 'AB42'\n",
"if VAR_7 == 'ro.build.type':\n",
"return 'userdebug'\n",
"if VAR_7 == 'ro.build.product' or VAR_7 == 'ro.product.name':\n",
"return 'FakeModel'\n",
"if VAR_7 == 'sys.boot_completed':\n",
"return '1'\n"
] | [
"def getprop(self, params):...\n",
"if params == 'ro.build.id':\n",
"return 'AB42'\n",
"if params == 'ro.build.type':\n",
"return 'userdebug'\n",
"if params == 'ro.build.product' or params == 'ro.product.name':\n",
"return 'FakeModel'\n",
"if params == 'sys.boot_completed':\n",
"return '1'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'"
] |
[
"def FUNC_0(VAR_0, VAR_1, VAR_2=None, VAR_3=False):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = posixpath.normpath(VAR_1)\n",
"VAR_1 = VAR_1.lstrip('/')\n",
"VAR_10 = ''\n",
"for VAR_18 in VAR_1.split('/'):\n",
"if not VAR_18:\n",
"if VAR_10 and VAR_1 != VAR_10:\n",
"VAR_17, VAR_18 = os.path.splitdrive(VAR_18)\n",
"return HttpResponseRedirect(VAR_10)\n",
"VAR_6 = os.path.join(VAR_2, VAR_10)\n",
"VAR_19, VAR_18 = os.path.split(VAR_18)\n",
"if os.path.isdir(VAR_6):\n",
"if VAR_18 in (os.curdir, os.pardir):\n",
"if VAR_3:\n",
"if not os.path.exists(VAR_6):\n",
"VAR_10 = os.path.join(VAR_10, VAR_18).replace('\\\\', '/')\n",
"return FUNC_1(VAR_10, VAR_6)\n",
"VAR_11 = os.stat(VAR_6)\n",
"if not FUNC_2(VAR_0.META.get('HTTP_IF_MODIFIED_SINCE'), VAR_11.st_mtime,\n",
"return HttpResponseNotModified()\n",
"VAR_12, VAR_13 = mimetypes.guess_type(VAR_6)\n",
"VAR_12 = VAR_12 or 'application/octet-stream'\n",
"VAR_14 = FileResponse(open(VAR_6, 'rb'), VAR_12=content_type)\n",
"VAR_14['Last-Modified'] = http_date(VAR_11.st_mtime)\n",
"if stat.S_ISREG(VAR_11.st_mode):\n",
"VAR_14['Content-Length'] = VAR_11.st_size\n",
"if VAR_13:\n",
"VAR_14['Content-Encoding'] = VAR_13\n",
"return VAR_14\n"
] | [
"def serve(request, path, document_root=None, show_indexes=False):...\n",
"\"\"\"docstring\"\"\"\n",
"path = posixpath.normpath(path)\n",
"path = path.lstrip('/')\n",
"newpath = ''\n",
"for part in path.split('/'):\n",
"if not part:\n",
"if newpath and path != newpath:\n",
"drive, part = os.path.splitdrive(part)\n",
"return HttpResponseRedirect(newpath)\n",
"fullpath = os.path.join(document_root, newpath)\n",
"head, part = os.path.split(part)\n",
"if os.path.isdir(fullpath):\n",
"if part in (os.curdir, os.pardir):\n",
"if show_indexes:\n",
"if not os.path.exists(fullpath):\n",
"newpath = os.path.join(newpath, part).replace('\\\\', '/')\n",
"return directory_index(newpath, fullpath)\n",
"statobj = os.stat(fullpath)\n",
"if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),\n",
"return HttpResponseNotModified()\n",
"content_type, encoding = mimetypes.guess_type(fullpath)\n",
"content_type = content_type or 'application/octet-stream'\n",
"response = FileResponse(open(fullpath, 'rb'), content_type=content_type)\n",
"response['Last-Modified'] = http_date(statobj.st_mtime)\n",
"if stat.S_ISREG(statobj.st_mode):\n",
"response['Content-Length'] = statobj.st_size\n",
"if encoding:\n",
"response['Content-Encoding'] = encoding\n",
"return response\n"
] | [
0,
0,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
0,
6,
0,
0,
6,
6,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_6(self):...\n",
"if is_banned_IP(VAR_102.ip):\n",
"VAR_101.errors.add(errors.BANNED_IP)\n",
"return VAR_102.ip\n"
] | [
"def run(self):...\n",
"if is_banned_IP(request.ip):\n",
"c.errors.add(errors.BANNED_IP)\n",
"return request.ip\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_6(self, VAR_74):...\n",
"if VAR_74 and (not self.domain_re.match(VAR_74) or VAR_74.endswith(\n",
"VAR_101.errors.add(errors.BAD_CNAME)\n",
"if VAR_74:\n",
"return VAR_100(VAR_74).lower()\n",
"VAR_101.errors.add(errors.BAD_CNAME)\n"
] | [
"def run(self, domain):...\n",
"if domain and (not self.domain_re.match(domain) or domain.endswith(\n",
"c.errors.add(errors.BAD_CNAME)\n",
"if domain:\n",
"return str(domain).lower()\n",
"c.errors.add(errors.BAD_CNAME)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Condition",
"Return'",
"Expr'"
] |
[
"import math\n",
"from random import shuffle\n",
"from django.db import models\n",
"from django.urls import reverse\n",
"from django.core.validators import URLValidator, MaxValueValidator, MinValueValidator\n",
"from .common_info import CommonInfo\n",
"from .data_document import DataDocument\n",
"VAR_0 = ('DL', 'download'), ('EX', 'extraction'), ('PC',\n 'product categorization'), ('DC', 'data cleaning')\n",
"VAR_1 = 0.2\n",
"VAR_2 = models.CharField(max_length=50)\n",
"VAR_3 = models.CharField(max_length=100, null=True, blank=True, validators=\n [URLValidator()])\n",
"VAR_4 = models.BooleanField(default=False)\n",
"VAR_5 = models.CharField(max_length=2, choices=TYPE_CHOICES, blank=False,\n default='EX')\n",
"VAR_6 = models.PositiveSmallIntegerField('Confidence', blank=True,\n validators=[MaxValueValidator(100), MinValueValidator(1)], default=1)\n",
"def __str__(self):...\n",
"return str(self.title)\n"
] | [
"import math\n",
"from random import shuffle\n",
"from django.db import models\n",
"from django.urls import reverse\n",
"from django.core.validators import URLValidator, MaxValueValidator, MinValueValidator\n",
"from .common_info import CommonInfo\n",
"from .data_document import DataDocument\n",
"TYPE_CHOICES = ('DL', 'download'), ('EX', 'extraction'), ('PC',\n 'product categorization'), ('DC', 'data cleaning')\n",
"QA_COMPLETE_PERCENTAGE = 0.2\n",
"title = models.CharField(max_length=50)\n",
"url = models.CharField(max_length=100, null=True, blank=True, validators=[\n URLValidator()])\n",
"qa_begun = models.BooleanField(default=False)\n",
"script_type = models.CharField(max_length=2, choices=TYPE_CHOICES, blank=\n False, default='EX')\n",
"confidence = models.PositiveSmallIntegerField('Confidence', blank=True,\n validators=[MaxValueValidator(100), MinValueValidator(1)], default=1)\n",
"def __str__(self):...\n",
"return str(self.title)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
6,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"@api.public...\n",
"self.response.write(api.get_current_identity().to_bytes())\n"
] | [
"@api.public...\n",
"self.response.write(api.get_current_identity().to_bytes())\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_2(VAR_5, VAR_6, VAR_7, VAR_8, VAR_3, VAR_9):...\n",
"VAR_14 = WfAnalysis.Get(VAR_5, VAR_6, VAR_7)\n",
"if not VAR_14:\n",
"return\n",
"VAR_17 = {'triage_timestamp': time_util.GetUTCNowTimestamp(), 'user_name':\n VAR_9, 'cl_status': VAR_3, 'version': VAR_14.version, 'triaged_cl': VAR_8}\n",
"if not VAR_14.triage_history:\n",
"VAR_14.triage_history = []\n",
"VAR_14.triage_history.append(VAR_17)\n",
"VAR_14.triage_email_obscured = False\n",
"VAR_14.triage_record_last_add = time_util.GetUTCNow()\n",
"VAR_14.put()\n"
] | [
"def _AppendTriageHistoryRecord(master_name, builder_name, build_number,...\n",
"analysis = WfAnalysis.Get(master_name, builder_name, build_number)\n",
"if not analysis:\n",
"return\n",
"triage_record = {'triage_timestamp': time_util.GetUTCNowTimestamp(),\n 'user_name': user_name, 'cl_status': cl_status, 'version': analysis.\n version, 'triaged_cl': cl_info}\n",
"if not analysis.triage_history:\n",
"analysis.triage_history = []\n",
"analysis.triage_history.append(triage_record)\n",
"analysis.triage_email_obscured = False\n",
"analysis.triage_record_last_add = time_util.GetUTCNow()\n",
"analysis.put()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_13(self, VAR_4):...\n",
"return self.iden_reqid_map.get_values(VAR_4)\n"
] | [
"def get_reqids(self, iden):...\n",
"return self.iden_reqid_map.get_values(iden)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __str__(self):...\n",
"return super().__str__() + 'Backend: ' + self.backend + '\\n'\n"
] | [
"def __str__(self):...\n",
"return super().__str__() + 'Backend: ' + self.backend + '\\n'\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"if os.path.exists(VAR_0):\n",
"shutil.rmtree(VAR_0, ignore_errors=True)\n"
] | [
"def remove_directory_tree(path):...\n",
"\"\"\"docstring\"\"\"\n",
"if os.path.exists(path):\n",
"shutil.rmtree(path, ignore_errors=True)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'"
] |
[
"def FUNC_0():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = {'TEST_STACK': str(CLASS_0.DEFAULT_TEST_ID), 'TEST_APP': \n 'gcpsmoketest' + CLASS_0.DEFAULT_TEST_ID}\n",
"return st.ScenarioTestRunner.main(CLASS_0, default_binding_overrides=\n defaults, test_case_list=[GoogleSmokeTest])\n"
] | [
"def main():...\n",
"\"\"\"docstring\"\"\"\n",
"defaults = {'TEST_STACK': str(GoogleSmokeTestScenario.DEFAULT_TEST_ID),\n 'TEST_APP': 'gcpsmoketest' + GoogleSmokeTestScenario.DEFAULT_TEST_ID}\n",
"return st.ScenarioTestRunner.main(GoogleSmokeTestScenario,\n default_binding_overrides=defaults, test_case_list=[GoogleSmokeTest])\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"from flask import Flask, redirect, url_for, render_template, request, session, flash\n",
"from flask.ext.sqlalchemy import SQLAlchemy\n",
"from oauth import OAuthSignIn\n",
"from subprocess import check_output, STDOUT, CalledProcessError\n",
"from werkzeug import generate_password_hash, check_password_hash, secure_filename\n",
"from database.database_create import Base, User\n",
"from database.database_insert import insert_user, insert_social_user\n",
"from database.database_query import query_user, query_social_user, number_of_users\n",
"import base64\n",
"import json\n",
"import os\n",
"import shutil\n",
"import tempfile\n",
"import parser\n",
"VAR_0 = True\n",
"VAR_1 = Flask(__name__)\n",
"VAR_1.config['SECRET_KEY'] = 'secret'\n",
"VAR_1.config['OAUTH_CREDENTIALS'] = {'facebook': {'id': '604820106335654',\n 'secret': '5eb3f15f84c722df9cbc577206557cc8'}, 'twitter': {'id':\n 'cGFr2WV93py7an7FrGXXNDS6p', 'secret':\n 'U9ufkrhicVHrj5CGojmQ7ZCxSwytoShSgM0t9WCq0HbqcfKwL8'}}\n",
"VAR_1.secret_key = 'fe2917b485cc985c47071f3e38273348'\n",
"VAR_1.config['UPLOAD_FOLDER'] = 'userFiles/'\n",
"VAR_1.config['ALLOWED_EXTENSIONS'] = set(['pml'])\n",
"def FUNC_0(VAR_2, VAR_3='utf-8'):...\n",
"return f.read().decode(VAR_3)\n"
] | [
"from flask import Flask, redirect, url_for, render_template, request, session, flash\n",
"from flask.ext.sqlalchemy import SQLAlchemy\n",
"from oauth import OAuthSignIn\n",
"from subprocess import check_output, STDOUT, CalledProcessError\n",
"from werkzeug import generate_password_hash, check_password_hash, secure_filename\n",
"from database.database_create import Base, User\n",
"from database.database_insert import insert_user, insert_social_user\n",
"from database.database_query import query_user, query_social_user, number_of_users\n",
"import base64\n",
"import json\n",
"import os\n",
"import shutil\n",
"import tempfile\n",
"import parser\n",
"DEBUG = True\n",
"app = Flask(__name__)\n",
"app.config['SECRET_KEY'] = 'secret'\n",
"app.config['OAUTH_CREDENTIALS'] = {'facebook': {'id': '604820106335654',\n 'secret': '5eb3f15f84c722df9cbc577206557cc8'}, 'twitter': {'id':\n 'cGFr2WV93py7an7FrGXXNDS6p', 'secret':\n 'U9ufkrhicVHrj5CGojmQ7ZCxSwytoShSgM0t9WCq0HbqcfKwL8'}}\n",
"app.secret_key = 'fe2917b485cc985c47071f3e38273348'\n",
"app.config['UPLOAD_FOLDER'] = 'userFiles/'\n",
"app.config['ALLOWED_EXTENSIONS'] = set(['pml'])\n",
"def get_resource_as_string(name, charset='utf-8'):...\n",
"return f.read().decode(charset)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
6,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_0(self):...\n",
"self.user = User(username='[email protected]', email='', first_name='Matti',\n last_name='Sukunimi')\n",
"self.user.set_unusable_password()\n",
"self.user.save()\n",
"self.user.userprofile.student_id = '000'\n",
"self.user.userprofile.save()\n",
"self.login_url = reverse('shibboleth-login')\n"
] | [
"def setUp(self):...\n",
"self.user = User(username='[email protected]', email='', first_name='Matti',\n last_name='Sukunimi')\n",
"self.user.set_unusable_password()\n",
"self.user.save()\n",
"self.user.userprofile.student_id = '000'\n",
"self.user.userprofile.save()\n",
"self.login_url = reverse('shibboleth-login')\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_22(self, VAR_17, VAR_16):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"def remove_export(self, context, volume):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Docstring"
] |
[
"def FUNC_10(self, VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = pathexpr.SearcherNotExists(self, VAR_8)\n",
"VAR_10 = ds.PathTraversalContext({}, {}, self._root, {}, None, None, None)\n",
"VAR_11 = self._doc['rules']['ROOT']\n",
"ds._traverse(VAR_5, VAR_11, VAR_10, self)\n",
"return VAR_5._store\n"
] | [
"def depict_paths(self, createexpr):...\n",
"\"\"\"docstring\"\"\"\n",
"searcher = pathexpr.SearcherNotExists(self, createexpr)\n",
"ctx = ds.PathTraversalContext({}, {}, self._root, {}, None, None, None)\n",
"rule = self._doc['rules']['ROOT']\n",
"ds._traverse(searcher, rule, ctx, self)\n",
"return searcher._store\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_7(self, VAR_10=None):...\n",
"VAR_25 = VAR_20.path.join(self.parlai_home, 'downloads')\n",
"VAR_26 = self.add_argument_group('Main ParlAI Arguments')\n",
"VAR_26.add_argument('-t', '--task', help=\n 'ParlAI task(s), e.g. \"babi:Task1\" or \"babi,cbt\"')\n",
"VAR_26.add_argument('--download-path', default=default_downloads_path, help\n =\n 'path for non-data dependencies to store any needed files.defaults to {parlai_dir}/downloads'\n )\n",
"VAR_26.add_argument('-dt', '--datatype', default='train', choices=['train',\n 'train:stream', 'train:ordered', 'train:ordered:stream',\n 'train:stream:ordered', 'valid', 'valid:stream', 'test', 'test:stream'],\n help=\n 'choose from: train, train:ordered, valid, test. to stream data add \":stream\" to any option (e.g., train:stream). by default: train is random with replacement, valid is ordered, test is ordered.'\n )\n",
"VAR_26.add_argument('-im', '--image-mode', default='raw', type=str, help=\n 'image preprocessor to use. default is \"raw\". set to \"none\" to skip image loading.'\n )\n",
"VAR_26.add_argument('-nt', '--numthreads', default=1, type=int, help=\n 'number of threads. If batchsize set to 1, used for hogwild; otherwise, used for number of threads in threadpool loading, e.g. in vqa'\n )\n",
"VAR_26.add_argument('--hide-labels', default=False, type='bool', help=\n 'default (False) moves labels in valid and test sets to the eval_labels field. If True, they are hidden completely.'\n )\n",
"VAR_27 = self.add_argument_group('Batching Arguments')\n",
"VAR_27.add_argument('-bs', '--batchsize', default=1, type=int, help=\n 'batch size for minibatch training schemes')\n",
"VAR_27.add_argument('-bsrt', '--batch-sort', default=True, type='bool',\n help=\n 'If enabled (default True), create batches by flattening all episodes to have exactly one utterance exchange and then sorting all the examples according to their length. This dramatically reduces the amount of padding present after examples have been parsed, speeding up training.'\n )\n",
"VAR_27.add_argument('-clen', '--context-length', default=-1, type=int, help\n =\n 'Number of past utterances to remember when building flattened batches of data in multi-example episodes.'\n )\n",
"VAR_27.add_argument('-incl', '--include-labels', default=True, type='bool',\n help=\n 'Specifies whether or not to include labels as past utterances when building flattened batches of data in multi-example episodes.'\n )\n",
"self.add_parlai_data_path(VAR_26)\n"
] | [
"def add_parlai_args(self, args=None):...\n",
"default_downloads_path = os.path.join(self.parlai_home, 'downloads')\n",
"parlai = self.add_argument_group('Main ParlAI Arguments')\n",
"parlai.add_argument('-t', '--task', help=\n 'ParlAI task(s), e.g. \"babi:Task1\" or \"babi,cbt\"')\n",
"parlai.add_argument('--download-path', default=default_downloads_path, help\n =\n 'path for non-data dependencies to store any needed files.defaults to {parlai_dir}/downloads'\n )\n",
"parlai.add_argument('-dt', '--datatype', default='train', choices=['train',\n 'train:stream', 'train:ordered', 'train:ordered:stream',\n 'train:stream:ordered', 'valid', 'valid:stream', 'test', 'test:stream'],\n help=\n 'choose from: train, train:ordered, valid, test. to stream data add \":stream\" to any option (e.g., train:stream). by default: train is random with replacement, valid is ordered, test is ordered.'\n )\n",
"parlai.add_argument('-im', '--image-mode', default='raw', type=str, help=\n 'image preprocessor to use. default is \"raw\". set to \"none\" to skip image loading.'\n )\n",
"parlai.add_argument('-nt', '--numthreads', default=1, type=int, help=\n 'number of threads. If batchsize set to 1, used for hogwild; otherwise, used for number of threads in threadpool loading, e.g. in vqa'\n )\n",
"parlai.add_argument('--hide-labels', default=False, type='bool', help=\n 'default (False) moves labels in valid and test sets to the eval_labels field. If True, they are hidden completely.'\n )\n",
"batch = self.add_argument_group('Batching Arguments')\n",
"batch.add_argument('-bs', '--batchsize', default=1, type=int, help=\n 'batch size for minibatch training schemes')\n",
"batch.add_argument('-bsrt', '--batch-sort', default=True, type='bool', help\n =\n 'If enabled (default True), create batches by flattening all episodes to have exactly one utterance exchange and then sorting all the examples according to their length. This dramatically reduces the amount of padding present after examples have been parsed, speeding up training.'\n )\n",
"batch.add_argument('-clen', '--context-length', default=-1, type=int, help=\n 'Number of past utterances to remember when building flattened batches of data in multi-example episodes.'\n )\n",
"batch.add_argument('-incl', '--include-labels', default=True, type='bool',\n help=\n 'Specifies whether or not to include labels as past utterances when building flattened batches of data in multi-example episodes.'\n )\n",
"self.add_parlai_data_path(parlai)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_6():...\n",
"VAR_13.throw(_('Invalid Search Field'), VAR_13.DataError)\n"
] | [
"def _raise_exception():...\n",
"frappe.throw(_('Invalid Search Field'), frappe.DataError)\n"
] | [
0,
4
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"VAR_1 = ['DTXSID9022528', 'DTXSID1020273', 'DTXSID6026296', 'DTXSID2021781']\n",
"VAR_2 = stats_by_dtxsids(VAR_1)\n",
"for e in VAR_2:\n",
"if e['sid'] == 'DTXSID9022528':\n",
"self.assertEqual(1, VAR_3['dds_wf_n'],\n 'There should be 1 extracted chemical with weight fraction data associated with ethylparaben'\n )\n",
"VAR_3 = e\n",
"VAR_12 = ExtractedChemical.objects.get(rawchem_ptr_id=73)\n",
"VAR_12.raw_min_comp = 0.1\n",
"VAR_12.save()\n",
"VAR_2 = stats_by_dtxsids(VAR_1)\n",
"for e in VAR_2:\n",
"if e['sid'] == 'DTXSID9022528':\n",
"self.assertEqual(2, VAR_3['dds_wf_n'], 'string')\n",
"VAR_3 = e\n"
] | [
"def test_dtxsid_dds_wf_n(self):...\n",
"dtxs = ['DTXSID9022528', 'DTXSID1020273', 'DTXSID6026296', 'DTXSID2021781']\n",
"stats = stats_by_dtxsids(dtxs)\n",
"for e in stats:\n",
"if e['sid'] == 'DTXSID9022528':\n",
"self.assertEqual(1, ethylparaben_stats['dds_wf_n'],\n 'There should be 1 extracted chemical with weight fraction data associated with ethylparaben'\n )\n",
"ethylparaben_stats = e\n",
"ec = ExtractedChemical.objects.get(rawchem_ptr_id=73)\n",
"ec.raw_min_comp = 0.1\n",
"ec.save()\n",
"stats = stats_by_dtxsids(dtxs)\n",
"for e in stats:\n",
"if e['sid'] == 'DTXSID9022528':\n",
"self.assertEqual(2, ethylparaben_stats['dds_wf_n'],\n 'There should be 2 extracted chemicals with weight fraction data associated with ethylparaben'\n )\n",
"ethylparaben_stats = e\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Condition",
"Expr'",
"Assign'"
] |
[
"def FUNC_0(VAR_4, VAR_5=None, VAR_6=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_6 is None:\n",
"VAR_6 = ray.worker.global_worker\n",
"return CLASS_2(VAR_6.profiler, VAR_4, VAR_5=extra_data)\n"
] | [
"def profile(event_type, extra_data=None, worker=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if worker is None:\n",
"worker = ray.worker.global_worker\n",
"return RayLogSpanRaylet(worker.profiler, event_type, extra_data=extra_data)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Return'"
] |
[
"@VAR_0.route('/entry', VAR_5=['POST', 'GET'])...\n",
"if request.method == 'POST':\n",
"VAR_8 = {}\n",
"return render_template('make_entry.html', parameters=['pH', 'TDS',\n 'Turbidity', 'Temperature'])\n",
"VAR_8['study'] = 'test'\n",
"VAR_8['timestamp'] = dt.now()\n",
"print(request.form)\n",
"for VAR_10, val in request.form.items():\n",
"VAR_8[VAR_10] = val\n",
"print(VAR_8)\n",
"VAR_9 = 'string'\n",
"VAR_7.execute(VAR_9, tuple(VAR_8[k] for k in VAR_8.keys()))\n",
"VAR_6.commit()\n"
] | [
"@app.route('/entry', methods=['POST', 'GET'])...\n",
"if request.method == 'POST':\n",
"dict = {}\n",
"return render_template('make_entry.html', parameters=['pH', 'TDS',\n 'Turbidity', 'Temperature'])\n",
"dict['study'] = 'test'\n",
"dict['timestamp'] = dt.now()\n",
"print(request.form)\n",
"for item, val in request.form.items():\n",
"dict[item] = val\n",
"print(dict)\n",
"sql = \"\"\"INSERT INTO observations (study, pH, TDS, Turbidity, Temperature, timestamp)\n VALUES(?,?,?,?,?,?)\"\"\"\n",
"cur.execute(sql, tuple(dict[k] for k in dict.keys()))\n",
"db.commit()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(VAR_0, VAR_1, VAR_2=VAR_2()):...\n",
"\"\"\"docstring\"\"\"\n",
"return True\n"
] | [
"def is_executable(tweak_group, groups, is_admin=is_admin()):...\n",
"\"\"\"docstring\"\"\"\n",
"return True\n"
] | [
0,
0,
2
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"@defer.inlineCallbacks...\n",
"VAR_68 = yield self.mail_store.add_mail('INBOX', VAR_29.raw)\n",
"defer.returnValue(VAR_68)\n"
] | [
"@defer.inlineCallbacks...\n",
"mail = yield self.mail_store.add_mail('INBOX', input_mail.raw)\n",
"defer.returnValue(mail)\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_17(self, *VAR_67):...\n",
"self._ruleorder.add(*VAR_67)\n"
] | [
"def ruleorder(self, *rulenames):...\n",
"self._ruleorder.add(*rulenames)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_13(self):...\n",
"def FUNC_22(VAR_2, VAR_18, VAR_19):...\n",
"if VAR_18.provider.id == '8.9.10.11':\n",
"self._test_health(FUNC_22, cluster.ClusterHealth.ORANGE)\n"
] | [
"def test_orange_health(self):...\n",
"def _validate(cluster_api, endpoint, conn):...\n",
"if endpoint.provider.id == '8.9.10.11':\n",
"self._test_health(_validate, cluster.ClusterHealth.ORANGE)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Condition",
"Expr'"
] |
[
"def FUNC_31(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_43 = ''\n",
"VAR_43 += self.config['cluster'].get_node_label(self)\n",
"if self.config['label']:\n",
"VAR_43 += '%s' % self.config['label'] if not VAR_43 else '-%s' % self.config[\n 'label']\n",
"if not VAR_43:\n",
"return None\n",
"self.log_debug('Label for sosreport set to %s' % VAR_43)\n",
"if self.check_sos_version('3.6'):\n",
"VAR_61 = '--label'\n",
"VAR_61 = '--name'\n",
"return '%s=%s' % (VAR_61, VAR_43)\n",
"VAR_43 = '%s-%s' % (self.address.split('.')[0], VAR_43)\n"
] | [
"def determine_sos_label(self):...\n",
"\"\"\"docstring\"\"\"\n",
"label = ''\n",
"label += self.config['cluster'].get_node_label(self)\n",
"if self.config['label']:\n",
"label += '%s' % self.config['label'] if not label else '-%s' % self.config[\n 'label']\n",
"if not label:\n",
"return None\n",
"self.log_debug('Label for sosreport set to %s' % label)\n",
"if self.check_sos_version('3.6'):\n",
"lcmd = '--label'\n",
"lcmd = '--name'\n",
"return '%s=%s' % (lcmd, label)\n",
"label = '%s-%s' % (self.address.split('.')[0], label)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"AugAssign'",
"Condition",
"AugAssign'",
"Condition",
"Return'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Assign'"
] |
[
"def FUNC_14(VAR_27):...\n",
"if VAR_27.body[0] == ' ':\n",
"VAR_28 = str(VAR_27.body[1:]).lower().replace('\\\\', '').split('\\n')[0].split(\n ' ')\n",
"VAR_28 = str(VAR_27.body).lower().replace('\\\\', '').split('\\n')[0].split(' ')\n",
"print(VAR_28)\n",
"print(len(VAR_28))\n",
"VAR_39 = FUNC_10(VAR_27, VAR_28, 'comment')\n",
"VAR_27.reply(VAR_39 + VAR_0)\n"
] | [
"def handle_comment(message):...\n",
"if message.body[0] == ' ':\n",
"parsed_text = str(message.body[1:]).lower().replace('\\\\', '').split('\\n')[0\n ].split(' ')\n",
"parsed_text = str(message.body).lower().replace('\\\\', '').split('\\n')[0].split(\n ' ')\n",
"print(parsed_text)\n",
"print(len(parsed_text))\n",
"response = handle_send_nano(message, parsed_text, 'comment')\n",
"message.reply(response + comment_footer)\n"
] | [
0,
4,
4,
0,
4,
4,
4,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_1(VAR_3, VAR_4):...\n",
"VAR_5 = (\n \"UPDATE twitter_bot_vac_last_replied_id SET item_id = '${0}' WHERE name = '${1}'\"\n .format(VAR_4, VAR_3))\n",
"VAR_1 = psycopg2.connect(VAR_0)\n",
"print('Error %s' % e)\n",
"if VAR_1:\n",
"VAR_6 = VAR_1.cursor()\n",
"VAR_1.close()\n",
"VAR_6.execute(VAR_5)\n",
"VAR_1.commit()\n",
"VAR_6.close()\n"
] | [
"def setLastReplied(messageType, itemId):...\n",
"QUERY = (\n \"UPDATE twitter_bot_vac_last_replied_id SET item_id = '${0}' WHERE name = '${1}'\"\n .format(itemId, messageType))\n",
"conn = psycopg2.connect(connectionString)\n",
"print('Error %s' % e)\n",
"if conn:\n",
"cur = conn.cursor()\n",
"conn.close()\n",
"cur.execute(QUERY)\n",
"conn.commit()\n",
"cur.close()\n"
] | [
0,
4,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_36(self, VAR_25):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.file_exists(VAR_25):\n",
"self.log_debug('Failed to remove %s: %s' % (VAR_25, VAR_68))\n",
"self.log_debug('Removing file %s' % VAR_25)\n",
"self.log_debug(\n 'Attempting to remove remote file %s, but it does not exist on filesystem'\n % VAR_25)\n",
"return False\n",
"if self.local or self.config['become_root'] or self.config['need_sudo']:\n",
"return False\n",
"VAR_6 = 'rm -f %s' % VAR_25\n",
"self.sftp.remove(VAR_25)\n",
"VAR_33 = self.run_command(VAR_6, VAR_19=True)\n",
"return True\n"
] | [
"def remove_file(self, path):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.file_exists(path):\n",
"self.log_debug('Failed to remove %s: %s' % (path, e))\n",
"self.log_debug('Removing file %s' % path)\n",
"self.log_debug(\n 'Attempting to remove remote file %s, but it does not exist on filesystem'\n % path)\n",
"return False\n",
"if self.local or self.config['become_root'] or self.config['need_sudo']:\n",
"return False\n",
"cmd = 'rm -f %s' % path\n",
"self.sftp.remove(path)\n",
"res = self.run_command(cmd, need_root=True)\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_0():...\n",
"VAR_3 = argparse.ArgumentParser(description='Post ads on Kijiji')\n",
"VAR_3.add_argument('-u', '--username', help='username of your kijiji account')\n",
"VAR_3.add_argument('-p', '--password', help='password of your kijiji account')\n",
"VAR_4 = VAR_3.add_subparsers(help='sub-command help')\n",
"VAR_5 = VAR_4.add_parser('post', help='post a new ad')\n",
"VAR_5.add_argument('inf_file', type=str, help=\n '.inf file containing posting details')\n",
"VAR_5.set_defaults(function=post_ad)\n",
"VAR_6 = VAR_4.add_parser('folder', help='post ad from folder')\n",
"VAR_6.add_argument('folderName', type=str, help='folder containing ad details')\n",
"VAR_6.set_defaults(function=post_folder)\n",
"VAR_7 = VAR_4.add_parser('repost_folder', help='post ad from folder')\n",
"VAR_7.add_argument('folderName', type=str, help='folder containing ad details')\n",
"VAR_7.set_defaults(function=repost_folder)\n",
"VAR_8 = VAR_4.add_parser('show', help='show currently listed ads')\n",
"VAR_8.set_defaults(function=show_ads)\n",
"VAR_9 = VAR_4.add_parser('delete', help='delete a listed ad')\n",
"VAR_9.add_argument('id', type=str, help='id of the ad you wish to delete')\n",
"VAR_9.set_defaults(function=delete_ad)\n",
"VAR_10 = VAR_4.add_parser('nuke', help='delete all ads')\n",
"VAR_10.set_defaults(function=nuke)\n",
"VAR_11 = VAR_4.add_parser('check_ad', help='check if ad is active')\n",
"VAR_11.add_argument('folderName', type=str, help='folder containing ad details'\n )\n",
"VAR_11.set_defaults(function=check_ad)\n",
"VAR_12 = VAR_4.add_parser('repost', help='repost an existing ad')\n",
"VAR_12.add_argument('inf_file', type=str, help=\n '.inf file containing posting details')\n",
"VAR_12.set_defaults(function=repost_ad)\n",
"VAR_13 = VAR_4.add_parser('build_ad', help=\n 'Generates the item.inf file for a new ad')\n",
"VAR_13.set_defaults(function=generate_inf_file)\n",
"VAR_0 = VAR_3.parse_args()\n",
"VAR_0.function(VAR_0)\n",
"VAR_3.print_help()\n"
] | [
"def main():...\n",
"parser = argparse.ArgumentParser(description='Post ads on Kijiji')\n",
"parser.add_argument('-u', '--username', help='username of your kijiji account')\n",
"parser.add_argument('-p', '--password', help='password of your kijiji account')\n",
"subparsers = parser.add_subparsers(help='sub-command help')\n",
"postParser = subparsers.add_parser('post', help='post a new ad')\n",
"postParser.add_argument('inf_file', type=str, help=\n '.inf file containing posting details')\n",
"postParser.set_defaults(function=post_ad)\n",
"folderParser = subparsers.add_parser('folder', help='post ad from folder')\n",
"folderParser.add_argument('folderName', type=str, help=\n 'folder containing ad details')\n",
"folderParser.set_defaults(function=post_folder)\n",
"repostFolderParser = subparsers.add_parser('repost_folder', help=\n 'post ad from folder')\n",
"repostFolderParser.add_argument('folderName', type=str, help=\n 'folder containing ad details')\n",
"repostFolderParser.set_defaults(function=repost_folder)\n",
"showParser = subparsers.add_parser('show', help='show currently listed ads')\n",
"showParser.set_defaults(function=show_ads)\n",
"deleteParser = subparsers.add_parser('delete', help='delete a listed ad')\n",
"deleteParser.add_argument('id', type=str, help=\n 'id of the ad you wish to delete')\n",
"deleteParser.set_defaults(function=delete_ad)\n",
"nukeParser = subparsers.add_parser('nuke', help='delete all ads')\n",
"nukeParser.set_defaults(function=nuke)\n",
"checkParser = subparsers.add_parser('check_ad', help='check if ad is active')\n",
"checkParser.add_argument('folderName', type=str, help=\n 'folder containing ad details')\n",
"checkParser.set_defaults(function=check_ad)\n",
"repostParser = subparsers.add_parser('repost', help='repost an existing ad')\n",
"repostParser.add_argument('inf_file', type=str, help=\n '.inf file containing posting details')\n",
"repostParser.set_defaults(function=repost_ad)\n",
"buildParser = subparsers.add_parser('build_ad', help=\n 'Generates the item.inf file for a new ad')\n",
"buildParser.set_defaults(function=generate_inf_file)\n",
"args = parser.parse_args()\n",
"args.function(args)\n",
"parser.print_help()\n"
] | [
0,
0,
0,
0,
0,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self, VAR_17):...\n",
"\"\"\"docstring\"\"\"\n",
"return self._adb.shell(VAR_17).decode('utf-8')\n",
"if e.ret_code == 1 and not e.stdout and not e.stderr:\n",
"return False\n"
] | [
"def _adb_grep_wrapper(self, adb_shell_cmd):...\n",
"\"\"\"docstring\"\"\"\n",
"return self._adb.shell(adb_shell_cmd).decode('utf-8')\n",
"if e.ret_code == 1 and not e.stdout and not e.stderr:\n",
"return False\n"
] | [
0,
0,
2,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'",
"Condition",
"Return'"
] |
[
"def FUNC_2(self, VAR_4, **VAR_5):...\n",
"self._handler.render_response(VAR_4, **context)\n"
] | [
"def render_response(self, _template, **context):...\n",
"self._handler.render_response(_template, **context)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0():...\n",
"VAR_32 = hashlib.sha1()\n",
"VAR_32.update(uuid.uuid4().bytes)\n",
"VAR_33 = VAR_32.digest()\n",
"assert len(VAR_33) == ray_constants.ID_SIZE\n",
"return VAR_33\n"
] | [
"def _random_string():...\n",
"id_hash = hashlib.sha1()\n",
"id_hash.update(uuid.uuid4().bytes)\n",
"id_bytes = id_hash.digest()\n",
"assert len(id_bytes) == ray_constants.ID_SIZE\n",
"return id_bytes\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Assert'",
"Return'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.