lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "@wraps(VAR_7)...\n", "" ]
[ "@wraps(f)...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "def FUNC_26():...\n", "frappe.throw(_('Use of sub-query or function is restricted'), frappe.DataError)\n" ]
[ "def _raise_exception():...\n", "frappe.throw(_('Use of sub-query or function is restricted'), frappe.DataError)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_6(self, VAR_7=None, VAR_3=None, VAR_4=None, VAR_8=None):...\n", "if VAR_7:\n", "if isinstance(VAR_7, dict):\n", "return self.__dict__\n", "return FUNC_1(self.get_all_children(), VAR_7, VAR_4=limit)\n", "if VAR_3:\n", "if isinstance(VAR_3, dict):\n", "VAR_9 = self.__dict__.get(VAR_7, VAR_8)\n", "VAR_9 = FUNC_1(self.__dict__.get(VAR_7, []), VAR_3, VAR_4=limit)\n", "VAR_8 = VAR_3\n", "if VAR_9 is None and VAR_7 not in self.ignore_in_getter and VAR_7 in (VAR_6\n", "VAR_3 = None\n", "self.set(VAR_7, [])\n", "return VAR_9\n", "VAR_9 = self.__dict__.get(VAR_7, VAR_8)\n", "VAR_9 = self.__dict__.get(VAR_7)\n" ]
[ "def get(self, key=None, filters=None, limit=None, default=None):...\n", "if key:\n", "if isinstance(key, dict):\n", "return self.__dict__\n", "return _filter(self.get_all_children(), key, limit=limit)\n", "if filters:\n", "if isinstance(filters, dict):\n", "value = self.__dict__.get(key, default)\n", "value = _filter(self.__dict__.get(key, []), filters, limit=limit)\n", "default = filters\n", "if value is None and key not in self.ignore_in_getter and key in (d.\n", "filters = None\n", "self.set(key, [])\n", "return value\n", "value = self.__dict__.get(key, default)\n", "value = self.__dict__.get(key)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Return'", "Return'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Return'", "Assign'", "Assign'" ]
[ "import pymysql.cursors\n", "from datetime import date, datetime\n", "import json\n", "import config\n", "def __init__(self):...\n", "self.conn = pymysql.connect(user=config.mysql_credentials['user'], password\n =config.mysql_credentials['password'], host=config.mysql_credentials[\n 'host'], db=config.mysql_credentials['database'], cursorclass=pymysql.\n cursors.DictCursor)\n", "self.cur = self.conn.cursor()\n", "def __enter__(self):...\n", "return DBase()\n" ]
[ "import pymysql.cursors\n", "from datetime import date, datetime\n", "import json\n", "import config\n", "def __init__(self):...\n", "self.conn = pymysql.connect(user=config.mysql_credentials['user'], password\n =config.mysql_credentials['password'], host=config.mysql_credentials[\n 'host'], db=config.mysql_credentials['database'], cursorclass=pymysql.\n cursors.DictCursor)\n", "self.cur = self.conn.cursor()\n", "def __enter__(self):...\n", "return DBase()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "Import'", "Import'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Return'" ]
[ "def FUNC_4(VAR_2):...\n", "VAR_43 = VAR_2[0]\n", "VAR_44 = VAR_43.__class__\n", "if VAR_44 not in [LearningUnitYear, EducationGroupYear]:\n", "if any(obj for obj in VAR_2 if obj.__class__ != VAR_44):\n" ]
[ "def _raise_if_incorrect_instance(objects):...\n", "first_obj = objects[0]\n", "obj_class = first_obj.__class__\n", "if obj_class not in [LearningUnitYear, EducationGroupYear]:\n", "if any(obj for obj in objects if obj.__class__ != obj_class):\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "For" ]
[ "@VAR_0.route('/signup/<remote_app>/', methods=['GET', 'POST'])...\n", "\"\"\"docstring\"\"\"\n", "if VAR_1 not in signup_handlers:\n", "return abort(404)\n", "VAR_3 = signup_handlers[VAR_1]['view']()\n", "return abort(404) if VAR_3 is None else VAR_3\n" ]
[ "@blueprint.route('/signup/<remote_app>/', methods=['GET', 'POST'])...\n", "\"\"\"docstring\"\"\"\n", "if remote_app not in signup_handlers:\n", "return abort(404)\n", "res = signup_handlers[remote_app]['view']()\n", "return abort(404) if res is None else res\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Assign'", "Return'" ]
[ "def FUNC_30(self, VAR_78):...\n", "def FUNC_39(VAR_101):...\n", "VAR_101.version = VAR_78\n", "return VAR_101\n" ]
[ "def version(self, version):...\n", "def decorate(ruleinfo):...\n", "ruleinfo.version = version\n", "return ruleinfo\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_14(self, VAR_10, VAR_11):...\n", "VAR_24 = self.common._cli_run('createhost -add %s %s' % (VAR_10, ' '.join(\n VAR_11)), None)\n" ]
[ "def _modify_3par_fibrechan_host(self, hostname, wwn):...\n", "out = self.common._cli_run('createhost -add %s %s' % (hostname, ' '.join(\n wwn)), None)\n" ]
[ 0, 2 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_15(self):...\n", "" ]
[ "def testDownloadFileDownloadError(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_20(self, VAR_17, VAR_16):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def create_export(self, context, volume):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "def FUNC_2(VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "return VAR_3.splitlines()\n" ]
[ "def splitlines(text):...\n", "\"\"\"docstring\"\"\"\n", "return text.splitlines()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_0(VAR_0, VAR_1=None, VAR_2=False):...\n", "VAR_3 = None\n", "if conf.eString and conf.eString in VAR_0:\n", "VAR_4 = VAR_0.index(conf.eString)\n", "if conf.eRegexp:\n", "VAR_5 = len(conf.eString)\n", "VAR_3 = re.findall(conf.eRegexp, VAR_0, re.I | re.M)\n", "if conf.string:\n", "VAR_6 = VAR_0[:VAR_4]\n", "if VAR_3:\n", "if conf.string in VAR_0:\n", "if conf.regexp:\n", "VAR_6 += VAR_0[VAR_4 + VAR_5:]\n", "for regExpResult in VAR_3:\n", "return True\n", "return False\n", "if re.search(conf.regexp, VAR_0, re.I | re.M):\n", "conf.seqMatcher.set_seq2(VAR_0)\n", "VAR_0 = VAR_6\n", "VAR_4 = VAR_0.index(regExpResult)\n", "return True\n", "return False\n", "if VAR_2:\n", "VAR_5 = len(regExpResult)\n", "return round(conf.seqMatcher.ratio(), 5)\n", "if round(conf.seqMatcher.ratio(), 5) >= MATCH_RATIO:\n", "VAR_7 = VAR_0[:VAR_4]\n", "return True\n", "return False\n", "VAR_7 += VAR_0[VAR_4 + VAR_5:]\n", "VAR_0 = VAR_7\n" ]
[ "def comparison(page, headers=None, getSeqMatcher=False):...\n", "regExpResults = None\n", "if conf.eString and conf.eString in page:\n", "index = page.index(conf.eString)\n", "if conf.eRegexp:\n", "length = len(conf.eString)\n", "regExpResults = re.findall(conf.eRegexp, page, re.I | re.M)\n", "if conf.string:\n", "pageWithoutString = page[:index]\n", "if regExpResults:\n", "if conf.string in page:\n", "if conf.regexp:\n", "pageWithoutString += page[index + length:]\n", "for regExpResult in regExpResults:\n", "return True\n", "return False\n", "if re.search(conf.regexp, page, re.I | re.M):\n", "conf.seqMatcher.set_seq2(page)\n", "page = pageWithoutString\n", "index = page.index(regExpResult)\n", "return True\n", "return False\n", "if getSeqMatcher:\n", "length = len(regExpResult)\n", "return round(conf.seqMatcher.ratio(), 5)\n", "if round(conf.seqMatcher.ratio(), 5) >= MATCH_RATIO:\n", "pageWithoutRegExp = page[:index]\n", "return True\n", "return False\n", "pageWithoutRegExp += page[index + length:]\n", "page = pageWithoutRegExp\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Condition", "AugAssign'", "For", "Return'", "Return'", "Condition", "Expr'", "Assign'", "Assign'", "Return'", "Return'", "Condition", "Assign'", "Return'", "Condition", "Assign'", "Return'", "Return'", "AugAssign'", "Assign'" ]
[ "def FUNC_6(self, VAR_59, VAR_60):...\n", "if self.remember:\n", "VAR_96 = '%s_%s' % (VAR_60, self.nav.get_param)\n", "if VAR_59 not in self.nav.options:\n", "VAR_97 = copy(VAR_101.user.sort_options) if VAR_101.user else {}\n", "VAR_59 = self.nav.default\n", "if self.remember and VAR_101.user_is_loggedin and VAR_59 != VAR_98:\n", "VAR_98 = VAR_97.get(VAR_96)\n", "VAR_97[VAR_96] = VAR_59\n", "return VAR_59\n", "if not VAR_59:\n", "VAR_101.user.sort_options = VAR_97\n", "VAR_59 = VAR_98\n", "VAR_82 = VAR_101.user\n", "utils.worker.do(lambda : VAR_82._commit())\n" ]
[ "def run(self, sort, where):...\n", "if self.remember:\n", "pref = '%s_%s' % (where, self.nav.get_param)\n", "if sort not in self.nav.options:\n", "user_prefs = copy(c.user.sort_options) if c.user else {}\n", "sort = self.nav.default\n", "if self.remember and c.user_is_loggedin and sort != user_pref:\n", "user_pref = user_prefs.get(pref)\n", "user_prefs[pref] = sort\n", "return sort\n", "if not sort:\n", "c.user.sort_options = user_prefs\n", "sort = user_pref\n", "user = c.user\n", "utils.worker.do(lambda : user._commit())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_2(self):...\n", "\"\"\"docstring\"\"\"\n", "if os.path.exists(self.cache_dir):\n", "shutil.rmtree(self.cache_dir)\n" ]
[ "def clear(self):...\n", "\"\"\"docstring\"\"\"\n", "if os.path.exists(self.cache_dir):\n", "shutil.rmtree(self.cache_dir)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'" ]
[ "def FUNC_5(self, VAR_5):...\n", "if VAR_5 in self.groups:\n", "self.groups.remove(VAR_5)\n", "for oldg in VAR_5.get_ancestors():\n", "if oldg.name != 'all':\n", "for childg in self.groups:\n", "if oldg in childg.get_ancestors():\n" ]
[ "def remove_group(self, group):...\n", "if group in self.groups:\n", "self.groups.remove(group)\n", "for oldg in group.get_ancestors():\n", "if oldg.name != 'all':\n", "for childg in self.groups:\n", "if oldg in childg.get_ancestors():\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "For", "Condition", "For", "Condition" ]
[ "def FUNC_41(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = {'SEVERITY': {'HIGH': 4, 'MEDIUM': 0, 'LOW': 10}, 'CONFIDENCE': {\n 'MEDIUM': 5, 'HIGH': 9}}\n", "self.check_example('wildcard-injection.py', VAR_2)\n" ]
[ "def test_wildcard_injection(self):...\n", "\"\"\"docstring\"\"\"\n", "expect = {'SEVERITY': {'HIGH': 4, 'MEDIUM': 0, 'LOW': 10}, 'CONFIDENCE': {\n 'MEDIUM': 5, 'HIGH': 9}}\n", "self.check_example('wildcard-injection.py', expect)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "def FUNC_4(VAR_5, VAR_6='csrf_token', VAR_7=VAR_1, VAR_8=True):...\n", "VAR_10 = VAR_5.params.get(VAR_6, VAR_5.headers.get(VAR_7))\n", "if VAR_10 != VAR_5.session.get_csrf_token():\n", "if VAR_8:\n", "return True\n", "return False\n" ]
[ "def check_csrf_token(request, token='csrf_token', header=HEADER_NAME,...\n", "supplied_token = request.params.get(token, request.headers.get(header))\n", "if supplied_token != request.session.get_csrf_token():\n", "if raises:\n", "return True\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_3(self):...\n", "VAR_13 = tweet.objects.order_by('-datetime')\n", "VAR_14 = self.request.GET.get('keyword')\n", "if VAR_14 is not None:\n", "VAR_13 = VAR_13.filter(Q(text__icontains=keyword)).order_by('-datetime')\n", "return VAR_13\n" ]
[ "def get_queryset(self):...\n", "query = tweet.objects.order_by('-datetime')\n", "keyword = self.request.GET.get('keyword')\n", "if keyword is not None:\n", "query = query.filter(Q(text__icontains=keyword)).order_by('-datetime')\n", "return query\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_21(self, VAR_21, VAR_38, VAR_39):...\n", "\"\"\"docstring\"\"\"\n", "VAR_57 = self.getfile(VAR_21)\n", "if VAR_57 == False:\n", "VAR_57[VAR_7] = VAR_39\n" ]
[ "def utime(self, path, atime, mtime):...\n", "\"\"\"docstring\"\"\"\n", "p = self.getfile(path)\n", "if p == False:\n", "p[A_CTIME] = mtime\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'" ]
[ "def FUNC_3(VAR_5):...\n", "" ]
[ "def sendIndexReq(nGram):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_7(self):...\n", "" ]
[ "def checkout(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_5(VAR_5):...\n", "VAR_13 = FUNC_6()\n", "conn.execute(f'DROP DATABASE {VAR_13}')\n" ]
[ "def _drop_database(system_engine):...\n", "test_db_name = _get_test_db_name()\n", "conn.execute(f'DROP DATABASE {test_db_name}')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_6(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = self.client.get('/api/apps')\n", "self.assertEqual(VAR_6.status_code, 200)\n", "self.assertEqual(len(VAR_6.data['results']), 2)\n" ]
[ "def test_admin_can_list(self):...\n", "\"\"\"docstring\"\"\"\n", "response = self.client.get('/api/apps')\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 2)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_17(self):...\n", "" ]
[ "def touch(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_2(VAR_6, VAR_7):...\n", "VAR_10 = settings.DYNAMIC_LINK\n", "if len(VAR_10) > 0:\n", "VAR_11 = '{}{}/{}'.format(settings.PUBLIC_DOMAIN, VAR_2, VAR_7)\n", "VAR_12 = '{}{}/{}'.format(settings.APP_DEEPLINK_DOMAIN, VAR_4, VAR_7)\n", "VAR_12 = VAR_10.format(VAR_11)\n", "VAR_9 = HttpResponse('', status=302)\n", "VAR_13 = create_get_experience_interactor()\n", "VAR_9['Location'] = VAR_12\n", "VAR_14 = VAR_13.set_params(VAR_7=experience_share_id, logged_person_id='-1'\n ).execute()\n", "return VAR_9\n", "VAR_15 = VAR_14.description[:77] + '...' if len(VAR_14.description\n ) > 77 else VAR_14.description\n", "VAR_16 = {'st': VAR_14.title, 'sd': VAR_15, 'si': VAR_14.picture.small_url}\n", "VAR_17 = urlencode(VAR_16, quote_via=quote_plus)\n", "VAR_12 = '{}&{}'.format(VAR_12, VAR_17)\n" ]
[ "def experience_redirect(request, experience_share_id):...\n", "dynamic_link = settings.DYNAMIC_LINK\n", "if len(dynamic_link) > 0:\n", "real_link = '{}{}/{}'.format(settings.PUBLIC_DOMAIN, EXPERIENCE_PATH,\n experience_share_id)\n", "link = '{}{}/{}'.format(settings.APP_DEEPLINK_DOMAIN,\n EXPERIENCE_DEEPLINK_PATH, experience_share_id)\n", "link = dynamic_link.format(real_link)\n", "response = HttpResponse('', status=302)\n", "get_experience_interactor = create_get_experience_interactor()\n", "response['Location'] = link\n", "experience = get_experience_interactor.set_params(experience_share_id=\n experience_share_id, logged_person_id='-1').execute()\n", "return response\n", "desc = experience.description[:77] + '...' if len(experience.description\n ) > 77 else experience.description\n", "preview_content = {'st': experience.title, 'sd': desc, 'si': experience.\n picture.small_url}\n", "preview_encoded = urlencode(preview_content, quote_via=quote_plus)\n", "link = '{}&{}'.format(link, preview_encoded)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "@decorators.require_cronjob...\n", "config.cron_update_buckets()\n" ]
[ "@decorators.require_cronjob...\n", "config.cron_update_buckets()\n" ]
[ 0, 0 ]
[ "Condition", "Expr'" ]
[ "def FUNC_9(self, VAR_8):...\n", "return {key: value for key, value in VAR_8.items() if key in self.\n _backend_kwargs}\n" ]
[ "def filter_kwargs_for_backend(self, kwargs):...\n", "return {key: value for key, value in kwargs.items() if key in self.\n _backend_kwargs}\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "from .value_constraints import ClassConstraintComponent\n", "VAR_0 = [ClassConstraintComponent]\n", "VAR_1 = {p: c for c in VAR_0 for p in c.constraint_parameters()}\n", "VAR_2 = list(VAR_1.keys())\n" ]
[ "from .value_constraints import ClassConstraintComponent\n", "ALL_CONSTRAINT_COMPONENTS = [ClassConstraintComponent]\n", "CONSTRAINT_PARAMETERS_MAP = {p: c for c in ALL_CONSTRAINT_COMPONENTS for p in\n c.constraint_parameters()}\n", "ALL_CONSTRAINT_PARAMETERS = list(CONSTRAINT_PARAMETERS_MAP.keys())\n" ]
[ 1, 1, 0, 0 ]
[ "ImportFrom'", "Assign'", "Assign'", "Assign'" ]
[ "@app.route('/metric/api/v1.0/metric', methods=['POST'])...\n", "if not request.json or not 'Origin' in request.json or not 'Key' in request.json or not 'Value' in request.json:\n", "abort(400)\n", "VAR_1 = database.cursor()\n", "VAR_1.execute(\n 'INSERT INTO log_metric (Time, Origin, Key, Value) VALUES (%s,%s,%s,%s);',\n (time.time(), request.json['Origin'], request.json['Key'], request.json\n ['Value']))\n", "VAR_1.close()\n", "database.commit()\n", "return \"{'message': 'OK'}\"\n" ]
[ "@app.route('/metric/api/v1.0/metric', methods=['POST'])...\n", "if not request.json or not 'Origin' in request.json or not 'Key' in request.json or not 'Value' in request.json:\n", "abort(400)\n", "cursor = database.cursor()\n", "cursor.execute(\n 'INSERT INTO log_metric (Time, Origin, Key, Value) VALUES (%s,%s,%s,%s);',\n (time.time(), request.json['Origin'], request.json['Key'], request.json\n ['Value']))\n", "cursor.close()\n", "database.commit()\n", "return \"{'message': 'OK'}\"\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_0(VAR_0, VAR_1, VAR_2):...\n", "def FUNC_1(VAR_3):...\n", "return Task('nfs/{}'.format(VAR_0), VAR_1, VAR_2, partial(\n serialize_dashboard_exception, include_http_status=True))(VAR_3)\n" ]
[ "def NfsTask(name, metadata, wait_for):...\n", "def composed_decorator(func):...\n", "return Task('nfs/{}'.format(name), metadata, wait_for, partial(\n serialize_dashboard_exception, include_http_status=True))(func)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Return'" ]
[ "def FUNC_1(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "if ':' not in VAR_0:\n", "VAR_4 = VAR_0.split(':')\n", "VAR_5 = importlib.import_module(VAR_4[0])\n", "return getattr(VAR_5, VAR_4[1])\n" ]
[ "def str2class(value):...\n", "\"\"\"docstring\"\"\"\n", "if ':' not in value:\n", "name = value.split(':')\n", "module = importlib.import_module(name[0])\n", "return getattr(module, name[1])\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Return'" ]
[ "from datetime import datetime\n", "import pyodbc\n", "import sys\n", "import subprocess as sp\n", "from os import system\n", "def FUNC_0():...\n", "system('cls')\n", "def FUNC_1():...\n", "print('-' * 70)\n", "VAR_0 = 'laserInv'\n", "VAR_1 = False\n", "VAR_2 = None\n", "VAR_3 = False\n", "VAR_4 = False\n", "VAR_5 = None\n", "VAR_6 = datetime.now()\n", "while VAR_3 == False:\n", "sys.exit()\n", "FUNC_0()\n", "if VAR_1 == True:\n", "while VAR_2 != 'yes':\n", "VAR_9.close()\n", "print('Update the key inventory by entering the order information below.')\n", "VAR_2 = 'No'\n", "VAR_1 = False\n", "FUNC_1()\n", "VAR_7 = int(VAR_7)\n", "if VAR_4 == False:\n", "VAR_8 = int(VAR_8)\n", "VAR_15 = input('Order #: ')\n", "VAR_7 = input('Key used (i.e. #29): #')\n", "print('Connecting to database...')\n", "VAR_8 = input('# of keys lased: ')\n", "VAR_9 = pyodbc.connect(Driver='{SQL Server Native Client 11.0}', Server=\n '(LocalDB)\\\\LocalDB Laser', Database='laserInv', trusted_connection='yes')\n", "FUNC_0()\n", "VAR_1 = True\n", "FUNC_1()\n", "VAR_10 = VAR_9.cursor()\n", "print(\"\"\"{} \n Order #: {} \n Key #: {} \n # of keys lased: {}\"\"\".format(VAR_6,\n VAR_15, VAR_7, VAR_8))\n", "VAR_10.execute(\"SELECT invCount FROM keyInventory WHERE keyNum = '%s';\" % VAR_7\n )\n", "FUNC_1()\n", "VAR_14 = VAR_10.fetchall()[0][0]\n", "FUNC_1()\n", "VAR_6 = datetime.now()\n", "VAR_2 = input('Is the information above correct? ')\n", "print(\n \"ERROR: The key number you entered doesn't exist in the keyInventory table.\"\n )\n", "VAR_11 = VAR_14 - VAR_8\n", "if VAR_2 == 'yes':\n", "print(\n \"TIP: If you know you've typed it correctly, you'll have to add it to the Database with newKey.py\"\n )\n", "VAR_12 = VAR_9.cursor()\n", "FUNC_0()\n", "if VAR_2 == 'no':\n", "FUNC_1()\n", "VAR_12.execute('string', (VAR_6, VAR_15, VAR_7, VAR_8, VAR_14, VAR_11))\n", "FUNC_0()\n", "FUNC_0()\n", "input('Press Enter to close...')\n", "VAR_12.commit()\n", "print('Re-enter the information. \\n')\n", "print(\"Must answer yes or no, it's case sensitive because I'm lazy! \\n\")\n", "if VAR_1 == True:\n", "VAR_13 = VAR_9.cursor()\n", "VAR_9.close()\n", "sys.exit()\n", "VAR_13.execute('UPDATE keyInventory SET invCount = ? WHERE keyNum = ?;', (\n VAR_11, VAR_7))\n", "VAR_1 = False\n", "if VAR_1 == True:\n", "VAR_13.commit()\n", "VAR_9.close()\n", "sys.exit()\n", "FUNC_0()\n", "VAR_1 = False\n", "print('Success! Database has been updated.')\n", "FUNC_1()\n", "VAR_5 = None\n", "FUNC_1()\n", "while VAR_5 != 'yes' and VAR_5 != 'no':\n", "input('Press Enter to close...')\n", "VAR_5 = input('Are there more keys on this order? ')\n", "if VAR_5 == 'yes':\n", "VAR_4 = True\n", "if VAR_5 == 'no':\n", "if VAR_1 == True:\n", "VAR_3 = True\n", "FUNC_0()\n", "VAR_9.close()\n", "print('Okay, bye!')\n", "print(\"Must answer yes or no, it's case sensitive because I'm lazy!\")\n", "VAR_1 = False\n" ]
[ "from datetime import datetime\n", "import pyodbc\n", "import sys\n", "import subprocess as sp\n", "from os import system\n", "def clear():...\n", "system('cls')\n", "def divider():...\n", "print('-' * 70)\n", "DBNAME = 'laserInv'\n", "openConn = False\n", "confirmed = None\n", "orderComplete = False\n", "multiKeyOrder = False\n", "addMore = None\n", "u_date = datetime.now()\n", "while orderComplete == False:\n", "sys.exit()\n", "clear()\n", "if openConn == True:\n", "while confirmed != 'yes':\n", "db.close()\n", "print('Update the key inventory by entering the order information below.')\n", "confirmed = 'No'\n", "openConn = False\n", "divider()\n", "u_keyNum = int(u_keyNum)\n", "if multiKeyOrder == False:\n", "u_keysUsed = int(u_keysUsed)\n", "u_orderNum = input('Order #: ')\n", "u_keyNum = input('Key used (i.e. #29): #')\n", "print('Connecting to database...')\n", "u_keysUsed = input('# of keys lased: ')\n", "db = pyodbc.connect(Driver='{SQL Server Native Client 11.0}', Server=\n '(LocalDB)\\\\LocalDB Laser', Database='laserInv', trusted_connection='yes')\n", "clear()\n", "openConn = True\n", "divider()\n", "c1 = db.cursor()\n", "print(\"\"\"{} \n Order #: {} \n Key #: {} \n # of keys lased: {}\"\"\".format(\n u_date, u_orderNum, u_keyNum, u_keysUsed))\n", "c1.execute(\"SELECT invCount FROM keyInventory WHERE keyNum = '%s';\" % u_keyNum)\n", "divider()\n", "u_preCount = c1.fetchall()[0][0]\n", "divider()\n", "u_date = datetime.now()\n", "confirmed = input('Is the information above correct? ')\n", "print(\n \"ERROR: The key number you entered doesn't exist in the keyInventory table.\"\n )\n", "u_postCount = u_preCount - u_keysUsed\n", "if confirmed == 'yes':\n", "print(\n \"TIP: If you know you've typed it correctly, you'll have to add it to the Database with newKey.py\"\n )\n", "c2 = db.cursor()\n", "clear()\n", "if confirmed == 'no':\n", "divider()\n", "c2.execute(\n 'INSERT INTO ordersFilled (submit_time, orderNum, keyNum, keysUsed, preCount, postCount) VALUES (?, ?, ?, ?, ?, ?);'\n , (u_date, u_orderNum, u_keyNum, u_keysUsed, u_preCount, u_postCount))\n", "clear()\n", "clear()\n", "input('Press Enter to close...')\n", "c2.commit()\n", "print('Re-enter the information. \\n')\n", "print(\"Must answer yes or no, it's case sensitive because I'm lazy! \\n\")\n", "if openConn == True:\n", "c3 = db.cursor()\n", "db.close()\n", "sys.exit()\n", "c3.execute('UPDATE keyInventory SET invCount = ? WHERE keyNum = ?;', (\n u_postCount, u_keyNum))\n", "openConn = False\n", "if openConn == True:\n", "c3.commit()\n", "db.close()\n", "sys.exit()\n", "clear()\n", "openConn = False\n", "print('Success! Database has been updated.')\n", "divider()\n", "addMore = None\n", "divider()\n", "while addMore != 'yes' and addMore != 'no':\n", "input('Press Enter to close...')\n", "addMore = input('Are there more keys on this order? ')\n", "if addMore == 'yes':\n", "multiKeyOrder = True\n", "if addMore == 'no':\n", "if openConn == True:\n", "orderComplete = True\n", "clear()\n", "db.close()\n", "print('Okay, bye!')\n", "print(\"Must answer yes or no, it's case sensitive because I'm lazy!\")\n", "openConn = False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "Import'", "Import'", "ImportFrom'", "FunctionDef'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Condition", "Condition", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'" ]
[ "def FUNC_6(self, VAR_51, VAR_44):...\n", "VAR_51 = FUNC_3(VAR_51)\n", "VAR_82 = None\n", "if VAR_51:\n", "VAR_82 = valid_login(VAR_51, VAR_44)\n", "if not VAR_82:\n", "return self.error()\n", "return VAR_82\n" ]
[ "def run(self, user_name, password):...\n", "user_name = chkuser(user_name)\n", "user = None\n", "if user_name:\n", "user = valid_login(user_name, password)\n", "if not user:\n", "return self.error()\n", "return user\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Return'", "Return'" ]
[ "def __init__(self, VAR_0):...\n", "self.doctype = VAR_0\n", "self.tables = []\n", "self.conditions = []\n", "self.or_conditions = []\n", "self.fields = None\n", "self.user = None\n", "self.ignore_ifnull = False\n", "self.flags = frappe._dict()\n" ]
[ "def __init__(self, doctype):...\n", "self.doctype = doctype\n", "self.tables = []\n", "self.conditions = []\n", "self.or_conditions = []\n", "self.fields = None\n", "self.user = None\n", "self.ignore_ifnull = False\n", "self.flags = frappe._dict()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_9(self, VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "VAR_29 = get_agent_module(VAR_11)\n", "if hasattr(VAR_29, 'add_cmdline_args'):\n", "if hasattr(VAR_29, 'dictionary_class'):\n", "VAR_29.add_cmdline_args(self)\n", "VAR_6 = FUNC_2(VAR_29.dictionary_class())\n", "self.set_defaults(dict_class=s)\n" ]
[ "def add_model_subargs(self, model):...\n", "\"\"\"docstring\"\"\"\n", "agent = get_agent_module(model)\n", "if hasattr(agent, 'add_cmdline_args'):\n", "if hasattr(agent, 'dictionary_class'):\n", "agent.add_cmdline_args(self)\n", "s = class2str(agent.dictionary_class())\n", "self.set_defaults(dict_class=s)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Condition", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_0(self):...\n", "self.objects = load_model_objects()\n", "self.client.login(username='Karyn', password='specialP@55word')\n" ]
[ "def setUp(self):...\n", "self.objects = load_model_objects()\n", "self.client.login(username='Karyn', password='specialP@55word')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_3():...\n", "VAR_3 = {'foo': 'bar'}\n", "VAR_1 = json.dumps(VAR_3)\n", "VAR_4 = tempfile.NamedTemporaryFile(mode='w')\n", "VAR_4.write(VAR_1)\n", "VAR_4.file.seek(0)\n", "VAR_2 = load_source(json_file)\n", "assert VAR_2 == VAR_3\n" ]
[ "def test_json_file_object():...\n", "native = {'foo': 'bar'}\n", "source = json.dumps(native)\n", "tmp_file = tempfile.NamedTemporaryFile(mode='w')\n", "tmp_file.write(source)\n", "tmp_file.file.seek(0)\n", "result = load_source(json_file)\n", "assert result == native\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assert'" ]
[ "def FUNC_4(self):...\n", "VAR_14 = self.cleaned_data\n", "if not VAR_14.get('is_business'):\n", "VAR_14['company'] = ''\n", "if self.event.settings.invoice_address_required:\n", "if VAR_14.get('is_business') and not VAR_14.get('company'):\n", "if 'vat_id' in self.changed_data or not VAR_14.get('vat_id'):\n", "if not VAR_14.get('is_business') and not VAR_14.get('name_parts'):\n", "self.instance.vat_id_validated = False\n", "self.instance.name_parts = VAR_14.get('name_parts')\n", "if self.validate_vat_id and self.instance.vat_id_validated and 'vat_id' not in self.changed_data:\n", "if self.validate_vat_id and VAR_14.get('is_business') and VAR_14.get('country'\n", "if VAR_14.get('vat_id')[:2] != str(VAR_14.get('country')):\n", "self.instance.vat_id_validated = False\n", "VAR_43 = vat_moss.id.validate(VAR_14.get('vat_id'))\n", "VAR_0.exception('VAT ID checking failed for country {}'.format(VAR_14.get(\n 'country')))\n", "if VAR_43:\n", "self.instance.vat_id_validated = False\n", "VAR_44, VAR_45, VAR_46 = VAR_43\n", "if self.request and self.vat_warning:\n", "self.instance.vat_id_validated = True\n", "messages.warning(self.request, _('string'))\n", "VAR_0.exception('VAT ID checking failed for country {}'.format(VAR_14.get(\n 'country')))\n", "self.instance.vat_id = VAR_45\n", "self.instance.vat_id_validated = False\n", "if self.request and self.vat_warning:\n", "messages.warning(self.request, _('string'))\n" ]
[ "def clean(self):...\n", "data = self.cleaned_data\n", "if not data.get('is_business'):\n", "data['company'] = ''\n", "if self.event.settings.invoice_address_required:\n", "if data.get('is_business') and not data.get('company'):\n", "if 'vat_id' in self.changed_data or not data.get('vat_id'):\n", "if not data.get('is_business') and not data.get('name_parts'):\n", "self.instance.vat_id_validated = False\n", "self.instance.name_parts = data.get('name_parts')\n", "if self.validate_vat_id and self.instance.vat_id_validated and 'vat_id' not in self.changed_data:\n", "if self.validate_vat_id and data.get('is_business') and data.get('country'\n", "if data.get('vat_id')[:2] != str(data.get('country')):\n", "self.instance.vat_id_validated = False\n", "result = vat_moss.id.validate(data.get('vat_id'))\n", "logger.exception('VAT ID checking failed for country {}'.format(data.get(\n 'country')))\n", "if result:\n", "self.instance.vat_id_validated = False\n", "country_code, normalized_id, company_name = result\n", "if self.request and self.vat_warning:\n", "self.instance.vat_id_validated = True\n", "messages.warning(self.request, _(\n 'Your VAT ID could not be checked, as the VAT checking service of your country is currently not available. We will therefore need to charge VAT on your invoice. You can get the tax amount back via the VAT reimbursement process.'\n ))\n", "logger.exception('VAT ID checking failed for country {}'.format(data.get(\n 'country')))\n", "self.instance.vat_id = normalized_id\n", "self.instance.vat_id_validated = False\n", "if self.request and self.vat_warning:\n", "messages.warning(self.request, _(\n 'Your VAT ID could not be checked, as the VAT checking service of your country returned an incorrect result. We will therefore need to charge VAT on your invoice. Please contact support to resolve this manually.'\n ))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_1(self, VAR_4):...\n", "VAR_4 = np.float32(VAR_4)\n", "VAR_4 /= 255.0\n", "VAR_4 = np.expand_dims(VAR_4, axis=0)\n", "VAR_4 = np.expand_dims(VAR_4, axis=3)\n", "self.ball_detector_interpreter.set_tensor(self.ball_detector_input_details[\n 0]['index'], VAR_4)\n", "self.ball_detector_interpreter.invoke()\n", "return np.squeeze(self.ball_detector_interpreter.get_tensor(self.\n ball_detector_output_details[0]['index']))\n" ]
[ "def getBallPosition(self, image):...\n", "image = np.float32(image)\n", "image /= 255.0\n", "image = np.expand_dims(image, axis=0)\n", "image = np.expand_dims(image, axis=3)\n", "self.ball_detector_interpreter.set_tensor(self.ball_detector_input_details[\n 0]['index'], image)\n", "self.ball_detector_interpreter.invoke()\n", "return np.squeeze(self.ball_detector_interpreter.get_tensor(self.\n ball_detector_output_details[0]['index']))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "AugAssign'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_5(self):...\n", "VAR_15 = self.data_group.group_type\n", "VAR_16 = DocumentType.objects.filter(group_type=this_type)\n", "if not self.document_type in VAR_16:\n" ]
[ "def clean(self):...\n", "this_type = self.data_group.group_type\n", "doc_types = DocumentType.objects.filter(group_type=this_type)\n", "if not self.document_type in doc_types:\n" ]
[ 0, 0, 6, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition" ]
[ "@VAR_0.route('/level-2/search', methods=['POST'])...\n", "VAR_26 = str(request.form['term'])\n", "VAR_21 = psycopg2.connect('dbname=unlock user=ubuntu')\n", "VAR_22 = VAR_21.cursor()\n", "VAR_22.execute('SELECT * FROM nile_items;')\n", "VAR_23 = VAR_22.fetchall()\n", "VAR_22.close()\n", "VAR_21.close()\n", "VAR_25 = [dict() for x in range(len(VAR_23))]\n", "for VAR_27 in range(len(VAR_23) - 1, -1, -1):\n", "if VAR_26 in VAR_23[VAR_27][0] or VAR_26 in VAR_23[VAR_27][3]:\n", "return str(VAR_25)\n", "VAR_25[VAR_27]['name'] = VAR_23[VAR_27][0]\n", "VAR_25[VAR_27]['price'] = VAR_23[VAR_27][1]\n", "VAR_25[VAR_27]['image'] = VAR_23[VAR_27][2]\n" ]
[ "@app.route('/level-2/search', methods=['POST'])...\n", "term = str(request.form['term'])\n", "conn = psycopg2.connect('dbname=unlock user=ubuntu')\n", "cur = conn.cursor()\n", "cur.execute('SELECT * FROM nile_items;')\n", "res = cur.fetchall()\n", "cur.close()\n", "conn.close()\n", "items = [dict() for x in range(len(res))]\n", "for i in range(len(res) - 1, -1, -1):\n", "if term in res[i][0] or term in res[i][3]:\n", "return str(items)\n", "items[i]['name'] = res[i][0]\n", "items[i]['price'] = res[i][1]\n", "items[i]['image'] = res[i][2]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "For", "Condition", "Return'", "Assign'", "Assign'", "Assign'" ]
[ "def __init__(self, VAR_0, VAR_1=False):...\n", "self.settings = VAR_0\n", "self.sensors = VAR_0['sensors']\n" ]
[ "def __init__(self, settings, debug=False):...\n", "self.settings = settings\n", "self.sensors = settings['sensors']\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "@staticmethod...\n", "if VAR_6.last_run_at:\n", "return VAR_6.schedule.remaining_estimate(last_run_at=obj.last_run_at)\n", "VAR_23, VAR_24 = VAR_6.schedule.is_due(last_run_at=datetime.now(pytz.utc))\n", "VAR_25 = datetime.now(pytz.utc) + timedelta(seconds=y)\n", "return VAR_25\n" ]
[ "@staticmethod...\n", "if obj.last_run_at:\n", "return obj.schedule.remaining_estimate(last_run_at=obj.last_run_at)\n", "z, y = obj.schedule.is_due(last_run_at=datetime.now(pytz.utc))\n", "date = datetime.now(pytz.utc) + timedelta(seconds=y)\n", "return date\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_28(self, *VAR_76, **VAR_52):...\n", "def FUNC_39(VAR_101):...\n", "VAR_101.resources = VAR_76, VAR_52\n", "return VAR_101\n" ]
[ "def resources(self, *args, **resources):...\n", "def decorate(ruleinfo):...\n", "ruleinfo.resources = args, resources\n", "return ruleinfo\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_2(self, VAR_9):...\n", "VAR_9 = u\"'{0}'\".format(VAR_9)\n", "return VAR_9\n" ]
[ "def insert_format(self, value):...\n", "value = u\"'{0}'\".format(value)\n", "return value\n" ]
[ 0, 4, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_10(VAR_32, VAR_34):...\n", "logging.info('Got signal %s', VAR_32)\n" ]
[ "def handler(sig, _):...\n", "logging.info('Got signal %s', sig)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_6(self):...\n", "VAR_1 = 'http://my.url.com'\n", "VAR_2 = 'True'\n", "url_helper.urllib2.urlopen(mox.IsA(urllib2.Request), timeout=mox.IgnoreArg()\n ).AndReturn(StringIO.StringIO(VAR_2))\n", "self._mox.ReplayAll()\n", "self.assertEqual(url_helper.UrlOpen(VAR_1, method='POSTFORM'), VAR_2)\n", "self._mox.VerifyAll()\n" ]
[ "def testUrlOpenPOSTFORMSuccess(self):...\n", "url = 'http://my.url.com'\n", "response = 'True'\n", "url_helper.urllib2.urlopen(mox.IsA(urllib2.Request), timeout=mox.IgnoreArg()\n ).AndReturn(StringIO.StringIO(response))\n", "self._mox.ReplayAll()\n", "self.assertEqual(url_helper.UrlOpen(url, method='POSTFORM'), response)\n", "self._mox.VerifyAll()\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_48(self, VAR_22, VAR_6=None):...\n", "if VAR_6:\n", "VAR_5 = VAR_5 = self.session.query(self.obj)\n", "return self.session.query(self.obj).get(VAR_22)\n", "VAR_35 = VAR_6.copy()\n", "VAR_35.add_filter(self.get_pk_name(), self.FilterEqual, VAR_22)\n", "VAR_5 = self._get_base_query(VAR_5=query, VAR_6=_filters)\n", "return VAR_5.first()\n" ]
[ "def get(self, id, filters=None):...\n", "if filters:\n", "query = query = self.session.query(self.obj)\n", "return self.session.query(self.obj).get(id)\n", "_filters = filters.copy()\n", "_filters.add_filter(self.get_pk_name(), self.FilterEqual, id)\n", "query = self._get_base_query(query=query, filters=_filters)\n", "return query.first()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def __init__(self, VAR_0, VAR_1, VAR_2=None):...\n", "self.base_directory_path = VAR_1\n", "self.directory_filter, self.file_filter = VAR_0.filters\n", "if VAR_2 is None:\n", "VAR_2 = os_walk\n", "self.walk_fn = VAR_2\n" ]
[ "def __init__(self, loader, base_directory_path, walk_fn=None):...\n", "self.base_directory_path = base_directory_path\n", "self.directory_filter, self.file_filter = loader.filters\n", "if walk_fn is None:\n", "walk_fn = os_walk\n", "self.walk_fn = walk_fn\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'" ]
[ "\"\"\"This module is the video repository in charge of all database requests.\"\"\"\n", "def FUNC_0(VAR_0, VAR_1):...\n", "VAR_1.execute('string'.format(VAR_0=playlist_id))\n", "VAR_8 = VAR_1.fetchall()\n", "return VAR_8\n" ]
[ "\"\"\"This module is the video repository in charge of all database requests.\"\"\"\n", "def retrieve_videos_from_playlist(playlist_id, db):...\n", "db.execute(\n 'SELECT id, title, thumbnail, position from video WHERE playlist_id={playlist_id} ORDER BY position ASC;'\n .format(playlist_id=playlist_id))\n", "rows = db.fetchall()\n", "return rows\n" ]
[ 0, 0, 4, 0, 0 ]
[ "Expr'", "FunctionDef'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_7(VAR_17, *VAR_18, **VAR_19):...\n", "\"\"\"docstring\"\"\"\n", "VAR_20 = VAR_17.POST\n", "VAR_21 = VAR_20.get('new_status')\n", "if not VAR_21:\n", "if not VAR_8 or not callable(VAR_8):\n", "if not 'fields' in VAR_20:\n", "return error_handler.logErrorAndReturnOK(\n 'No valid status can be set by the manageModelStatus.')\n", "error_handler.logErrorAndReturnOK(\n 'No fields to filter on found for manageModelStatus.')\n", "VAR_22 = pickle.loads(str(VAR_20['fields']))\n", "VAR_23 = VAR_7.getForFields(VAR_22, limit=BATCH_SIZE)\n", "for VAR_9 in VAR_23:\n", "if VAR_21:\n", "db.put(VAR_23)\n", "VAR_25 = VAR_21\n", "VAR_25 = VAR_8(VAR_9)\n", "if len(VAR_23) == VAR_4:\n", "VAR_9.status = VAR_25\n", "VAR_24 = VAR_20.copy()\n", "return responses.terminateTask()\n", "return responses.startTask(VAR_17.path, VAR_24=context)\n" ]
[ "def manageModelsStatus(request, *args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "post_dict = request.POST\n", "new_status = post_dict.get('new_status')\n", "if not new_status:\n", "if not status_retriever or not callable(status_retriever):\n", "if not 'fields' in post_dict:\n", "return error_handler.logErrorAndReturnOK(\n 'No valid status can be set by the manageModelStatus.')\n", "error_handler.logErrorAndReturnOK(\n 'No fields to filter on found for manageModelStatus.')\n", "fields = pickle.loads(str(post_dict['fields']))\n", "entities = entity_logic.getForFields(fields, limit=BATCH_SIZE)\n", "for entity in entities:\n", "if new_status:\n", "db.put(entities)\n", "status = new_status\n", "status = status_retriever(entity)\n", "if len(entities) == BATCH_SIZE:\n", "entity.status = status\n", "context = post_dict.copy()\n", "return responses.terminateTask()\n", "return responses.startTask(request.path, context=context)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Return'", "Expr'", "Assign'", "Assign'", "For", "Condition", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Return'" ]
[ "def FUNC_16(self, VAR_23):...\n", "\"\"\"docstring\"\"\"\n", "if not VAR_23:\n", "return VAR_23\n", "VAR_39 = '\\\\\"password\\\\\": [^,}]*'\n", "return re.sub(VAR_39, '\"password\": \"********\"', VAR_23)\n" ]
[ "def _mask_password(self, json):...\n", "\"\"\"docstring\"\"\"\n", "if not json:\n", "return json\n", "pattern = '\\\\\"password\\\\\": [^,}]*'\n", "return re.sub(pattern, '\"password\": \"********\"', json)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Return'" ]
[ "def FUNC_1(self):...\n", "VAR_2 = self.connect()\n", "VAR_3 = 'SELECT description FROM crimes;'\n", "VAR_2.close()\n", "cursor.execute(VAR_3)\n", "return cursor.fetchall()\n" ]
[ "def get_all_inputs(self):...\n", "connection = self.connect()\n", "query = 'SELECT description FROM crimes;'\n", "connection.close()\n", "cursor.execute(query)\n", "return cursor.fetchall()\n" ]
[ 0, 0, 0, 0, 4, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def __init__(self, *VAR_5, **VAR_6):...\n", "VAR_31 = VAR_6.get('null', False)\n", "VAR_29 = VAR_6.get('default', None)\n", "self.encoder = VAR_6.get('encoder', None)\n", "if not VAR_31 and VAR_29 is None:\n", "VAR_6['default'] = '{}'\n", "if isinstance(VAR_29, (list, dict)):\n", "VAR_6['default'] = json_encode(VAR_29, cls=self.encoder, sort_keys=True)\n", "models.Field.__init__(self, *VAR_5, **kwargs)\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "null = kwargs.get('null', False)\n", "default = kwargs.get('default', None)\n", "self.encoder = kwargs.get('encoder', None)\n", "if not null and default is None:\n", "kwargs['default'] = '{}'\n", "if isinstance(default, (list, dict)):\n", "kwargs['default'] = json_encode(default, cls=self.encoder, sort_keys=True)\n", "models.Field.__init__(self, *args, **kwargs)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_8(self):...\n", "VAR_14 = []\n", "for n in self.nodes:\n", "if len(self.get_dependents(n['node_object'])) < 1:\n", "return VAR_14\n", "VAR_14.append(n)\n" ]
[ "def get_root_nodes(self):...\n", "roots = []\n", "for n in self.nodes:\n", "if len(self.get_dependents(n['node_object'])) < 1:\n", "return roots\n", "roots.append(n)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Condition", "Return'", "Expr'" ]
[ "def __str__(self):...\n", "VAR_26 = list()\n", "if self.forced:\n", "VAR_26.append('Forced execution')\n", "if self.noio:\n", "VAR_26 = '; '.join(VAR_26)\n", "VAR_26.append('Rules with neither input nor output files are always executed.')\n", "if self.nooutput:\n", "return VAR_26\n", "VAR_26.append(\n 'Rules with a run or shell declaration but no output are always executed.')\n", "if self.missing_output:\n", "VAR_26.append('Missing output files: {}'.format(', '.join(self.missing_output))\n )\n", "if self.incomplete_output:\n", "VAR_26.append('Incomplete output files: {}'.format(', '.join(self.\n incomplete_output)))\n", "VAR_30 = self.updated_input - self.updated_input_run\n", "if VAR_30:\n", "VAR_26.append('Updated input files: {}'.format(', '.join(VAR_30)))\n", "if self.updated_input_run:\n", "VAR_26.append('Input files updated by another job: {}'.format(', '.join(\n self.updated_input_run)))\n" ]
[ "def __str__(self):...\n", "s = list()\n", "if self.forced:\n", "s.append('Forced execution')\n", "if self.noio:\n", "s = '; '.join(s)\n", "s.append('Rules with neither input nor output files are always executed.')\n", "if self.nooutput:\n", "return s\n", "s.append(\n 'Rules with a run or shell declaration but no output are always executed.')\n", "if self.missing_output:\n", "s.append('Missing output files: {}'.format(', '.join(self.missing_output)))\n", "if self.incomplete_output:\n", "s.append('Incomplete output files: {}'.format(', '.join(self.\n incomplete_output)))\n", "updated_input = self.updated_input - self.updated_input_run\n", "if updated_input:\n", "s.append('Updated input files: {}'.format(', '.join(updated_input)))\n", "if self.updated_input_run:\n", "s.append('Input files updated by another job: {}'.format(', '.join(self.\n updated_input_run)))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Expr'", "Condition", "Assign'", "Expr'", "Condition", "Return'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_8(VAR_2, VAR_5, VAR_7, VAR_1):...\n", "VAR_1.execute('string'.format(VAR_5=position, VAR_7=next_position))\n" ]
[ "def update_video_position(id, position, next_position, db):...\n", "db.execute(\n 'UPDATE video SET position = Case position When {position} Then {next_position} Else position + 1 End WHERE position BETWEEN {next_position} AND {position};'\n .format(position=position, next_position=next_position))\n" ]
[ 0, 4 ]
[ "FunctionDef'", "Expr'" ]
[ "def __init__(self, VAR_10, VAR_11={}):...\n", "self._leap_provider = VAR_10\n", "self._credentials = VAR_11.copy()\n" ]
[ "def __init__(self, leap_provider, credentials={}):...\n", "self._leap_provider = leap_provider\n", "self._credentials = credentials.copy()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_9(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_0.debug(_('Updating volume stats'))\n", "VAR_34 = {}\n", "VAR_35 = 'eqlx'\n", "if self.configuration:\n", "VAR_35 = self.configuration.safe_get('volume_backend_name')\n", "VAR_34['volume_backend_name'] = VAR_35 or 'eqlx'\n", "VAR_34['vendor_name'] = 'Dell'\n", "VAR_34['driver_version'] = self.VERSION\n", "VAR_34['storage_protocol'] = 'iSCSI'\n", "VAR_34['reserved_percentage'] = 0\n", "VAR_34['QoS_support'] = False\n", "VAR_34['total_capacity_gb'] = 'infinite'\n", "VAR_34['free_capacity_gb'] = 'infinite'\n", "for line in self._eql_execute('pool', 'select', self.configuration.\n", "if line.startswith('TotalCapacity:'):\n", "self._stats = VAR_34\n", "VAR_50 = line.rstrip().partition(' ')\n", "if line.startswith('FreeSpace:'):\n", "VAR_34['total_capacity_gb'] = self._get_space_in_gb(VAR_50[-1])\n", "VAR_50 = line.rstrip().partition(' ')\n", "VAR_34['free_capacity_gb'] = self._get_space_in_gb(VAR_50[-1])\n" ]
[ "def _update_volume_stats(self):...\n", "\"\"\"docstring\"\"\"\n", "LOG.debug(_('Updating volume stats'))\n", "data = {}\n", "backend_name = 'eqlx'\n", "if self.configuration:\n", "backend_name = self.configuration.safe_get('volume_backend_name')\n", "data['volume_backend_name'] = backend_name or 'eqlx'\n", "data['vendor_name'] = 'Dell'\n", "data['driver_version'] = self.VERSION\n", "data['storage_protocol'] = 'iSCSI'\n", "data['reserved_percentage'] = 0\n", "data['QoS_support'] = False\n", "data['total_capacity_gb'] = 'infinite'\n", "data['free_capacity_gb'] = 'infinite'\n", "for line in self._eql_execute('pool', 'select', self.configuration.\n", "if line.startswith('TotalCapacity:'):\n", "self._stats = data\n", "out_tup = line.rstrip().partition(' ')\n", "if line.startswith('FreeSpace:'):\n", "data['total_capacity_gb'] = self._get_space_in_gb(out_tup[-1])\n", "out_tup = line.rstrip().partition(' ')\n", "data['free_capacity_gb'] = self._get_space_in_gb(out_tup[-1])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_20(self, VAR_4):...\n", "" ]
[ "def on_task_failure(self, task):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_0(self):...\n", "super(CLASS_0, self).setUp()\n" ]
[ "def setUp(self):...\n", "super(TestAuthMiddleware, self).setUp()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_2(self):...\n", "subprocess.Popen([sys.executable, '-c',\n 'import os; print(\"HORK\" in os.environ)'], stdout=output).wait()\n", "VAR_18.seek(0)\n", "self.assertEqual('False\\n', VAR_18.read())\n" ]
[ "def test_environment_negation(self):...\n", "subprocess.Popen([sys.executable, '-c',\n 'import os; print(\"HORK\" in os.environ)'], stdout=output).wait()\n", "output.seek(0)\n", "self.assertEqual('False\\n', output.read())\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_3(self, VAR_8, VAR_9):...\n", "for line in VAR_8:\n", "if line.startswith(VAR_9):\n", "return\n", "return line[len(VAR_9):]\n" ]
[ "def _get_prefixed_value(self, lines, prefix):...\n", "for line in lines:\n", "if line.startswith(prefix):\n", "return\n", "return line[len(prefix):]\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Return'", "Return'" ]
[ "def FUNC_6(self, *VAR_17, **VAR_18):...\n", "super(CLASS_0, self).setUp()\n", "FUNC_0()\n", "if self.use_client_cert_auth():\n", "VAR_54 = FUNC_2()\n", "VAR_54 = FUNC_1()\n", "self.nsxlib = v3.NsxLib(VAR_54)\n", "self.maxDiff = None\n" ]
[ "def setUp(self, *args, **kwargs):...\n", "super(NsxLibTestCase, self).setUp()\n", "_mock_nsxlib()\n", "if self.use_client_cert_auth():\n", "nsxlib_config = get_nsxlib_config_with_client_cert()\n", "nsxlib_config = get_default_nsxlib_config()\n", "self.nsxlib = v3.NsxLib(nsxlib_config)\n", "self.maxDiff = None\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_6(VAR_12):...\n", "if isinstance(VAR_12, set):\n", "return list(VAR_12)\n", "if hasattr(VAR_12, '__str__'):\n", "return str(VAR_12)\n", "log.msg('RENDERING ERROR, cannot json serialize %s' % VAR_12, system='httprest'\n )\n" ]
[ "def render(obj):...\n", "if isinstance(obj, set):\n", "return list(obj)\n", "if hasattr(obj, '__str__'):\n", "return str(obj)\n", "log.msg('RENDERING ERROR, cannot json serialize %s' % obj, system='httprest')\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'", "Expr'" ]
[ "def FUNC_34(self):...\n", "if JvmPlatform.global_instance().get_options().compiler == 'zinc':\n", "return super(CLASS_1, self).execute()\n" ]
[ "def execute(self):...\n", "if JvmPlatform.global_instance().get_options().compiler == 'zinc':\n", "return super(ZincCompile, self).execute()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "For", "Return'" ]
[ "def FUNC_6(self, VAR_51, VAR_44):...\n", "VAR_51 = FUNC_3(VAR_51)\n", "VAR_82 = None\n", "if VAR_51:\n", "VAR_82 = valid_login(VAR_51, VAR_44)\n", "if not VAR_82:\n", "return self.error()\n", "return VAR_82\n" ]
[ "def run(self, user_name, password):...\n", "user_name = chkuser(user_name)\n", "user = None\n", "if user_name:\n", "user = valid_login(user_name, password)\n", "if not user:\n", "return self.error()\n", "return user\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_5(*VAR_17):...\n", "VAR_18 = VAR_8.replace('_', '-')\n", "VAR_9 = ' '.join(str(elem) for elem in VAR_17)\n", "return self._exec_adb_cmd(VAR_18, VAR_9)\n" ]
[ "def adb_call(*args):...\n", "clean_name = name.replace('_', '-')\n", "arg_str = ' '.join(str(elem) for elem in args)\n", "return self._exec_adb_cmd(clean_name, arg_str)\n" ]
[ 0, 0, 2, 2 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_2(self, VAR_5, VAR_6, VAR_7, VAR_8=False):...\n", "\"\"\"docstring\"\"\"\n", "VAR_14 = self.read_metadata(VAR_7)\n", "VAR_15, VAR_16, VAR_17 = self.workspace_changes(VAR_5, VAR_14, update_meta=\n False)\n", "VAR_18, VAR_12 = refpath_to_mfs(Path(f'@{VAR_7}'))\n", "for path in VAR_15:\n", "os.remove(path)\n", "for path in (VAR_16 | VAR_17):\n", "VAR_29 = self.get_mfs_path(VAR_5, VAR_6, branch_info=mfs_refpath / path.\n relative_to(fs_repo_root))\n", "VAR_30 = VAR_14[str(path)]['timestamp']\n", "f.write(self.ipfs.files_read(VAR_29))\n", "os.utime(path, ns=(timestamp, timestamp))\n" ]
[ "def _load_ref_into_repo(self, fs_repo_root, branch, ref, without_timestamps...\n", "\"\"\"docstring\"\"\"\n", "metadata = self.read_metadata(ref)\n", "added, removed, modified = self.workspace_changes(fs_repo_root, metadata,\n update_meta=False)\n", "mfs_refpath, _ = refpath_to_mfs(Path(f'@{ref}'))\n", "for path in added:\n", "os.remove(path)\n", "for path in (removed | modified):\n", "mfs_path = self.get_mfs_path(fs_repo_root, branch, branch_info=mfs_refpath /\n path.relative_to(fs_repo_root))\n", "timestamp = metadata[str(path)]['timestamp']\n", "f.write(self.ipfs.files_read(mfs_path))\n", "os.utime(path, ns=(timestamp, timestamp))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "For", "Expr'", "For", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_14(self, VAR_41, VAR_42=None):...\n", "" ]
[ "def get(self, key, default=None):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_0(self, VAR_0='crimemap'):...\n", "return pymysql.connect(host='localhost', user=dbconfig.db_user, passwd=\n dbconfig.db_password, db=database)\n" ]
[ "def connect(self, database='crimemap'):...\n", "return pymysql.connect(host='localhost', user=dbconfig.db_user, passwd=\n dbconfig.db_password, db=database)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_8(self, VAR_26):...\n", "\"\"\"docstring\"\"\"\n", "VAR_21 = self.resolve_path(VAR_26, os.path.dirname(VAR_26))\n", "if not VAR_21 or not self.exists(VAR_21):\n", "VAR_24 = self.getfile(VAR_21)\n", "if VAR_24[VAR_2] == VAR_12:\n", "if VAR_24[VAR_2] == VAR_13 and VAR_24[VAR_10]:\n", "return FUNC_14(VAR_24[VAR_10], 'rb').read()\n", "if VAR_24[VAR_2] == VAR_13 and VAR_24[VAR_5] == 0:\n", "return ''\n" ]
[ "def file_contents(self, target):...\n", "\"\"\"docstring\"\"\"\n", "path = self.resolve_path(target, os.path.dirname(target))\n", "if not path or not self.exists(path):\n", "f = self.getfile(path)\n", "if f[A_TYPE] == T_DIR:\n", "if f[A_TYPE] == T_FILE and f[A_REALFILE]:\n", "return open(f[A_REALFILE], 'rb').read()\n", "if f[A_TYPE] == T_FILE and f[A_SIZE] == 0:\n", "return ''\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Return'", "Condition", "Return'" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "if not request.json:\n", "abort(400)\n", "VAR_7 = request.json.get('Note', '')\n", "VAR_8 = request.json.get('Attributter', {})\n", "VAR_9 = request.json.get('Tilstande', {})\n", "VAR_10 = request.json.get('Relationer', {})\n", "if not db.object_exists(VAR_2.__name__, VAR_5):\n", "VAR_17 = db.create_or_import_object(VAR_2.__name__, VAR_7, VAR_8, VAR_9,\n VAR_10, VAR_5)\n", "\"\"\"Edit or passivate.\"\"\"\n", "return FUNC_0(u'Importeret {0}: {1}'.format(VAR_2.__name__, VAR_5)), 200\n", "if request.json.get('livscyklus', '').lower() == 'passiv':\n", "db.passivate_object(VAR_2.__name__, VAR_7, VAR_5)\n", "VAR_17 = db.update_object(VAR_2.__name__, VAR_7, VAR_8, VAR_9, VAR_10, VAR_5)\n", "return FUNC_0(u'Passiveret {0}: {1}'.format(VAR_2.__name__, VAR_5)), 200\n", "return FUNC_0(u'Opdateret {0}: {1}'.format(VAR_2.__name__, VAR_5)), 200\n" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "if not request.json:\n", "abort(400)\n", "note = request.json.get('Note', '')\n", "attributes = request.json.get('Attributter', {})\n", "states = request.json.get('Tilstande', {})\n", "relations = request.json.get('Relationer', {})\n", "if not db.object_exists(cls.__name__, uuid):\n", "result = db.create_or_import_object(cls.__name__, note, attributes, states,\n relations, uuid)\n", "\"\"\"Edit or passivate.\"\"\"\n", "return j(u'Importeret {0}: {1}'.format(cls.__name__, uuid)), 200\n", "if request.json.get('livscyklus', '').lower() == 'passiv':\n", "db.passivate_object(cls.__name__, note, uuid)\n", "result = db.update_object(cls.__name__, note, attributes, states, relations,\n uuid)\n", "return j(u'Passiveret {0}: {1}'.format(cls.__name__, uuid)), 200\n", "return j(u'Opdateret {0}: {1}'.format(cls.__name__, uuid)), 200\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Return'", "Condition", "Expr'", "Assign'", "Return'", "Return'" ]
[ "def FUNC_2(VAR_0, VAR_1):...\n", "VAR_8 = get_object_or_404(Hunt, id=pk)\n", "if VAR_8.notice == True:\n", "VAR_8.setNoticeFalse()\n", "VAR_8.setNoticeTrue()\n", "VAR_8.run()\n", "return redirect('threat_hunter:index')\n" ]
[ "def hunt_switch_notice(request, pk):...\n", "hunt = get_object_or_404(Hunt, id=pk)\n", "if hunt.notice == True:\n", "hunt.setNoticeFalse()\n", "hunt.setNoticeTrue()\n", "hunt.run()\n", "return redirect('threat_hunter:index')\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "\"\"\"\nThis module contains ...\n\"\"\"\n", "from __future__ import division, absolute_import\n", "import cPickle as pickle\n", "import pickle\n", "import os\n", "import time\n", "import fnmatch\n", "import hashlib\n", "import re\n", "import stat\n", "import errno\n", "from twisted.python import log\n", "from cowrie.core.config import CONFIG\n", "VAR_0 = pickle.load(FUNC_14(CONFIG.get('honeypot', 'filesystem_file'), 'rb'))\n", "VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6, VAR_7, VAR_8, VAR_9, VAR_10 = list(\n range(0, 10))\n", "VAR_11, VAR_12, VAR_13, VAR_14, VAR_15, VAR_16, VAR_17 = list(range(0, 7))\n", "\"\"\"string\"\"\"\n", "\"\"\"\n raise OSError(errno.ENOENT, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n \"\"\"\n", "def __init__(self, VAR_18, VAR_19):...\n", "self.fs = VAR_18\n", "self.cfg = VAR_19\n", "self.tempfiles = {}\n", "self.filenames = {}\n", "self.newcount = 0\n", "self.init_honeyfs(self.cfg.get('honeypot', 'contents_path'))\n", "def FUNC_0(self, VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "for VAR_21, directories, filenames in os.walk(VAR_20):\n", "for VAR_32 in filenames:\n", "def FUNC_1(self, VAR_21, VAR_22):...\n", "VAR_72 = os.path.join(VAR_21, VAR_32)\n", "\"\"\"docstring\"\"\"\n", "VAR_73 = '/' + os.path.relpath(VAR_72, VAR_20)\n", "VAR_55 = VAR_21.rstrip('/').split('/')\n", "VAR_24 = self.getfile(VAR_73, VAR_23=False)\n", "if VAR_21[0] == '/':\n", "if VAR_24 and VAR_24[VAR_2] == VAR_13:\n", "VAR_22 = []\n", "VAR_22 = [x for x in VAR_22.split('/') if len(x) and x is not None]\n", "self.update_realfile(VAR_24, VAR_72)\n", "while 1:\n", "if not len(VAR_55):\n", "VAR_66 = VAR_55.pop(0)\n", "return '/%s' % ('/'.join(VAR_22),)\n", "if VAR_66 == '..':\n", "if len(VAR_22):\n", "if VAR_66 in ('.', ''):\n", "VAR_22.pop()\n", "VAR_22.append(VAR_66)\n" ]
[ "\"\"\"\nThis module contains ...\n\"\"\"\n", "from __future__ import division, absolute_import\n", "import cPickle as pickle\n", "import pickle\n", "import os\n", "import time\n", "import fnmatch\n", "import hashlib\n", "import re\n", "import stat\n", "import errno\n", "from twisted.python import log\n", "from cowrie.core.config import CONFIG\n", "PICKLE = pickle.load(open(CONFIG.get('honeypot', 'filesystem_file'), 'rb'))\n", "(A_NAME, A_TYPE, A_UID, A_GID, A_SIZE, A_MODE, A_CTIME, A_CONTENTS,\n A_TARGET, A_REALFILE) = list(range(0, 10))\n", "T_LINK, T_DIR, T_FILE, T_BLK, T_CHR, T_SOCK, T_FIFO = list(range(0, 7))\n", "\"\"\"\n 62 ELOOP Too many levels of symbolic links. A path name lookup involved more than 8 symbolic links.\n raise OSError(errno.ELOOP, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n raise OSError(errno.ENOENT, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n \"\"\"\n", "def __init__(self, fs, cfg):...\n", "self.fs = fs\n", "self.cfg = cfg\n", "self.tempfiles = {}\n", "self.filenames = {}\n", "self.newcount = 0\n", "self.init_honeyfs(self.cfg.get('honeypot', 'contents_path'))\n", "def init_honeyfs(self, honeyfs_path):...\n", "\"\"\"docstring\"\"\"\n", "for path, directories, filenames in os.walk(honeyfs_path):\n", "for filename in filenames:\n", "def resolve_path(self, path, cwd):...\n", "realfile_path = os.path.join(path, filename)\n", "\"\"\"docstring\"\"\"\n", "virtual_path = '/' + os.path.relpath(realfile_path, honeyfs_path)\n", "pieces = path.rstrip('/').split('/')\n", "f = self.getfile(virtual_path, follow_symlinks=False)\n", "if path[0] == '/':\n", "if f and f[A_TYPE] == T_FILE:\n", "cwd = []\n", "cwd = [x for x in cwd.split('/') if len(x) and x is not None]\n", "self.update_realfile(f, realfile_path)\n", "while 1:\n", "if not len(pieces):\n", "piece = pieces.pop(0)\n", "return '/%s' % ('/'.join(cwd),)\n", "if piece == '..':\n", "if len(cwd):\n", "if piece in ('.', ''):\n", "cwd.pop()\n", "cwd.append(piece)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Docstring", "For", "For", "FunctionDef'", "Assign'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Return'", "Condition", "Condition", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_5(VAR_9, VAR_12, VAR_13, VAR_14):...\n", "FUNC_8(VAR_9, VAR_13, VAR_14)\n", "VAR_9.cmd('send-keys', VAR_12, 'Enter')\n" ]
[ "def start_window(window, cmd, log_file, comp_name):...\n", "setup_log(window, log_file, comp_name)\n", "window.cmd('send-keys', cmd, 'Enter')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'" ]
[ "def __hash__(self):...\n", "return hash(self.value)\n" ]
[ "def __hash__(self):...\n", "return hash(self.value)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@VAR_0.filter...\n", "return int(VAR_5 * 100)\n" ]
[ "@register.filter...\n", "return int(decimal * 100)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@commands.command(pass_context=True)...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = VAR_6.lower()\n", "if VAR_6 == '3ds' or VAR_6 == 'auto' and 'wiiu' not in VAR_4.message.channel.name:\n", "VAR_7 = discord.Embed(VAR_2='Guide', VAR_3=discord.Color(13506590))\n", "if (VAR_6 == 'wiiu' or VAR_6 == 'wii u'\n", "VAR_7.set_author(name='Plailect', url='https://3ds.guide/')\n", "VAR_7 = discord.Embed(VAR_2='Guide', VAR_3=discord.Color(39623))\n", "VAR_7.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "VAR_7.set_author(name='FlimFlam69 & Plailect', url='https://wiiu.guide/')\n", "VAR_7.url = 'https://3ds.guide/'\n", "VAR_7.set_thumbnail(url='http://i.imgur.com/CpF12I4.png')\n", "VAR_7.description = (\n 'A complete guide to 3DS custom firmware, from stock to boot9strap.')\n", "VAR_7.url = 'https://wiiu.guide/'\n", "await self.bot.say('', VAR_7=embed)\n", "VAR_7.description = (\n \"FlimFlam69 and Plailect's Wii U custom firmware + coldboothax guide\")\n", "await self.bot.say('', VAR_7=embed)\n" ]
[ "@commands.command(pass_context=True)...\n", "\"\"\"docstring\"\"\"\n", "console = console.lower()\n", "if console == '3ds' or console == 'auto' and 'wiiu' not in ctx.message.channel.name:\n", "embed = discord.Embed(title='Guide', color=discord.Color(13506590))\n", "if (console == 'wiiu' or console == 'wii u'\n", "embed.set_author(name='Plailect', url='https://3ds.guide/')\n", "embed = discord.Embed(title='Guide', color=discord.Color(39623))\n", "embed.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "embed.set_author(name='FlimFlam69 & Plailect', url='https://wiiu.guide/')\n", "embed.url = 'https://3ds.guide/'\n", "embed.set_thumbnail(url='http://i.imgur.com/CpF12I4.png')\n", "embed.description = (\n 'A complete guide to 3DS custom firmware, from stock to boot9strap.')\n", "embed.url = 'https://wiiu.guide/'\n", "await self.bot.say('', embed=embed)\n", "embed.description = (\n \"FlimFlam69 and Plailect's Wii U custom firmware + coldboothax guide\")\n", "await self.bot.say('', embed=embed)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_0():...\n", "VAR_0 = sqlite3.connect(CFG('dbname'))\n", "return VAR_0, VAR_0.cursor()\n" ]
[ "def connectDB():...\n", "conn = sqlite3.connect(CFG('dbname'))\n", "return conn, conn.cursor()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_6(self, VAR_59, VAR_60):...\n", "if self.remember:\n", "VAR_96 = '%s_%s' % (VAR_60, self.nav.get_param)\n", "if VAR_59 not in self.nav.options:\n", "VAR_97 = copy(VAR_101.user.sort_options) if VAR_101.user else {}\n", "VAR_59 = self.nav.default\n", "if self.remember and VAR_101.user_is_loggedin and VAR_59 != VAR_98:\n", "VAR_98 = VAR_97.get(VAR_96)\n", "VAR_97[VAR_96] = VAR_59\n", "return VAR_59\n", "if not VAR_59:\n", "VAR_101.user.sort_options = VAR_97\n", "VAR_59 = VAR_98\n", "VAR_82 = VAR_101.user\n", "utils.worker.do(lambda : VAR_82._commit())\n" ]
[ "def run(self, sort, where):...\n", "if self.remember:\n", "pref = '%s_%s' % (where, self.nav.get_param)\n", "if sort not in self.nav.options:\n", "user_prefs = copy(c.user.sort_options) if c.user else {}\n", "sort = self.nav.default\n", "if self.remember and c.user_is_loggedin and sort != user_pref:\n", "user_pref = user_prefs.get(pref)\n", "user_prefs[pref] = sort\n", "return sort\n", "if not sort:\n", "c.user.sort_options = user_prefs\n", "sort = user_pref\n", "user = c.user\n", "utils.worker.do(lambda : user._commit())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "@property...\n", "return self.get_object()\n" ]
[ "@property...\n", "return self.get_object()\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_4(self):...\n", "VAR_3 = self.get_root_nodes()\n", "VAR_4 = copy.copy(VAR_3)\n", "VAR_8 = []\n", "VAR_9 = set()\n", "for index, n in enumerate(VAR_4):\n", "VAR_10 = n['node_object']\n", "return [n['node_object'] for n in VAR_8]\n", "if VAR_10.id in VAR_9:\n", "VAR_9.add(VAR_10.id)\n", "VAR_11 = VAR_10.job\n", "if not VAR_11 and VAR_10.do_not_run is False and n not in VAR_3:\n", "VAR_17 = [p['node_object'] for p in self.get_dependents(VAR_10)]\n", "if VAR_10.do_not_run:\n", "VAR_18 = True\n", "VAR_12 = self.get_dependencies(VAR_10, 'success_nodes')\n", "if VAR_11 and VAR_11.status == 'failed':\n", "for p in VAR_17:\n", "VAR_13 = self.get_dependencies(VAR_10, 'failure_nodes')\n", "VAR_13 = self.get_dependencies(VAR_10, 'success_nodes')\n", "if VAR_11 and VAR_11.status == 'successful':\n", "if not p.job and p.do_not_run is False:\n", "if VAR_18:\n", "VAR_14 = self.get_dependencies(VAR_10, 'always_nodes')\n", "VAR_4.extend(VAR_13)\n", "VAR_12 = self.get_dependencies(VAR_10, 'failure_nodes')\n", "VAR_18 = False\n", "VAR_10.do_not_run = True\n", "VAR_16 = VAR_13 + VAR_14\n", "VAR_4.extend(VAR_12)\n", "VAR_8.append(n)\n", "VAR_4.extend(VAR_16)\n" ]
[ "def mark_dnr_nodes(self):...\n", "root_nodes = self.get_root_nodes()\n", "nodes = copy.copy(root_nodes)\n", "nodes_marked_do_not_run = []\n", "node_ids_visited = set()\n", "for index, n in enumerate(nodes):\n", "obj = n['node_object']\n", "return [n['node_object'] for n in nodes_marked_do_not_run]\n", "if obj.id in node_ids_visited:\n", "node_ids_visited.add(obj.id)\n", "job = obj.job\n", "if not job and obj.do_not_run is False and n not in root_nodes:\n", "parent_nodes = [p['node_object'] for p in self.get_dependents(obj)]\n", "if obj.do_not_run:\n", "all_parents_dnr = True\n", "children_success = self.get_dependencies(obj, 'success_nodes')\n", "if job and job.status == 'failed':\n", "for p in parent_nodes:\n", "children_failed = self.get_dependencies(obj, 'failure_nodes')\n", "children_failed = self.get_dependencies(obj, 'success_nodes')\n", "if job and job.status == 'successful':\n", "if not p.job and p.do_not_run is False:\n", "if all_parents_dnr:\n", "children_always = self.get_dependencies(obj, 'always_nodes')\n", "nodes.extend(children_failed)\n", "children_success = self.get_dependencies(obj, 'failure_nodes')\n", "all_parents_dnr = False\n", "obj.do_not_run = True\n", "children_all = children_failed + children_always\n", "nodes.extend(children_success)\n", "nodes_marked_do_not_run.append(n)\n", "nodes.extend(children_all)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "For", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_5(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_20 = kijiji_api.KijijiApi()\n", "VAR_20.login(VAR_0.username, VAR_0.password)\n", "[print(\"{} '{}'\".format(adId, adName)) for adName, adId in VAR_20.get_all_ads()\n ]\n" ]
[ "def show_ads(args):...\n", "\"\"\"docstring\"\"\"\n", "api = kijiji_api.KijijiApi()\n", "api.login(args.username, args.password)\n", "[print(\"{} '{}'\".format(adId, adName)) for adName, adId in api.get_all_ads()]\n" ]
[ 0, 0, 0, 0, 5 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_17(self):...\n", "if not self._IsServerAlive():\n", "return\n", "VAR_5 = {}\n", "FUNC_1(VAR_5)\n", "SendEventNotificationAsync('BufferVisit', VAR_5)\n" ]
[ "def OnBufferVisit(self):...\n", "if not self._IsServerAlive():\n", "return\n", "extra_data = {}\n", "_AddUltiSnipsDataIfNeeded(extra_data)\n", "SendEventNotificationAsync('BufferVisit', extra_data)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_3(self, VAR_6=None, VAR_7='', VAR_8='', VAR_9=None, VAR_10=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = self.session.query(self.obj)\n", "if len(VAR_7.split('.')) >= 2:\n", "VAR_33 = ''\n", "VAR_23 = self.session.query(func.count('*')).select_from(self.obj)\n", "for join_relation in VAR_7.split('.')[:-1]:\n", "VAR_23 = self._get_base_query(VAR_5=query_count, VAR_6=filters)\n", "VAR_37 = self.get_related_model(join_relation)\n", "VAR_7 = VAR_33 + VAR_7.split('.')[-1]\n", "VAR_5 = self._get_base_query(VAR_5=query, VAR_6=filters, VAR_7=order_column,\n VAR_8=order_direction)\n", "VAR_5 = VAR_5.join(VAR_37)\n", "VAR_24 = VAR_23.scalar()\n", "VAR_33 = VAR_33 + VAR_37.__tablename__ + '.'\n", "if VAR_9:\n", "VAR_5 = VAR_5.offset(VAR_9 * VAR_10)\n", "if VAR_10:\n", "VAR_5 = VAR_5.limit(VAR_10)\n", "return VAR_24, VAR_5.all()\n" ]
[ "def query(self, filters=None, order_column='', order_direction='', page=...\n", "\"\"\"docstring\"\"\"\n", "query = self.session.query(self.obj)\n", "if len(order_column.split('.')) >= 2:\n", "tmp_order_column = ''\n", "query_count = self.session.query(func.count('*')).select_from(self.obj)\n", "for join_relation in order_column.split('.')[:-1]:\n", "query_count = self._get_base_query(query=query_count, filters=filters)\n", "model_relation = self.get_related_model(join_relation)\n", "order_column = tmp_order_column + order_column.split('.')[-1]\n", "query = self._get_base_query(query=query, filters=filters, order_column=\n order_column, order_direction=order_direction)\n", "query = query.join(model_relation)\n", "count = query_count.scalar()\n", "tmp_order_column = tmp_order_column + model_relation.__tablename__ + '.'\n", "if page:\n", "query = query.offset(page * page_size)\n", "if page_size:\n", "query = query.limit(page_size)\n", "return count, query.all()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_21 = {}\n", "VAR_22 = {}\n", "VAR_23 = f'{VAR_7}, {VAR_8}'\n", "log.debug('Getting address from coordinates %s...', VAR_23)\n", "VAR_24 = Nominatim()\n", "VAR_33 = VAR_24.reverse(VAR_23, language='en')\n", "log.error('Getting address has failed!')\n", "VAR_21['en-US'] = VAR_33.address\n", "log.error(e)\n", "VAR_22['en-US'] = VAR_33.raw['address']['country']\n", "VAR_34 = VAR_24.reverse(VAR_23, language='ru')\n", "VAR_21['ru-RU'] = VAR_34.address\n", "VAR_22['ru-RU'] = VAR_34.raw['address']['country']\n", "return VAR_21, VAR_22\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "address = {}\n", "country = {}\n", "coordinates = f'{latitude}, {longitude}'\n", "log.debug('Getting address from coordinates %s...', coordinates)\n", "geolocator = Nominatim()\n", "location = geolocator.reverse(coordinates, language='en')\n", "log.error('Getting address has failed!')\n", "address['en-US'] = location.address\n", "log.error(e)\n", "country['en-US'] = location.raw['address']['country']\n", "location2 = geolocator.reverse(coordinates, language='ru')\n", "address['ru-RU'] = location2.address\n", "country['ru-RU'] = location2.raw['address']['country']\n", "return address, country\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_22(VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "VAR_45 = FUNC_21(VAR_21)\n", "if not isinstance(VAR_45, dict):\n", "return VAR_45\n" ]
[ "def load_configfile(configpath):...\n", "\"\"\"docstring\"\"\"\n", "config = _load_configfile(configpath)\n", "if not isinstance(config, dict):\n", "return config\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Return'" ]
[ "@click.command()...\n", "" ]
[ "@click.command()...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "def FUNC_6(self, VAR_41):...\n", "if VAR_41:\n", "VAR_95 = Thing._by_fullname(VAR_41, False, data=True)\n", "abort(403, 'forbidden')\n", "if isinstance(VAR_95, Message):\n", "return VAR_95\n", "VAR_52 = VAR_95.subreddit_slow\n", "if VAR_101.user_is_loggedin and VAR_52.can_comment(VAR_101.user):\n", "return VAR_95\n" ]
[ "def run(self, fullname):...\n", "if fullname:\n", "parent = Thing._by_fullname(fullname, False, data=True)\n", "abort(403, 'forbidden')\n", "if isinstance(parent, Message):\n", "return parent\n", "sr = parent.subreddit_slow\n", "if c.user_is_loggedin and sr.can_comment(c.user):\n", "return parent\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Expr'", "Condition", "Return'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_3(VAR_3, VAR_4):...\n", "VAR_7 = '(i) ' + VAR_3 + ' -> ' + VAR_4.__str__()\n", "SCAN_ERRORS.append(VAR_7)\n" ]
[ "def ErrorLogger(url, error):...\n", "con = '(i) ' + url + ' -> ' + error.__str__()\n", "SCAN_ERRORS.append(con)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_29(self, VAR_77):...\n", "def FUNC_39(VAR_101):...\n", "VAR_101.priority = VAR_77\n", "return VAR_101\n" ]
[ "def priority(self, priority):...\n", "def decorate(ruleinfo):...\n", "ruleinfo.priority = priority\n", "return ruleinfo\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Return'" ]
[ "def __repr__(self):...\n", "return 'UInt({})'.format(self.size)\n" ]
[ "def __repr__(self):...\n", "return 'UInt({})'.format(self.size)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(VAR_87):...\n", "return map(self._rules.__getitem__, filter(self.is_rule, VAR_87))\n" ]
[ "def rules(items):...\n", "return map(self._rules.__getitem__, filter(self.is_rule, items))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_9 = {}\n", "if not VAR_5:\n", "VAR_5 = CLASS_0.get_report(VAR_1)['analysis']\n", "VAR_15 = CLASS_0.behavior_get_processes(VAR_1, VAR_5)\n", "for proc in VAR_15['data']:\n", "VAR_6 = proc['pid']\n", "return VAR_9\n", "VAR_22 = proc['process_name']\n", "VAR_23 = None\n", "for p in VAR_5['behavior']['generic']:\n", "if p['pid'] == VAR_6:\n", "if not VAR_23:\n", "VAR_23 = p\n", "VAR_24 = CLASS_0.behavior_get_watchers(VAR_1, VAR_6=pid, VAR_5=report)\n", "for VAR_30, events in VAR_24.iteritems():\n", "if not VAR_9.has_key(VAR_30):\n", "VAR_9[VAR_30] = {}\n", "if not VAR_9[VAR_30].has_key(VAR_6):\n", "VAR_9[VAR_30][VAR_22] = {'pid': VAR_6, 'process_name': VAR_22, 'events': {}}\n", "for VAR_31 in events:\n", "if not VAR_9[VAR_30][VAR_22]['events'].has_key(VAR_31):\n", "VAR_9[VAR_30][VAR_22]['events'][VAR_31] = []\n", "for _event in VAR_23['summary'][VAR_31]:\n", "VAR_9[VAR_30][VAR_22]['events'][VAR_31].append(_event)\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "data = {}\n", "if not report:\n", "report = AnalysisController.get_report(task_id)['analysis']\n", "procs = AnalysisController.behavior_get_processes(task_id, report)\n", "for proc in procs['data']:\n", "pid = proc['pid']\n", "return data\n", "pname = proc['process_name']\n", "pdetails = None\n", "for p in report['behavior']['generic']:\n", "if p['pid'] == pid:\n", "if not pdetails:\n", "pdetails = p\n", "watchers = AnalysisController.behavior_get_watchers(task_id, pid=pid,\n report=report)\n", "for category, events in watchers.iteritems():\n", "if not data.has_key(category):\n", "data[category] = {}\n", "if not data[category].has_key(pid):\n", "data[category][pname] = {'pid': pid, 'process_name': pname, 'events': {}}\n", "for event in events:\n", "if not data[category][pname]['events'].has_key(event):\n", "data[category][pname]['events'][event] = []\n", "for _event in pdetails['summary'][event]:\n", "data[category][pname]['events'][event].append(_event)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "For", "Assign'", "Return'", "Assign'", "Assign'", "For", "Condition", "Condition", "Assign'", "Assign'", "For", "Condition", "Assign'", "Condition", "Assign'", "For", "Condition", "Assign'", "For", "Expr'" ]
[ "def FUNC_10(self, VAR_9):...\n", "\"\"\"docstring\"\"\"\n", "if self.connection:\n", "self.cursor.execute('insert into itemlanguage (language) values (\"%s\")' %\n VAR_9[0])\n", "self.connection.commit()\n" ]
[ "def add_language(self, language):...\n", "\"\"\"docstring\"\"\"\n", "if self.connection:\n", "self.cursor.execute('insert into itemlanguage (language) values (\"%s\")' %\n language[0])\n", "self.connection.commit()\n" ]
[ 0, 0, 0, 4, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'", "Expr'" ]
[ "@VAR_2.route('/')...\n", "VAR_4 = 'SELECT img_path, img_name\\t\\t\\tFROM imageTable'\n", "VAR_1.execute(VAR_4)\n", "return render_template('index.html', images=cursor)\n" ]
[ "@app.route('/')...\n", "query = 'SELECT img_path, img_name\\t\\t\\tFROM imageTable'\n", "cursor.execute(query)\n", "return render_template('index.html', images=cursor)\n" ]
[ 0, 4, 4, 4 ]
[ "Condition", "Assign'", "Expr'", "Return'" ]
[ "@VAR_0.route('/login', methods=['GET', 'POST'])...\n", "if request.method == 'POST':\n", "VAR_5 = CLASS_0(request.form['email'], request.form['password'])\n", "return render_template('login.html')\n", "VAR_1.session.add(VAR_5)\n", "VAR_1.session.commit()\n", "return redirect(url_for('tables'))\n" ]
[ "@app.route('/login', methods=['GET', 'POST'])...\n", "if request.method == 'POST':\n", "user = User(request.form['email'], request.form['password'])\n", "return render_template('login.html')\n", "db.session.add(user)\n", "db.session.commit()\n", "return redirect(url_for('tables'))\n" ]
[ 0, 0, 4, 0, 4, 4, 4 ]
[ "Condition", "Condition", "Assign'", "Return'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1 = self.bindings\n", "VAR_11 = '{app}-{stack}-v000'.format(app=self.TEST_APP, stack=bindings[\n 'TEST_STACK'])\n", "VAR_9 = self.agent.make_json_payload_from_kwargs(job=[{'cloudProvider':\n 'gce', 'serverGroupName': group_name, 'region': bindings[\n 'TEST_GCE_REGION'], 'zone': bindings['TEST_GCE_ZONE'], 'asgName':\n group_name, 'type': 'destroyServerGroup', 'regions': [bindings[\n 'TEST_GCE_REGION']], 'zones': [bindings['TEST_GCE_ZONE']],\n 'credentials': bindings['GCE_CREDENTIALS'], 'user': '[anonymous]'}],\n application=self.TEST_APP, description='DestroyServerGroup: ' + group_name)\n", "VAR_10 = gcp.GceContractBuilder(self.gce_observer)\n", "VAR_10.new_clause_builder('Managed Instance Group Removed').inspect_resource(\n 'managed-instance-groups', VAR_11, no_resource_ok=True).contains_path_eq(\n 'targetSize', 0)\n", "VAR_10.new_clause_builder('Instances Are Removed', retryable_for_secs=30\n ).list_resources('instances').excludes_path_value('name', VAR_11)\n", "return st.OperationContract(self.new_post_operation(title=\n 'delete_server_group', data=payload, path='tasks'), VAR_5=builder.build())\n" ]
[ "def delete_server_group(self):...\n", "\"\"\"docstring\"\"\"\n", "bindings = self.bindings\n", "group_name = '{app}-{stack}-v000'.format(app=self.TEST_APP, stack=bindings[\n 'TEST_STACK'])\n", "payload = self.agent.make_json_payload_from_kwargs(job=[{'cloudProvider':\n 'gce', 'serverGroupName': group_name, 'region': bindings[\n 'TEST_GCE_REGION'], 'zone': bindings['TEST_GCE_ZONE'], 'asgName':\n group_name, 'type': 'destroyServerGroup', 'regions': [bindings[\n 'TEST_GCE_REGION']], 'zones': [bindings['TEST_GCE_ZONE']],\n 'credentials': bindings['GCE_CREDENTIALS'], 'user': '[anonymous]'}],\n application=self.TEST_APP, description='DestroyServerGroup: ' + group_name)\n", "builder = gcp.GceContractBuilder(self.gce_observer)\n", "builder.new_clause_builder('Managed Instance Group Removed').inspect_resource(\n 'managed-instance-groups', group_name, no_resource_ok=True\n ).contains_path_eq('targetSize', 0)\n", "builder.new_clause_builder('Instances Are Removed', retryable_for_secs=30\n ).list_resources('instances').excludes_path_value('name', group_name)\n", "return st.OperationContract(self.new_post_operation(title=\n 'delete_server_group', data=payload, path='tasks'), contract=builder.\n build())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_6(self, VAR_3):...\n", "if VAR_3.name not in self.host_names:\n", "self.hosts.append(VAR_3)\n", "self._hosts.add(VAR_3.name)\n", "VAR_3.add_group(self)\n", "self.clear_hosts_cache()\n" ]
[ "def add_host(self, host):...\n", "if host.name not in self.host_names:\n", "self.hosts.append(host)\n", "self._hosts.add(host.name)\n", "host.add_group(self)\n", "self.clear_hosts_cache()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'" ]