lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def FUNC_3(self):...\n", "" ]
[ "def tearDown(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def __str__(self):...\n", "return str(self.nickname())\n" ]
[ "def __str__(self):...\n", "return str(self.nickname())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@handled_slot(bool)...\n", "print(f\"{'Locking' if VAR_4 else 'Unlocking'} fast piezo.\")\n", "self.lock_fast_piezo_action.setChecked(not VAR_4)\n", "self.matisse.set_piezo_etalon_lock(VAR_4)\n", "self.lock_fast_piezo_action.setChecked(VAR_4)\n" ]
[ "@handled_slot(bool)...\n", "print(f\"{'Locking' if checked else 'Unlocking'} fast piezo.\")\n", "self.lock_fast_piezo_action.setChecked(not checked)\n", "self.matisse.set_piezo_etalon_lock(checked)\n", "self.lock_fast_piezo_action.setChecked(checked)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_8(VAR_3, VAR_2):...\n", "VAR_10 = {}\n", "for measure_value in VAR_3:\n", "VAR_32 = measure_value.measure_id\n", "return VAR_10.values()\n", "VAR_33 = {'date': measure_value.month, 'numerator': measure_value.numerator,\n 'denominator': measure_value.denominator, 'calc_value': measure_value.\n calc_value, 'percentile': measure_value.percentile, 'cost_savings':\n measure_value.cost_savings}\n", "if VAR_2 == 'practice':\n", "if measure_value.practice_id:\n", "if VAR_2 == 'ccg':\n", "VAR_33.update({'practice_id': measure_value.practice_id, 'practice_name':\n measure_value.practice.name})\n", "if VAR_32 in VAR_10:\n", "if measure_value.pct_id:\n", "if VAR_2 == 'stp':\n", "VAR_10[VAR_32]['data'].append(VAR_33)\n", "VAR_13 = measure_value.measure\n", "VAR_33.update({'pct_id': measure_value.pct_id, 'pct_name': measure_value.\n pct.name})\n", "if measure_value.stp_id:\n", "if VAR_2 == 'regional_team':\n", "VAR_10[VAR_32] = {'id': VAR_32, 'name': VAR_13.name, 'title': VAR_13.title,\n 'description': VAR_13.description, 'why_it_matters': VAR_13.\n why_it_matters, 'numerator_short': VAR_13.numerator_short,\n 'denominator_short': VAR_13.denominator_short, 'url': VAR_13.url,\n 'is_cost_based': VAR_13.is_cost_based, 'is_percentage': VAR_13.\n is_percentage, 'low_is_good': VAR_13.low_is_good, 'tags': FUNC_9(VAR_13\n .tags), 'data': [VAR_33]}\n", "VAR_33.update({'stp_id': measure_value.stp_id, 'stp_name': measure_value.\n stp.name})\n", "if measure_value.regional_team_id:\n", "assert False\n", "VAR_33.update({'regional_team_id': measure_value.regional_team_id,\n 'regional_team_name': measure_value.regional_team.name})\n" ]
[ "def _roll_up_measure_values(measure_values, org_type):...\n", "rolled = {}\n", "for measure_value in measure_values:\n", "measure_id = measure_value.measure_id\n", "return rolled.values()\n", "measure_value_data = {'date': measure_value.month, 'numerator':\n measure_value.numerator, 'denominator': measure_value.denominator,\n 'calc_value': measure_value.calc_value, 'percentile': measure_value.\n percentile, 'cost_savings': measure_value.cost_savings}\n", "if org_type == 'practice':\n", "if measure_value.practice_id:\n", "if org_type == 'ccg':\n", "measure_value_data.update({'practice_id': measure_value.practice_id,\n 'practice_name': measure_value.practice.name})\n", "if measure_id in rolled:\n", "if measure_value.pct_id:\n", "if org_type == 'stp':\n", "rolled[measure_id]['data'].append(measure_value_data)\n", "measure = measure_value.measure\n", "measure_value_data.update({'pct_id': measure_value.pct_id, 'pct_name':\n measure_value.pct.name})\n", "if measure_value.stp_id:\n", "if org_type == 'regional_team':\n", "rolled[measure_id] = {'id': measure_id, 'name': measure.name, 'title':\n measure.title, 'description': measure.description, 'why_it_matters':\n measure.why_it_matters, 'numerator_short': measure.numerator_short,\n 'denominator_short': measure.denominator_short, 'url': measure.url,\n 'is_cost_based': measure.is_cost_based, 'is_percentage': measure.\n is_percentage, 'low_is_good': measure.low_is_good, 'tags':\n _hydrate_tags(measure.tags), 'data': [measure_value_data]}\n", "measure_value_data.update({'stp_id': measure_value.stp_id, 'stp_name':\n measure_value.stp.name})\n", "if measure_value.regional_team_id:\n", "assert False\n", "measure_value_data.update({'regional_team_id': measure_value.\n regional_team_id, 'regional_team_name': measure_value.regional_team.name})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Return'", "Assign'", "Condition", "Condition", "Condition", "Expr'", "Condition", "Condition", "Condition", "Expr'", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Expr'", "Condition", "Assert'", "Expr'" ]
[ "def FUNC_17(self, VAR_18):...\n", "\"\"\"docstring\"\"\"\n", "VAR_31 = VAR_18[0],\n", "self.cursor.execute('insert into store (name) values (?)', VAR_31)\n", "self.connection.commit()\n" ]
[ "def add_store(self, store):...\n", "\"\"\"docstring\"\"\"\n", "t = store[0],\n", "self.cursor.execute('insert into store (name) values (?)', t)\n", "self.connection.commit()\n" ]
[ 0, 0, 4, 4, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'" ]
[ "def __init__(self):...\n", "self.conn = pymysql.connect(user=config.mysql_credentials['user'], password\n =config.mysql_credentials['password'], host=config.mysql_credentials[\n 'host'], db=config.mysql_credentials['database'], cursorclass=pymysql.\n cursors.DictCursor)\n", "self.cur = self.conn.cursor()\n" ]
[ "def __init__(self):...\n", "self.conn = pymysql.connect(user=config.mysql_credentials['user'], password\n =config.mysql_credentials['password'], host=config.mysql_credentials[\n 'host'], db=config.mysql_credentials['database'], cursorclass=pymysql.\n cursors.DictCursor)\n", "self.cur = self.conn.cursor()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_27(self, VAR_43, VAR_44):...\n", "\"\"\"docstring\"\"\"\n", "VAR_63 = self.getfile(VAR_43)\n", "if VAR_63 == False:\n", "VAR_64 = self.getfile(VAR_44)\n", "if VAR_64 != False:\n", "self.get_path(os.path.dirname(VAR_43)).remove(VAR_63)\n", "VAR_63[VAR_1] = os.path.basename(VAR_44)\n", "self.get_path(os.path.dirname(VAR_44)).append(VAR_63)\n", "return\n" ]
[ "def rename(self, oldpath, newpath):...\n", "\"\"\"docstring\"\"\"\n", "old = self.getfile(oldpath)\n", "if old == False:\n", "new = self.getfile(newpath)\n", "if new != False:\n", "self.get_path(os.path.dirname(oldpath)).remove(old)\n", "old[A_NAME] = os.path.basename(newpath)\n", "self.get_path(os.path.dirname(newpath)).append(old)\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_7(self, **VAR_10):...\n", "return super().formfield(**{'form_class': forms.JSONField, **kwargs})\n" ]
[ "def formfield(self, **kwargs):...\n", "return super().formfield(**{'form_class': forms.JSONField, **kwargs})\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_9(self):...\n", "self._hosts_cache = None\n", "for VAR_12 in self.parent_groups:\n", "VAR_12.clear_hosts_cache()\n" ]
[ "def clear_hosts_cache(self):...\n", "self._hosts_cache = None\n", "for g in self.parent_groups:\n", "g.clear_hosts_cache()\n" ]
[ 0, 0, 1, 1 ]
[ "FunctionDef'", "Assign'", "For", "Expr'" ]
[ "def FUNC_12(self, VAR_5):...\n", "VAR_20 = VAR_5.args['username'][0]\n", "VAR_21 = VAR_5.args['password'][0]\n", "return credentials.UsernamePassword(VAR_20, VAR_21)\n" ]
[ "def _get_creds_from(self, request):...\n", "username = request.args['username'][0]\n", "password = request.args['password'][0]\n", "return credentials.UsernamePassword(username, password)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "\"\"\"string\"\"\"\n", "from __future__ import unicode_literals\n", "import mimetypes\n", "import os\n", "import posixpath\n", "import re\n", "import stat\n", "from django.http import FileResponse, Http404, HttpResponse, HttpResponseNotModified, HttpResponseRedirect\n", "from django.template import Context, Engine, TemplateDoesNotExist, loader\n", "from django.utils.http import http_date, parse_http_date\n", "from django.utils.six.moves.urllib.parse import unquote\n", "from django.utils.translation import ugettext as _, ugettext_lazy\n", "def FUNC_0(VAR_0, VAR_1, VAR_2=None, VAR_3=False):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1 = posixpath.normpath(unquote(VAR_1))\n", "VAR_1 = VAR_1.lstrip('/')\n", "VAR_10 = ''\n", "for VAR_18 in VAR_1.split('/'):\n", "if not VAR_18:\n", "if VAR_10 and VAR_1 != VAR_10:\n", "VAR_17, VAR_18 = os.path.splitdrive(VAR_18)\n", "return HttpResponseRedirect(VAR_10)\n", "VAR_6 = os.path.join(VAR_2, VAR_10)\n", "VAR_19, VAR_18 = os.path.split(VAR_18)\n", "if os.path.isdir(VAR_6):\n", "if VAR_18 in (os.curdir, os.pardir):\n", "if VAR_3:\n", "if not os.path.exists(VAR_6):\n", "VAR_10 = os.path.join(VAR_10, VAR_18).replace('\\\\', '/')\n", "return FUNC_1(VAR_10, VAR_6)\n", "VAR_11 = os.stat(VAR_6)\n", "if not FUNC_2(VAR_0.META.get('HTTP_IF_MODIFIED_SINCE'), VAR_11.st_mtime,\n", "return HttpResponseNotModified()\n", "VAR_12, VAR_13 = mimetypes.guess_type(VAR_6)\n", "VAR_12 = VAR_12 or 'application/octet-stream'\n", "VAR_14 = FileResponse(open(VAR_6, 'rb'), VAR_12=content_type)\n", "VAR_14['Last-Modified'] = http_date(VAR_11.st_mtime)\n", "if stat.S_ISREG(VAR_11.st_mode):\n", "VAR_14['Content-Length'] = VAR_11.st_size\n", "if VAR_13:\n", "VAR_14['Content-Encoding'] = VAR_13\n", "return VAR_14\n" ]
[ "\"\"\"\nViews and functions for serving static files. These are only to be used\nduring development, and SHOULD NOT be used in a production setting.\n\"\"\"\n", "from __future__ import unicode_literals\n", "import mimetypes\n", "import os\n", "import posixpath\n", "import re\n", "import stat\n", "from django.http import FileResponse, Http404, HttpResponse, HttpResponseNotModified, HttpResponseRedirect\n", "from django.template import Context, Engine, TemplateDoesNotExist, loader\n", "from django.utils.http import http_date, parse_http_date\n", "from django.utils.six.moves.urllib.parse import unquote\n", "from django.utils.translation import ugettext as _, ugettext_lazy\n", "def serve(request, path, document_root=None, show_indexes=False):...\n", "\"\"\"docstring\"\"\"\n", "path = posixpath.normpath(unquote(path))\n", "path = path.lstrip('/')\n", "newpath = ''\n", "for part in path.split('/'):\n", "if not part:\n", "if newpath and path != newpath:\n", "drive, part = os.path.splitdrive(part)\n", "return HttpResponseRedirect(newpath)\n", "fullpath = os.path.join(document_root, newpath)\n", "head, part = os.path.split(part)\n", "if os.path.isdir(fullpath):\n", "if part in (os.curdir, os.pardir):\n", "if show_indexes:\n", "if not os.path.exists(fullpath):\n", "newpath = os.path.join(newpath, part).replace('\\\\', '/')\n", "return directory_index(newpath, fullpath)\n", "statobj = os.stat(fullpath)\n", "if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),\n", "return HttpResponseNotModified()\n", "content_type, encoding = mimetypes.guess_type(fullpath)\n", "content_type = content_type or 'application/octet-stream'\n", "response = FileResponse(open(fullpath, 'rb'), content_type=content_type)\n", "response['Last-Modified'] = http_date(statobj.st_mtime)\n", "if stat.S_ISREG(statobj.st_mode):\n", "response['Content-Length'] = statobj.st_size\n", "if encoding:\n", "response['Content-Encoding'] = encoding\n", "return response\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 0, 6, 0, 0, 6, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "For", "Condition", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Condition", "Assign'", "Return'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_0(VAR_0):...\n", "print(VAR_0, file=sys.stderr)\n" ]
[ "def log_error(error):...\n", "print(error, file=sys.stderr)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_13(self, VAR_17, VAR_16):...\n", "\"\"\"docstring\"\"\"\n", "return False\n" ]
[ "def immutable_file_test(self, path, url):...\n", "\"\"\"docstring\"\"\"\n", "return False\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_24(self, VAR_14):...\n", "" ]
[ "def is_pk(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_18(self, VAR_2, VAR_3, VAR_4, *VAR_5):...\n", "self.write_data({'type': 'market_payment_received', 'event': VAR_5[0]})\n" ]
[ "def on_market_payment_received(self, subject, changetype, objectID, *args):...\n", "self.write_data({'type': 'market_payment_received', 'event': args[0]})\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_20(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_34 = []\n", "for value in self.iscsi_ips.values():\n", "VAR_34.append(value['nsp'])\n", "return VAR_34\n" ]
[ "def _get_iscsi_nsps(self):...\n", "\"\"\"docstring\"\"\"\n", "nsps = []\n", "for value in self.iscsi_ips.values():\n", "nsps.append(value['nsp'])\n", "return nsps\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Expr'", "Return'" ]
[ "def __init__(self):...\n", "self.content = VAR_2\n" ]
[ "def __init__(self):...\n", "self.content = content\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_9(self, VAR_12):...\n", "print('Test type: %s' % self.__class__.__name__, file=fd)\n", "print('Execution start time: %s' % datetime.datetime.fromtimestamp(self.\n start_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n", "print('Execution stop time: %s' % datetime.datetime.fromtimestamp(self.\n stop_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n", "print('Duration: %f seconds' % self.duration, file=fd)\n", "print('Outcome: %s' % self.outcome, file=fd)\n", "VAR_12.write(self.specific_info())\n", "if self.exception_data is not None:\n", "print('', file=fd)\n", "print('EXCEPTION CASTED', file=fd)\n", "VAR_12.write(unicode(self.exception_data))\n" ]
[ "def store_to_file(self, fd):...\n", "print('Test type: %s' % self.__class__.__name__, file=fd)\n", "print('Execution start time: %s' % datetime.datetime.fromtimestamp(self.\n start_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n", "print('Execution stop time: %s' % datetime.datetime.fromtimestamp(self.\n stop_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n", "print('Duration: %f seconds' % self.duration, file=fd)\n", "print('Outcome: %s' % self.outcome, file=fd)\n", "fd.write(self.specific_info())\n", "if self.exception_data is not None:\n", "print('', file=fd)\n", "print('EXCEPTION CASTED', file=fd)\n", "fd.write(unicode(self.exception_data))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def __gt__(self, VAR_11):...\n", "return self.rule.__gt__(VAR_11.rule)\n" ]
[ "def __gt__(self, other):...\n", "return self.rule.__gt__(other.rule)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(self):...\n", "VAR_1 = bot.Bot(None, {'dimensions': {'foo': 'bar'}},\n 'https://localhost:1/', '1234-1a2b3c4-tainted-joe', 'base_dir', None)\n", "self.assertEqual({'foo': 'bar'}, VAR_1.dimensions)\n", "self.assertEqual(os.path.join(os.path.dirname(VAR_0), 'swarming_bot.zip'),\n VAR_1.swarming_bot_zip)\n", "self.assertEqual('1234-1a2b3c4-tainted-joe', VAR_1.server_version)\n", "self.assertEqual('base_dir', VAR_1.base_dir)\n" ]
[ "def test_bot(self):...\n", "obj = bot.Bot(None, {'dimensions': {'foo': 'bar'}}, 'https://localhost:1/',\n '1234-1a2b3c4-tainted-joe', 'base_dir', None)\n", "self.assertEqual({'foo': 'bar'}, obj.dimensions)\n", "self.assertEqual(os.path.join(os.path.dirname(THIS_FILE),\n 'swarming_bot.zip'), obj.swarming_bot_zip)\n", "self.assertEqual('1234-1a2b3c4-tainted-joe', obj.server_version)\n", "self.assertEqual('base_dir', obj.base_dir)\n" ]
[ 0, 5, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_25(VAR_14, VAR_27, VAR_28):...\n", "VAR_26.append((VAR_14, VAR_27.splitlines()[0], VAR_28))\n" ]
[ "def post_error_task(botobj, msg, task_id):...\n", "posted.append((botobj, msg.splitlines()[0], task_id))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "from .common_info import CommonInfo\n", "from .data_source import DataSource\n", "from .group_type import GroupType\n", "from .data_group import DataGroup\n", "from .document_type import DocumentType\n", "from .data_document import DataDocument\n", "from .ingredient import Ingredient\n", "from .product import Product\n", "from .source_category import SourceCategory\n", "from .product_document import ProductDocument\n", "from .extracted_text import ExtractedText\n", "from .extracted_cpcat import ExtractedCPCat\n", "from .extracted_chemical import ExtractedChemical\n", "from .extracted_functional_use import ExtractedFunctionalUse\n", "from .extracted_habits_and_practices import ExtractedHabitsAndPractices\n", "from .extracted_list_presence import ExtractedListPresence\n", "from .extracted_hhdoc import ExtractedHHDoc\n", "from .extracted_hhrec import ExtractedHHRec\n", "from .script import Script\n", "from .dsstox_lookup import DSSToxLookup\n", "from .qa_group import QAGroup\n", "from .unit_type import UnitType\n", "from .weight_fraction_type import WeightFractionType\n", "from .PUC import PUC, PUCToTag, PUCTag\n", "from .product_to_tag import ProductToTag\n", "from .product_to_puc import ProductToPUC\n", "from .extracted_habits_and_practices_to_puc import ExtractedHabitsAndPracticesToPUC\n", "from .qa_notes import QANotes\n", "from .raw_chem import RawChem\n", "from .taxonomy import Taxonomy\n", "from .taxonomy_source import TaxonomySource\n", "from .taxonomy_to_PUC import TaxonomyToPUC\n" ]
[ "from .common_info import CommonInfo\n", "from .data_source import DataSource\n", "from .group_type import GroupType\n", "from .data_group import DataGroup\n", "from .document_type import DocumentType\n", "from .data_document import DataDocument\n", "from .ingredient import Ingredient\n", "from .product import Product\n", "from .source_category import SourceCategory\n", "from .product_document import ProductDocument\n", "from .extracted_text import ExtractedText\n", "from .extracted_cpcat import ExtractedCPCat\n", "from .extracted_chemical import ExtractedChemical\n", "from .extracted_functional_use import ExtractedFunctionalUse\n", "from .extracted_habits_and_practices import ExtractedHabitsAndPractices\n", "from .extracted_list_presence import ExtractedListPresence\n", "from .extracted_hhdoc import ExtractedHHDoc\n", "from .extracted_hhrec import ExtractedHHRec\n", "from .script import Script\n", "from .dsstox_lookup import DSSToxLookup\n", "from .qa_group import QAGroup\n", "from .unit_type import UnitType\n", "from .weight_fraction_type import WeightFractionType\n", "from .PUC import PUC, PUCToTag, PUCTag\n", "from .product_to_tag import ProductToTag\n", "from .product_to_puc import ProductToPUC\n", "from .extracted_habits_and_practices_to_puc import ExtractedHabitsAndPracticesToPUC\n", "from .qa_notes import QANotes\n", "from .raw_chem import RawChem\n", "from .taxonomy import Taxonomy\n", "from .taxonomy_source import TaxonomySource\n", "from .taxonomy_to_PUC import TaxonomyToPUC\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'" ]
[ "def FUNC_9(self, VAR_7):...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = {}\n", "for VAR_3 in VAR_7:\n", "VAR_11[VAR_3] = self.get_tags(VAR_3)\n", "return VAR_11\n" ]
[ "def get_tags_dict(self, tids):...\n", "\"\"\"docstring\"\"\"\n", "tags_dict = {}\n", "for tid in tids:\n", "tags_dict[tid] = self.get_tags(tid)\n", "return tags_dict\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Assign'", "Return'" ]
[ "def __init__(self, VAR_9):...\n", "self.controller = VAR_9\n", "self._loaded = False\n" ]
[ "def __init__(self, controller):...\n", "self.controller = controller\n", "self._loaded = False\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_8(self, VAR_14):...\n", "" ]
[ "def is_file(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_11(self):...\n", "VAR_10 = self.objects.dg.pk\n", "VAR_11 = str(self.objects.ds.pk)\n", "VAR_12 = str(self.objects.gt.pk)\n", "VAR_13 = {'name': ['Changed Name'], 'group_type': [VAR_12], 'downloaded_by':\n [str(User.objects.get(username='Karyn').pk)], 'downloaded_at': [\n '08/20/2017'], 'data_source': [VAR_11]}\n", "VAR_2 = self.client.post(f'/datagroup/edit/{VAR_10}/', VAR_13=data)\n", "self.assertEqual(VAR_2.status_code, 302, 'User is redirected to detail page.')\n", "self.assertEqual(VAR_2.url, f'/datagroup/{VAR_10}/',\n 'Should go to detail page.')\n" ]
[ "def test_edit_redirect(self):...\n", "dgpk = self.objects.dg.pk\n", "dspk = str(self.objects.ds.pk)\n", "gtpk = str(self.objects.gt.pk)\n", "data = {'name': ['Changed Name'], 'group_type': [gtpk], 'downloaded_by': [\n str(User.objects.get(username='Karyn').pk)], 'downloaded_at': [\n '08/20/2017'], 'data_source': [dspk]}\n", "response = self.client.post(f'/datagroup/edit/{dgpk}/', data=data)\n", "self.assertEqual(response.status_code, 302,\n 'User is redirected to detail page.')\n", "self.assertEqual(response.url, f'/datagroup/{dgpk}/',\n 'Should go to detail page.')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_0(self, VAR_1, VAR_4):...\n", "" ]
[ "def run(self, args, headers):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_10(self, VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "VAR_21 = VAR_8\n", "VAR_25 = CLASS_1(VAR_8)\n", "if VAR_25.get_tag('pulled from'):\n", "VAR_21 = VAR_25.get_tag('pulled from')[0].split(' |')[0].strip(' ->')\n", "return VAR_21\n", "return self.find_oldest_node(VAR_21)\n" ]
[ "def find_oldest_node(self, filename):...\n", "\"\"\"docstring\"\"\"\n", "oldest_known_filename = filename\n", "this_meta = NodeMetadata(filename)\n", "if this_meta.get_tag('pulled from'):\n", "oldest_known_filename = this_meta.get_tag('pulled from')[0].split(' |')[0\n ].strip(' ->')\n", "return oldest_known_filename\n", "return self.find_oldest_node(oldest_known_filename)\n" ]
[ 0, 0, 0, 1, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Return'" ]
[ "\"\"\"SchoolCMS-handler-init.\n\nroute.\n\"\"\"\n", "from __future__ import absolute_import\n", "from __future__ import print_function\n", "from __future__ import unicode_literals\n", "from .. import version as system_version\n", "from ..db import SQL_Session, User, GroupList, Login_Session\n", "from ..util import webassets_react\n", "import functools\n", "import os\n", "from webassets import Environment, Bundle\n", "import tornado.web\n", "from tornado.escape import json_encode\n", "from tornado.options import options\n", "def FUNC_0(self, VAR_1=True):...\n", "self.is_api = VAR_1\n", "self.assets = Environment(os.path.join(os.path.dirname(__file__),\n '../static'), '/static')\n", "VAR_11 = Bundle('css/bootstrap.min.css', 'css/material.min.css', Bundle(\n 'css/schoolcms.css', 'css/dropdown.css', filters='cssmin'),\n 'outdatedbrowser/outdatedbrowser.min.css', output='dict/plugin.min.css')\n", "VAR_12 = Bundle(Bundle('outdatedbrowser/outdatedbrowser.min.js',\n 'react-0.13.2/react-with-addons.min.js', 'js/jquery-2.1.3.min.js',\n 'js/bootstrap.min.js', 'js/react-bootstrap.min.js',\n 'js/react-mini-router.min.js', 'js/marked.min.js', 'js/material.min.js',\n 'js/isMobile.min.js', 'js/moment-with-locales.min.js', 'js/dropdown.js',\n filters='jsmin'), Bundle('schoolcms/init.jsx', 'schoolcms/mixin/*.jsx',\n 'schoolcms/component/*.jsx', 'schoolcms/page/*.jsx', filters=('react',\n 'jsmin')), output='dict/plugin.min.js')\n", "self.assets.register('css_all', VAR_11)\n", "self.assets.register('js_all', VAR_12)\n", "def FUNC_1(self):...\n", "\"\"\"docstring\"\"\"\n", "self.sql_session = SQL_Session()\n", "def FUNC_2(self):...\n", "\"\"\"docstring\"\"\"\n", "self.sql_session.close()\n", "def FUNC_3(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_13 = self.get_secure_cookie('session_key')\n", "if not VAR_13:\n", "return None\n", "VAR_14 = Login_Session.get_by_key(VAR_13, self.sql_session)\n", "if not VAR_14:\n", "return None\n", "return User.by_key(VAR_14.userkey, self.sql_session).scalar()\n" ]
[ "\"\"\"SchoolCMS-handler-init.\n\nroute.\n\"\"\"\n", "from __future__ import absolute_import\n", "from __future__ import print_function\n", "from __future__ import unicode_literals\n", "from .. import version as system_version\n", "from ..db import SQL_Session, User, GroupList, Login_Session\n", "from ..util import webassets_react\n", "import functools\n", "import os\n", "from webassets import Environment, Bundle\n", "import tornado.web\n", "from tornado.escape import json_encode\n", "from tornado.options import options\n", "def initialize(self, is_api=True):...\n", "self.is_api = is_api\n", "self.assets = Environment(os.path.join(os.path.dirname(__file__),\n '../static'), '/static')\n", "css_all = Bundle('css/bootstrap.min.css', 'css/material.min.css', Bundle(\n 'css/schoolcms.css', 'css/dropdown.css', filters='cssmin'),\n 'outdatedbrowser/outdatedbrowser.min.css', output='dict/plugin.min.css')\n", "js_all = Bundle(Bundle('outdatedbrowser/outdatedbrowser.min.js',\n 'react-0.13.2/react-with-addons.min.js', 'js/jquery-2.1.3.min.js',\n 'js/bootstrap.min.js', 'js/react-bootstrap.min.js',\n 'js/react-mini-router.min.js', 'js/marked.min.js', 'js/material.min.js',\n 'js/isMobile.min.js', 'js/moment-with-locales.min.js', 'js/dropdown.js',\n filters='jsmin'), Bundle('schoolcms/init.jsx', 'schoolcms/mixin/*.jsx',\n 'schoolcms/component/*.jsx', 'schoolcms/page/*.jsx', filters=('react',\n 'jsmin')), output='dict/plugin.min.js')\n", "self.assets.register('css_all', css_all)\n", "self.assets.register('js_all', js_all)\n", "def prepare(self):...\n", "\"\"\"docstring\"\"\"\n", "self.sql_session = SQL_Session()\n", "def on_finish(self):...\n", "\"\"\"docstring\"\"\"\n", "self.sql_session.close()\n", "def get_current_user(self):...\n", "\"\"\"docstring\"\"\"\n", "session_key = self.get_secure_cookie('session_key')\n", "if not session_key:\n", "return None\n", "login_session = Login_Session.get_by_key(session_key, self.sql_session)\n", "if not login_session:\n", "return None\n", "return User.by_key(login_session.userkey, self.sql_session).scalar()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 5, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Docstring", "Assign'", "FunctionDef'", "Docstring", "Expr'", "FunctionDef'", "Docstring", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_1(self, VAR_21, VAR_22):...\n", "\"\"\"docstring\"\"\"\n", "VAR_55 = VAR_21.rstrip('/').split('/')\n", "if VAR_21[0] == '/':\n", "VAR_22 = []\n", "VAR_22 = [x for x in VAR_22.split('/') if len(x) and x is not None]\n", "while 1:\n", "if not len(VAR_55):\n", "VAR_66 = VAR_55.pop(0)\n", "return '/%s' % ('/'.join(VAR_22),)\n", "if VAR_66 == '..':\n", "if len(VAR_22):\n", "if VAR_66 in ('.', ''):\n", "VAR_22.pop()\n", "VAR_22.append(VAR_66)\n" ]
[ "def resolve_path(self, path, cwd):...\n", "\"\"\"docstring\"\"\"\n", "pieces = path.rstrip('/').split('/')\n", "if path[0] == '/':\n", "cwd = []\n", "cwd = [x for x in cwd.split('/') if len(x) and x is not None]\n", "while 1:\n", "if not len(pieces):\n", "piece = pieces.pop(0)\n", "return '/%s' % ('/'.join(cwd),)\n", "if piece == '..':\n", "if len(cwd):\n", "if piece in ('.', ''):\n", "cwd.pop()\n", "cwd.append(piece)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Return'", "Condition", "Condition", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_19(VAR_36):...\n", "if VAR_36.strip() != '' and not os.path.exists(VAR_36):\n", "os.makedirs(VAR_36)\n" ]
[ "def mkdir_p(path):...\n", "if path.strip() != '' and not os.path.exists(path):\n", "os.makedirs(path)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'" ]
[ "def __str__(self):...\n", "return '{} - {}'.format(self.parent, self.child)\n" ]
[ "def __str__(self):...\n", "return '{} - {}'.format(self.parent, self.child)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0():...\n", "\"\"\"docstring\"\"\"\n", "return VAR_3\n" ]
[ "def getDjangoURLPatterns():...\n", "\"\"\"docstring\"\"\"\n", "return ROLE_MODELS_URL_PATTERNS\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_4(VAR_11, VAR_12=False):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_1['VERBOSITY'] >= 1:\n", "VAR_0.info('$ ' + ' '.join(VAR_11))\n", "VAR_19 = dict()\n", "if VAR_1['VERBOSITY'] >= 3:\n", "VAR_19['stdout'] = io.open(os.devnull, 'wb')\n", "VAR_32 = subprocess.call(VAR_11, **kwargs)\n", "VAR_19['stderr'] = subprocess.STDOUT\n", "if not VAR_12 and VAR_32 != 0:\n" ]
[ "def sh(cmdline, ignore_failure=False):...\n", "\"\"\"docstring\"\"\"\n", "if CONFIG['VERBOSITY'] >= 1:\n", "logger.info('$ ' + ' '.join(cmdline))\n", "kwargs = dict()\n", "if CONFIG['VERBOSITY'] >= 3:\n", "kwargs['stdout'] = io.open(os.devnull, 'wb')\n", "ret = subprocess.call(cmdline, **kwargs)\n", "kwargs['stderr'] = subprocess.STDOUT\n", "if not ignore_failure and ret != 0:\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition" ]
[ "def FUNC_1(self, VAR_2):...\n", "print('remove_frame: ' + str(VAR_2))\n", "self.frames.pop(VAR_2, None)\n" ]
[ "def remove_frame(self, frame):...\n", "print('remove_frame: ' + str(frame))\n", "self.frames.pop(frame, None)\n" ]
[ 0, 4, 4 ]
[ "FunctionDef'", "Expr'", "Expr'" ]
[ "def FUNC_6(self):...\n", "self._check_duplicate_key(VAR_1)\n" ]
[ "def test_ecdsa_duplicate_key(self):...\n", "self._check_duplicate_key(ECDSA_PUBKEY)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_6(self, VAR_10):...\n", "return '%s%s' % (self.configuration.iscsi_target_prefix, VAR_10['name'])\n" ]
[ "def _build_iscsi_target_name(self, volume):...\n", "return '%s%s' % (self.configuration.iscsi_target_prefix, volume['name'])\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_27(self, VAR_8=True):...\n", "\"\"\"docstring\"\"\"\n", "logging.debug('Processing SAML logout response')\n", "VAR_11 = self._saml_auth()\n", "VAR_32 = []\n", "VAR_11.process_slo()\n", "VAR_32 = VAR_11.get_errors()\n", "if VAR_32:\n", "if VAR_8:\n", "logging.info('SAML SLO request was successful')\n", "self.clear_session()\n", "return self._render_saml_errors_json(VAR_11)\n", "self.clear_session()\n", "return self.redirect_to_goodbye()\n" ]
[ "def log_out_callback(self, clear_session_on_errors=True):...\n", "\"\"\"docstring\"\"\"\n", "logging.debug('Processing SAML logout response')\n", "auth = self._saml_auth()\n", "errors = []\n", "auth.process_slo()\n", "errors = auth.get_errors()\n", "if errors:\n", "if clear_session_on_errors:\n", "logging.info('SAML SLO request was successful')\n", "self.clear_session()\n", "return self._render_saml_errors_json(auth)\n", "self.clear_session()\n", "return self.redirect_to_goodbye()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Condition", "Expr'", "Expr'", "Return'", "Expr'", "Return'" ]
[ "def FUNC_0(VAR_0):...\n", "VAR_12 = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']\n", "def FUNC_6():...\n", "VAR_13.throw(_('Invalid Search Field'), VAR_13.DataError)\n", "if len(VAR_0) >= 3:\n", "if '=' in VAR_0:\n", "FUNC_6()\n", "if ' --' in VAR_0:\n", "FUNC_6()\n", "if any(' {0} '.format(keyword) in VAR_0.split() for keyword in VAR_12):\n", "FUNC_6()\n", "if any(keyword in VAR_0.split() for keyword in VAR_12):\n", "FUNC_6()\n" ]
[ "def sanitize_searchfield(searchfield):...\n", "blacklisted_keywords = ['select', 'delete', 'drop', 'update', 'case', 'and',\n 'or', 'like']\n", "def _raise_exception():...\n", "frappe.throw(_('Invalid Search Field'), frappe.DataError)\n", "if len(searchfield) >= 3:\n", "if '=' in searchfield:\n", "_raise_exception()\n", "if ' --' in searchfield:\n", "_raise_exception()\n", "if any(' {0} '.format(keyword) in searchfield.split() for keyword in\n", "_raise_exception()\n", "if any(keyword in searchfield.split() for keyword in blacklisted_keywords):\n", "_raise_exception()\n" ]
[ 0, 0, 0, 4, 0, 0, 4, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "FunctionDef'", "Expr'", "Condition", "Condition", "Expr'", "Condition", "Expr'", "For", "Expr'", "For", "Expr'" ]
[ "@staticmethod...\n", "if tuple(VAR_17) != tuple(VAR_12):\n", "VAR_31 = Diff.from_string_arrays(VAR_12, VAR_17)\n", "for diff in VAR_31.split_diff():\n", "yield diff\n" ]
[ "@staticmethod...\n", "if tuple(new_file) != tuple(file):\n", "wholediff = Diff.from_string_arrays(file, new_file)\n", "for diff in wholediff.split_diff():\n", "yield diff\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "For", "Expr'" ]
[ "def FUNC_16(self, VAR_34, VAR_35):...\n", "\"\"\"docstring\"\"\"\n", "return os.write(VAR_34, VAR_35)\n" ]
[ "def write(self, fd, string):...\n", "\"\"\"docstring\"\"\"\n", "return os.write(fd, string)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "\"\"\"string\"\"\"\n", "from __future__ import unicode_literals\n", "from django.conf import settings\n", "from django.conf.urls import include\n", "from django.conf.urls import patterns\n", "from django.conf.urls import url\n", "from api import routers\n", "from api import views\n", "VAR_0 = routers.ApiRouter()\n", "VAR_1 = patterns('', url('^', include(VAR_0.urls)), url(\n '^apps/(?P<id>{})/config/?'.format(settings.APP_URL_REGEX), views.\n AppConfigViewSet.as_view({'get': 'retrieve', 'post': 'create'})), url(\n '^apps/(?P<id>{})/builds/(?P<uuid>[-_\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.AppBuildViewSet.as_view({'get': 'retrieve'})),\n url('^apps/(?P<id>{})/builds/?'.format(settings.APP_URL_REGEX), views.\n AppBuildViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^apps/(?P<id>{})/releases/v(?P<version>[0-9]+)/?'.format(settings.\n APP_URL_REGEX), views.AppReleaseViewSet.as_view({'get': 'retrieve'})),\n url('^apps/(?P<id>{})/releases/rollback/?'.format(settings.\n APP_URL_REGEX), views.AppReleaseViewSet.as_view({'post': 'rollback'})),\n url('^apps/(?P<id>{})/releases/?'.format(settings.APP_URL_REGEX), views\n .AppReleaseViewSet.as_view({'get': 'list'})), url(\n '^apps/(?P<id>{})/containers/(?P<type>[-_\\\\w]+)/(?P<num>[-_\\\\w]+)/?'.\n format(settings.APP_URL_REGEX), views.AppContainerViewSet.as_view({\n 'get': 'retrieve'})), url(\n '^apps/(?P<id>{})/containers/(?P<type>[-_\\\\w.]+)/?'.format(settings.\n APP_URL_REGEX), views.AppContainerViewSet.as_view({'get': 'list'})),\n url('^apps/(?P<id>{})/containers/?'.format(settings.APP_URL_REGEX),\n views.AppContainerViewSet.as_view({'get': 'list'})), url(\n '^apps/(?P<id>{})/domains/(?P<domain>[-\\\\._\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.DomainViewSet.as_view({'delete': 'destroy'})),\n url('^apps/(?P<id>{})/domains/?'.format(settings.APP_URL_REGEX), views.\n DomainViewSet.as_view({'post': 'create', 'get': 'list'})), url(\n '^apps/(?P<id>{})/scale/?'.format(settings.APP_URL_REGEX), views.\n AppViewSet.as_view({'post': 'scale'})), url('^apps/(?P<id>{})/logs/?'.\n format(settings.APP_URL_REGEX), views.AppViewSet.as_view({'get': 'logs'\n })), url('^apps/(?P<id>{})/run/?'.format(settings.APP_URL_REGEX), views\n .AppViewSet.as_view({'post': 'run'})), url(\n '^apps/(?P<id>{})/perms/(?P<username>[-_\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.AppPermsViewSet.as_view({'delete': 'destroy'})),\n url('^apps/(?P<id>{})/perms/?'.format(settings.APP_URL_REGEX), views.\n AppPermsViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^apps/(?P<id>{})/?'.format(settings.APP_URL_REGEX), views.AppViewSet.\n as_view({'get': 'retrieve', 'delete': 'destroy'})), url('^apps/?',\n views.AppViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^keys/(?P<id>.+)/?', views.KeyViewSet.as_view({'get': 'retrieve',\n 'delete': 'destroy'})), url('^keys/?', views.KeyViewSet.as_view({'get':\n 'list', 'post': 'create'})), url('^hooks/push/?', views.PushHookViewSet\n .as_view({'post': 'create'})), url('^hooks/build/?', views.\n BuildHookViewSet.as_view({'post': 'create'})), url('^hooks/config/?',\n views.ConfigHookViewSet.as_view({'post': 'create'})), url(\n '^auth/register/?', views.UserRegistrationView.as_view({'post':\n 'create'})), url('^auth/cancel/?', views.UserCancellationView.as_view({\n 'delete': 'destroy'})), url('^auth/', include('rest_framework.urls',\n namespace='rest_framework')), url('^generate-api-key/',\n 'rest_framework.authtoken.views.obtain_auth_token'), url(\n '^admin/perms/(?P<username>[-_\\\\w]+)/?', views.AdminPermsViewSet.\n as_view({'delete': 'destroy'})), url('^admin/perms/?', views.\n AdminPermsViewSet.as_view({'get': 'list', 'post': 'create'})))\n" ]
[ "\"\"\"\nRESTful URL patterns and routing for the Deis API app.\n\n\nApplications\n============\n\n.. http:get:: /api/apps/(string:id)/\n\n Retrieve a :class:`~api.models.App` by its `id`.\n\n.. http:delete:: /api/apps/(string:id)/\n\n Destroy a :class:`~api.models.App` by its `id`.\n\n.. http:get:: /api/apps/\n\n List all :class:`~api.models.App`\\\\s.\n\n.. http:post:: /api/apps/\n\n Create a new :class:`~api.models.App`.\n\n\nApplication Release Components\n------------------------------\n\n.. http:get:: /api/apps/(string:id)/config/\n\n List all :class:`~api.models.Config`\\\\s.\n\n.. http:post:: /api/apps/(string:id)/config/\n\n Create a new :class:`~api.models.Config`.\n\n.. http:get:: /api/apps/(string:id)/builds/(string:uuid)/\n\n Retrieve a :class:`~api.models.Build` by its `uuid`.\n\n.. http:get:: /api/apps/(string:id)/builds/\n\n List all :class:`~api.models.Build`\\\\s.\n\n.. http:post:: /api/apps/(string:id)/builds/\n\n Create a new :class:`~api.models.Build`.\n\n.. http:get:: /api/apps/(string:id)/releases/(int:version)/\n\n Retrieve a :class:`~api.models.Release` by its `version`.\n\n.. http:get:: /api/apps/(string:id)/releases/\n\n List all :class:`~api.models.Release`\\\\s.\n\n.. http:post:: /api/apps/(string:id)/releases/rollback/\n\n Rollback to a previous :class:`~api.models.Release`.\n\n\nApplication Infrastructure\n--------------------------\n\n.. http:get:: /api/apps/(string:id)/containers/(string:type)/(int:num)/\n\n List all :class:`~api.models.Container`\\\\s.\n\n.. http:get:: /api/apps/(string:id)/containers/(string:type)/\n\n List all :class:`~api.models.Container`\\\\s.\n\n.. http:get:: /api/apps/(string:id)/containers/\n\n List all :class:`~api.models.Container`\\\\s.\n\n\nApplication Domains\n-------------------\n\n\n.. http:delete:: /api/apps/(string:id)/domains/(string:hostname)\n\n Destroy a :class:`~api.models.Domain` by its `hostname`\n\n.. http:get:: /api/apps/(string:id)/domains/\n\n List all :class:`~api.models.Domain`\\\\s.\n\n.. http:post:: /api/apps/(string:id)/domains/\n\n Create a new :class:`~api.models.Domain`\\\\s.\n\n\nApplication Actions\n-------------------\n\n.. http:post:: /api/apps/(string:id)/scale/\n\n See also\n :meth:`AppViewSet.scale() <api.views.AppViewSet.scale>`\n\n.. http:get:: /api/apps/(string:id)/logs/\n\n See also\n :meth:`AppViewSet.logs() <api.views.AppViewSet.logs>`\n\n.. http:post:: /api/apps/(string:id)/run/\n\n See also\n :meth:`AppViewSet.run() <api.views.AppViewSet.run>`\n\n\nApplication Sharing\n===================\n\n.. http:delete:: /api/apps/(string:id)/perms/(string:username)/\n\n Destroy an app permission by its `username`.\n\n.. http:get:: /api/apps/(string:id)/perms/\n\n List all permissions granted to this app.\n\n.. http:post:: /api/apps/(string:id)/perms/\n\n Create a new app permission.\n\n\nKeys\n====\n\n.. http:get:: /api/keys/(string:id)/\n\n Retrieve a :class:`~api.models.Key` by its `id`.\n\n.. http:delete:: /api/keys/(string:id)/\n\n Destroy a :class:`~api.models.Key` by its `id`.\n\n.. http:get:: /api/keys/\n\n List all :class:`~api.models.Key`\\\\s.\n\n.. http:post:: /api/keys/\n\n Create a new :class:`~api.models.Key`.\n\n\nAPI Hooks\n=========\n\n.. http:post:: /api/hooks/push/\n\n Create a new :class:`~api.models.Push`.\n\n.. http:post:: /api/hooks/build/\n\n Create a new :class:`~api.models.Build`.\n\n.. http:post:: /api/hooks/config/\n\n Retrieve latest application :class:`~api.models.Config`.\n\n\nAuth\n====\n\n.. http:post:: /api/auth/register/\n\n Create a new User.\n\n.. http:delete:: /api/auth/register/\n\n Destroy the logged-in User.\n\n.. http:post:: /api/auth/login\n\n Authenticate for the REST framework.\n\n.. http:post:: /api/auth/logout\n\n Clear authentication for the REST framework.\n\n.. http:get:: /api/generate-api-key/\n\n Generate an API key.\n\n\nAdmin Sharing\n=============\n\n.. http:delete:: /api/admin/perms/(string:username)/\n\n Destroy an admin permission by its `username`.\n\n.. http:get:: /api/admin/perms/\n\n List all admin permissions granted.\n\n.. http:post:: /api/admin/perms/\n\n Create a new admin permission.\n\n\"\"\"\n", "from __future__ import unicode_literals\n", "from django.conf import settings\n", "from django.conf.urls import include\n", "from django.conf.urls import patterns\n", "from django.conf.urls import url\n", "from api import routers\n", "from api import views\n", "router = routers.ApiRouter()\n", "urlpatterns = patterns('', url('^', include(router.urls)), url(\n '^apps/(?P<id>{})/config/?'.format(settings.APP_URL_REGEX), views.\n AppConfigViewSet.as_view({'get': 'retrieve', 'post': 'create'})), url(\n '^apps/(?P<id>{})/builds/(?P<uuid>[-_\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.AppBuildViewSet.as_view({'get': 'retrieve'})),\n url('^apps/(?P<id>{})/builds/?'.format(settings.APP_URL_REGEX), views.\n AppBuildViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^apps/(?P<id>{})/releases/v(?P<version>[0-9]+)/?'.format(settings.\n APP_URL_REGEX), views.AppReleaseViewSet.as_view({'get': 'retrieve'})),\n url('^apps/(?P<id>{})/releases/rollback/?'.format(settings.\n APP_URL_REGEX), views.AppReleaseViewSet.as_view({'post': 'rollback'})),\n url('^apps/(?P<id>{})/releases/?'.format(settings.APP_URL_REGEX), views\n .AppReleaseViewSet.as_view({'get': 'list'})), url(\n '^apps/(?P<id>{})/containers/(?P<type>[-_\\\\w]+)/(?P<num>[-_\\\\w]+)/?'.\n format(settings.APP_URL_REGEX), views.AppContainerViewSet.as_view({\n 'get': 'retrieve'})), url(\n '^apps/(?P<id>{})/containers/(?P<type>[-_\\\\w.]+)/?'.format(settings.\n APP_URL_REGEX), views.AppContainerViewSet.as_view({'get': 'list'})),\n url('^apps/(?P<id>{})/containers/?'.format(settings.APP_URL_REGEX),\n views.AppContainerViewSet.as_view({'get': 'list'})), url(\n '^apps/(?P<id>{})/domains/(?P<domain>[-\\\\._\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.DomainViewSet.as_view({'delete': 'destroy'})),\n url('^apps/(?P<id>{})/domains/?'.format(settings.APP_URL_REGEX), views.\n DomainViewSet.as_view({'post': 'create', 'get': 'list'})), url(\n '^apps/(?P<id>{})/scale/?'.format(settings.APP_URL_REGEX), views.\n AppViewSet.as_view({'post': 'scale'})), url('^apps/(?P<id>{})/logs/?'.\n format(settings.APP_URL_REGEX), views.AppViewSet.as_view({'get': 'logs'\n })), url('^apps/(?P<id>{})/run/?'.format(settings.APP_URL_REGEX), views\n .AppViewSet.as_view({'post': 'run'})), url(\n '^apps/(?P<id>{})/perms/(?P<username>[-_\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.AppPermsViewSet.as_view({'delete': 'destroy'})),\n url('^apps/(?P<id>{})/perms/?'.format(settings.APP_URL_REGEX), views.\n AppPermsViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^apps/(?P<id>{})/?'.format(settings.APP_URL_REGEX), views.AppViewSet.\n as_view({'get': 'retrieve', 'delete': 'destroy'})), url('^apps/?',\n views.AppViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^keys/(?P<id>.+)/?', views.KeyViewSet.as_view({'get': 'retrieve',\n 'delete': 'destroy'})), url('^keys/?', views.KeyViewSet.as_view({'get':\n 'list', 'post': 'create'})), url('^hooks/push/?', views.PushHookViewSet\n .as_view({'post': 'create'})), url('^hooks/build/?', views.\n BuildHookViewSet.as_view({'post': 'create'})), url('^hooks/config/?',\n views.ConfigHookViewSet.as_view({'post': 'create'})), url(\n '^auth/register/?', views.UserRegistrationView.as_view({'post':\n 'create'})), url('^auth/cancel/?', views.UserCancellationView.as_view({\n 'delete': 'destroy'})), url('^auth/', include('rest_framework.urls',\n namespace='rest_framework')), url('^generate-api-key/',\n 'rest_framework.authtoken.views.obtain_auth_token'), url(\n '^admin/perms/(?P<username>[-_\\\\w]+)/?', views.AdminPermsViewSet.\n as_view({'delete': 'destroy'})), url('^admin/perms/?', views.\n AdminPermsViewSet.as_view({'get': 'list', 'post': 'create'})))\n" ]
[ 5, 0, 0, 0, 0, 0, 0, 0, 0, 5 ]
[ "Expr'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'" ]
[ "def __init__(self, VAR_14):...\n", "self.is_authenticated = False\n", "self.is_active = False\n", "self.is_anonymous = True\n", "self.username = None\n", "self.user_id = VAR_14\n" ]
[ "def __init__(self, uid):...\n", "self.is_authenticated = False\n", "self.is_active = False\n", "self.is_anonymous = True\n", "self.username = None\n", "self.user_id = uid\n" ]
[ 0, 4, 4, 4, 4, 4 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_0(self):...\n", "return Locality.objects.raw(VAR_0)\n" ]
[ "def get_queryset(self):...\n", "return Locality.objects.raw(locality_list_raw_query)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_1():...\n", "task(name='interpreter', action=SelectInterpreter).install('pyprep')\n", "task(name='build-local-dists', action=BuildLocalPythonDistributions).install(\n 'pyprep')\n", "task(name='requirements', action=ResolveRequirements).install('pyprep')\n", "task(name='sources', action=GatherSources).install('pyprep')\n", "task(name='py', action=PythonRun).install('run')\n", "task(name='pytest-prep', action=PytestPrep).install('test')\n", "task(name='pytest', action=PytestRun).install('test')\n", "task(name='py', action=PythonRepl).install('repl')\n", "task(name='setup-py', action=SetupPy).install()\n", "task(name='py', action=PythonBinaryCreate).install('binary')\n", "task(name='py-wheels', action=LocalPythonDistributionArtifact).install('binary'\n )\n", "task(name='isort-prep', action=IsortPrep).install('fmt')\n", "task(name='isort', action=IsortRun).install('fmt')\n", "task(name='py', action=PythonBundle).install('bundle')\n", "task(name='unpack-wheels', action=UnpackWheels).install()\n" ]
[ "def register_goals():...\n", "task(name='interpreter', action=SelectInterpreter).install('pyprep')\n", "task(name='build-local-dists', action=BuildLocalPythonDistributions).install(\n 'pyprep')\n", "task(name='requirements', action=ResolveRequirements).install('pyprep')\n", "task(name='sources', action=GatherSources).install('pyprep')\n", "task(name='py', action=PythonRun).install('run')\n", "task(name='pytest-prep', action=PytestPrep).install('test')\n", "task(name='pytest', action=PytestRun).install('test')\n", "task(name='py', action=PythonRepl).install('repl')\n", "task(name='setup-py', action=SetupPy).install()\n", "task(name='py', action=PythonBinaryCreate).install('binary')\n", "task(name='py-wheels', action=LocalPythonDistributionArtifact).install('binary'\n )\n", "task(name='isort-prep', action=IsortPrep).install('fmt')\n", "task(name='isort', action=IsortRun).install('fmt')\n", "task(name='py', action=PythonBundle).install('bundle')\n", "task(name='unpack-wheels', action=UnpackWheels).install()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "VAR_15 = set()\n", "for VAR_28, VAR_27 in zip(self.output, self.rule.output):\n", "if VAR_28 in self.dynamic_output:\n", "VAR_16 = defaultdict(list)\n", "for VAR_28, w in self.expand_dynamic(VAR_27, VAR_13=self.wildcards, VAR_14=\n", "for combination in VAR_15:\n", "VAR_15.add(tuple(w.items()))\n", "for name, value in combination:\n", "return VAR_16\n", "VAR_16[name].append(value)\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "combinations = set()\n", "for f, f_ in zip(self.output, self.rule.output):\n", "if f in self.dynamic_output:\n", "wildcards = defaultdict(list)\n", "for f, w in self.expand_dynamic(f_, restriction=self.wildcards, omit_value=\n", "for combination in combinations:\n", "combinations.add(tuple(w.items()))\n", "for name, value in combination:\n", "return wildcards\n", "wildcards[name].append(value)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "For", "Condition", "Assign'", "For", "For", "Expr'", "For", "Return'", "Expr'" ]
[ "def __init__(self, VAR_1=None, VAR_2=5):...\n", "self.size = VAR_2\n", "self.data = VAR_1 or []\n" ]
[ "def __init__(self, list=None, size=5):...\n", "self.size = size\n", "self.data = list or []\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def __lt__(self, VAR_11):...\n", "return self.rule.__lt__(VAR_11.rule)\n" ]
[ "def __lt__(self, other):...\n", "return self.rule.__lt__(other.rule)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = '.yml'\n", "VAR_3 = os.path.basename(VAR_0)\n", "if os.path.isdir(VAR_0):\n", "VAR_4 = {}\n", "if os.path.isfile(VAR_0):\n", "for entry in os.listdir(VAR_0):\n", "if os.path.abspath(VAR_0) == os.path.abspath(sys.argv[0]):\n", "VAR_5 = os.path.join(VAR_0, entry)\n", "return VAR_3, VAR_4\n", "return None, None\n", "if VAR_0.endswith(VAR_2):\n", "VAR_6, VAR_7 = FUNC_0(VAR_5)\n", "VAR_3 = VAR_3[:-len(VAR_2)]\n", "return None, None\n", "if not VAR_6:\n", "return VAR_3, yaml.load(open(VAR_0))\n", "return VAR_3, None\n", "VAR_4[VAR_6] = VAR_7\n" ]
[ "def _load_yml_filedir(path):...\n", "\"\"\"docstring\"\"\"\n", "YML_FILE_SUFFIX = '.yml'\n", "bpath = os.path.basename(path)\n", "if os.path.isdir(path):\n", "result = {}\n", "if os.path.isfile(path):\n", "for entry in os.listdir(path):\n", "if os.path.abspath(path) == os.path.abspath(sys.argv[0]):\n", "epath = os.path.join(path, entry)\n", "return bpath, result\n", "return None, None\n", "if path.endswith(YML_FILE_SUFFIX):\n", "key, value = _load_yml_filedir(epath)\n", "bpath = bpath[:-len(YML_FILE_SUFFIX)]\n", "return None, None\n", "if not key:\n", "return bpath, yaml.load(open(path))\n", "return bpath, None\n", "result[key] = value\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "For", "Condition", "Assign'", "Return'", "Return'", "Condition", "Assign'", "Assign'", "Return'", "Condition", "Return'", "Return'", "Assign'" ]
[ "def __init__(VAR_39, VAR_40, VAR_41, VAR_42, VAR_43, VAR_44, VAR_45, VAR_46):...\n", "VAR_39.returncode = None\n", "VAR_6 = [sys.executable, bot_main.THIS_FILE, 'run_isolated']\n", "self.assertEqual(VAR_6, VAR_40[:len(VAR_6)])\n", "self.assertEqual(True, VAR_41)\n", "self.assertEqual(subprocess42.PIPE, VAR_43)\n", "self.assertEqual(subprocess42.STDOUT, VAR_44)\n", "self.assertEqual(subprocess42.PIPE, VAR_45)\n", "self.assertEqual(sys.platform != 'win32', VAR_46)\n" ]
[ "def __init__(self2, cmd, detached, cwd, stdout, stderr, stdin, close_fds):...\n", "self2.returncode = None\n", "expected = [sys.executable, bot_main.THIS_FILE, 'run_isolated']\n", "self.assertEqual(expected, cmd[:len(expected)])\n", "self.assertEqual(True, detached)\n", "self.assertEqual(subprocess42.PIPE, stdout)\n", "self.assertEqual(subprocess42.STDOUT, stderr)\n", "self.assertEqual(subprocess42.PIPE, stdin)\n", "self.assertEqual(sys.platform != 'win32', close_fds)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_1(VAR_2, *VAR_3, VAR_4=None, VAR_5=True, VAR_6=False, **VAR_7):...\n", "VAR_2 = FUNC_0(VAR_2)\n", "if VAR_4 is not None:\n", "VAR_4 = FUNC_0(VAR_4)\n", "VAR_15 = CLASS_0(VAR_2, VAR_4, VAR_10={'inference': inference,\n 'abort_on_error': abort_on_error})\n", "return VAR_15.run()\n" ]
[ "def validate(target_graph, *args, shacl_graph=None, inference=True,...\n", "target_graph = _load_into_graph(target_graph)\n", "if shacl_graph is not None:\n", "shacl_graph = _load_into_graph(shacl_graph)\n", "validator = Validator(target_graph, shacl_graph, options={'inference':\n inference, 'abort_on_error': abort_on_error})\n", "return validator.run()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def __init__(self, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5=5432):...\n", "self.db_conn = psycopg2.connect(VAR_4=host, VAR_1=dbname, VAR_2=user, VAR_3\n =password, VAR_5=port)\n", "self.cursor = self.db_conn.cursor()\n" ]
[ "def __init__(self, dbname, user, password, host, port=5432):...\n", "self.db_conn = psycopg2.connect(host=host, dbname=dbname, user=user,\n password=password, port=port)\n", "self.cursor = self.db_conn.cursor()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "@VAR_0.route('/loadValues/<protocole>', methods=['GET'])...\n", "VAR_8 = getConnexion()\n", "VAR_9 = 'SELECT * FROM ' + VAR_5\n", "VAR_8.cur.execute(VAR_9)\n", "VAR_11 = VAR_8.cur.fetchall()\n", "VAR_13 = dict()\n", "for VAR_19 in VAR_11:\n", "VAR_18 = ast.literal_eval(VAR_19[3])\n", "return Response(flask.json.dumps(VAR_13), mimetype='application/json')\n", "VAR_13[VAR_19[2]] = VAR_18['values']\n" ]
[ "@addObs.route('/loadValues/<protocole>', methods=['GET'])...\n", "db = getConnexion()\n", "sql = 'SELECT * FROM ' + protocole\n", "db.cur.execute(sql)\n", "res = db.cur.fetchall()\n", "finalDict = dict()\n", "for r in res:\n", "dictValues = ast.literal_eval(r[3])\n", "return Response(flask.json.dumps(finalDict), mimetype='application/json')\n", "finalDict[r[2]] = dictValues['values']\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Assign'" ]
[ "def FUNC_20(VAR_36):...\n", "VAR_63 = {}\n", "for VAR_62, info in VAR_36.items():\n", "if VAR_62.core:\n", "return VAR_63\n", "VAR_63[VAR_62] = info\n" ]
[ "def _filter_core_keywors(keywords):...\n", "matches = {}\n", "for kw, info in keywords.items():\n", "if kw.core:\n", "return matches\n", "matches[kw] = info\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Condition", "Return'", "Assign'" ]
[ "@classmethod...\n", "VAR_31 = lambda _request: None\n", "return [VAR_31, VAR_31]\n" ]
[ "@classmethod...\n", "non_applicable = lambda _request: None\n", "return [non_applicable, non_applicable]\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def FUNC_4(VAR_7, **VAR_6):...\n", "import telebot\n", "from telebot import apihelper\n", "import sql\n", "VAR_23 = sql.get_telegram_by_ip(VAR_6.get('ip'))\n", "VAR_24 = sql.get_setting('proxy')\n", "for telegram in VAR_23:\n", "VAR_69 = telegram[1]\n", "if VAR_24 is not None:\n", "VAR_70 = telegram[2]\n", "apihelper.proxy = {'https': VAR_24}\n", "VAR_71 = telebot.TeleBot(token=token_bot)\n", "print(\n \"Fatal: Can't send message. Add Telegram chanel before use alerting at this servers group\"\n )\n", "VAR_71.send_message(chat_id=channel_name, VAR_14=mess)\n", "sys.exit()\n" ]
[ "def telegram_send_mess(mess, **kwargs):...\n", "import telebot\n", "from telebot import apihelper\n", "import sql\n", "telegrams = sql.get_telegram_by_ip(kwargs.get('ip'))\n", "proxy = sql.get_setting('proxy')\n", "for telegram in telegrams:\n", "token_bot = telegram[1]\n", "if proxy is not None:\n", "channel_name = telegram[2]\n", "apihelper.proxy = {'https': proxy}\n", "bot = telebot.TeleBot(token=token_bot)\n", "print(\n \"Fatal: Can't send message. Add Telegram chanel before use alerting at this servers group\"\n )\n", "bot.send_message(chat_id=channel_name, text=mess)\n", "sys.exit()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Import'", "ImportFrom'", "Import'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_11(self, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "self.mock(handler.api, 'get_current_identity', lambda : VAR_5)\n" ]
[ "def mock_get_current_identity(self, ident):...\n", "\"\"\"docstring\"\"\"\n", "self.mock(handler.api, 'get_current_identity', lambda : ident)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'" ]
[ "def FUNC_21(self, VAR_21, VAR_38, VAR_39):...\n", "\"\"\"docstring\"\"\"\n", "VAR_57 = self.getfile(VAR_21)\n", "if VAR_57 == False:\n", "VAR_57[VAR_7] = VAR_39\n" ]
[ "def utime(self, path, atime, mtime):...\n", "\"\"\"docstring\"\"\"\n", "p = self.getfile(path)\n", "if p == False:\n", "p[A_CTIME] = mtime\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'" ]
[ "def FUNC_11(self):...\n", "VAR_9 = []\n", "VAR_10 = {}\n", "for kid in self.child_groups:\n", "VAR_13 = kid.get_hosts()\n", "for VAR_14 in self.hosts:\n", "for VAR_15 in VAR_13:\n", "if VAR_14 not in VAR_10:\n", "return VAR_9\n", "if VAR_15 not in VAR_10:\n", "VAR_10[VAR_14] = 1\n", "VAR_10[VAR_15] = 1\n", "if self.name == 'all' and VAR_14.implicit:\n", "if self.name == 'all' and VAR_15.implicit:\n", "VAR_9.append(VAR_14)\n", "VAR_9.append(VAR_15)\n" ]
[ "def _get_hosts(self):...\n", "hosts = []\n", "seen = {}\n", "for kid in self.child_groups:\n", "kid_hosts = kid.get_hosts()\n", "for mine in self.hosts:\n", "for kk in kid_hosts:\n", "if mine not in seen:\n", "return hosts\n", "if kk not in seen:\n", "seen[mine] = 1\n", "seen[kk] = 1\n", "if self.name == 'all' and mine.implicit:\n", "if self.name == 'all' and kk.implicit:\n", "hosts.append(mine)\n", "hosts.append(kk)\n" ]
[ 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Assign'", "For", "For", "Condition", "Return'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_3(self):...\n", "VAR_6 = BeautifulSoup(self.res_data, 'html.parser')\n", "VAR_7 = {}\n", "VAR_8 = VAR_6.findAll(id='user_test_status')[0]\n", "VAR_7['status'] = VAR_8.text.strip()\n", "VAR_9 = VAR_6.findAll(id='compilation')\n", "if VAR_9:\n", "VAR_12 = VAR_9[0]\n", "VAR_7['compile_output'] = None\n", "VAR_7['compile_output'] = VAR_12.pre.text.strip()\n", "return VAR_7\n" ]
[ "def get_user_test_info(self):...\n", "soup = BeautifulSoup(self.res_data, 'html.parser')\n", "info = {}\n", "tag = soup.findAll(id='user_test_status')[0]\n", "info['status'] = tag.text.strip()\n", "tags = soup.findAll(id='compilation')\n", "if tags:\n", "content = tags[0]\n", "info['compile_output'] = None\n", "info['compile_output'] = content.pre.text.strip()\n", "return info\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "from __future__ import unicode_literals\n", "from six.moves import urllib_parse as urlparse\n", "import os\n", "import collections\n", "import requests\n", "import six\n", "import json\n", "import yaml\n", "from flex.context_managers import ErrorDict\n", "from flex.exceptions import ValidationError\n", "from flex.loading.definitions import definitions_validator\n", "from flex.loading.schema import swagger_schema_validator\n", "from flex.loading.schema.paths.path_item.operation.responses.single.schema import schema_validator\n", "from flex.http import normalize_request, normalize_response\n", "from flex.validation.common import validate_object\n", "from flex.validation.request import validate_request\n", "from flex.validation.response import validate_response\n", "def FUNC_0(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "if isinstance(VAR_0, collections.Mapping):\n", "return VAR_0\n", "if hasattr(VAR_0, 'read') and callable(VAR_0.read):\n", "VAR_13 = VAR_0.read()\n", "if os.path.exists(os.path.expanduser(str(VAR_0))):\n", "def FUNC_1(VAR_1):...\n", "return json.loads(VAR_13)\n", "return yaml.load(VAR_13)\n", "VAR_13 = source_file.read()\n", "if isinstance(VAR_0, six.string_types):\n", "VAR_8 = {'deferred_references': set()}\n", "VAR_14 = urlparse.urlparse(VAR_0)\n", "VAR_9 = definitions_validator(VAR_1, VAR_8=context)\n", "if VAR_14.scheme and VAR_14.netloc:\n", "VAR_10 = swagger_schema_validator(VAR_1, VAR_8=swagger_definitions)\n", "VAR_12 = requests.get(VAR_0)\n", "VAR_13 = VAR_0\n", "return VAR_10\n", "if isinstance(VAR_12.content, six.binary_type):\n", "VAR_13 = six.text_type(VAR_12.content, encoding='utf-8')\n", "VAR_13 = VAR_12.content\n" ]
[ "from __future__ import unicode_literals\n", "from six.moves import urllib_parse as urlparse\n", "import os\n", "import collections\n", "import requests\n", "import six\n", "import json\n", "import yaml\n", "from flex.context_managers import ErrorDict\n", "from flex.exceptions import ValidationError\n", "from flex.loading.definitions import definitions_validator\n", "from flex.loading.schema import swagger_schema_validator\n", "from flex.loading.schema.paths.path_item.operation.responses.single.schema import schema_validator\n", "from flex.http import normalize_request, normalize_response\n", "from flex.validation.common import validate_object\n", "from flex.validation.request import validate_request\n", "from flex.validation.response import validate_response\n", "def load_source(source):...\n", "\"\"\"docstring\"\"\"\n", "if isinstance(source, collections.Mapping):\n", "return source\n", "if hasattr(source, 'read') and callable(source.read):\n", "raw_source = source.read()\n", "if os.path.exists(os.path.expanduser(str(source))):\n", "def parse(raw_schema):...\n", "return json.loads(raw_source)\n", "return yaml.load(raw_source)\n", "raw_source = source_file.read()\n", "if isinstance(source, six.string_types):\n", "context = {'deferred_references': set()}\n", "parts = urlparse.urlparse(source)\n", "swagger_definitions = definitions_validator(raw_schema, context=context)\n", "if parts.scheme and parts.netloc:\n", "swagger_schema = swagger_schema_validator(raw_schema, context=\n swagger_definitions)\n", "response = requests.get(source)\n", "raw_source = source\n", "return swagger_schema\n", "if isinstance(response.content, six.binary_type):\n", "raw_source = six.text_type(response.content, encoding='utf-8')\n", "raw_source = response.content\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Docstring", "Condition", "Return'", "Condition", "Assign'", "Condition", "FunctionDef'", "Return'", "Return'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'", "Condition", "Assign'", "Assign'" ]
[ "def FUNC_2():...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = subprocess.Popen('nmcli -t -f SSID,BARS device wifi list', shell=\n True, stdout=subprocess.PIPE).communicate()[0]\n", "VAR_9 = VAR_8.split('\\n')\n", "VAR_10 = []\n", "for row in VAR_9:\n", "VAR_20 = row.split(':')\n", "return VAR_10\n", "print(VAR_20)\n", "VAR_10.append(VAR_20)\n" ]
[ "def get_allAPs():...\n", "\"\"\"docstring\"\"\"\n", "ps = subprocess.Popen('nmcli -t -f SSID,BARS device wifi list', shell=True,\n stdout=subprocess.PIPE).communicate()[0]\n", "wifirows = ps.split('\\n')\n", "wifi = []\n", "for row in wifirows:\n", "entry = row.split(':')\n", "return wifi\n", "print(entry)\n", "wifi.append(entry)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Expr'", "Expr'" ]
[ "def FUNC_12(self):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def _attribute_iterator(self):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "def FUNC_26():...\n", "return self.closed\n" ]
[ "def stop():...\n", "return self.closed\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_11(self, VAR_17):...\n", "\"\"\"docstring\"\"\"\n", "VAR_44 = 'confirmation', 'paging', 'events', 'formatoutput'\n", "VAR_0.error(_('Failed to setup the Dell EqualLogic driver'))\n", "for feature in VAR_44:\n", "self._eql_execute('cli-settings', feature, 'off')\n", "for line in self._eql_execute('grpparams', 'show'):\n", "if line.startswith('Group-Ipaddress:'):\n", "VAR_0.info(_('EQL-driver: Setup is complete, group IP is %s'), self._group_ip)\n", "VAR_50 = line.rstrip().partition(' ')\n", "self._group_ip = VAR_50[-1]\n" ]
[ "def do_setup(self, context):...\n", "\"\"\"docstring\"\"\"\n", "disabled_cli_features = 'confirmation', 'paging', 'events', 'formatoutput'\n", "LOG.error(_('Failed to setup the Dell EqualLogic driver'))\n", "for feature in disabled_cli_features:\n", "self._eql_execute('cli-settings', feature, 'off')\n", "for line in self._eql_execute('grpparams', 'show'):\n", "if line.startswith('Group-Ipaddress:'):\n", "LOG.info(_('EQL-driver: Setup is complete, group IP is %s'), self._group_ip)\n", "out_tup = line.rstrip().partition(' ')\n", "self._group_ip = out_tup[-1]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "For", "Expr'", "For", "Condition", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_18(*VAR_20, **VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "VAR_32 = VAR_20[0]\n", "if len(VAR_20) == 1:\n", "VAR_53 = product\n", "if len(VAR_20) == 2:\n", "if isinstance(VAR_32, str):\n", "VAR_53 = VAR_20[1]\n", "VAR_32 = [VAR_32]\n", "def FUNC_36(VAR_11):...\n", "for VAR_51, VAR_58 in VAR_11.items():\n", "if isinstance(VAR_58, str) or not isinstance(VAR_58, Iterable):\n", "return [VAR_10.format(**comb) for comb in map(dict, VAR_53(*FUNC_36(VAR_11)\n )) for VAR_10 in VAR_32]\n", "VAR_58 = [VAR_58]\n", "yield [(VAR_51, VAR_16) for VAR_16 in VAR_58]\n" ]
[ "def expand(*args, **wildcards):...\n", "\"\"\"docstring\"\"\"\n", "filepatterns = args[0]\n", "if len(args) == 1:\n", "combinator = product\n", "if len(args) == 2:\n", "if isinstance(filepatterns, str):\n", "combinator = args[1]\n", "filepatterns = [filepatterns]\n", "def flatten(wildcards):...\n", "for wildcard, values in wildcards.items():\n", "if isinstance(values, str) or not isinstance(values, Iterable):\n", "return [filepattern.format(**comb) for comb in map(dict, combinator(*\n flatten(wildcards))) for filepattern in filepatterns]\n", "values = [values]\n", "yield [(wildcard, value) for value in values]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "FunctionDef'", "For", "Condition", "Return'", "Assign'", "Expr'" ]
[ "def __init__(self, VAR_5='table.db', VAR_6=None, VAR_7=None):...\n", "self._phrase_table_column_names = ['id', 'mlen', 'clen', 'input_phrase',\n 'phrase', 'freq', 'user_freq']\n", "self.old_phrases = []\n", "self._conf_file_path = '/usr/share/ibus-typing-booster/hunspell-tables/'\n", "self.ime_properties = CLASS_0(self._conf_file_path + VAR_7)\n", "self._mlen = int(self.ime_properties.get('max_key_length'))\n", "self._m17ndb = 'm17n'\n", "self._m17n_mim_name = ''\n", "self.lang_chars = self.ime_properties.get('lang_chars')\n", "if self.lang_chars != None:\n", "self.lang_chars = self.lang_chars.decode('utf8')\n", "self.lang_chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'\n", "self.encoding = self.ime_properties.get('encoding')\n", "self.hunspell_obj = hunspell_suggest.Hunspell(lang=self.ime_properties.get(\n 'languages'), dict_name=self.ime_properties.get('hunspell_dict'),\n aff_name=self.ime_properties.get('hunspell_dict').replace('.dic',\n '.aff'), encoding=self.encoding, lang_chars=self.lang_chars)\n", "self.startchars = self.get_start_chars()\n", "VAR_6 = self.ime_properties.get('name') + '-user.db'\n", "if VAR_6 != None:\n", "VAR_46 = os.getenv('HOME')\n", "VAR_6 = ':memory:'\n", "VAR_47 = path.join(VAR_46, '.local/share/.ibus', 'hunspell-tables')\n", "sys.stderr.write('Connect to the database %(name)s.\\n' % {'name': VAR_6})\n", "sys.stderr.write('Could not open the database %(name)s.\\n' % {'name': VAR_6})\n", "self.create_tables('user_db')\n", "if not path.isdir(VAR_47):\n", "self.db = sqlite3.connect(VAR_6)\n", "VAR_56 = '%s.%d' % (VAR_6, os.getpid())\n", "if self.old_phrases:\n", "os.makedirs(VAR_47)\n", "VAR_6 = path.join(VAR_47, VAR_6)\n", "self.db.execute('PRAGMA page_size = 8192; ')\n", "sys.stderr.write('Renaming the incompatible database to \"%(name)s\".\\n' % {\n 'name': VAR_56})\n", "VAR_48 = filter(lambda x: x[0] > 1, self.old_phrases)\n", "self.create_indexes('user_db', VAR_13=False)\n", "if not path.exists(VAR_6):\n", "self.db.execute('PRAGMA cache_size = 20000; ')\n", "os.rename(VAR_6, VAR_56)\n", "VAR_48 = map(lambda x: [x[1]] + list(x[1:]), VAR_48)\n", "self.generate_userdb_desc()\n", "sys.stderr.write('The user database %(udb)s does not exist yet.\\n' % {'udb':\n VAR_6})\n", "VAR_51 = self.get_database_desc(VAR_6)\n", "import traceback\n", "self.db.execute('PRAGMA temp_store = MEMORY; ')\n", "sys.stderr.write('Creating a new, empty database \"%(name)s\".\\n' % {'name':\n VAR_6})\n", "map(self.u_add_phrase, VAR_48)\n", "VAR_21 = ':memory:'\n", "if VAR_51 == None or VAR_51['version'\n", "traceback.print_exc()\n", "self.db.execute('PRAGMA synchronous = OFF; ')\n", "self.init_user_db(VAR_6)\n", "self.db.commit()\n", "self.db.execute('ATTACH DATABASE \"%s\" AS mudb;' % VAR_21)\n", "sys.stderr.write('The user database %(udb)s seems to be incompatible.\\n' %\n {'udb': VAR_6})\n", "sys.stderr.write('Compatible database %(db)s found.\\n' % {'db': VAR_6})\n", "self.db.execute('ATTACH DATABASE \"%s\" AS user_db;' % VAR_6)\n", "self.db.execute('ATTACH DATABASE \"%s\" AS user_db;' % VAR_6)\n", "self.create_tables('mudb')\n", "if VAR_51 == None:\n", "sys.stderr.write('There is no version information in the database.\\n')\n", "if VAR_51['version'] != VAR_0:\n", "sys.stderr.write(\n 'Trying to recover the phrases from the old, incompatible database.\\n')\n", "sys.stderr.write(\n 'The version of the database does not match (too old or too new?).\\n')\n", "if self.get_number_of_columns_of_phrase_table(VAR_6) != len(self.\n", "self.old_phrases = self.extract_user_phrases(VAR_6)\n", "sys.stderr.write('ibus-typing-booster wants version=%s\\n' % VAR_0)\n", "sys.stderr.write('The number of columns of the database does not match.\\n')\n", "VAR_56 = '%s.%d' % (VAR_6, os.getpid())\n", "sys.stderr.write('But the database actually has version=%s\\n' % VAR_51[\n 'version'])\n", "sys.stderr.write('ibus-typing-booster expects %(col)s columns.\\n' % {'col':\n len(self._phrase_table_column_names)})\n", "sys.stderr.write('Renaming the incompatible database to \"%(name)s\".\\n' % {\n 'name': VAR_56})\n", "sys.stderr.write('But the database actually has %(col)s columns.\\n' % {\n 'col': self.get_number_of_columns_of_phrase_table(VAR_6)})\n", "os.rename(VAR_6, VAR_56)\n", "sys.stderr.write('Creating a new, empty database \"%(name)s\".\\n' % {'name':\n VAR_6})\n", "self.init_user_db(VAR_6)\n", "sys.stderr.write('If user phrases were successfully recovered from the old,\\n')\n", "sys.stderr.write(\n 'incompatible database, they will be used to initialize the new database.\\n'\n )\n" ]
[ "def __init__(self, name='table.db', user_db=None, filename=None):...\n", "self._phrase_table_column_names = ['id', 'mlen', 'clen', 'input_phrase',\n 'phrase', 'freq', 'user_freq']\n", "self.old_phrases = []\n", "self._conf_file_path = '/usr/share/ibus-typing-booster/hunspell-tables/'\n", "self.ime_properties = ImeProperties(self._conf_file_path + filename)\n", "self._mlen = int(self.ime_properties.get('max_key_length'))\n", "self._m17ndb = 'm17n'\n", "self._m17n_mim_name = ''\n", "self.lang_chars = self.ime_properties.get('lang_chars')\n", "if self.lang_chars != None:\n", "self.lang_chars = self.lang_chars.decode('utf8')\n", "self.lang_chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'\n", "self.encoding = self.ime_properties.get('encoding')\n", "self.hunspell_obj = hunspell_suggest.Hunspell(lang=self.ime_properties.get(\n 'languages'), dict_name=self.ime_properties.get('hunspell_dict'),\n aff_name=self.ime_properties.get('hunspell_dict').replace('.dic',\n '.aff'), encoding=self.encoding, lang_chars=self.lang_chars)\n", "self.startchars = self.get_start_chars()\n", "user_db = self.ime_properties.get('name') + '-user.db'\n", "if user_db != None:\n", "home_path = os.getenv('HOME')\n", "user_db = ':memory:'\n", "tables_path = path.join(home_path, '.local/share/.ibus', 'hunspell-tables')\n", "sys.stderr.write('Connect to the database %(name)s.\\n' % {'name': user_db})\n", "sys.stderr.write('Could not open the database %(name)s.\\n' % {'name': user_db})\n", "self.create_tables('user_db')\n", "if not path.isdir(tables_path):\n", "self.db = sqlite3.connect(user_db)\n", "new_name = '%s.%d' % (user_db, os.getpid())\n", "if self.old_phrases:\n", "os.makedirs(tables_path)\n", "user_db = path.join(tables_path, user_db)\n", "self.db.execute('PRAGMA page_size = 8192; ')\n", "sys.stderr.write('Renaming the incompatible database to \"%(name)s\".\\n' % {\n 'name': new_name})\n", "phrases = filter(lambda x: x[0] > 1, self.old_phrases)\n", "self.create_indexes('user_db', commit=False)\n", "if not path.exists(user_db):\n", "self.db.execute('PRAGMA cache_size = 20000; ')\n", "os.rename(user_db, new_name)\n", "phrases = map(lambda x: [x[1]] + list(x[1:]), phrases)\n", "self.generate_userdb_desc()\n", "sys.stderr.write('The user database %(udb)s does not exist yet.\\n' % {'udb':\n user_db})\n", "desc = self.get_database_desc(user_db)\n", "import traceback\n", "self.db.execute('PRAGMA temp_store = MEMORY; ')\n", "sys.stderr.write('Creating a new, empty database \"%(name)s\".\\n' % {'name':\n user_db})\n", "map(self.u_add_phrase, phrases)\n", "mudb = ':memory:'\n", "if desc == None or desc['version'\n", "traceback.print_exc()\n", "self.db.execute('PRAGMA synchronous = OFF; ')\n", "self.init_user_db(user_db)\n", "self.db.commit()\n", "self.db.execute('ATTACH DATABASE \"%s\" AS mudb;' % mudb)\n", "sys.stderr.write('The user database %(udb)s seems to be incompatible.\\n' %\n {'udb': user_db})\n", "sys.stderr.write('Compatible database %(db)s found.\\n' % {'db': user_db})\n", "self.db.execute('ATTACH DATABASE \"%s\" AS user_db;' % user_db)\n", "self.db.execute('ATTACH DATABASE \"%s\" AS user_db;' % user_db)\n", "self.create_tables('mudb')\n", "if desc == None:\n", "sys.stderr.write('There is no version information in the database.\\n')\n", "if desc['version'] != user_database_version:\n", "sys.stderr.write(\n 'Trying to recover the phrases from the old, incompatible database.\\n')\n", "sys.stderr.write(\n 'The version of the database does not match (too old or too new?).\\n')\n", "if self.get_number_of_columns_of_phrase_table(user_db) != len(self.\n", "self.old_phrases = self.extract_user_phrases(user_db)\n", "sys.stderr.write('ibus-typing-booster wants version=%s\\n' %\n user_database_version)\n", "sys.stderr.write('The number of columns of the database does not match.\\n')\n", "new_name = '%s.%d' % (user_db, os.getpid())\n", "sys.stderr.write('But the database actually has version=%s\\n' % desc[\n 'version'])\n", "sys.stderr.write('ibus-typing-booster expects %(col)s columns.\\n' % {'col':\n len(self._phrase_table_column_names)})\n", "sys.stderr.write('Renaming the incompatible database to \"%(name)s\".\\n' % {\n 'name': new_name})\n", "sys.stderr.write('But the database actually has %(col)s columns.\\n' % {\n 'col': self.get_number_of_columns_of_phrase_table(user_db)})\n", "os.rename(user_db, new_name)\n", "sys.stderr.write('Creating a new, empty database \"%(name)s\".\\n' % {'name':\n user_db})\n", "self.init_user_db(user_db)\n", "sys.stderr.write('If user phrases were successfully recovered from the old,\\n')\n", "sys.stderr.write(\n 'incompatible database, they will be used to initialize the new database.\\n'\n )\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Condition", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Import'", "Expr'", "Expr'", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_7(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_21 = config.ensure_configured()\n", "VAR_40 = api.reinitialize_request_cache()\n", "self.response.headers['Content-Security-Policy'] = 'string'\n", "self.response.headers['Strict-Transport-Security'\n ] = 'max-age=31536000; includeSubDomains; preload'\n", "if self.frame_options:\n", "self.response.headers['X-Frame-Options'] = self.frame_options\n", "VAR_41 = None\n", "for VAR_49 in self.get_auth_methods(VAR_21):\n", "self.auth_method = VAR_49\n", "VAR_41 = VAR_49(self.request)\n", "self.authentication_error(err)\n", "VAR_41 = VAR_41 or model.Anonymous\n", "if VAR_41:\n", "return\n", "VAR_42 = VAR_49 in (FUNC_4, FUNC_5)\n", "VAR_43 = self.request.headers.get(host_token.HTTP_HEADER)\n", "if VAR_43:\n", "VAR_50 = host_token.validate_host_token(VAR_43)\n", "assert self.request.remote_addr\n", "if VAR_50:\n", "VAR_44 = ipaddr.ip_from_string(self.request.remote_addr)\n", "VAR_40.peer_host = VAR_50\n", "VAR_40.peer_ip = VAR_44\n", "VAR_40.peer_identity = api.verify_ip_whitelisted(VAR_41, VAR_44, self.\n request.headers)\n", "self.authorization_error(err)\n", "VAR_45 = self.request.headers.get(delegation.HTTP_HEADER)\n", "return\n", "if VAR_45:\n", "VAR_40.current_identity = VAR_40.peer_identity\n", "VAR_40.current_identity = delegation.check_delegation_token(VAR_45, VAR_40.\n peer_identity)\n", "self.authorization_error(api.AuthorizationError('Bad delegation token: %s' %\n exc))\n", "VAR_51 = not VAR_42 and self.request.method in self.xsrf_token_enforce_on\n", "self.authorization_error(err)\n", "VAR_54 = \"\"\"Transient error while validating delegation token.\n%s\"\"\" % exc\n", "if VAR_51 and self.xsrf_token is None:\n", "logging.error(VAR_54)\n", "self.xsrf_token_data = {}\n", "self.abort(500, detail=msg)\n", "if self.xsrf_token is not None:\n", "self.xsrf_token_data = self.verify_xsrf_token()\n", "super(CLASS_2, self).dispatch()\n" ]
[ "def dispatch(self):...\n", "\"\"\"docstring\"\"\"\n", "conf = config.ensure_configured()\n", "auth_context = api.reinitialize_request_cache()\n", "self.response.headers['Content-Security-Policy'] = (\n \"default-src https: 'self' 'unsafe-inline' https://www.google.com https://www.google-analytics.com 'unsafe-eval'\"\n )\n", "self.response.headers['Strict-Transport-Security'\n ] = 'max-age=31536000; includeSubDomains; preload'\n", "if self.frame_options:\n", "self.response.headers['X-Frame-Options'] = self.frame_options\n", "identity = None\n", "for method_func in self.get_auth_methods(conf):\n", "self.auth_method = method_func\n", "identity = method_func(self.request)\n", "self.authentication_error(err)\n", "identity = identity or model.Anonymous\n", "if identity:\n", "return\n", "using_headers_auth = method_func in (oauth_authentication,\n service_to_service_authentication)\n", "host_tok = self.request.headers.get(host_token.HTTP_HEADER)\n", "if host_tok:\n", "validated_host = host_token.validate_host_token(host_tok)\n", "assert self.request.remote_addr\n", "if validated_host:\n", "ip = ipaddr.ip_from_string(self.request.remote_addr)\n", "auth_context.peer_host = validated_host\n", "auth_context.peer_ip = ip\n", "auth_context.peer_identity = api.verify_ip_whitelisted(identity, ip, self.\n request.headers)\n", "self.authorization_error(err)\n", "delegation_tok = self.request.headers.get(delegation.HTTP_HEADER)\n", "return\n", "if delegation_tok:\n", "auth_context.current_identity = auth_context.peer_identity\n", "auth_context.current_identity = delegation.check_delegation_token(\n delegation_tok, auth_context.peer_identity)\n", "self.authorization_error(api.AuthorizationError('Bad delegation token: %s' %\n exc))\n", "need_xsrf_token = (not using_headers_auth and self.request.method in self.\n xsrf_token_enforce_on)\n", "self.authorization_error(err)\n", "msg = \"\"\"Transient error while validating delegation token.\n%s\"\"\" % exc\n", "if need_xsrf_token and self.xsrf_token is None:\n", "logging.error(msg)\n", "self.xsrf_token_data = {}\n", "self.abort(500, detail=msg)\n", "if self.xsrf_token is not None:\n", "self.xsrf_token_data = self.verify_xsrf_token()\n", "super(AuthenticatingHandler, self).dispatch()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Assert'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_8(self, VAR_26):...\n", "\"\"\"docstring\"\"\"\n", "VAR_21 = self.resolve_path(VAR_26, os.path.dirname(VAR_26))\n", "if not VAR_21 or not self.exists(VAR_21):\n", "VAR_24 = self.getfile(VAR_21)\n", "if VAR_24[VAR_2] == VAR_12:\n", "if VAR_24[VAR_2] == VAR_13 and VAR_24[VAR_10]:\n", "return FUNC_14(VAR_24[VAR_10], 'rb').read()\n", "if VAR_24[VAR_2] == VAR_13 and VAR_24[VAR_5] == 0:\n", "return ''\n" ]
[ "def file_contents(self, target):...\n", "\"\"\"docstring\"\"\"\n", "path = self.resolve_path(target, os.path.dirname(target))\n", "if not path or not self.exists(path):\n", "f = self.getfile(path)\n", "if f[A_TYPE] == T_DIR:\n", "if f[A_TYPE] == T_FILE and f[A_REALFILE]:\n", "return open(f[A_REALFILE], 'rb').read()\n", "if f[A_TYPE] == T_FILE and f[A_SIZE] == 0:\n", "return ''\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Return'", "Condition", "Return'" ]
[ "def FUNC_3():...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = {}\n", "for arg in request.args:\n", "VAR_8 = re.findall('_oc_(.*)', arg)\n", "return VAR_7\n", "if VAR_8:\n", "VAR_7[VAR_8[0]] = request.args.get(arg), request.args.get('_od_' + VAR_8[0])\n" ]
[ "def get_order_args():...\n", "\"\"\"docstring\"\"\"\n", "orders = {}\n", "for arg in request.args:\n", "re_match = re.findall('_oc_(.*)', arg)\n", "return orders\n", "if re_match:\n", "orders[re_match[0]] = request.args.get(arg), request.args.get('_od_' +\n re_match[0])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 4 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Assign'", "Return'", "Condition", "Assign'" ]
[ "def FUNC_6(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_15 = {}\n", "for idx, VAR_25 in enumerate(self.nodes):\n", "VAR_15[VAR_25] = idx\n", "return VAR_15\n" ]
[ "def getNodeNameMap(self):...\n", "\"\"\"docstring\"\"\"\n", "name_map = {}\n", "for idx, v in enumerate(self.nodes):\n", "name_map[v] = idx\n", "return name_map\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_5(VAR_4, *VAR_5, **VAR_6):...\n", "" ]
[ "def dummy_handler(remote, *args, **kargs):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_1(VAR_10):...\n", "if VAR_9 is None:\n", "VAR_9 = Browser()\n", "return VAR_9\n", "VAR_54 = VAR_3[VAR_10]['username']\n", "VAR_55 = VAR_3[VAR_10]['password']\n", "VAR_53 = CWSLoginRequest(VAR_9, VAR_54, VAR_55, base_url=CWS_BASE_URL)\n", "VAR_9.login(VAR_53)\n" ]
[ "def get_cws_browser(user_id):...\n", "if cws_browser is None:\n", "cws_browser = Browser()\n", "return cws_browser\n", "username = created_users[user_id]['username']\n", "password = created_users[user_id]['password']\n", "lr = CWSLoginRequest(cws_browser, username, password, base_url=CWS_BASE_URL)\n", "cws_browser.login(lr)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_0(self):...\n", "VAR_8 = self.request.query_params.get('search', '')\n", "VAR_9 = ' & '.join(VAR_8.split())\n", "return Locality.objects.raw(VAR_1.format(VAR_9=tokens))\n" ]
[ "def get_queryset(self):...\n", "search = self.request.query_params.get('search', '')\n", "tokens = ' & '.join(search.split())\n", "return Locality.objects.raw(locality_list_search_query.format(tokens=tokens))\n" ]
[ 0, 0, 0, 4 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_7(self):...\n", "url_helper.urllib2.urlopen(mox.IgnoreArg(), mox.IgnoreArg(), timeout=mox.\n IgnoreArg()).AndRaise(urllib2.URLError('url'))\n", "time.sleep(mox.IgnoreArg())\n", "VAR_2 = 'True'\n", "url_helper.urllib2.urlopen(mox.IgnoreArg(), mox.IgnoreArg(), timeout=mox.\n IgnoreArg()).AndReturn(StringIO.StringIO(VAR_2))\n", "self._mox.ReplayAll()\n", "self.assertEqual(url_helper.UrlOpen('url', max_tries=2), VAR_2)\n", "self._mox.VerifyAll()\n" ]
[ "def testUrlOpenSuccessAfterFailure(self):...\n", "url_helper.urllib2.urlopen(mox.IgnoreArg(), mox.IgnoreArg(), timeout=mox.\n IgnoreArg()).AndRaise(urllib2.URLError('url'))\n", "time.sleep(mox.IgnoreArg())\n", "response = 'True'\n", "url_helper.urllib2.urlopen(mox.IgnoreArg(), mox.IgnoreArg(), timeout=mox.\n IgnoreArg()).AndReturn(StringIO.StringIO(response))\n", "self._mox.ReplayAll()\n", "self.assertEqual(url_helper.UrlOpen('url', max_tries=2), response)\n", "self._mox.VerifyAll()\n" ]
[ 0, 5, 0, 0, 5, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_6(self, VAR_33):...\n", "return VAR_33 != 'off' and bool(VAR_33)\n" ]
[ "def run(self, val):...\n", "return val != 'off' and bool(val)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __hash__(self):...\n", "return self.name.__hash__()\n" ]
[ "def __hash__(self):...\n", "return self.name.__hash__()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_14(self):...\n", "self.run_test_case(self.scenario.delete_load_balancer(VAR_5=True),\n max_retries=5)\n" ]
[ "def test_y_delete_load_balancer_vpc(self):...\n", "self.run_test_case(self.scenario.delete_load_balancer(use_vpc=True),\n max_retries=5)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def __init__(self, VAR_4, VAR_5):...\n", "self.prompts = ['P2020>']\n", "self.targets = devices['p2020']\n", "super().__init__(VAR_4, VAR_5)\n" ]
[ "def __init__(self, database, options):...\n", "self.prompts = ['P2020>']\n", "self.targets = devices['p2020']\n", "super().__init__(database, options)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_41(self):...\n", "for VAR_40 in self._names:\n", "yield VAR_40, getattr(self, VAR_40)\n" ]
[ "def items(self):...\n", "for name in self._names:\n", "yield name, getattr(self, name)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "For", "Expr'" ]
[ "def FUNC_4(self):...\n", "VAR_0 = Library(id='org/repo', metadata=\n '{\"full_name\": \"NSS Bob\", \"stargazers_count\": 420, \"subscribers_count\": 419, \"forks\": 418, \"updated_at\": \"2011-8-10T13:47:12Z\"}'\n , contributor_count=417)\n", "VAR_2 = Version(parent=library.key, id='v1.0.0', sha='lol')\n", "VAR_0.put()\n", "VAR_2.put()\n", "self.respond_to('https://raw.githubusercontent.com/org/repo/v1.0.0/README.md',\n 'README')\n", "self.respond_to('https://raw.githubusercontent.com/org/repo/v1.0.0/bower.json',\n '{}')\n", "self.respond_to_github('https://api.github.com/markdown', '<html>README</html>'\n )\n", "VAR_3 = self.app.get(util.ingest_version_task('org', 'repo', 'v1.0.0'))\n", "self.assertEqual(VAR_3.status_int, 200)\n", "VAR_2 = VAR_2.key.get()\n", "self.assertIsNone(VAR_2.error)\n", "VAR_4 = ndb.Key(Library, 'org/repo', Version, 'v1.0.0', Content, 'readme').get(\n )\n", "self.assertEqual(VAR_4.content, 'README')\n", "VAR_5 = ndb.Key(Library, 'org/repo', Version, 'v1.0.0', Content, 'readme.html'\n ).get()\n", "self.assertEqual(VAR_5.content, '<html>README</html>')\n", "VAR_6 = ndb.Key(Library, 'org/repo', Version, 'v1.0.0', Content, 'bower').get()\n", "self.assertEqual(VAR_6.content, '{}')\n" ]
[ "def test_ingest_version(self):...\n", "library = Library(id='org/repo', metadata=\n '{\"full_name\": \"NSS Bob\", \"stargazers_count\": 420, \"subscribers_count\": 419, \"forks\": 418, \"updated_at\": \"2011-8-10T13:47:12Z\"}'\n , contributor_count=417)\n", "version = Version(parent=library.key, id='v1.0.0', sha='lol')\n", "library.put()\n", "version.put()\n", "self.respond_to('https://raw.githubusercontent.com/org/repo/v1.0.0/README.md',\n 'README')\n", "self.respond_to('https://raw.githubusercontent.com/org/repo/v1.0.0/bower.json',\n '{}')\n", "self.respond_to_github('https://api.github.com/markdown', '<html>README</html>'\n )\n", "response = self.app.get(util.ingest_version_task('org', 'repo', 'v1.0.0'))\n", "self.assertEqual(response.status_int, 200)\n", "version = version.key.get()\n", "self.assertIsNone(version.error)\n", "readme = ndb.Key(Library, 'org/repo', Version, 'v1.0.0', Content, 'readme'\n ).get()\n", "self.assertEqual(readme.content, 'README')\n", "readme_html = ndb.Key(Library, 'org/repo', Version, 'v1.0.0', Content,\n 'readme.html').get()\n", "self.assertEqual(readme_html.content, '<html>README</html>')\n", "bower = ndb.Key(Library, 'org/repo', Version, 'v1.0.0', Content, 'bower').get()\n", "self.assertEqual(bower.content, '{}')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "import sqlobject\n", "import vdm.sqlobject.base as vdmbase\n", "import vdm.base as vdmbase\n", "VAR_16 = 'name'\n", "from vdm.sqlobject.base import State\n", "from vdm.base import State\n", "VAR_0 = sqlobject.UnicodeCol(alternateID=True)\n", "VAR_1 = sqlobject.MultipleJoin('Package')\n", "VAR_2 = sqlobject.ForeignKey('Package', cascade=True)\n", "VAR_3 = sqlobject.UnicodeCol(default=None)\n", "VAR_4 = sqlobject.UnicodeCol(default=None)\n", "VAR_5 = sqlobject.UnicodeCol(default=None)\n", "VAR_6 = sqlobject.ForeignKey('License', default=None)\n", "VAR_7 = sqlobject.UnicodeCol(default=None)\n", "VAR_2 = sqlobject.ForeignKey('Tag', cascade=True)\n", "VAR_2 = sqlobject.ForeignKey('PackageTag', cascade=True)\n", "VAR_8 = CLASS_1\n", "VAR_9 = vdmbase.get_attribute_names(VAR_8)\n", "VAR_0 = sqlobject.UnicodeCol(alternateID=True)\n", "VAR_10 = [('tags', 'ckan.models.package', 'Tag', 'PackageTag')]\n", "def FUNC_0(self, VAR_11):...\n", "VAR_14 = self.revision.model.tags.get(VAR_11)\n", "VAR_14 = self.transaction.model.tags.create(VAR_0=tagname)\n", "self.tags.create(VAR_14=tag)\n", "VAR_8 = CLASS_2\n", "VAR_0 = sqlobject.UnicodeCol(alternateID=True)\n", "VAR_9 = vdmbase.get_attribute_names(VAR_8)\n", "VAR_10 = [('packages', 'ckan.models.package', 'Package', 'PackageTag')]\n", "@classmethod...\n", "VAR_17 = str(VAR_12)\n", "VAR_18 = \"UPPER(tag.name) LIKE UPPER('%%%s%%')\" % VAR_17\n", "return self.select(VAR_18)\n" ]
[ "import sqlobject\n", "import vdm.sqlobject.base as vdmbase\n", "import vdm.base as vdmbase\n", "_defaultOrder = 'name'\n", "from vdm.sqlobject.base import State\n", "from vdm.base import State\n", "name = sqlobject.UnicodeCol(alternateID=True)\n", "packages = sqlobject.MultipleJoin('Package')\n", "base = sqlobject.ForeignKey('Package', cascade=True)\n", "title = sqlobject.UnicodeCol(default=None)\n", "url = sqlobject.UnicodeCol(default=None)\n", "download_url = sqlobject.UnicodeCol(default=None)\n", "license = sqlobject.ForeignKey('License', default=None)\n", "notes = sqlobject.UnicodeCol(default=None)\n", "base = sqlobject.ForeignKey('Tag', cascade=True)\n", "base = sqlobject.ForeignKey('PackageTag', cascade=True)\n", "sqlobj_version_class = PackageRevision\n", "versioned_attributes = vdmbase.get_attribute_names(sqlobj_version_class)\n", "name = sqlobject.UnicodeCol(alternateID=True)\n", "m2m = [('tags', 'ckan.models.package', 'Tag', 'PackageTag')]\n", "def add_tag_by_name(self, tagname):...\n", "tag = self.revision.model.tags.get(tagname)\n", "tag = self.transaction.model.tags.create(name=tagname)\n", "self.tags.create(tag=tag)\n", "sqlobj_version_class = TagRevision\n", "name = sqlobject.UnicodeCol(alternateID=True)\n", "versioned_attributes = vdmbase.get_attribute_names(sqlobj_version_class)\n", "m2m = [('packages', 'ckan.models.package', 'Package', 'PackageTag')]\n", "@classmethod...\n", "text_query_str = str(text_query)\n", "sql_query = \"UPPER(tag.name) LIKE UPPER('%%%s%%')\" % text_query_str\n", "return self.select(sql_query)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4 ]
[ "Import'", "Import'", "Import'", "Assign'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_1(VAR_2, VAR_3):...\n", "VAR_15 = set()\n", "for level in VAR_2:\n", "VAR_24 = level[0]\n", "return VAR_15\n", "VAR_11 = level[1]\n", "VAR_15 |= VAR_0[VAR_24].get_bookmarks(VAR_11, VAR_3)\n" ]
[ "def get_rule_bookmarks(levellist, doc):...\n", "ret = set()\n", "for level in levellist:\n", "leveltype = level[0]\n", "return ret\n", "levelfields = level[1]\n", "ret |= FnLevel[leveltype].get_bookmarks(levelfields, doc)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Return'", "Assign'", "AugAssign'" ]
[ "def FUNC_6(self, VAR_47):...\n", "if VAR_101.user_is_admin:\n", "return True\n", "if VAR_101.user_is_loggedin:\n", "VAR_18 = Thing._by_fullname(VAR_47, data=True)\n", "abort(403, 'forbidden')\n", "VAR_109 = VAR_18.subreddit_slow\n", "if VAR_109.can_ban(VAR_101.user):\n", "return True\n" ]
[ "def run(self, thing_name):...\n", "if c.user_is_admin:\n", "return True\n", "if c.user_is_loggedin:\n", "item = Thing._by_fullname(thing_name, data=True)\n", "abort(403, 'forbidden')\n", "subreddit = item.subreddit_slow\n", "if subreddit.can_ban(c.user):\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_35(self, VAR_14):...\n", "VAR_20 = self.get_related_model(VAR_14)\n", "return self.session.query(VAR_20).all()\n" ]
[ "def query_model_relation(self, col_name):...\n", "model = self.get_related_model(col_name)\n", "return self.session.query(model).all()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_17(VAR_5):...\n", "VAR_10 = 'https://api.github.com/repos/{}/git/refs/heads'\n", "VAR_10 = VAR_10.format(VAR_5['fork_fullname'])\n", "VAR_8 = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n", "VAR_9 = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n", "VAR_41 = None\n", "VAR_11 = requests.get(VAR_10, VAR_8=headers, VAR_9=auth)\n", "for ref in VAR_11.json():\n", "if ref['ref'].split('/')[-1] == VAR_5['target_repo_branch']:\n", "VAR_10 = 'https://api.github.com/repos/{}/git/refs'\n", "VAR_41 = ref['object']['sha']\n", "VAR_10 = VAR_10.format(VAR_5['fork_fullname'])\n", "VAR_5['new_branch'] = '{}-pep8-patch'.format(VAR_5['target_repo_branch'])\n", "VAR_40 = {'ref': 'refs/heads/{}'.format(VAR_5['new_branch']), 'sha': VAR_41}\n", "VAR_11 = requests.post(VAR_10, json=request_json, VAR_8=headers, VAR_9=auth)\n", "if VAR_11.status_code != 200:\n", "VAR_5['error'] = 'Could not create new branch in the fork'\n" ]
[ "def create_new_branch(data):...\n", "url = 'https://api.github.com/repos/{}/git/refs/heads'\n", "url = url.format(data['fork_fullname'])\n", "headers = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n", "auth = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n", "sha = None\n", "r = requests.get(url, headers=headers, auth=auth)\n", "for ref in r.json():\n", "if ref['ref'].split('/')[-1] == data['target_repo_branch']:\n", "url = 'https://api.github.com/repos/{}/git/refs'\n", "sha = ref['object']['sha']\n", "url = url.format(data['fork_fullname'])\n", "data['new_branch'] = '{}-pep8-patch'.format(data['target_repo_branch'])\n", "request_json = {'ref': 'refs/heads/{}'.format(data['new_branch']), 'sha': sha}\n", "r = requests.post(url, json=request_json, headers=headers, auth=auth)\n", "if r.status_code != 200:\n", "data['error'] = 'Could not create new branch in the fork'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'" ]
[ "def FUNC_1():...\n", "return frappe.db.sql('string', as_dict=1)\n" ]
[ "def get_children():...\n", "return frappe.db.sql(\n \"\"\"select route as name,\n\t\ttitle from `tabBlog Category`\n\t\twhere published = 1\n\t\tand exists (select name from `tabBlog Post`\n\t\t\twhere `tabBlog Post`.blog_category=`tabBlog Category`.name and published=1)\n\t\torder by title asc\"\"\"\n , as_dict=1)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_5(self):...\n", "super().halt_dut('halt', ['target state: halted'] * 2)\n" ]
[ "def halt_dut(self):...\n", "super().halt_dut('halt', ['target state: halted'] * 2)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_5(self):...\n", "VAR_24 = None, None, None\n", "for i, step in enumerate(self._r_steps_, 1):\n", "VAR_31, VAR_3 = step\n", "return VAR_24\n", "if VAR_3 != VAR_0:\n", "VAR_24 = i, VAR_31, VAR_3\n" ]
[ "def _last_found(self):...\n", "last_found_step = None, None, None\n", "for i, step in enumerate(self._r_steps_, 1):\n", "desc, roamer = step\n", "return last_found_step\n", "if roamer != MISSING:\n", "last_found_step = i, desc, roamer\n" ]
[ 0, 1, 0, 1, 0, 1, 1 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Return'", "Condition", "Assign'" ]
[ "def FUNC_21(VAR_14, VAR_15):...\n", "if not VAR_15 in VAR_1:\n", "VAR_1[VAR_15] = CachedPoints(VAR_13, VAR_14, VAR_1['content'])\n", "return VAR_1[VAR_15]\n" ]
[ "def points(user, key):...\n", "if not key in context:\n", "context[key] = CachedPoints(instance, user, context['content'])\n", "return context[key]\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_13(self, VAR_64):...\n", "self._onsuccess = VAR_64\n" ]
[ "def onsuccess(self, func):...\n", "self._onsuccess = func\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_3(self):...\n", "self.client.login(username=self.tester.username, password='password')\n", "VAR_1 = self.client.get(reverse('core-views-index'))\n", "self.assertRedirects(VAR_1, reverse('tcms-recent', args=[self.tester.\n username]), target_status_code=HTTPStatus.OK)\n" ]
[ "def test_when_logged_in_index_page_redirects_to_dashboard(self):...\n", "self.client.login(username=self.tester.username, password='password')\n", "response = self.client.get(reverse('core-views-index'))\n", "self.assertRedirects(response, reverse('tcms-recent', args=[self.tester.\n username]), target_status_code=HTTPStatus.OK)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'" ]
[ "@VAR_19.route('/test')...\n", "return 'OK'\n" ]
[ "@test_views.route('/test')...\n", "return 'OK'\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_21(VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "return json.load(VAR_0)\n", "VAR_0.seek(0)\n", "import yaml\n", "return yaml.load(VAR_0)\n" ]
[ "def _load_configfile(configpath):...\n", "\"\"\"docstring\"\"\"\n", "return json.load(f)\n", "f.seek(0)\n", "import yaml\n", "return yaml.load(f)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'", "Expr'", "Import'", "Return'" ]
[ "def FUNC_13(VAR_9, VAR_7, VAR_11, VAR_10):...\n", "if VAR_11 == 'POST':\n", "return CLASS_0.session.post(FUNC_2(VAR_7), VAR_9=ToUtf8Json(data), headers=\n _HEADERS, VAR_10=timeout)\n", "if VAR_11 == 'GET':\n", "return CLASS_0.session.get(FUNC_2(VAR_7), headers=_HEADERS, VAR_10=timeout)\n" ]
[ "def SendRequest(data, handler, method, timeout):...\n", "if method == 'POST':\n", "return BaseRequest.session.post(_BuildUri(handler), data=ToUtf8Json(data),\n headers=_HEADERS, timeout=timeout)\n", "if method == 'GET':\n", "return BaseRequest.session.get(_BuildUri(handler), headers=_HEADERS,\n timeout=timeout)\n" ]
[ 0, 0, 7, 0, 7 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'" ]
[ "from __future__ import absolute_import, division, print_function\n", "__metaclass__ = type\n", "from ansible.inventory.group import Group\n", "from ansible.utils.vars import combine_vars, get_unique_id\n", "__all__ = ['Host']\n", "\"\"\" a single ansible host \"\"\"\n", "def __getstate__(self):...\n", "return self.serialize()\n" ]
[ "from __future__ import absolute_import, division, print_function\n", "__metaclass__ = type\n", "from ansible.inventory.group import Group\n", "from ansible.utils.vars import combine_vars, get_unique_id\n", "__all__ = ['Host']\n", "\"\"\" a single ansible host \"\"\"\n", "def __getstate__(self):...\n", "return self.serialize()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Assign'", "ImportFrom'", "ImportFrom'", "Assign'", "Expr'", "FunctionDef'", "Return'" ]
[ "def FUNC_31(self):...\n", "if VAR_54.flags.in_install:\n", "return\n", "if self.meta.issingle:\n", "return\n", "VAR_43 = 'varchar', 'int', 'bigint'\n", "for VAR_16, VAR_9 in iteritems(self.get_valid_dict()):\n", "VAR_25 = self.meta.get_field(VAR_16)\n", "if not VAR_25 or VAR_25.fieldtype == 'Check':\n", "VAR_58 = type_map[VAR_25.fieldtype][0] or None\n", "VAR_59 = type_map[VAR_25.fieldtype][1] or None\n", "if VAR_25 and VAR_25.fieldtype in type_map and VAR_58 in VAR_43:\n", "VAR_69 = cint(VAR_25.get('length')) or cint(VAR_59)\n", "if len(cstr(VAR_9)) > VAR_69:\n", "if self.parentfield and self.idx:\n", "VAR_73 = _('{0}, Row {1}').format(_(self.doctype), self.idx)\n", "VAR_73 = '{0} {1}'.format(_(self.doctype), self.name)\n", "VAR_54.throw(_(\n \"{0}: '{1}' ({3}) will get truncated, as max characters allowed is {2}\"\n ).format(VAR_73, _(VAR_25.label), VAR_69, VAR_9), VAR_54.\n CharacterLengthExceededError, title=_('Value too big'))\n" ]
[ "def _validate_length(self):...\n", "if frappe.flags.in_install:\n", "return\n", "if self.meta.issingle:\n", "return\n", "column_types_to_check_length = 'varchar', 'int', 'bigint'\n", "for fieldname, value in iteritems(self.get_valid_dict()):\n", "df = self.meta.get_field(fieldname)\n", "if not df or df.fieldtype == 'Check':\n", "column_type = type_map[df.fieldtype][0] or None\n", "default_column_max_length = type_map[df.fieldtype][1] or None\n", "if df and df.fieldtype in type_map and column_type in column_types_to_check_length:\n", "max_length = cint(df.get('length')) or cint(default_column_max_length)\n", "if len(cstr(value)) > max_length:\n", "if self.parentfield and self.idx:\n", "reference = _('{0}, Row {1}').format(_(self.doctype), self.idx)\n", "reference = '{0} {1}'.format(_(self.doctype), self.name)\n", "frappe.throw(_(\n \"{0}: '{1}' ({3}) will get truncated, as max characters allowed is {2}\"\n ).format(reference, _(df.label), max_length, value), frappe.\n CharacterLengthExceededError, title=_('Value too big'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'", "Assign'", "For", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Expr'" ]
[ "@classmethod...\n", "" ]
[ "@classmethod...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "def FUNC_32(VAR_39, VAR_49=None):...\n", "VAR_39.returncode = VAR_1\n", "json.dump(VAR_22, VAR_32)\n", "return 0\n" ]
[ "def wait(self2, timeout=None):...\n", "self2.returncode = returncode\n", "json.dump(result, f)\n", "return 0\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_3(VAR_4, VAR_5):...\n", "if VAR_4.get('token'):\n", "return FUNC_0(VAR_4.get('token'))\n", "if VAR_5:\n", "return FUNC_1(VAR_5.email()) or FUNC_2(VAR_5.user_id())\n" ]
[ "def check_request(request, user):...\n", "if request.get('token'):\n", "return check_token(request.get('token'))\n", "if user:\n", "return check_email(user.email()) or check_user_id(user.user_id())\n" ]
[ 0, 5, 5, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'" ]