lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def __init__(self):...\n", "self.license_files = 0\n", "self.exception_files = 0\n", "self.licenses = []\n", "self.exceptions = {}\n" ]
[ "def __init__(self):...\n", "self.license_files = 0\n", "self.exception_files = 0\n", "self.licenses = []\n", "self.exceptions = {}\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_3(VAR_9, VAR_10):...\n", "VAR_15 = VAR_9.search_fields and VAR_9.search_fields.split(',') or []\n", "VAR_16 = [VAR_9.title_field\n ] if VAR_9.title_field and VAR_9.title_field not in VAR_15 else []\n", "VAR_15 = ['name'] + VAR_15 + VAR_16\n", "if not VAR_10 in VAR_15:\n", "VAR_15 = VAR_15 + [VAR_10]\n", "return VAR_15\n" ]
[ "def get_std_fields_list(meta, key):...\n", "sflist = meta.search_fields and meta.search_fields.split(',') or []\n", "title_field = [meta.title_field\n ] if meta.title_field and meta.title_field not in sflist else []\n", "sflist = ['name'] + sflist + title_field\n", "if not key in sflist:\n", "sflist = sflist + [key]\n", "return sflist\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_13():...\n", "\"\"\"docstring\"\"\"\n", "log.info('Stopping worker(s)')\n", "for pid in VAR_20:\n", "if pid is not None:\n", "os.kill(pid, VAR_7.SIGTERM)\n" ]
[ "def kill_children():...\n", "\"\"\"docstring\"\"\"\n", "log.info('Stopping worker(s)')\n", "for pid in child_pids:\n", "if pid is not None:\n", "os.kill(pid, signal.SIGTERM)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "For", "Condition", "Expr'" ]
[ "def FUNC_31():...\n", "VAR_24 = vim.eval('tagfiles()')\n", "VAR_25 = VAR_0.getcwd()\n", "return [VAR_0.path.join(VAR_25, x) for x in VAR_24]\n" ]
[ "def GetTagFiles():...\n", "tag_files = vim.eval('tagfiles()')\n", "current_working_directory = os.getcwd()\n", "return [os.path.join(current_working_directory, x) for x in tag_files]\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "from dal import autocomplete\n", "from bootstrap_datepicker_plus import DatePickerInput\n", "from django import forms\n", "from django.forms import BaseInlineFormSet\n", "from django.utils.translation import ugettext_lazy as _\n", "from dashboard.models import *\n", "from django.db.models import F\n", "from dashboard.utils import get_extracted_models\n", "VAR_5 = 'required'\n", "VAR_22 = DataGroup\n", "VAR_27 = ['name', 'description', 'url', 'group_type', 'downloaded_by',\n 'downloaded_at', 'download_script', 'data_source', 'csv']\n", "VAR_28 = {'downloaded_at': DatePickerInput()}\n", "VAR_29 = {'csv': _('Register Records CSV File'), 'url': _('URL')}\n", "def __init__(self, *VAR_6, **VAR_7):...\n", "VAR_30 = Script.objects.filter(script_type='DL')\n", "self.user = VAR_7.pop('user', None)\n", "super(CLASS_0, self).__init__(*VAR_6, **kwargs)\n", "self.fields['csv'].widget.attrs.update({'accept': '.csv'})\n", "self.fields['download_script'].queryset = VAR_30\n", "VAR_5 = 'required'\n", "VAR_8 = forms.ModelChoiceField(queryset=Script.objects.filter(script_type=\n 'EX'), label='Extraction Script')\n", "VAR_9 = forms.ModelChoiceField(queryset=WeightFractionType.objects.all(),\n label='Weight Fraction Type', initial='1')\n", "VAR_10 = forms.FileField(label='Extracted Text CSV File')\n", "def __init__(self, *VAR_6, **VAR_7):...\n", "self.dg_type = VAR_7.pop('dg_type', 0)\n", "self.user = VAR_7.pop('user', None)\n", "super(CLASS_1, self).__init__(*VAR_6, **kwargs)\n", "self.fields['weight_fraction_type'].widget.attrs.update({'style':\n 'height:2.75rem; !important'})\n", "self.fields['script_selection'].widget.attrs.update({'style':\n 'height:2.75rem; !important'})\n", "self.fields['extract_file'].widget.attrs.update({'accept': '.csv'})\n", "if self.dg_type in ['FU', 'CP']:\n", "self.collapsed = True\n", "VAR_5 = 'required'\n", "VAR_8 = forms.ModelChoiceField(queryset=Script.objects.filter(script_type=\n 'DC'), label='Data Cleaning Script', required=True)\n", "VAR_11 = forms.FileField(label='Clean Composition Data CSV File', required=True\n )\n", "def __init__(self, *VAR_6, **VAR_7):...\n", "super(CLASS_2, self).__init__(*VAR_6, **kwargs)\n", "self.fields['script_selection'].widget.attrs.update({'style':\n 'height:2.75rem; !important'})\n", "self.fields['clean_comp_data_file'].widget.attrs.update({'accept': '.csv'})\n", "self.collapsed = True\n", "VAR_5 = 'required'\n", "VAR_22 = DataSource\n", "VAR_27 = ['title', 'url', 'estimated_records', 'state', 'priority',\n 'description']\n", "VAR_22 = DataSource\n", "VAR_27 = ['priority']\n", "def __init__(self, *VAR_6, **VAR_7):...\n", "super(CLASS_4, self).__init__(*VAR_6, **kwargs)\n", "self.fields['priority'].label = ''\n", "self.fields['priority'].widget.attrs.update({'onchange': 'form.submit();'})\n", "VAR_22 = QANotes\n", "VAR_27 = ['qa_notes']\n", "VAR_28 = {'qa_notes': forms.Textarea}\n", "VAR_29 = {'qa_notes': _('QA Notes (required if approving edited records)')}\n", "VAR_5 = 'required'\n", "VAR_22 = ExtractedText\n", "VAR_27 = ['prod_name', 'data_document', 'qa_checked']\n", "VAR_5 = 'required'\n", "VAR_12 = forms.ModelChoiceField(queryset=DocumentType.objects.all(), label=\n 'Data Document Type', required=True)\n", "VAR_13 = forms.CharField()\n", "VAR_22 = Product\n", "VAR_27 = ['title', 'manufacturer', 'brand_name', 'upc', 'size', 'color']\n", "def __init__(self, *VAR_6, **VAR_7):...\n", "super(CLASS_7, self).__init__(*VAR_6, **kwargs)\n", "self.fields['return_url'].widget = forms.HiddenInput()\n", "VAR_5 = 'required'\n", "VAR_22 = Product\n", "VAR_27 = ['title', 'manufacturer', 'brand_name', 'size', 'color',\n 'model_number', 'short_description', 'long_description']\n", "VAR_4 = 'title', 'long_description'\n", "def __init__(self, *VAR_6, **VAR_7):...\n", "super(CLASS_8, self).__init__(*VAR_6, **kwargs)\n", "for f in self.fields:\n", "self.fields[f].disabled = True\n", "VAR_14 = forms.ModelChoiceField(queryset=PUC.objects.all(), label=\n 'Category', widget=autocomplete.ModelSelect2(url='puc-autocomplete',\n attrs={'data-minimum-input-length': 3}))\n", "VAR_22 = ProductToPUC\n", "VAR_27 = ['puc']\n", "VAR_22 = ExtractedHabitsAndPracticesToPUC\n", "VAR_27 = ['puc']\n", "VAR_15 = forms.CharField(label='Product Titles', widget=forms.HiddenInput(),\n required=True)\n", "VAR_22 = ProductToPUC\n", "VAR_27 = ['puc', 'id_pks']\n", "VAR_22 = ProductToPUC\n", "VAR_27 = ['puc']\n", "def __init__(self, *VAR_6, **VAR_7):...\n", "super(CLASS_14, self).__init__(*VAR_6, **kwargs)\n", "VAR_31 = 'Select PUC for Attribute to Assign to Selected Products'\n", "self.fields['puc'].label = VAR_31\n", "self.fields['puc'].widget.attrs['onchange'] = 'form.submit();'\n", "VAR_5 = 'required'\n", "VAR_16 = forms.ModelChoiceField(queryset=PUCTag.objects.none(), label=\n 'Attribute')\n", "VAR_15 = forms.CharField(label='Product Titles', widget=forms.HiddenInput())\n", "VAR_22 = ProductToPUC\n", "VAR_27 = ['tag', 'id_pks']\n", "def __init__(self, *VAR_6, **VAR_7):...\n", "super(CLASS_15, self).__init__(*VAR_6, **kwargs)\n", "VAR_31 = 'Select Attribute to Assign to Selected Products'\n", "self.fields['tag'].label = VAR_31\n", "VAR_22 = ExtractedText\n", "VAR_27 = ['prod_name', 'doc_date', 'rev_num']\n", "VAR_28 = {'data_document': forms.HiddenInput(), 'extraction_script': forms.\n HiddenInput()}\n", "VAR_22 = ExtractedCPCat\n", "VAR_27 = ['doc_date', 'cat_code', 'description_cpcat', 'cpcat_sourcetype']\n", "VAR_27 = CLASS_17.Meta.fields + ['prod_name', 'doc_date', 'rev_num',\n 'cpcat_code']\n", "VAR_22 = ExtractedHHDoc\n", "VAR_27 = ['hhe_report_number', 'study_location', 'naics_code',\n 'sampling_date', 'population_gender', 'population_age',\n 'population_other', 'occupation', 'facility']\n", "VAR_27 = CLASS_19.Meta.fields + ['prod_name', 'doc_date', 'rev_num']\n", "VAR_22 = DataDocument\n", "VAR_27 = ['document_type']\n", "def __init__(self, *VAR_6, **VAR_7):...\n", "super(CLASS_21, self).__init__(*VAR_6, **kwargs)\n", "self.fields['document_type'].label = ''\n", "self.fields['document_type'].widget.attrs.update({'onchange': 'form.submit();'}\n )\n", "def FUNC_0(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "if not VAR_0.type in ['FU', 'CO', 'CP']:\n", "return False\n", "if VAR_0.all_matched() and not VAR_0.all_extracted():\n", "return CLASS_1(dg_type=dg.type)\n", "return False\n" ]
[ "from dal import autocomplete\n", "from bootstrap_datepicker_plus import DatePickerInput\n", "from django import forms\n", "from django.forms import BaseInlineFormSet\n", "from django.utils.translation import ugettext_lazy as _\n", "from dashboard.models import *\n", "from django.db.models import F\n", "from dashboard.utils import get_extracted_models\n", "required_css_class = 'required'\n", "model = DataGroup\n", "fields = ['name', 'description', 'url', 'group_type', 'downloaded_by',\n 'downloaded_at', 'download_script', 'data_source', 'csv']\n", "widgets = {'downloaded_at': DatePickerInput()}\n", "labels = {'csv': _('Register Records CSV File'), 'url': _('URL')}\n", "def __init__(self, *args, **kwargs):...\n", "qs = Script.objects.filter(script_type='DL')\n", "self.user = kwargs.pop('user', None)\n", "super(DataGroupForm, self).__init__(*args, **kwargs)\n", "self.fields['csv'].widget.attrs.update({'accept': '.csv'})\n", "self.fields['download_script'].queryset = qs\n", "required_css_class = 'required'\n", "script_selection = forms.ModelChoiceField(queryset=Script.objects.filter(\n script_type='EX'), label='Extraction Script')\n", "weight_fraction_type = forms.ModelChoiceField(queryset=WeightFractionType.\n objects.all(), label='Weight Fraction Type', initial='1')\n", "extract_file = forms.FileField(label='Extracted Text CSV File')\n", "def __init__(self, *args, **kwargs):...\n", "self.dg_type = kwargs.pop('dg_type', 0)\n", "self.user = kwargs.pop('user', None)\n", "super(ExtractionScriptForm, self).__init__(*args, **kwargs)\n", "self.fields['weight_fraction_type'].widget.attrs.update({'style':\n 'height:2.75rem; !important'})\n", "self.fields['script_selection'].widget.attrs.update({'style':\n 'height:2.75rem; !important'})\n", "self.fields['extract_file'].widget.attrs.update({'accept': '.csv'})\n", "if self.dg_type in ['FU', 'CP']:\n", "self.collapsed = True\n", "required_css_class = 'required'\n", "script_selection = forms.ModelChoiceField(queryset=Script.objects.filter(\n script_type='DC'), label='Data Cleaning Script', required=True)\n", "clean_comp_data_file = forms.FileField(label=\n 'Clean Composition Data CSV File', required=True)\n", "def __init__(self, *args, **kwargs):...\n", "super(CleanCompDataForm, self).__init__(*args, **kwargs)\n", "self.fields['script_selection'].widget.attrs.update({'style':\n 'height:2.75rem; !important'})\n", "self.fields['clean_comp_data_file'].widget.attrs.update({'accept': '.csv'})\n", "self.collapsed = True\n", "required_css_class = 'required'\n", "model = DataSource\n", "fields = ['title', 'url', 'estimated_records', 'state', 'priority',\n 'description']\n", "model = DataSource\n", "fields = ['priority']\n", "def __init__(self, *args, **kwargs):...\n", "super(PriorityForm, self).__init__(*args, **kwargs)\n", "self.fields['priority'].label = ''\n", "self.fields['priority'].widget.attrs.update({'onchange': 'form.submit();'})\n", "model = QANotes\n", "fields = ['qa_notes']\n", "widgets = {'qa_notes': forms.Textarea}\n", "labels = {'qa_notes': _('QA Notes (required if approving edited records)')}\n", "required_css_class = 'required'\n", "model = ExtractedText\n", "fields = ['prod_name', 'data_document', 'qa_checked']\n", "required_css_class = 'required'\n", "document_type = forms.ModelChoiceField(queryset=DocumentType.objects.all(),\n label='Data Document Type', required=True)\n", "return_url = forms.CharField()\n", "model = Product\n", "fields = ['title', 'manufacturer', 'brand_name', 'upc', 'size', 'color']\n", "def __init__(self, *args, **kwargs):...\n", "super(ProductLinkForm, self).__init__(*args, **kwargs)\n", "self.fields['return_url'].widget = forms.HiddenInput()\n", "required_css_class = 'required'\n", "model = Product\n", "fields = ['title', 'manufacturer', 'brand_name', 'size', 'color',\n 'model_number', 'short_description', 'long_description']\n", "exclude = 'title', 'long_description'\n", "def __init__(self, *args, **kwargs):...\n", "super(ProductForm, self).__init__(*args, **kwargs)\n", "for f in self.fields:\n", "self.fields[f].disabled = True\n", "puc = forms.ModelChoiceField(queryset=PUC.objects.all(), label='Category',\n widget=autocomplete.ModelSelect2(url='puc-autocomplete', attrs={\n 'data-minimum-input-length': 3}))\n", "model = ProductToPUC\n", "fields = ['puc']\n", "model = ExtractedHabitsAndPracticesToPUC\n", "fields = ['puc']\n", "id_pks = forms.CharField(label='Product Titles', widget=forms.HiddenInput(),\n required=True)\n", "model = ProductToPUC\n", "fields = ['puc', 'id_pks']\n", "model = ProductToPUC\n", "fields = ['puc']\n", "def __init__(self, *args, **kwargs):...\n", "super(BulkPUCForm, self).__init__(*args, **kwargs)\n", "lbl = 'Select PUC for Attribute to Assign to Selected Products'\n", "self.fields['puc'].label = lbl\n", "self.fields['puc'].widget.attrs['onchange'] = 'form.submit();'\n", "required_css_class = 'required'\n", "tag = forms.ModelChoiceField(queryset=PUCTag.objects.none(), label='Attribute')\n", "id_pks = forms.CharField(label='Product Titles', widget=forms.HiddenInput())\n", "model = ProductToPUC\n", "fields = ['tag', 'id_pks']\n", "def __init__(self, *args, **kwargs):...\n", "super(BulkProductTagForm, self).__init__(*args, **kwargs)\n", "lbl = 'Select Attribute to Assign to Selected Products'\n", "self.fields['tag'].label = lbl\n", "model = ExtractedText\n", "fields = ['prod_name', 'doc_date', 'rev_num']\n", "widgets = {'data_document': forms.HiddenInput(), 'extraction_script': forms\n .HiddenInput()}\n", "model = ExtractedCPCat\n", "fields = ['doc_date', 'cat_code', 'description_cpcat', 'cpcat_sourcetype']\n", "fields = ExtractedCPCatForm.Meta.fields + ['prod_name', 'doc_date',\n 'rev_num', 'cpcat_code']\n", "model = ExtractedHHDoc\n", "fields = ['hhe_report_number', 'study_location', 'naics_code',\n 'sampling_date', 'population_gender', 'population_age',\n 'population_other', 'occupation', 'facility']\n", "fields = ExtractedHHDocForm.Meta.fields + ['prod_name', 'doc_date', 'rev_num']\n", "model = DataDocument\n", "fields = ['document_type']\n", "def __init__(self, *args, **kwargs):...\n", "super(DocumentTypeForm, self).__init__(*args, **kwargs)\n", "self.fields['document_type'].label = ''\n", "self.fields['document_type'].widget.attrs.update({'onchange': 'form.submit();'}\n )\n", "def include_extract_form(dg):...\n", "\"\"\"docstring\"\"\"\n", "if not dg.type in ['FU', 'CO', 'CP']:\n", "return False\n", "if dg.all_matched() and not dg.all_extracted():\n", "return ExtractionScriptForm(dg_type=dg.type)\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Docstring", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_24(VAR_15):...\n", "import subprocess\n", "VAR_60 = subprocess.Popen(VAR_15, VAR_12=subprocess.PIPE, VAR_42=subprocess\n .PIPE, shell=True, universal_newlines=True)\n", "VAR_12, VAR_42 = VAR_60.communicate()\n", "VAR_41 = VAR_12.splitlines()\n", "return VAR_41, VAR_42\n" ]
[ "def subprocess_execute(cmd):...\n", "import subprocess\n", "p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n shell=True, universal_newlines=True)\n", "stdout, stderr = p.communicate()\n", "output = stdout.splitlines()\n", "return output, stderr\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Import'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_35(self, VAR_14):...\n", "VAR_20 = self.get_related_model(VAR_14)\n", "return self.session.query(VAR_20).all()\n" ]
[ "def query_model_relation(self, col_name):...\n", "model = self.get_related_model(col_name)\n", "return self.session.query(model).all()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_17(self, *VAR_67):...\n", "self._ruleorder.add(*VAR_67)\n" ]
[ "def ruleorder(self, *rulenames):...\n", "self._ruleorder.add(*rulenames)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "from flask import Flask, jsonify, make_response, request, g\n", "from flask_restful import Api\n", "from celery import Celery\n", "from sqlalchemy.ext.automap import automap_base\n", "from sqlalchemy import create_engine\n", "from sqlalchemy.orm import Session\n", "from common.utils import unauthorized, headers, not_found\n", "from config import load_env_variables, DevelopmentConfig, ProdConfig\n", "load_env_variables()\n", "VAR_0 = Flask(__name__)\n", "VAR_0.config.from_object(DevelopmentConfig)\n", "VAR_1 = Api(VAR_0)\n", "print('Reflecting classes...')\n", "VAR_2 = automap_base()\n", "VAR_3 = create_engine(VAR_0.config['SQLALCHEMY_DATABASE_URI'], pool_size=20,\n max_overflow=20, pool_pre_ping=True)\n", "VAR_2.prepare(VAR_3, reflect=True)\n", "print('Classes reflected...')\n", "@VAR_0.before_request...\n", "\"\"\"docstring\"\"\"\n", "g.session = Session(VAR_3)\n", "g.Base = VAR_2\n", "@VAR_0.after_request...\n", "\"\"\"docstring\"\"\"\n", "g.session.commit()\n", "g.session.close()\n", "return VAR_4\n" ]
[ "from flask import Flask, jsonify, make_response, request, g\n", "from flask_restful import Api\n", "from celery import Celery\n", "from sqlalchemy.ext.automap import automap_base\n", "from sqlalchemy import create_engine\n", "from sqlalchemy.orm import Session\n", "from common.utils import unauthorized, headers, not_found\n", "from config import load_env_variables, DevelopmentConfig, ProdConfig\n", "load_env_variables()\n", "app = Flask(__name__)\n", "app.config.from_object(DevelopmentConfig)\n", "api = Api(app)\n", "print('Reflecting classes...')\n", "Base = automap_base()\n", "engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'], pool_size=20,\n max_overflow=20, pool_pre_ping=True)\n", "Base.prepare(engine, reflect=True)\n", "print('Classes reflected...')\n", "@app.before_request...\n", "\"\"\"docstring\"\"\"\n", "g.session = Session(engine)\n", "g.Base = Base\n", "@app.after_request...\n", "\"\"\"docstring\"\"\"\n", "g.session.commit()\n", "g.session.close()\n", "return resp\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "For", "Docstring", "Assign'", "Assign'", "Condition", "Docstring", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "if self.eng is not None and self.backend == 'Simulator' or self.backend == 'IBMBackend':\n", "self.eng.flush()\n", "self.eng.backend.collapse_wavefunction(self.reg, [(0) for i in range(len(\n self.reg))])\n" ]
[ "def _deallocate3(self):...\n", "\"\"\"docstring\"\"\"\n", "if self.eng is not None and self.backend == 'Simulator' or self.backend == 'IBMBackend':\n", "self.eng.flush()\n", "self.eng.backend.collapse_wavefunction(self.reg, [(0) for i in range(len(\n self.reg))])\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'", "Expr'" ]
[ "@VAR_0.route('/signup/<remote_app>/', methods=['GET', 'POST'])...\n", "\"\"\"docstring\"\"\"\n", "if VAR_1 not in signup_handlers:\n", "return abort(404)\n", "VAR_3 = signup_handlers[VAR_1]['view']()\n", "return abort(404) if VAR_3 is None else VAR_3\n" ]
[ "@blueprint.route('/signup/<remote_app>/', methods=['GET', 'POST'])...\n", "\"\"\"docstring\"\"\"\n", "if remote_app not in signup_handlers:\n", "return abort(404)\n", "res = signup_handlers[remote_app]['view']()\n", "return abort(404) if res is None else res\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Assign'", "Return'" ]
[ "def FUNC_3(self, VAR_9, VAR_10):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def create_export(self, context, volume):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "def __init__(self, VAR_11, VAR_4, VAR_5=None):...\n", "\"\"\"docstring\"\"\"\n", "self.profiler = VAR_11\n", "self.event_type = VAR_4\n", "self.extra_data = VAR_5 if VAR_5 is not None else {}\n" ]
[ "def __init__(self, profiler, event_type, extra_data=None):...\n", "\"\"\"docstring\"\"\"\n", "self.profiler = profiler\n", "self.event_type = event_type\n", "self.extra_data = extra_data if extra_data is not None else {}\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return quote(self.path)\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return quote(self.path)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_10(self, VAR_6):...\n", "VAR_22 = \"SELECT * FROM article_reference WHERE query_hash = '%s'\" % VAR_6\n", "self.cur.execute(VAR_22)\n", "self.conn.commit()\n", "VAR_20 = []\n", "if self.cur.rowcount > 0:\n", "for VAR_30 in self.cur.fetchall():\n", "return VAR_20\n", "VAR_29 = {}\n", "VAR_29['hash'] = VAR_30['article_hash']\n", "VAR_29['date'] = VAR_30['article_date']\n", "VAR_29['url'] = VAR_30['article_url']\n", "VAR_29['content'] = VAR_30['article_content']\n", "VAR_20.append(VAR_29)\n" ]
[ "def get_reference_by_qhash(self, qhash):...\n", "sql = \"SELECT * FROM article_reference WHERE query_hash = '%s'\" % qhash\n", "self.cur.execute(sql)\n", "self.conn.commit()\n", "articles = []\n", "if self.cur.rowcount > 0:\n", "for row in self.cur.fetchall():\n", "return articles\n", "article = {}\n", "article['hash'] = row['article_hash']\n", "article['date'] = row['article_date']\n", "article['url'] = row['article_url']\n", "article['content'] = row['article_content']\n", "articles.append(article)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Assign'", "Condition", "For", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def __init__(self, VAR_3, VAR_4, VAR_5=None, VAR_6=None):...\n", "self.rule = VAR_3\n", "self.dag = VAR_4\n", "self.targetfile = VAR_5\n", "self.wildcards_dict = self.rule.get_wildcards(VAR_5)\n", "self.wildcards = Wildcards(fromdict=self.wildcards_dict)\n", "self._format_wildcards = self.wildcards if VAR_6 is None else Wildcards(\n fromdict=format_wildcards)\n", "(self.input, self.output, self.params, self.log, self.benchmark, self.\n ruleio, self.dependencies) = VAR_3.expand_wildcards(self.wildcards_dict)\n", "self.resources_dict = {name: min(self.rule.workflow.global_resources.get(\n name, res), res) for name, res in VAR_3.resources.items()}\n", "self.threads = self.resources_dict['_cores']\n", "self.resources = Resources(fromdict=self.resources_dict)\n", "self._inputsize = None\n", "self.dynamic_output, self.dynamic_input = set(), set()\n", "self.temp_output, self.protected_output = set(), set()\n", "self.touch_output = set()\n", "self.subworkflow_input = dict()\n", "for VAR_28 in self.output:\n", "VAR_27 = self.ruleio[VAR_28]\n", "for VAR_28 in self.input:\n", "if VAR_27 in self.rule.dynamic_output:\n", "VAR_27 = self.ruleio[VAR_28]\n", "self._hash = self.rule.__hash__()\n", "self.dynamic_output.add(VAR_28)\n", "if VAR_27 in self.rule.temp_output:\n", "if VAR_27 in self.rule.dynamic_input:\n", "if True or not self.dynamic_output:\n", "self.temp_output.add(VAR_28)\n", "if VAR_27 in self.rule.protected_output:\n", "self.dynamic_input.add(VAR_28)\n", "if VAR_27 in self.rule.subworkflow_input:\n", "for o in self.output:\n", "self.protected_output.add(VAR_28)\n", "if VAR_27 in self.rule.touch_output:\n", "self.subworkflow_input[VAR_28] = self.rule.subworkflow_input[VAR_27]\n", "self._hash ^= o.__hash__()\n", "self.touch_output.add(VAR_28)\n" ]
[ "def __init__(self, rule, dag, targetfile=None, format_wildcards=None):...\n", "self.rule = rule\n", "self.dag = dag\n", "self.targetfile = targetfile\n", "self.wildcards_dict = self.rule.get_wildcards(targetfile)\n", "self.wildcards = Wildcards(fromdict=self.wildcards_dict)\n", "self._format_wildcards = (self.wildcards if format_wildcards is None else\n Wildcards(fromdict=format_wildcards))\n", "(self.input, self.output, self.params, self.log, self.benchmark, self.\n ruleio, self.dependencies) = rule.expand_wildcards(self.wildcards_dict)\n", "self.resources_dict = {name: min(self.rule.workflow.global_resources.get(\n name, res), res) for name, res in rule.resources.items()}\n", "self.threads = self.resources_dict['_cores']\n", "self.resources = Resources(fromdict=self.resources_dict)\n", "self._inputsize = None\n", "self.dynamic_output, self.dynamic_input = set(), set()\n", "self.temp_output, self.protected_output = set(), set()\n", "self.touch_output = set()\n", "self.subworkflow_input = dict()\n", "for f in self.output:\n", "f_ = self.ruleio[f]\n", "for f in self.input:\n", "if f_ in self.rule.dynamic_output:\n", "f_ = self.ruleio[f]\n", "self._hash = self.rule.__hash__()\n", "self.dynamic_output.add(f)\n", "if f_ in self.rule.temp_output:\n", "if f_ in self.rule.dynamic_input:\n", "if True or not self.dynamic_output:\n", "self.temp_output.add(f)\n", "if f_ in self.rule.protected_output:\n", "self.dynamic_input.add(f)\n", "if f_ in self.rule.subworkflow_input:\n", "for o in self.output:\n", "self.protected_output.add(f)\n", "if f_ in self.rule.touch_output:\n", "self.subworkflow_input[f] = self.rule.subworkflow_input[f_]\n", "self._hash ^= o.__hash__()\n", "self.touch_output.add(f)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "For", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Condition", "Condition", "Expr'", "Condition", "Expr'", "Condition", "For", "Expr'", "Condition", "Assign'", "AugAssign'", "Expr'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "if len(self.nodes) > 0:\n", "VAR_5 = self.nodes[-1].proxy\n", "VAR_5 = None\n", "return VAR_5\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "if len(self.nodes) > 0:\n", "parent = self.nodes[-1].proxy\n", "parent = None\n", "return parent\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_2(VAR_3, VAR_2=None, VAR_4=True, VAR_5=True):...\n", "\"\"\"docstring\"\"\"\n", "if isinstance(VAR_3, dict):\n", "return FUNC_1(VAR_3, VAR_2, VAR_4, VAR_5)\n", "if isinstance(VAR_3, list):\n", "return [FUNC_2(o, VAR_2, VAR_4, VAR_5) for o in VAR_3]\n", "return VAR_3\n" ]
[ "def build_object_graph(d, resource=None, full_clean=True, copy_dict=True):...\n", "\"\"\"docstring\"\"\"\n", "if isinstance(d, dict):\n", "return create_resource_from_dict(d, resource, full_clean, copy_dict)\n", "if isinstance(d, list):\n", "return [build_object_graph(o, resource, full_clean, copy_dict) for o in d]\n", "return d\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_5():...\n", "VAR_6 = VAR_14.url_for('main.home')\n", "VAR_7 = VAR_14.request.args.get('next') or VAR_14.request.referrer or VAR_6\n", "if VAR_7 == VAR_14.request.url:\n", "return VAR_6\n", "return VAR_7\n" ]
[ "def redirect_url():...\n", "home_url = flask.url_for('main.home')\n", "url = flask.request.args.get('next') or flask.request.referrer or home_url\n", "if url == flask.request.url:\n", "return home_url\n", "return url\n" ]
[ 0, 6, 6, 6, 6, 6 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def __init__(self):...\n", "super(CLASS_0, self).__init__()\n", "self._scsi_disks_timeout_set = False\n" ]
[ "def __init__(self):...\n", "super(FreeBSDOSUtil, self).__init__()\n", "self._scsi_disks_timeout_set = False\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'" ]
[ "def FUNC_8(VAR_17):...\n", "\"\"\"docstring\"\"\"\n", "VAR_34 = VAR_17.replace(',', '\" OR \"')\n", "print(VAR_34)\n", "VAR_18 = sql.connect('./resources/comments.db')\n", "VAR_33 = []\n", "VAR_20 = (\"\"\" SELECT * FROM comments\n WHERE id = \"{0}\";\n \"\"\"\n .format(VAR_34))\n", "VAR_21 = VAR_18.cursor()\n", "print(''.join(traceback.format_exception(etype=type(e), value=e, tb=e.\n __traceback__)))\n", "VAR_18.close()\n", "VAR_21.execute(VAR_20)\n", "VAR_18.rollback()\n", "return VAR_33\n", "VAR_17 = VAR_21.fetchall()\n", "VAR_33 = {'error': 'error getting comments'}\n", "print(str(VAR_17))\n", "if VAR_21.rowcount() != 0:\n", "for VAR_15 in VAR_17:\n", "VAR_33 = None\n", "VAR_33.append({'id': VAR_15[0], 'posterId': VAR_15[1], 'posterUsername':\n VAR_15[2], 'posterFirstname': VAR_15[3], 'posterFastname': VAR_15[4],\n 'comment': VAR_15[5], 'timePosted': VAR_15[6]})\n" ]
[ "def __get_comments(comments):...\n", "\"\"\"docstring\"\"\"\n", "comment_id_input = comments.replace(',', '\" OR \"')\n", "print(comment_id_input)\n", "con = sql.connect('./resources/comments.db')\n", "return_obj = []\n", "cmd = (\"\"\" SELECT * FROM comments\n WHERE id = \"{0}\";\n \"\"\".\n format(comment_id_input))\n", "cur = con.cursor()\n", "print(''.join(traceback.format_exception(etype=type(e), value=e, tb=e.\n __traceback__)))\n", "con.close()\n", "cur.execute(cmd)\n", "con.rollback()\n", "return return_obj\n", "comments = cur.fetchall()\n", "return_obj = {'error': 'error getting comments'}\n", "print(str(comments))\n", "if cur.rowcount() != 0:\n", "for comment in comments:\n", "return_obj = None\n", "return_obj.append({'id': comment[0], 'posterId': comment[1],\n 'posterUsername': comment[2], 'posterFirstname': comment[3],\n 'posterFastname': comment[4], 'comment': comment[5], 'timePosted':\n comment[6]})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'", "Assign'", "Assign'", "Expr'", "Condition", "For", "Assign'", "Expr'" ]
[ "def FUNC_6(VAR_10, VAR_11):...\n", "if not VAR_10:\n", "return FUNC_5\n", "VAR_18 = dict(point=0, line=0, polygon=0)\n", "for format_ext, format_cfg in VAR_10.items():\n", "VAR_28 = format_cfg.get('layer', {}).get(VAR_11)\n", "if VAR_18['point'] == VAR_18['line'] == VAR_18['polygon'] == 0:\n", "VAR_29 = format_cfg.get('geometry', {})\n", "return FUNC_5\n", "def FUNC_9(VAR_8, VAR_9):...\n", "if VAR_28:\n", "VAR_30 = {}\n", "for VAR_36, buffer_size in VAR_28.items():\n", "if VAR_29:\n", "for VAR_36 in ('point', 'line', 'polygon'):\n", "VAR_18[VAR_36] = max(VAR_18[VAR_36], buffer_size)\n", "for VAR_36, buffer_size in VAR_29.items():\n", "VAR_35 = VAR_9 * VAR_18[VAR_36]\n", "return VAR_30\n", "VAR_18[VAR_36] = max(VAR_18[VAR_36], buffer_size)\n", "VAR_30[VAR_36] = bounds_buffer(VAR_8, VAR_35)\n" ]
[ "def create_query_bounds_pad_fn(buffer_cfg, layer_name):...\n", "if not buffer_cfg:\n", "return _bounds_pad_no_buf\n", "buf_by_type = dict(point=0, line=0, polygon=0)\n", "for format_ext, format_cfg in buffer_cfg.items():\n", "format_layer_cfg = format_cfg.get('layer', {}).get(layer_name)\n", "if buf_by_type['point'] == buf_by_type['line'] == buf_by_type['polygon'] == 0:\n", "format_geometry_cfg = format_cfg.get('geometry', {})\n", "return _bounds_pad_no_buf\n", "def bounds_pad(bounds, meters_per_pixel_dim):...\n", "if format_layer_cfg:\n", "buffered_by_type = {}\n", "for geometry_type, buffer_size in format_layer_cfg.items():\n", "if format_geometry_cfg:\n", "for geometry_type in ('point', 'line', 'polygon'):\n", "buf_by_type[geometry_type] = max(buf_by_type[geometry_type], buffer_size)\n", "for geometry_type, buffer_size in format_geometry_cfg.items():\n", "offset = meters_per_pixel_dim * buf_by_type[geometry_type]\n", "return buffered_by_type\n", "buf_by_type[geometry_type] = max(buf_by_type[geometry_type], buffer_size)\n", "buffered_by_type[geometry_type] = bounds_buffer(bounds, offset)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "For", "Assign'", "Condition", "Assign'", "Return'", "FunctionDef'", "Condition", "Assign'", "For", "Condition", "For", "Assign'", "For", "Assign'", "Return'", "Assign'", "Assign'" ]
[ "def FUNC_13(VAR_16):...\n", "\"\"\"docstring\"\"\"\n", "if FUNC_12(VAR_16, 'protected'):\n", "return VAR_19(VAR_16, 'temp')\n" ]
[ "def temp(value):...\n", "\"\"\"docstring\"\"\"\n", "if is_flagged(value, 'protected'):\n", "return flag(value, 'temp')\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'" ]
[ "def FUNC_2(self, VAR_11='', VAR_12=None, VAR_13=False):...\n", "return self.url_list(VAR_11, VAR_12=headers, VAR_13=silent)\n" ]
[ "def list(self, resource='', headers=None, silent=False):...\n", "return self.url_list(resource, headers=headers, silent=silent)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_3(self):...\n", "VAR_37 = os.path.join(self._leap_home, self._uuid)\n", "shutil.rmtree(VAR_37)\n" ]
[ "def cleanup(self):...\n", "soledad_test_folder = os.path.join(self._leap_home, self._uuid)\n", "shutil.rmtree(soledad_test_folder)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "@staticmethod...\n", "return users.create_logout_url(VAR_23)\n" ]
[ "@staticmethod...\n", "return users.create_logout_url(dest_url)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "from flask import Blueprint, request, render_template, send_from_directory\n", "from player_web import get_web\n", "import json\n", "from database_writer import get_db\n", "import constants\n", "import bracket_utils\n", "import requests\n", "import logger\n", "VAR_0 = None\n", "VAR_1 = 'https://localhost:5000'\n", "VAR_2 = Blueprint('endpoints', __name__)\n", "VAR_3 = logger.logger(__name__)\n", "@VAR_2.route('/')...\n", "if VAR_0 == None:\n", "FUNC_16()\n", "VAR_5 = request.args.get('tag', default='christmasmike')\n", "VAR_7 = get_web(VAR_0=db)\n", "return render_template('libraries/html/web.html', VAR_7=data, VAR_5=tag)\n" ]
[ "from flask import Blueprint, request, render_template, send_from_directory\n", "from player_web import get_web\n", "import json\n", "from database_writer import get_db\n", "import constants\n", "import bracket_utils\n", "import requests\n", "import logger\n", "db = None\n", "BASE_URL = 'https://localhost:5000'\n", "endpoints = Blueprint('endpoints', __name__)\n", "LOG = logger.logger(__name__)\n", "@endpoints.route('/')...\n", "if db == None:\n", "init()\n", "tag = request.args.get('tag', default='christmasmike')\n", "data = get_web(db=db)\n", "return render_template('libraries/html/web.html', data=data, tag=tag)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_3(self):...\n", "auth.login(self.request, self.user)\n", "VAR_5 = import_from_settings('OIDC_RENEW_ID_TOKEN_EXPIRY_SECONDS', 60 * 15)\n", "self.request.session['oidc_id_token_expiration'] = time.time() + VAR_5\n", "return HttpResponseRedirect(self.success_url)\n" ]
[ "def login_success(self):...\n", "auth.login(self.request, self.user)\n", "expiration_interval = import_from_settings('OIDC_RENEW_ID_TOKEN_EXPIRY_SECONDS'\n , 60 * 15)\n", "self.request.session['oidc_id_token_expiration'] = time.time(\n ) + expiration_interval\n", "return HttpResponseRedirect(self.success_url)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "\"\"\" Easily traverse nested Python data structures \"\"\"\n", "__version__ = '0.2'\n", "import re\n", "\"\"\" Falsey class used to flag item \"missing\" from traversal path \"\"\"\n", "def __bool__(self):...\n", "return False\n" ]
[ "\"\"\" Easily traverse nested Python data structures \"\"\"\n", "__version__ = '0.2'\n", "import re\n", "\"\"\" Falsey class used to flag item \"missing\" from traversal path \"\"\"\n", "def __bool__(self):...\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Assign'", "Import'", "Expr'", "FunctionDef'", "Return'" ]
[ "def FUNC_33(self):...\n", "def FUNC_39(VAR_101):...\n", "VAR_101.norun = True\n", "return VAR_101\n" ]
[ "def norun(self):...\n", "def decorate(ruleinfo):...\n", "ruleinfo.norun = True\n", "return ruleinfo\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Return'" ]
[ "@mock.patch('requests.post', FUNC_0)...\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_7 = {'image': 'autotest/example'}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_4 = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 1)\n", "VAR_12 = VAR_5.data['results'][0]\n", "self.assertEqual(VAR_12['type'], 'cmd')\n", "self.assertEqual(VAR_12['num'], 1)\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_7 = {'image': 'autotest/example', 'sha': 'a' * 40, 'dockerfile':\n 'FROM scratch'}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_4 = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 1)\n", "VAR_12 = VAR_5.data['results'][0]\n", "self.assertEqual(VAR_12['type'], 'cmd')\n", "self.assertEqual(VAR_12['num'], 1)\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_7 = {'image': 'autotest/example', 'sha': 'a' * 40, 'dockerfile':\n 'FROM scratch', 'procfile': {'worker': 'node worker.js'}}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_4 = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 1)\n", "VAR_12 = VAR_5.data['results'][0]\n", "self.assertEqual(VAR_12['type'], 'cmd')\n", "self.assertEqual(VAR_12['num'], 1)\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_7 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_4 = '/api/apps/{app_id}/containers/web'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 1)\n", "VAR_12 = VAR_5.data['results'][0]\n", "self.assertEqual(VAR_12['type'], 'web')\n", "self.assertEqual(VAR_12['num'], 1)\n" ]
[ "@mock.patch('requests.post', mock_import_repository_task)...\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example'}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "url = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 1)\n", "container = response.data['results'][0]\n", "self.assertEqual(container['type'], 'cmd')\n", "self.assertEqual(container['num'], 1)\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'a' * 40, 'dockerfile':\n 'FROM scratch'}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "url = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 1)\n", "container = response.data['results'][0]\n", "self.assertEqual(container['type'], 'cmd')\n", "self.assertEqual(container['num'], 1)\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'a' * 40, 'dockerfile':\n 'FROM scratch', 'procfile': {'worker': 'node worker.js'}}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "url = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 1)\n", "container = response.data['results'][0]\n", "self.assertEqual(container['type'], 'cmd')\n", "self.assertEqual(container['num'], 1)\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "url = '/api/apps/{app_id}/containers/web'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 1)\n", "container = response.data['results'][0]\n", "self.assertEqual(container['type'], 'web')\n", "self.assertEqual(container['num'], 1)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "@login_required...\n", "VAR_23 = ['filename', 'title', 'document_type', 'url', 'organization']\n", "VAR_5 = DataGroup.objects.filter(VAR_2=pk).first()\n", "if VAR_5:\n", "VAR_23.insert(0, 'id')\n", "VAR_41 = DataDocument.objects.filter(data_group_id=0).values(*VAR_23)\n", "VAR_41 = DataDocument.objects.filter(data_group_id=pk).values(*VAR_23)\n", "return render_to_csv_response(VAR_41, filename='registered_records.csv',\n use_verbose_names=False)\n", "return render_to_csv_response(VAR_41, filename=dg.get_name_as_slug() +\n '_registered_records.csv', field_header_map={'id': 'DataDocument_id'},\n use_verbose_names=False)\n" ]
[ "@login_required...\n", "columnlist = ['filename', 'title', 'document_type', 'url', 'organization']\n", "dg = DataGroup.objects.filter(pk=pk).first()\n", "if dg:\n", "columnlist.insert(0, 'id')\n", "qs = DataDocument.objects.filter(data_group_id=0).values(*columnlist)\n", "qs = DataDocument.objects.filter(data_group_id=pk).values(*columnlist)\n", "return render_to_csv_response(qs, filename='registered_records.csv',\n use_verbose_names=False)\n", "return render_to_csv_response(qs, filename=dg.get_name_as_slug() +\n '_registered_records.csv', field_header_map={'id': 'DataDocument_id'},\n use_verbose_names=False)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Return'", "Return'" ]
[ "@classmethod...\n", "super(CLASS_1, VAR_3).register_options(VAR_4)\n", "VAR_4('--setuptools-version', advanced=True, fingerprint=True, default=\n '40.6.3', help=\n 'The setuptools version to use when executing `setup.py` scripts.')\n", "VAR_4('--wheel-version', advanced=True, fingerprint=True, default='0.32.3',\n help='The wheel version to use when executing `setup.py` scripts.')\n" ]
[ "@classmethod...\n", "super(BuildSetupRequiresPex, cls).register_options(register)\n", "register('--setuptools-version', advanced=True, fingerprint=True, default=\n '40.6.3', help=\n 'The setuptools version to use when executing `setup.py` scripts.')\n", "register('--wheel-version', advanced=True, fingerprint=True, default=\n '0.32.3', help=\n 'The wheel version to use when executing `setup.py` scripts.')\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_15(self):...\n", "VAR_28 = sqlite3.connect(':memory:')\n", "self.prepare_connection(VAR_28)\n", "VAR_43 = VAR_28.execute('select sqlite_version()').fetchone()[0]\n", "VAR_12 = {}\n", "for VAR_69, testsql, hasversion in (('json1', \"SELECT json('{}')\", False),\n", "VAR_44 = []\n", "VAR_63 = VAR_28.execute(testsql)\n", "for fts in ('FTS5', 'FTS4', 'FTS3'):\n", "if hasversion:\n", "VAR_45 = {'version': __version__}\n", "VAR_28.execute('CREATE VIRTUAL TABLE v{fts} USING {fts} (data)'.format(fts=fts)\n )\n", "VAR_12[VAR_69] = VAR_63.fetchone()[0]\n", "VAR_12[VAR_69] = None\n", "if self.version_note:\n", "VAR_44.append(fts)\n", "VAR_45['note'] = self.version_note\n", "return {'python': {'version': '.'.join(map(str, sys.version_info[:3])),\n 'full': sys.version}, 'datasette': VAR_45, 'sqlite': {'version': VAR_43,\n 'fts_versions': VAR_44, 'extensions': VAR_12, 'compile_options': [r[0] for\n r in VAR_28.execute('pragma compile_options;').fetchall()]}}\n" ]
[ "def versions(self):...\n", "conn = sqlite3.connect(':memory:')\n", "self.prepare_connection(conn)\n", "sqlite_version = conn.execute('select sqlite_version()').fetchone()[0]\n", "sqlite_extensions = {}\n", "for extension, testsql, hasversion in (('json1', \"SELECT json('{}')\", False\n", "fts_versions = []\n", "result = conn.execute(testsql)\n", "for fts in ('FTS5', 'FTS4', 'FTS3'):\n", "if hasversion:\n", "datasette_version = {'version': __version__}\n", "conn.execute('CREATE VIRTUAL TABLE v{fts} USING {fts} (data)'.format(fts=fts))\n", "sqlite_extensions[extension] = result.fetchone()[0]\n", "sqlite_extensions[extension] = None\n", "if self.version_note:\n", "fts_versions.append(fts)\n", "datasette_version['note'] = self.version_note\n", "return {'python': {'version': '.'.join(map(str, sys.version_info[:3])),\n 'full': sys.version}, 'datasette': datasette_version, 'sqlite': {\n 'version': sqlite_version, 'fts_versions': fts_versions, 'extensions':\n sqlite_extensions, 'compile_options': [r[0] for r in conn.execute(\n 'pragma compile_options;').fetchall()]}}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "For", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Return'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return set(VAR_28 for VAR_28 in self.input if not VAR_28.exists and not \n VAR_28 in self.subworkflow_input)\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return set(f for f in self.input if not f.exists and not f in self.\n subworkflow_input)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "import time\n", "import sys\n", "import citest.gcp_testing as gcp\n", "import citest.json_contract as jc\n", "import citest.service_testing as st\n", "import spinnaker_testing as sk\n", "import spinnaker_testing.gate as gate\n", "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return gate.new_agent(VAR_1)\n" ]
[ "import time\n", "import sys\n", "import citest.gcp_testing as gcp\n", "import citest.json_contract as jc\n", "import citest.service_testing as st\n", "import spinnaker_testing as sk\n", "import spinnaker_testing.gate as gate\n", "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return gate.new_agent(bindings)\n" ]
[ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Condition", "Docstring", "Return'" ]
[ "def __iter__(self):...\n", "return self.order.__iter__()\n" ]
[ "def __iter__(self):...\n", "return self.order.__iter__()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __call__(self, VAR_13, VAR_14):...\n", "VAR_17 = decode_path_info(VAR_13.get('PATH_INFO', ''))\n", "if self.autorefresh:\n", "VAR_15 = self.find_file(VAR_17)\n", "VAR_15 = self.files.get(VAR_17)\n", "if VAR_15 is None:\n", "return self.application(VAR_13, VAR_14)\n", "return self.serve(VAR_15, VAR_13, VAR_14)\n" ]
[ "def __call__(self, environ, start_response):...\n", "path = decode_path_info(environ.get('PATH_INFO', ''))\n", "if self.autorefresh:\n", "static_file = self.find_file(path)\n", "static_file = self.files.get(path)\n", "if static_file is None:\n", "return self.application(environ, start_response)\n", "return self.serve(static_file, environ, start_response)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_31():...\n", "VAR_24 = vim.eval('tagfiles()')\n", "VAR_25 = VAR_0.getcwd()\n", "return [VAR_0.path.join(VAR_25, x) for x in VAR_24]\n" ]
[ "def GetTagFiles():...\n", "tag_files = vim.eval('tagfiles()')\n", "current_working_directory = os.getcwd()\n", "return [os.path.join(current_working_directory, x) for x in tag_files]\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "@utils.synchronized('3par', external=True)...\n", "\"\"\"docstring\"\"\"\n", "self.common.client_login()\n", "VAR_20 = self._create_host(VAR_6, VAR_9)\n", "VAR_21 = self.common.create_vlun(VAR_6, VAR_20)\n", "VAR_22 = self.common.get_ports()\n", "self.common.client_logout()\n", "VAR_23 = {'driver_volume_type': 'fibre_channel', 'data': {'target_lun':\n VAR_21['lun'], 'target_discovered': True, 'target_wwn': VAR_22['FC']}}\n", "return VAR_23\n" ]
[ "@utils.synchronized('3par', external=True)...\n", "\"\"\"docstring\"\"\"\n", "self.common.client_login()\n", "host = self._create_host(volume, connector)\n", "vlun = self.common.create_vlun(volume, host)\n", "ports = self.common.get_ports()\n", "self.common.client_logout()\n", "info = {'driver_volume_type': 'fibre_channel', 'data': {'target_lun': vlun[\n 'lun'], 'target_discovered': True, 'target_wwn': ports['FC']}}\n", "return info\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_2(self):...\n", "" ]
[ "def tearDown(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_0(VAR_0, VAR_1=None, VAR_2=None, VAR_3='name', VAR_4=None):...\n", "if not VAR_4:\n", "VAR_4 = 'Entry with that name already exists.'\n", "def FUNC_2(VAR_6, VAR_7):...\n", "VAR_8 = VAR_1 and VAR_2\n", "VAR_9 = 'SELECT COALESCE(COUNT(' + VAR_0 + '.' + VAR_3 + '), 0) FROM ' + VAR_0\n", "if VAR_8:\n", "VAR_9 += ' LEFT JOIN ' + VAR_1\n", "VAR_9 += ' WHERE (' + VAR_0 + '.' + VAR_3 + ' = :x'\n", "if VAR_8:\n", "VAR_9 += ' AND ' + VAR_1 + '.id = ' + str(VAR_6[VAR_2].data)\n", "VAR_9 += ');'\n", "VAR_10 = text(VAR_9)\n", "VAR_11 = db.engine.execute(VAR_10, x=field.data)\n", "for row in VAR_11:\n", "if row[0] > 0:\n", "return FUNC_2\n" ]
[ "def unique(table, subtable=None, subname=None, name='name', message=None):...\n", "if not message:\n", "message = 'Entry with that name already exists.'\n", "def _unique(form, field):...\n", "sub = subtable and subname\n", "query = 'SELECT COALESCE(COUNT(' + table + '.' + name + '), 0) FROM ' + table\n", "if sub:\n", "query += ' LEFT JOIN ' + subtable\n", "query += ' WHERE (' + table + '.' + name + ' = :x'\n", "if sub:\n", "query += ' AND ' + subtable + '.id = ' + str(form[subname].data)\n", "query += ');'\n", "stmt = text(query)\n", "res = db.engine.execute(stmt, x=field.data)\n", "for row in res:\n", "if row[0] > 0:\n", "return _unique\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 4, 0, 0, 4, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Condition", "AugAssign'", "AugAssign'", "Condition", "AugAssign'", "AugAssign'", "Assign'", "Assign'", "For", "Condition", "Return'" ]
[ "def FUNC_3(VAR_7, VAR_8, VAR_2, VAR_9, VAR_10=0):...\n", "VAR_3 = []\n", "VAR_12 = VAR_2 // len(VAR_9)\n", "VAR_13 = {k: globals()[k] for k in VAR_0 if k in globals()}\n", "VAR_13['__name__'] = VAR_8\n", "VAR_13['__builtins__'] = {k: __builtins__.__dict__[k] for k in VAR_1 if k in\n __builtins__.__dict__}\n", "VAR_14 = {}\n", "exec(VAR_7, VAR_13, VAR_14)\n", "if VAR_10 >= 1:\n", "for i, test_case_obj in enumerate(VAR_9):\n", "print(repr(e), file=sys.stderr)\n", "FUNC_1(VAR_3, VAR_2, 'unable to execute function')\n", "return VAR_3\n", "VAR_21 = eval(test_case_obj, VAR_13, VAR_14)\n", "if VAR_10 >= 1:\n", "return VAR_3\n", "if not VAR_21:\n", "print(repr(e), file=sys.stderr)\n", "FUNC_1(VAR_3, VAR_12, 'exception during test case %d' % i)\n", "FUNC_1(VAR_3, VAR_12, 'failed test case %d' % i)\n" ]
[ "def grade(code_obj, name, points, test_case_objs, vlevel=0):...\n", "deductions = []\n", "points_per_case = points // len(test_case_objs)\n", "instr_globals = {k: globals()[k] for k in global_whitelist if k in globals()}\n", "instr_globals['__name__'] = name\n", "instr_globals['__builtins__'] = {k: __builtins__.__dict__[k] for k in\n builtins_whitelist if k in __builtins__.__dict__}\n", "instr_locals = {}\n", "exec(code_obj, instr_globals, instr_locals)\n", "if vlevel >= 1:\n", "for i, test_case_obj in enumerate(test_case_objs):\n", "print(repr(e), file=sys.stderr)\n", "dock_points(deductions, points, 'unable to execute function')\n", "return deductions\n", "result = eval(test_case_obj, instr_globals, instr_locals)\n", "if vlevel >= 1:\n", "return deductions\n", "if not result:\n", "print(repr(e), file=sys.stderr)\n", "dock_points(deductions, points_per_case, 'exception during test case %d' % i)\n", "dock_points(deductions, points_per_case, 'failed test case %d' % i)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "For", "Expr'", "Expr'", "Return'", "Assign'", "Condition", "Return'", "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_5(self):...\n", "\"\"\"docstring\"\"\"\n", "if request.endpoint in frozenset(['oidc_callback', 'oidc_error']):\n", "return None\n", "VAR_6 = self.get_cookie_id_token()\n", "if VAR_6 is None:\n", "return self.redirect_to_auth_server(request.url)\n", "if self.time() >= VAR_6['exp']:\n", "g.oidc_id_token = VAR_6\n", "VAR_22 = self.credentials_store[VAR_6['sub']]\n", "VAR_0.debug('Expired ID token, credentials missing', exc_info=True)\n", "VAR_22.refresh(self.http)\n", "VAR_0.debug(\"Expired ID token, can't refresh credentials\", exc_info=True)\n", "return None\n", "return self.redirect_to_auth_server(request.url)\n", "VAR_6 = VAR_22.id_token\n", "return self.redirect_to_auth_server(request.url)\n", "self.credentials_store[VAR_6['sub']] = VAR_22\n", "self.set_cookie_id_token(VAR_6)\n" ]
[ "def authenticate_or_redirect(self):...\n", "\"\"\"docstring\"\"\"\n", "if request.endpoint in frozenset(['oidc_callback', 'oidc_error']):\n", "return None\n", "id_token = self.get_cookie_id_token()\n", "if id_token is None:\n", "return self.redirect_to_auth_server(request.url)\n", "if self.time() >= id_token['exp']:\n", "g.oidc_id_token = id_token\n", "credentials = self.credentials_store[id_token['sub']]\n", "logger.debug('Expired ID token, credentials missing', exc_info=True)\n", "credentials.refresh(self.http)\n", "logger.debug(\"Expired ID token, can't refresh credentials\", exc_info=True)\n", "return None\n", "return self.redirect_to_auth_server(request.url)\n", "id_token = credentials.id_token\n", "return self.redirect_to_auth_server(request.url)\n", "self.credentials_store[id_token['sub']] = credentials\n", "self.set_cookie_id_token(id_token)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Condition", "Return'", "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Return'", "Return'", "Assign'", "Return'", "Assign'", "Expr'" ]
[ "import sqlite3\n", "import operator\n", "from AddScoresToDatabase import getTitle\n", "from AddScoresToDatabase import getDate\n", "from InitDatabase import getRedditInstance\n", "def FUNC_0(VAR_0):...\n", "VAR_1 = sqlite3.connect('database.db')\n", "VAR_2 = VAR_1.cursor()\n", "VAR_3 = set()\n", "for row in VAR_2.execute(\n", "for val in row:\n", "VAR_4 = [name for name in VAR_3]\n", "if val is not '':\n", "VAR_5 = [[name, 0, 0, 0] for name in VAR_4]\n", "for author in val.split('|'):\n", "for i in range(1, 4):\n", "VAR_3.add(author)\n", "for row in VAR_2.execute('SELECT Place' + str(i) +\n", "VAR_5.sort(reverse=True, key=operator.itemgetter(1, 2, 3))\n", "for val in row:\n", "VAR_1.close()\n", "if val is not '':\n", "return VAR_5\n", "for author in val.split('|'):\n", "VAR_5[VAR_4.index(author)][i] += 1\n" ]
[ "import sqlite3\n", "import operator\n", "from AddScoresToDatabase import getTitle\n", "from AddScoresToDatabase import getDate\n", "from InitDatabase import getRedditInstance\n", "def getRankingsFromDatabase(submission):...\n", "database = sqlite3.connect('database.db')\n", "cursor = database.cursor()\n", "nameSet = set()\n", "for row in cursor.execute(\n", "for val in row:\n", "nameList = [name for name in nameSet]\n", "if val is not '':\n", "table = [[name, 0, 0, 0] for name in nameList]\n", "for author in val.split('|'):\n", "for i in range(1, 4):\n", "nameSet.add(author)\n", "for row in cursor.execute('SELECT Place' + str(i) +\n", "table.sort(reverse=True, key=operator.itemgetter(1, 2, 3))\n", "for val in row:\n", "database.close()\n", "if val is not '':\n", "return table\n", "for author in val.split('|'):\n", "table[nameList.index(author)][i] += 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "For", "Assign'", "Condition", "Assign'", "For", "For", "Expr'", "For", "Expr'", "For", "Expr'", "Condition", "Return'", "For", "AugAssign'" ]
[ "def FUNC_7(self):...\n", "vimsupport.PostVimMessage('Restarting ycmd server...')\n", "self._user_notified_about_crash = False\n", "self._ServerCleanup()\n", "self._SetupServer()\n" ]
[ "def RestartServer(self):...\n", "vimsupport.PostVimMessage('Restarting ycmd server...')\n", "self._user_notified_about_crash = False\n", "self._ServerCleanup()\n", "self._SetupServer()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_23(VAR_11):...\n", "return '%s> ' % self.configuration.eqlx_group_name\n" ]
[ "def _fake_recv(ignore_arg):...\n", "return '%s> ' % self.configuration.eqlx_group_name\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@login_required()...\n", "VAR_13 = get_object_or_404(DataSource, VAR_2=pk)\n", "VAR_14 = DataGroup.objects.filter(data_source=datasource).count() + 1\n", "VAR_15 = '{} {}'.format(VAR_13.title, VAR_14)\n", "VAR_16 = 'Create New Data Group For Data Source \"' + str(VAR_13) + '\"'\n", "VAR_17 = {'downloaded_by': VAR_0.user, 'name': VAR_15, 'data_source': VAR_13}\n", "if VAR_0.method == 'POST':\n", "VAR_18 = DataGroupForm(VAR_0.POST, VAR_0.FILES, user=request.user, initial=\n initial_values)\n", "VAR_19 = GroupType.objects.all()\n", "if VAR_18.is_valid():\n", "for group in VAR_19:\n", "VAR_3 = VAR_18.save()\n", "VAR_12 = {'form': VAR_18, 'header': VAR_16, 'datasource': VAR_13, 'groups':\n VAR_19}\n", "group.codes = DocumentType.objects.filter(group_type=group)\n", "VAR_18 = DataGroupForm(user=request.user, initial=initial_values)\n", "VAR_48 = [x.decode('ascii', 'ignore') for x in VAR_3.csv.readlines()]\n", "return render(VAR_0, VAR_1, VAR_12)\n", "VAR_49 = csv.DictReader(VAR_48)\n", "VAR_55 = ['filename', 'title', 'document_type', 'url', 'organization']\n", "if not VAR_49.fieldnames == VAR_55:\n", "VAR_3.csv.close()\n", "VAR_56 = ['DataDocument_id,' + ','.join(VAR_49.fieldnames) + '\\n']\n", "VAR_3.delete()\n", "VAR_57 = []\n", "return render(VAR_0, VAR_1, {'field_error': VAR_49.fieldnames,\n 'good_fields': VAR_55, 'form': VAR_18})\n", "VAR_58 = []\n", "VAR_59 = 0\n", "for VAR_78 in VAR_49:\n", "VAR_59 += 1\n", "if VAR_57:\n", "VAR_65 = DocumentType.objects.get(VAR_2=1)\n", "VAR_3.csv.close()\n", "VAR_3.save()\n", "VAR_66 = VAR_78['document_type']\n", "VAR_3.delete()\n", "VAR_67 = File(VAR_43)\n", "if VAR_78['filename'] == '':\n", "return render(VAR_0, VAR_1, {'line_errors': VAR_57, 'form': VAR_18})\n", "VAR_67.write(''.join(VAR_56))\n", "VAR_57.append([VAR_59, \"Filename can't be empty!\"])\n", "if len(VAR_78['filename']) > 255:\n", "VAR_60 = Path(settings.MEDIA_URL + '/' + str(VAR_3.fs_id) + '/' + str(VAR_3\n .fs_id) + '.zip')\n", "VAR_57.append([VAR_59, 'Filename too long!'])\n", "if VAR_78['filename'] in VAR_58:\n", "VAR_61 = Path(settings.MEDIA_ROOT + '/' + str(VAR_3.fs_id) + '/' + str(\n VAR_3.fs_id) + '.zip')\n", "VAR_57.append([VAR_59, 'Duplicate filename found in csv'])\n", "if VAR_78['title'] == '':\n", "VAR_34 = zipfile.ZipFile(str(VAR_61), 'w', zipfile.ZIP_DEFLATED)\n", "VAR_78['title'] = VAR_78['filename'].split('.')[0]\n", "if VAR_66 == '':\n", "VAR_3.zip_file = VAR_60\n", "VAR_57.append([VAR_59, \"'document_type' field can't be empty\"])\n", "if DocumentType.objects.filter(group_type=datagroup.group_type, VAR_66=code\n", "VAR_34.close()\n", "VAR_65 = DocumentType.objects.get(group_type=datagroup.group_type, VAR_66=code)\n", "VAR_57.append([VAR_59, \"DocumentType code doesn't exist.\"])\n", "VAR_3.save()\n", "VAR_58.append(VAR_78['filename'])\n", "return redirect('data_group_detail', VAR_2=datagroup.id)\n", "VAR_24 = DataDocument(filename=line['filename'], title=line['title'],\n document_type=doc_type, url=line['url'], organization=line[\n 'organization'], data_group=datagroup)\n", "VAR_24.save()\n", "VAR_56.append(str(VAR_24.pk) + ',' + ','.join(VAR_78.values()) + '\\n')\n" ]
[ "@login_required()...\n", "datasource = get_object_or_404(DataSource, pk=pk)\n", "group_key = DataGroup.objects.filter(data_source=datasource).count() + 1\n", "default_name = '{} {}'.format(datasource.title, group_key)\n", "header = 'Create New Data Group For Data Source \"' + str(datasource) + '\"'\n", "initial_values = {'downloaded_by': request.user, 'name': default_name,\n 'data_source': datasource}\n", "if request.method == 'POST':\n", "form = DataGroupForm(request.POST, request.FILES, user=request.user,\n initial=initial_values)\n", "groups = GroupType.objects.all()\n", "if form.is_valid():\n", "for group in groups:\n", "datagroup = form.save()\n", "context = {'form': form, 'header': header, 'datasource': datasource,\n 'groups': groups}\n", "group.codes = DocumentType.objects.filter(group_type=group)\n", "form = DataGroupForm(user=request.user, initial=initial_values)\n", "info = [x.decode('ascii', 'ignore') for x in datagroup.csv.readlines()]\n", "return render(request, template_name, context)\n", "table = csv.DictReader(info)\n", "good_fields = ['filename', 'title', 'document_type', 'url', 'organization']\n", "if not table.fieldnames == good_fields:\n", "datagroup.csv.close()\n", "text = ['DataDocument_id,' + ','.join(table.fieldnames) + '\\n']\n", "datagroup.delete()\n", "errors = []\n", "return render(request, template_name, {'field_error': table.fieldnames,\n 'good_fields': good_fields, 'form': form})\n", "filenames = []\n", "count = 0\n", "for line in table:\n", "count += 1\n", "if errors:\n", "doc_type = DocumentType.objects.get(pk=1)\n", "datagroup.csv.close()\n", "datagroup.save()\n", "code = line['document_type']\n", "datagroup.delete()\n", "myfile = File(f)\n", "if line['filename'] == '':\n", "return render(request, template_name, {'line_errors': errors, 'form': form})\n", "myfile.write(''.join(text))\n", "errors.append([count, \"Filename can't be empty!\"])\n", "if len(line['filename']) > 255:\n", "new_zip_name = Path(settings.MEDIA_URL + '/' + str(datagroup.fs_id) + '/' +\n str(datagroup.fs_id) + '.zip')\n", "errors.append([count, 'Filename too long!'])\n", "if line['filename'] in filenames:\n", "new_zip_path = Path(settings.MEDIA_ROOT + '/' + str(datagroup.fs_id) + '/' +\n str(datagroup.fs_id) + '.zip')\n", "errors.append([count, 'Duplicate filename found in csv'])\n", "if line['title'] == '':\n", "zf = zipfile.ZipFile(str(new_zip_path), 'w', zipfile.ZIP_DEFLATED)\n", "line['title'] = line['filename'].split('.')[0]\n", "if code == '':\n", "datagroup.zip_file = new_zip_name\n", "errors.append([count, \"'document_type' field can't be empty\"])\n", "if DocumentType.objects.filter(group_type=datagroup.group_type, code=code\n", "zf.close()\n", "doc_type = DocumentType.objects.get(group_type=datagroup.group_type, code=code)\n", "errors.append([count, \"DocumentType code doesn't exist.\"])\n", "datagroup.save()\n", "filenames.append(line['filename'])\n", "return redirect('data_group_detail', pk=datagroup.id)\n", "doc = DataDocument(filename=line['filename'], title=line['title'],\n document_type=doc_type, url=line['url'], organization=line[\n 'organization'], data_group=datagroup)\n", "doc.save()\n", "text.append(str(doc.pk) + ',' + ','.join(line.values()) + '\\n')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "Return'", "Assign'", "Assign'", "For", "AugAssign'", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Condition", "Return'", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Return'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_0(VAR_0, VAR_1='_', VAR_2=-2, VAR_3=-1):...\n", "\"\"\"docstring\"\"\"\n", "import pandas as pd\n", "VAR_18 = [int(fn.split('.')[0].split(VAR_1)[VAR_2]) for fn in VAR_0]\n", "VAR_19 = [int(fn.split('.')[0].split(VAR_1)[VAR_3]) for fn in VAR_0]\n", "VAR_20 = pd.DataFrame({'fn': VAR_0, 'month': VAR_18, 'year': VAR_19})\n", "VAR_21 = VAR_20.sort_values(['year', 'month'])\n", "return VAR_21.fn.tolist()\n" ]
[ "def sort_files(files, split_on='_', elem_month=-2, elem_year=-1):...\n", "\"\"\"docstring\"\"\"\n", "import pandas as pd\n", "months = [int(fn.split('.')[0].split(split_on)[elem_month]) for fn in files]\n", "years = [int(fn.split('.')[0].split(split_on)[elem_year]) for fn in files]\n", "df = pd.DataFrame({'fn': files, 'month': months, 'year': years})\n", "df_sorted = df.sort_values(['year', 'month'])\n", "return df_sorted.fn.tolist()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "from __future__ import unicode_literals\n", "import frappe, json\n", "from frappe import _\n", "from frappe.utils import cint, formatdate\n", "@frappe.whitelist(allow_guest=True)...\n", "from frappe.www.contact import send_message as website_send_message\n", "VAR_4 = VAR_5 = None\n", "website_send_message(VAR_0, VAR_1, VAR_2)\n", "VAR_5 = frappe.db.sql('string'.format(email_id=sender))\n", "if not VAR_5:\n", "VAR_4 = frappe.db.get_value('Lead', dict(email_id=sender))\n", "VAR_6 = frappe.get_doc(dict(doctype='Opportunity', enquiry_from='Customer' if\n customer else 'Lead', VAR_3='Open', title=subject, contact_email=sender,\n to_discuss=message))\n", "if not VAR_4:\n", "if VAR_5:\n", "VAR_8 = frappe.get_doc(dict(doctype='Lead', email_id=sender, lead_name=\n sender.split('@')[0].title())).insert(ignore_permissions=True)\n", "VAR_6.customer = VAR_5[0][0]\n", "if VAR_4:\n", "VAR_6.insert(ignore_permissions=True)\n", "VAR_6.lead = VAR_4\n", "VAR_6.lead = VAR_8.name\n", "VAR_7 = frappe.get_doc({'doctype': 'Communication', 'subject': VAR_0,\n 'content': VAR_1, 'sender': VAR_2, 'sent_or_received': 'Received',\n 'reference_doctype': 'Opportunity', 'reference_name': VAR_6.name})\n", "VAR_7.insert(ignore_permissions=True)\n", "return 'okay'\n" ]
[ "from __future__ import unicode_literals\n", "import frappe, json\n", "from frappe import _\n", "from frappe.utils import cint, formatdate\n", "@frappe.whitelist(allow_guest=True)...\n", "from frappe.www.contact import send_message as website_send_message\n", "lead = customer = None\n", "website_send_message(subject, message, sender)\n", "customer = frappe.db.sql(\n \"\"\"select distinct dl.link_name from `tabDynamic Link` dl\n\t\tleft join `tabContact` c on dl.parent=c.name where dl.link_doctype='Customer'\n\t\tand c.email_id='{email_id}'\"\"\"\n .format(email_id=sender))\n", "if not customer:\n", "lead = frappe.db.get_value('Lead', dict(email_id=sender))\n", "opportunity = frappe.get_doc(dict(doctype='Opportunity', enquiry_from=\n 'Customer' if customer else 'Lead', status='Open', title=subject,\n contact_email=sender, to_discuss=message))\n", "if not lead:\n", "if customer:\n", "new_lead = frappe.get_doc(dict(doctype='Lead', email_id=sender, lead_name=\n sender.split('@')[0].title())).insert(ignore_permissions=True)\n", "opportunity.customer = customer[0][0]\n", "if lead:\n", "opportunity.insert(ignore_permissions=True)\n", "opportunity.lead = lead\n", "opportunity.lead = new_lead.name\n", "comm = frappe.get_doc({'doctype': 'Communication', 'subject': subject,\n 'content': message, 'sender': sender, 'sent_or_received': 'Received',\n 'reference_doctype': 'Opportunity', 'reference_name': opportunity.name})\n", "comm.insert(ignore_permissions=True)\n", "return 'okay'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "Condition", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "\"\"\"string\"\"\"\n", "import random\n", "from eventlet import greenthread\n", "from oslo.config import cfg\n", "from cinder import exception\n", "from cinder.openstack.common import excutils\n", "from cinder.openstack.common import log as logging\n", "from cinder import utils\n", "from cinder.volume import driver\n", "VAR_0 = logging.getLogger(__name__)\n", "VAR_1 = [cfg.BoolOpt('san_thin_provision', default=True, help=\n 'Use thin provisioning for SAN volumes?'), cfg.StrOpt('san_ip', default\n ='', help='IP address of SAN controller'), cfg.StrOpt('san_login',\n default='admin', help='Username for SAN controller'), cfg.StrOpt(\n 'san_password', default='', help='Password for SAN controller', secret=\n True), cfg.StrOpt('san_private_key', default='', help=\n 'Filename of private key to use for SSH authentication'), cfg.StrOpt(\n 'san_clustername', default='', help=\n 'Cluster name to use for creating volumes'), cfg.IntOpt('san_ssh_port',\n default=22, help='SSH port to use with SAN'), cfg.BoolOpt(\n 'san_is_local', default=False, help=\n 'Execute commands locally instead of over SSH; use if the volume service is running on the SAN device'\n ), cfg.IntOpt('ssh_conn_timeout', default=30, help=\n 'SSH connection timeout in seconds'), cfg.IntOpt('ssh_min_pool_conn',\n default=1, help='Minimum ssh connections in the pool'), cfg.IntOpt(\n 'ssh_max_pool_conn', default=5, help='Maximum ssh connections in the pool')\n ]\n", "VAR_2 = cfg.CONF\n", "VAR_2.register_opts(VAR_1)\n", "\"\"\"string\"\"\"\n", "def __init__(self, *VAR_3, **VAR_4):...\n", "VAR_11 = VAR_4.pop('execute', self.san_execute)\n", "super(CLASS_0, self).__init__(*VAR_3, VAR_11=execute, **kwargs)\n", "self.configuration.append_config_values(VAR_1)\n", "self.run_local = self.configuration.san_is_local\n", "self.sshpool = None\n", "def FUNC_0(self, *VAR_5, **VAR_4):...\n", "if self.run_local:\n", "return utils.execute(*VAR_5, **kwargs)\n", "VAR_7 = VAR_4.pop('check_exit_code', None)\n", "VAR_6 = ' '.join(VAR_5)\n", "return self._run_ssh(VAR_6, VAR_7)\n" ]
[ "\"\"\"\nDefault Driver for san-stored volumes.\n\nThe unique thing about a SAN is that we don't expect that we can run the volume\ncontroller on the SAN hardware. We expect to access it over SSH or some API.\n\"\"\"\n", "import random\n", "from eventlet import greenthread\n", "from oslo.config import cfg\n", "from cinder import exception\n", "from cinder.openstack.common import excutils\n", "from cinder.openstack.common import log as logging\n", "from cinder import utils\n", "from cinder.volume import driver\n", "LOG = logging.getLogger(__name__)\n", "san_opts = [cfg.BoolOpt('san_thin_provision', default=True, help=\n 'Use thin provisioning for SAN volumes?'), cfg.StrOpt('san_ip', default\n ='', help='IP address of SAN controller'), cfg.StrOpt('san_login',\n default='admin', help='Username for SAN controller'), cfg.StrOpt(\n 'san_password', default='', help='Password for SAN controller', secret=\n True), cfg.StrOpt('san_private_key', default='', help=\n 'Filename of private key to use for SSH authentication'), cfg.StrOpt(\n 'san_clustername', default='', help=\n 'Cluster name to use for creating volumes'), cfg.IntOpt('san_ssh_port',\n default=22, help='SSH port to use with SAN'), cfg.BoolOpt(\n 'san_is_local', default=False, help=\n 'Execute commands locally instead of over SSH; use if the volume service is running on the SAN device'\n ), cfg.IntOpt('ssh_conn_timeout', default=30, help=\n 'SSH connection timeout in seconds'), cfg.IntOpt('ssh_min_pool_conn',\n default=1, help='Minimum ssh connections in the pool'), cfg.IntOpt(\n 'ssh_max_pool_conn', default=5, help='Maximum ssh connections in the pool')\n ]\n", "CONF = cfg.CONF\n", "CONF.register_opts(san_opts)\n", "\"\"\"Base class for SAN-style storage volumes\n\n A SAN-style storage value is 'different' because the volume controller\n probably won't run on it, so we need to access is over SSH or another\n remote protocol.\n \"\"\"\n", "def __init__(self, *args, **kwargs):...\n", "execute = kwargs.pop('execute', self.san_execute)\n", "super(SanDriver, self).__init__(*args, execute=execute, **kwargs)\n", "self.configuration.append_config_values(san_opts)\n", "self.run_local = self.configuration.san_is_local\n", "self.sshpool = None\n", "def san_execute(self, *cmd, **kwargs):...\n", "if self.run_local:\n", "return utils.execute(*cmd, **kwargs)\n", "check_exit_code = kwargs.pop('check_exit_code', None)\n", "command = ' '.join(cmd)\n", "return self._run_ssh(command, check_exit_code)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "FunctionDef'", "Condition", "Return'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_11(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "VAR_1 = {'name': self.volume_name}\n", "self.stubs.Set(self.driver, '_get_iscsi_properties', self.\n _fake_get_iscsi_properties)\n", "self.driver._eql_execute('volume', 'select', VAR_1['name'], 'access',\n 'create', 'initiator', self.connector['initiator'], 'authmethod chap',\n 'username', self.configuration.eqlx_chap_login)\n", "self.mox.ReplayAll()\n", "VAR_8 = self.driver.initialize_connection(VAR_1, self.connector)\n", "self.assertEqual(VAR_8['data'], self._fake_get_iscsi_properties(VAR_1))\n" ]
[ "def test_initialize_connection(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "volume = {'name': self.volume_name}\n", "self.stubs.Set(self.driver, '_get_iscsi_properties', self.\n _fake_get_iscsi_properties)\n", "self.driver._eql_execute('volume', 'select', volume['name'], 'access',\n 'create', 'initiator', self.connector['initiator'], 'authmethod chap',\n 'username', self.configuration.eqlx_chap_login)\n", "self.mox.ReplayAll()\n", "iscsi_properties = self.driver.initialize_connection(volume, self.connector)\n", "self.assertEqual(iscsi_properties['data'], self._fake_get_iscsi_properties(\n volume))\n" ]
[ 0, 0, 0, 0, 2, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "@VAR_0.route('/search', methods=['POST'])...\n", "VAR_10 = request.form.get('search')\n", "return redirect('/%s' % VAR_10)\n" ]
[ "@app.route('/search', methods=['POST'])...\n", "search = request.form.get('search')\n", "return redirect('/%s' % search)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "import hashlib\n", "from django import forms\n", "from django.contrib.auth import authenticate, login, logout\n", "from django.contrib.auth.forms import UserCreationForm, PasswordChangeForm\n", "from django.contrib.auth.models import User\n", "from django.contrib.auth.decorators import login_required\n", "from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin\n", "from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator\n", "from django.http import HttpResponse\n", "from django.urls import reverse\n", "from django.template import RequestContext\n", "from django.shortcuts import Http404, redirect, render, render_to_response\n", "from django.views.generic import ListView, DetailView\n", "from django.views.generic.edit import FormView, CreateView, DeleteView, UpdateView, FormMixin\n", "from markdown import markdown\n", "from .models import Article, Category, Comment\n", "VAR_1 = Article\n", "VAR_12 = ['title', 'category', 'content']\n", "VAR_1 = Comment\n", "VAR_12 = ['content']\n", "VAR_1 = User\n", "VAR_2 = 'user.html'\n", "def FUNC_1(self, **VAR_3):...\n", "VAR_13 = super().get_context_data(**kwargs)\n", "VAR_13['articles'] = self.object.article_set.all()\n", "VAR_13['form'] = CLASS_1()\n", "return VAR_13\n" ]
[ "import hashlib\n", "from django import forms\n", "from django.contrib.auth import authenticate, login, logout\n", "from django.contrib.auth.forms import UserCreationForm, PasswordChangeForm\n", "from django.contrib.auth.models import User\n", "from django.contrib.auth.decorators import login_required\n", "from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin\n", "from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator\n", "from django.http import HttpResponse\n", "from django.urls import reverse\n", "from django.template import RequestContext\n", "from django.shortcuts import Http404, redirect, render, render_to_response\n", "from django.views.generic import ListView, DetailView\n", "from django.views.generic.edit import FormView, CreateView, DeleteView, UpdateView, FormMixin\n", "from markdown import markdown\n", "from .models import Article, Category, Comment\n", "model = Article\n", "fields = ['title', 'category', 'content']\n", "model = Comment\n", "fields = ['content']\n", "model = User\n", "template_name = 'user.html'\n", "def get_context_data(self, **kwargs):...\n", "context = super().get_context_data(**kwargs)\n", "context['articles'] = self.object.article_set.all()\n", "context['form'] = CommentForm()\n", "return context\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_8(self):...\n", "VAR_13 = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR)\n", "self.assertTrue(isinstance(VAR_13, str))\n" ]
[ "def test_getinfo_string(self):...\n", "value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR)\n", "self.assertTrue(isinstance(value, str))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_13(self, VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_21 == '/':\n", "return True\n", "VAR_58 = self.getfile(VAR_21)\n", "VAR_58 = None\n", "if VAR_58 is None or VAR_58 is False:\n", "return False\n", "if VAR_58[VAR_2] == VAR_12:\n", "return True\n", "return False\n" ]
[ "def isdir(self, path):...\n", "\"\"\"docstring\"\"\"\n", "if path == '/':\n", "return True\n", "dir = self.getfile(path)\n", "dir = None\n", "if dir is None or dir is False:\n", "return False\n", "if dir[A_TYPE] == T_DIR:\n", "return True\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_1(self, VAR_0):...\n", "self.__init__(VAR_4=False)\n", "self.name = VAR_0.get('name')\n", "self.vars = VAR_0.get('vars', dict())\n", "self.address = VAR_0.get('address', '')\n", "self._uuid = VAR_0.get('uuid', None)\n", "self.implicit = VAR_0.get('implicit', False)\n", "VAR_8 = VAR_0.get('groups', [])\n", "for group_data in VAR_8:\n", "VAR_10 = Group()\n", "VAR_10.deserialize(group_data)\n", "self.groups.append(VAR_10)\n" ]
[ "def deserialize(self, data):...\n", "self.__init__(gen_uuid=False)\n", "self.name = data.get('name')\n", "self.vars = data.get('vars', dict())\n", "self.address = data.get('address', '')\n", "self._uuid = data.get('uuid', None)\n", "self.implicit = data.get('implicit', False)\n", "groups = data.get('groups', [])\n", "for group_data in groups:\n", "g = Group()\n", "g.deserialize(group_data)\n", "self.groups.append(g)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Expr'", "Expr'" ]
[ "import pytest\n", "import bottle\n", "import webtest\n", "import MySQLdb\n", "import os\n", "from logging import getLogger\n", "from bottle_mysql import Plugin\n", "from video import video_api\n", "from playlist import playlist_api\n", "from database import populate_test_database\n", "VAR_0 = getLogger()\n", "VAR_1 = bottle.default_app()\n", "VAR_2 = Plugin(dbuser=os.environ['USER'], dbpass=os.environ['PASSWORD'],\n dbname='test')\n", "VAR_1.install(VAR_2)\n", "VAR_3 = webtest.TestApp(VAR_1)\n", "def FUNC_0(VAR_4, VAR_5, VAR_6, VAR_7):...\n", "VAR_9 = FUNC_2()\n", "VAR_10 = VAR_9.cursor()\n", "VAR_10.execute('string'.format(VAR_4=playlist_id, VAR_5=title, VAR_6=\n thumbnail, VAR_7=position))\n", "VAR_9.commit()\n", "VAR_9.close()\n", "def FUNC_1(VAR_8):...\n", "VAR_9 = FUNC_2()\n", "VAR_10 = VAR_9.cursor()\n", "VAR_10.execute(\n \"INSERT INTO playlist (name, video_position) VALUES('{name}', 0);\".\n format(VAR_8=name))\n", "VAR_9.commit()\n", "VAR_9.close()\n", "def FUNC_2():...\n", "VAR_9 = MySQLdb.connect('localhost', 'root', os.environ['PASSWORD'], 'test')\n", "return VAR_9\n" ]
[ "import pytest\n", "import bottle\n", "import webtest\n", "import MySQLdb\n", "import os\n", "from logging import getLogger\n", "from bottle_mysql import Plugin\n", "from video import video_api\n", "from playlist import playlist_api\n", "from database import populate_test_database\n", "logger = getLogger()\n", "app = bottle.default_app()\n", "plugin = Plugin(dbuser=os.environ['USER'], dbpass=os.environ['PASSWORD'],\n dbname='test')\n", "app.install(plugin)\n", "test_app = webtest.TestApp(app)\n", "def create_video(playlist_id, title, thumbnail, position):...\n", "db = connect_to_database()\n", "cursor = db.cursor()\n", "cursor.execute(\n \"INSERT INTO video (playlist_id, title, thumbnail, position) VALUES('{playlist_id}', '{title}', '{thumbnail}', '{position}');\"\n .format(playlist_id=playlist_id, title=title, thumbnail=thumbnail,\n position=position))\n", "db.commit()\n", "db.close()\n", "def create_playlist(name):...\n", "db = connect_to_database()\n", "cursor = db.cursor()\n", "cursor.execute(\n \"INSERT INTO playlist (name, video_position) VALUES('{name}', 0);\".\n format(name=name))\n", "db.commit()\n", "db.close()\n", "def connect_to_database():...\n", "db = MySQLdb.connect('localhost', 'root', os.environ['PASSWORD'], 'test')\n", "return db\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_7(self):...\n", "VAR_4 = os.getcwd()\n", "self.assertEqual(os.path.realpath(tempdir1), os.getcwd())\n", "self.assertEqual(os.path.realpath(tempdir2), os.getcwd())\n", "self.assertEqual(os.path.realpath(tempdir1), os.getcwd())\n", "self.assertEqual(os.path.realpath(tempdir1), os.getcwd())\n", "self.assertEqual(VAR_4, os.getcwd())\n", "self.assertEqual(VAR_4, os.getcwd())\n" ]
[ "def test_nested_pushd(self):...\n", "pre_cwd = os.getcwd()\n", "self.assertEqual(os.path.realpath(tempdir1), os.getcwd())\n", "self.assertEqual(os.path.realpath(tempdir2), os.getcwd())\n", "self.assertEqual(os.path.realpath(tempdir1), os.getcwd())\n", "self.assertEqual(os.path.realpath(tempdir1), os.getcwd())\n", "self.assertEqual(pre_cwd, os.getcwd())\n", "self.assertEqual(pre_cwd, os.getcwd())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_14(self, VAR_26: MappingLevel):...\n", "\"\"\"docstring\"\"\"\n", "self.mapping_level = VAR_26.value\n", "db.session.commit()\n" ]
[ "def set_mapping_level(self, level: MappingLevel):...\n", "\"\"\"docstring\"\"\"\n", "self.mapping_level = level.value\n", "db.session.commit()\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "def FUNC_6(self, VAR_44=None):...\n", "if not VAR_101.user_is_loggedin:\n", "if VAR_44 is not None and not valid_password(VAR_101.user, VAR_44):\n", "VAR_101.errors.add(errors.WRONG_PASSWORD)\n" ]
[ "def run(self, password=None):...\n", "if not c.user_is_loggedin:\n", "if password is not None and not valid_password(c.user, password):\n", "c.errors.add(errors.WRONG_PASSWORD)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Expr'" ]
[ "def FUNC_11(self):...\n", "return self.current_user()['first_name']\n" ]
[ "def current_first_name(self):...\n", "return self.current_user()['first_name']\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_26(self):...\n", "return self.account\n" ]
[ "def get_id(self):...\n", "return self.account\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_20(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = {'SEVERITY': {'LOW': 2}, 'CONFIDENCE': {'HIGH': 2}}\n", "self.check_example('imports.py', VAR_2)\n" ]
[ "def test_imports(self):...\n", "\"\"\"docstring\"\"\"\n", "expect = {'SEVERITY': {'LOW': 2}, 'CONFIDENCE': {'HIGH': 2}}\n", "self.check_example('imports.py', expect)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "@staticmethod...\n", "if not VAR_1 or not VAR_7 or not VAR_6:\n", "if not VAR_5:\n", "VAR_5 = CLASS_0.get_report(VAR_1)['analysis']\n", "VAR_16 = VAR_5['behavior']['generic']\n", "VAR_17 = [z for z in VAR_16 if z['pid'] == VAR_6]\n", "if not VAR_17:\n", "VAR_17 = VAR_17[0]\n", "VAR_18 = VAR_17['summary']\n", "if VAR_7 not in VAR_18:\n", "if VAR_3:\n", "VAR_18[VAR_7] = VAR_18[VAR_7][VAR_3:]\n", "if VAR_2:\n", "VAR_18[VAR_7] = VAR_18[VAR_7][:VAR_2]\n", "return VAR_18[VAR_7]\n" ]
[ "@staticmethod...\n", "if not task_id or not watcher or not pid:\n", "if not report:\n", "report = AnalysisController.get_report(task_id)['analysis']\n", "behavior_generic = report['behavior']['generic']\n", "process = [z for z in behavior_generic if z['pid'] == pid]\n", "if not process:\n", "process = process[0]\n", "summary = process['summary']\n", "if watcher not in summary:\n", "if offset:\n", "summary[watcher] = summary[watcher][offset:]\n", "if limit:\n", "summary[watcher] = summary[watcher][:limit]\n", "return summary[watcher]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Return'" ]
[ "@VAR_0.route('/token-login', methods=['POST'])...\n", "current_app.logger.debug('Starting token login')\n", "VAR_20 = current_app.config.get('TOKEN_LOGIN_FAILURE_REDIRECT_URL')\n", "VAR_21 = current_app.config.get('TOKEN_LOGIN_SUCCESS_REDIRECT_URL')\n", "VAR_13 = request.form.get('eppn')\n", "VAR_22 = request.form.get('token')\n", "VAR_23 = request.form.get('nonce')\n", "VAR_24 = request.form.get('ts')\n", "VAR_8 = get_loa(current_app.config.get('AVAILABLE_LOA'), None)\n", "if verify_auth_token(VAR_13=eppn, VAR_22=token, VAR_23=nonce, VAR_24=timestamp\n", "current_app.logger.info('Token login failed, redirecting user to {}'.format\n (VAR_20))\n", "VAR_12 = current_app.central_userdb.get_user_by_eppn(VAR_13)\n", "current_app.logger.error('No user with eduPersonPrincipalName = {} found'.\n format(VAR_13))\n", "return redirect(VAR_20)\n", "if VAR_12.locked_identity.count > 0:\n", "current_app.logger.error(\n 'There are more than one user with eduPersonPrincipalName = {}'.format(\n VAR_13))\n", "current_app.logger.error('Not new user {} tried to log in using token login'\n .format(VAR_12))\n", "VAR_3['eduPersonPrincipalName'] = VAR_12.eppn\n", "return redirect(VAR_20)\n", "VAR_3['user_eppn'] = VAR_12.eppn\n", "VAR_3['eduPersonAssurance'] = VAR_8\n", "VAR_3.persist()\n", "VAR_26 = redirect(VAR_21)\n", "VAR_3.set_cookie(VAR_26)\n", "current_app.logger.info('Successful token login, redirecting user {} to {}'\n .format(VAR_12, VAR_21))\n", "return VAR_26\n" ]
[ "@authn_views.route('/token-login', methods=['POST'])...\n", "current_app.logger.debug('Starting token login')\n", "location_on_fail = current_app.config.get('TOKEN_LOGIN_FAILURE_REDIRECT_URL')\n", "location_on_success = current_app.config.get('TOKEN_LOGIN_SUCCESS_REDIRECT_URL'\n )\n", "eppn = request.form.get('eppn')\n", "token = request.form.get('token')\n", "nonce = request.form.get('nonce')\n", "timestamp = request.form.get('ts')\n", "loa = get_loa(current_app.config.get('AVAILABLE_LOA'), None)\n", "if verify_auth_token(eppn=eppn, token=token, nonce=nonce, timestamp=timestamp):\n", "current_app.logger.info('Token login failed, redirecting user to {}'.format\n (location_on_fail))\n", "user = current_app.central_userdb.get_user_by_eppn(eppn)\n", "current_app.logger.error('No user with eduPersonPrincipalName = {} found'.\n format(eppn))\n", "return redirect(location_on_fail)\n", "if user.locked_identity.count > 0:\n", "current_app.logger.error(\n 'There are more than one user with eduPersonPrincipalName = {}'.format(\n eppn))\n", "current_app.logger.error('Not new user {} tried to log in using token login'\n .format(user))\n", "session['eduPersonPrincipalName'] = user.eppn\n", "return redirect(location_on_fail)\n", "session['user_eppn'] = user.eppn\n", "session['eduPersonAssurance'] = loa\n", "session.persist()\n", "response = redirect(location_on_success)\n", "session.set_cookie(response)\n", "current_app.logger.info('Successful token login, redirecting user {} to {}'\n .format(user, location_on_success))\n", "return response\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Return'", "Condition", "Expr'", "Expr'", "Assign'", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_1(self) ->str:...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = self.bytes_buffer.getvalue()\n", "return 'Redirect Buffer Error: {}'.format(err)\n", "if VAR_6 is None:\n", "return ''\n", "return VAR_6.decode(self.source_encoding)\n" ]
[ "def read_all(self) ->str:...\n", "\"\"\"docstring\"\"\"\n", "buffered_bytes = self.bytes_buffer.getvalue()\n", "return 'Redirect Buffer Error: {}'.format(err)\n", "if buffered_bytes is None:\n", "return ''\n", "return buffered_bytes.decode(self.source_encoding)\n" ]
[ 0, 0, 0, 0, 0, 0, 6 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'", "Condition", "Return'", "Return'" ]
[ "def __init__(self, VAR_5, VAR_4):...\n", "self.el = VAR_5\n", "self.txt = VAR_4\n" ]
[ "def __init__(self, el, txt):...\n", "self.el = el\n", "self.txt = txt\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "import json\n", "from django.http import JsonResponse\n", "from django.shortcuts import redirect\n", "from django.views.decorators.csrf import csrf_exempt\n", "from django.views.decorators.http import require_http_methods\n", "from cuckoo.common.config import config\n", "from cuckoo.core.submit import SubmitManager\n", "from cuckoo.web.bin.utils import api_post, JsonSerialize, json_error_response\n", "VAR_0 = SubmitManager()\n", "def FUNC_0():...\n", "VAR_1 = config('cuckoo:cuckoo:machinery')\n", "if config('routing:vpn:enabled'):\n", "VAR_4 = config('routing:vpn:vpns')\n", "VAR_4 = []\n", "return {'machine': config('%s:%s:machines' % (VAR_1, VAR_1)), 'package':\n None, 'priority': 2, 'timeout': config('cuckoo:timeouts:default'),\n 'routing': {'route': config('routing:routing:route'), 'inetsim': config\n ('routing:inetsim:enabled'), 'tor': config('routing:tor:enabled'),\n 'vpns': VAR_4}, 'options': {'enable-services': False, 'enforce-timeout':\n False, 'full-memory-dump': config('cuckoo:cuckoo:memory_dump'),\n 'no-injection': False, 'process-memory-dump': True,\n 'simulated-human-interaction': True}}\n" ]
[ "import json\n", "from django.http import JsonResponse\n", "from django.shortcuts import redirect\n", "from django.views.decorators.csrf import csrf_exempt\n", "from django.views.decorators.http import require_http_methods\n", "from cuckoo.common.config import config\n", "from cuckoo.core.submit import SubmitManager\n", "from cuckoo.web.bin.utils import api_post, JsonSerialize, json_error_response\n", "submit_manager = SubmitManager()\n", "def defaults():...\n", "machinery = config('cuckoo:cuckoo:machinery')\n", "if config('routing:vpn:enabled'):\n", "vpns = config('routing:vpn:vpns')\n", "vpns = []\n", "return {'machine': config('%s:%s:machines' % (machinery, machinery)),\n 'package': None, 'priority': 2, 'timeout': config(\n 'cuckoo:timeouts:default'), 'routing': {'route': config(\n 'routing:routing:route'), 'inetsim': config('routing:inetsim:enabled'),\n 'tor': config('routing:tor:enabled'), 'vpns': vpns}, 'options': {\n 'enable-services': False, 'enforce-timeout': False, 'full-memory-dump':\n config('cuckoo:cuckoo:memory_dump'), 'no-injection': False,\n 'process-memory-dump': True, 'simulated-human-interaction': True}}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_9(self):...\n", "for resource in set(resource for VAR_13 in self.rules for resource in\n", "if resource not in '_cores _nodes'.split():\n", "logger.info(resource)\n" ]
[ "def list_resources(self):...\n", "for resource in set(resource for rule in self.rules for resource in rule.\n", "if resource not in '_cores _nodes'.split():\n", "logger.info(resource)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Expr'" ]
[ "def FUNC_0(self):...\n", "for _ in range(3):\n", "VAR_2 = ''.join([chr(random.randint(65, 90)) for _ in range(9)])\n", "VAR_3 = {'first_name': VAR_2, 'second_name': VAR_2, 'login': VAR_2,\n 'password': VAR_2}\n", "print(VAR_3)\n", "VAR_0 = requests.post('http://127.0.0.1:5000/register', json=data)\n", "print(VAR_0.text)\n", "VAR_4 = json.loads(VAR_0.text)\n", "self.assertEqual(VAR_0.status_code, 200)\n", "self.assertEqual(VAR_4['status'], 1)\n", "print('/register test_add_user: {}'.format(VAR_0.text))\n" ]
[ "def test_add_user(self):...\n", "for _ in range(3):\n", "string = ''.join([chr(random.randint(65, 90)) for _ in range(9)])\n", "data = {'first_name': string, 'second_name': string, 'login': string,\n 'password': string}\n", "print(data)\n", "resp = requests.post('http://127.0.0.1:5000/register', json=data)\n", "print(resp.text)\n", "response = json.loads(resp.text)\n", "self.assertEqual(resp.status_code, 200)\n", "self.assertEqual(response['status'], 1)\n", "print('/register test_add_user: {}'.format(resp.text))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self._out\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self._out\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_9(self):...\n", "return combine_vars(self.vars, self.get_magic_vars())\n" ]
[ "def get_vars(self):...\n", "return combine_vars(self.vars, self.get_magic_vars())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_3(VAR_10, VAR_11):...\n", "VAR_7 = VAR_10.find_where({'session_name': VAR_11})\n", "VAR_7.kill_session()\n" ]
[ "def kill_session_by_name(server, name):...\n", "session = server.find_where({'session_name': name})\n", "session.kill_session()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_13(self, VAR_15, *VAR_9, **VAR_7):...\n", "if self.closed:\n", "if self._adapter is not None:\n", "assert self._session is None\n", "VAR_9 = [VAR_15] + list(VAR_9)\n", "if VAR_7.pop('nodebug', False):\n", "VAR_9.insert(0, '--nodebug')\n", "self._launch(VAR_9, **kwargs)\n", "return self._adapter, self._session\n" ]
[ "def launch_script(self, filename, *argv, **kwargs):...\n", "if self.closed:\n", "if self._adapter is not None:\n", "assert self._session is None\n", "argv = [filename] + list(argv)\n", "if kwargs.pop('nodebug', False):\n", "argv.insert(0, '--nodebug')\n", "self._launch(argv, **kwargs)\n", "return self._adapter, self._session\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Assert'", "Assign'", "Condition", "Expr'", "Expr'", "Return'" ]
[ "@session_manager...\n", "\"\"\"docstring\"\"\"\n", "from invenio.modules.accounts.models import User, UserEXT\n", "from invenio.ext.sqlalchemy import db\n", "from ..handlers import token_session_key\n", "from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound\n", "import json\n", "import requests\n", "VAR_6 = session.get(token_session_key(VAR_2.name) + '_account_info').get(\n 'external_id')\n", "VAR_7 = {'orcid': VAR_6}\n", "VAR_4.remote_account.extra_data = VAR_7\n", "VAR_8 = User.query.join(UserEXT).filter_by(id=orcid, method='orcid').one()\n", "current_app.logger.exception('No user entry in userEXT.')\n", "if VAR_8 and not any([VAR_8.given_names, VAR_8.family_name]):\n", "VAR_9 = 'http://orcid.org/{0}/orcid-bio'.format(VAR_6)\n", "VAR_10 = {'Accept': 'application/orcid+json'}\n", "VAR_11 = requests.get(VAR_9, VAR_10=headers)\n", "VAR_12 = VAR_11.status_code\n", "if VAR_12 == requests.codes.ok:\n", "VAR_13 = json.loads(VAR_11.content)\n", "current_app.logger.exception('Not valid JSON response from ' +\n 'ORCID:\\n {0}'.format(repr(VAR_13)))\n", "VAR_14 = VAR_13['orcid-profile']['orcid-bio']['personal-details']\n", "current_app.logger.exception('Unexpected return format ' +\n 'from ORCID:\\n {0}'.format(repr(VAR_13)))\n", "db.session.add(VAR_8)\n", "return\n", "VAR_8.given_names = VAR_14['given-names']['value']\n", "return\n", "current_user.reload()\n", "VAR_8.family_name = VAR_14['family-name']['value']\n" ]
[ "@session_manager...\n", "\"\"\"docstring\"\"\"\n", "from invenio.modules.accounts.models import User, UserEXT\n", "from invenio.ext.sqlalchemy import db\n", "from ..handlers import token_session_key\n", "from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound\n", "import json\n", "import requests\n", "orcid = session.get(token_session_key(remote.name) + '_account_info').get(\n 'external_id')\n", "extra_data = {'orcid': orcid}\n", "token.remote_account.extra_data = extra_data\n", "user = User.query.join(UserEXT).filter_by(id=orcid, method='orcid').one()\n", "current_app.logger.exception('No user entry in userEXT.')\n", "if user and not any([user.given_names, user.family_name]):\n", "request_url = 'http://orcid.org/{0}/orcid-bio'.format(orcid)\n", "headers = {'Accept': 'application/orcid+json'}\n", "response = requests.get(request_url, headers=headers)\n", "code = response.status_code\n", "if code == requests.codes.ok:\n", "orcid_bio = json.loads(response.content)\n", "current_app.logger.exception('Not valid JSON response from ' +\n 'ORCID:\\n {0}'.format(repr(orcid_bio)))\n", "name = orcid_bio['orcid-profile']['orcid-bio']['personal-details']\n", "current_app.logger.exception('Unexpected return format ' +\n 'from ORCID:\\n {0}'.format(repr(orcid_bio)))\n", "db.session.add(user)\n", "return\n", "user.given_names = name['given-names']['value']\n", "return\n", "current_user.reload()\n", "user.family_name = name['family-name']['value']\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Return'", "Assign'", "Return'", "Expr'", "Assign'" ]
[ "def FUNC_6(self, VAR_7):...\n", "VAR_76 = CLASS_6.run(self, VAR_7)\n", "if VAR_76 and not (VAR_101.user_is_loggedin and VAR_76.can_edit(VAR_101.\n", "abort(403, 'forbidden')\n", "return VAR_76\n" ]
[ "def run(self, param):...\n", "meetup = VMeetup.run(self, param)\n", "if meetup and not (c.user_is_loggedin and meetup.can_edit(c.user, c.\n", "abort(403, 'forbidden')\n", "return meetup\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_6(self):...\n", "if self._IsServerAlive():\n", "self._server_popen.terminate()\n", "utils.RemoveIfExists(self._temp_options_filename)\n" ]
[ "def _ServerCleanup(self):...\n", "if self._IsServerAlive():\n", "self._server_popen.terminate()\n", "utils.RemoveIfExists(self._temp_options_filename)\n" ]
[ 0, 0, 0, 7 ]
[ "FunctionDef'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_12(VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "VAR_12 = VAR_3.user\n", "VAR_38 = [(unicode(year), unicode(year)) for year in UserProfile.VALID_YEARS]\n", "VAR_31 = FUNC_9(VAR_12)\n", "VAR_1.exception('Error fetching order history from Otto.')\n", "VAR_5 = {'auth': {}, 'duplicate_provider': None, 'nav_hidden': True,\n 'fields': {'country': {'options': list(countries)}, 'gender': {\n 'options': [(choice[0], _(choice[1])) for choice in UserProfile.\n GENDER_CHOICES]}, 'language': {'options': released_languages()},\n 'level_of_education': {'options': [(choice[0], _(choice[1])) for choice in\n UserProfile.LEVEL_OF_EDUCATION_CHOICES]}, 'password': {'url': reverse(\n 'password_reset')}, 'year_of_birth': {'options': VAR_38},\n 'preferred_language': {'options': all_languages()}, 'time_zone': {\n 'options': TIME_ZONE_CHOICES}}, 'platform_name': configuration_helpers.\n get_value('PLATFORM_NAME', settings.PLATFORM_NAME),\n 'password_reset_support_link': configuration_helpers.get_value(\n 'PASSWORD_RESET_SUPPORT_LINK', settings.PASSWORD_RESET_SUPPORT_LINK) or\n settings.SUPPORT_SITE_LINK, 'user_accounts_api_url': reverse(\n 'accounts_api', VAR_28={'username': user.username}),\n 'user_preferences_api_url': reverse('preferences_api', VAR_28={\n 'username': user.username}), 'disable_courseware_js': True,\n 'show_program_listing': ProgramsApiConfig.is_enabled(), 'order_history':\n VAR_31}\n", "VAR_31 = []\n", "if third_party_auth.is_enabled():\n", "VAR_5['duplicate_provider'] = pipeline.get_duplicate_provider(messages.\n get_messages(VAR_3))\n", "return VAR_5\n", "VAR_42 = pipeline.get_provider_user_states(VAR_12)\n", "VAR_5['auth']['providers'] = [{'id': state.provider.provider_id, 'name':\n state.provider.name, 'connected': state.has_account, 'connect_url':\n pipeline.get_login_url(state.provider.provider_id, pipeline.\n AUTH_ENTRY_ACCOUNT_SETTINGS, redirect_url=reverse('account_settings')),\n 'accepts_logins': state.provider.accepts_logins, 'disconnect_url':\n pipeline.get_disconnect_url(state.provider.provider_id, state.\n association_id)} for state in VAR_42 if state.provider.\n display_for_login or state.has_account]\n" ]
[ "def account_settings_context(request):...\n", "\"\"\"docstring\"\"\"\n", "user = request.user\n", "year_of_birth_options = [(unicode(year), unicode(year)) for year in\n UserProfile.VALID_YEARS]\n", "user_orders = get_user_orders(user)\n", "log.exception('Error fetching order history from Otto.')\n", "context = {'auth': {}, 'duplicate_provider': None, 'nav_hidden': True,\n 'fields': {'country': {'options': list(countries)}, 'gender': {\n 'options': [(choice[0], _(choice[1])) for choice in UserProfile.\n GENDER_CHOICES]}, 'language': {'options': released_languages()},\n 'level_of_education': {'options': [(choice[0], _(choice[1])) for choice in\n UserProfile.LEVEL_OF_EDUCATION_CHOICES]}, 'password': {'url': reverse(\n 'password_reset')}, 'year_of_birth': {'options': year_of_birth_options},\n 'preferred_language': {'options': all_languages()}, 'time_zone': {\n 'options': TIME_ZONE_CHOICES}}, 'platform_name': configuration_helpers.\n get_value('PLATFORM_NAME', settings.PLATFORM_NAME),\n 'password_reset_support_link': configuration_helpers.get_value(\n 'PASSWORD_RESET_SUPPORT_LINK', settings.PASSWORD_RESET_SUPPORT_LINK) or\n settings.SUPPORT_SITE_LINK, 'user_accounts_api_url': reverse(\n 'accounts_api', kwargs={'username': user.username}),\n 'user_preferences_api_url': reverse('preferences_api', kwargs={\n 'username': user.username}), 'disable_courseware_js': True,\n 'show_program_listing': ProgramsApiConfig.is_enabled(), 'order_history':\n user_orders}\n", "user_orders = []\n", "if third_party_auth.is_enabled():\n", "context['duplicate_provider'] = pipeline.get_duplicate_provider(messages.\n get_messages(request))\n", "return context\n", "auth_states = pipeline.get_provider_user_states(user)\n", "context['auth']['providers'] = [{'id': state.provider.provider_id, 'name':\n state.provider.name, 'connected': state.has_account, 'connect_url':\n pipeline.get_login_url(state.provider.provider_id, pipeline.\n AUTH_ENTRY_ACCOUNT_SETTINGS, redirect_url=reverse('account_settings')),\n 'accepts_logins': state.provider.accepts_logins, 'disconnect_url':\n pipeline.get_disconnect_url(state.provider.provider_id, state.\n association_id)} for state in auth_states if state.provider.\n display_for_login or state.has_account]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Assign'" ]
[ "@VAR_0.route('/disconnect/<remote_app>/')...\n", "\"\"\"docstring\"\"\"\n", "if VAR_1 not in disconnect_handlers:\n", "return abort(404)\n", "return disconnect_handlers[VAR_1]()\n" ]
[ "@blueprint.route('/disconnect/<remote_app>/')...\n", "\"\"\"docstring\"\"\"\n", "if remote_app not in disconnect_handlers:\n", "return abort(404)\n", "return disconnect_handlers[remote_app]()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Return'" ]
[ "def FUNC_3(self, VAR_9, VAR_7, *VAR_10):...\n", "if VAR_9 not in self._gates:\n", "VAR_17 = VAR_0[VAR_9](*VAR_10)\n", "if isinstance(VAR_7, int):\n", "VAR_17 | self.reg[VAR_7]\n", "VAR_17 | tuple([self.reg[i] for i in VAR_7])\n" ]
[ "def apply(self, gate_name, wires, *par):...\n", "if gate_name not in self._gates:\n", "gate = operator_map[gate_name](*par)\n", "if isinstance(wires, int):\n", "gate | self.reg[wires]\n", "gate | tuple([self.reg[i] for i in wires])\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_3(self, VAR_2):...\n", "self.reply(VAR_2, \"yeah, right, I'll go and rob a die factory\")\n" ]
[ "def _too_much(self, msg):...\n", "self.reply(msg, \"yeah, right, I'll go and rob a die factory\")\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = []\n", "VAR_7 = self.pool.get('ir.model.data')\n", "VAR_8 = self.pool.get('res.users').browse(VAR_2, VAR_3, VAR_3).company_id.id\n", "VAR_9 = self.pool.get('account.bank.statement')\n", "VAR_10 = self.pool.get('ir.sequence')\n", "VAR_11 = self.pool.get('account.journal')\n", "VAR_2.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % VAR_3)\n", "VAR_12 = map(lambda x1: x1[0], VAR_2.fetchall())\n", "VAR_2.execute('string' % ','.join(map(lambda x: \"'\" + str(x) + \"'\", VAR_12)))\n", "VAR_13 = map(lambda x1: x1[0], VAR_2.fetchall())\n", "for journal in VAR_11.browse(VAR_2, VAR_3, VAR_13):\n", "VAR_4 = VAR_9.search(VAR_2, VAR_3, [('state', '!=', 'confirm'), ('user_id',\n '=', VAR_3), ('journal_id', '=', journal.id)])\n", "VAR_14 = self.pool.get('ir.model.data')\n", "if len(VAR_4):\n", "VAR_15 = VAR_14._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_tree')\n", "VAR_17 = ''\n", "VAR_16 = VAR_14._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_form2')\n", "if journal.sequence_id:\n", "if VAR_15:\n", "VAR_17 = VAR_10.get_id(VAR_2, VAR_3, journal.sequence_id.id)\n", "VAR_17 = VAR_10.get(VAR_2, VAR_3, 'account.bank.statement')\n", "VAR_15 = VAR_14.browse(VAR_2, VAR_3, VAR_15, VAR_5=context).res_id\n", "if VAR_16:\n", "VAR_18 = VAR_9.create(VAR_2, VAR_3, {'journal_id': journal.id, 'company_id':\n VAR_8, 'user_id': VAR_3, 'state': 'open', 'name': VAR_17,\n 'starting_details_ids': VAR_9._get_cash_close_box_lines(VAR_2, VAR_3, [])})\n", "VAR_16 = VAR_14.browse(VAR_2, VAR_3, VAR_16, VAR_5=context).res_id\n", "return {'domain': \"[('state','=','open')]\", 'name': 'Open Statement',\n 'view_type': 'form', 'view_mode': 'tree,form', 'res_model':\n 'account.bank.statement', 'views': [(VAR_15, 'tree'), (VAR_16, 'form')],\n 'type': 'ir.actions.act_window'}\n", "VAR_9.button_open(VAR_2, VAR_3, [VAR_18], VAR_5)\n" ]
[ "def open_statement(self, cr, uid, ids, context):...\n", "\"\"\"docstring\"\"\"\n", "list_statement = []\n", "mod_obj = self.pool.get('ir.model.data')\n", "company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id\n", "statement_obj = self.pool.get('account.bank.statement')\n", "sequence_obj = self.pool.get('ir.sequence')\n", "journal_obj = self.pool.get('account.journal')\n", "cr.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % uid)\n", "j_ids = map(lambda x1: x1[0], cr.fetchall())\n", "cr.execute(\n \"\"\" select id from account_journal\n where auto_cash='True' and type='cash'\n and id in (%s)\"\"\"\n % ','.join(map(lambda x: \"'\" + str(x) + \"'\", j_ids)))\n", "journal_ids = map(lambda x1: x1[0], cr.fetchall())\n", "for journal in journal_obj.browse(cr, uid, journal_ids):\n", "ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id',\n '=', uid), ('journal_id', '=', journal.id)])\n", "data_obj = self.pool.get('ir.model.data')\n", "if len(ids):\n", "id2 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_tree')\n", "number = ''\n", "id3 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_form2')\n", "if journal.sequence_id:\n", "if id2:\n", "number = sequence_obj.get_id(cr, uid, journal.sequence_id.id)\n", "number = sequence_obj.get(cr, uid, 'account.bank.statement')\n", "id2 = data_obj.browse(cr, uid, id2, context=context).res_id\n", "if id3:\n", "statement_id = statement_obj.create(cr, uid, {'journal_id': journal.id,\n 'company_id': company_id, 'user_id': uid, 'state': 'open', 'name':\n number, 'starting_details_ids': statement_obj._get_cash_close_box_lines\n (cr, uid, [])})\n", "id3 = data_obj.browse(cr, uid, id3, context=context).res_id\n", "return {'domain': \"[('state','=','open')]\", 'name': 'Open Statement',\n 'view_type': 'form', 'view_mode': 'tree,form', 'res_model':\n 'account.bank.statement', 'views': [(id2, 'tree'), (id3, 'form')],\n 'type': 'ir.actions.act_window'}\n", "statement_obj.button_open(cr, uid, [statement_id], context)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "For", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Expr'" ]
[ "def __init__(self, VAR_8):...\n", "self.spdx = VAR_8\n", "self.lasttok = None\n", "self.lastid = None\n", "self.lexer = lex.lex(module=self, reflags=re.UNICODE)\n", "self.parser = yacc.yacc(module=self, write_tables=False, debug=False)\n", "self.lines_checked = 0\n", "self.checked = 0\n", "self.spdx_valid = 0\n", "self.spdx_errors = 0\n", "self.curline = 0\n", "self.deepest = 0\n" ]
[ "def __init__(self, spdx):...\n", "self.spdx = spdx\n", "self.lasttok = None\n", "self.lastid = None\n", "self.lexer = lex.lex(module=self, reflags=re.UNICODE)\n", "self.parser = yacc.yacc(module=self, write_tables=False, debug=False)\n", "self.lines_checked = 0\n", "self.checked = 0\n", "self.spdx_valid = 0\n", "self.spdx_errors = 0\n", "self.curline = 0\n", "self.deepest = 0\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "@property...\n", "self.validate()\n", "return self._is_jdk\n" ]
[ "@property...\n", "self.validate()\n", "return self._is_jdk\n" ]
[ 0, 0, 0 ]
[ "Condition", "Expr'", "Return'" ]
[ "def FUNC_0(self, VAR_1, VAR_2=False):...\n", "if not VAR_1.get('database'):\n", "VAR_1['database'] = self._model.sqlDatabaseName()\n" ]
[ "def augmentDatabaseArgs(self, args, pool=False):...\n", "if not args.get('database'):\n", "args['database'] = self._model.sqlDatabaseName()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'" ]
[ "def FUNC_0(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "return '%s_%s' % (cfg['OAUTHCLIENT_SESSION_KEY_PREFIX'], VAR_0)\n" ]
[ "def token_session_key(remote_app):...\n", "\"\"\"docstring\"\"\"\n", "return '%s_%s' % (cfg['OAUTHCLIENT_SESSION_KEY_PREFIX'], remote_app)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@rest_utils.ajax()...\n", "\"\"\"docstring\"\"\"\n", "VAR_25 = VAR_1.DATA['updated']\n", "VAR_26 = VAR_1.DATA['removed']\n", "if VAR_25:\n", "api.nova.server_metadata_update(VAR_1, VAR_3, VAR_25)\n", "if VAR_26:\n", "api.nova.server_metadata_delete(VAR_1, VAR_3, VAR_26)\n" ]
[ "@rest_utils.ajax()...\n", "\"\"\"docstring\"\"\"\n", "updated = request.DATA['updated']\n", "removed = request.DATA['removed']\n", "if updated:\n", "api.nova.server_metadata_update(request, server_id, updated)\n", "if removed:\n", "api.nova.server_metadata_delete(request, server_id, removed)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Expr'" ]
[ "@VAR_2.route('/ranks')...\n", "if VAR_0 == None:\n", "FUNC_16()\n", "VAR_14 = request.args.get('scene', default='austin')\n", "VAR_19 = request.args.get('date')\n", "if VAR_19 == None:\n", "VAR_8 = (\n \"SELECT distinct date FROM ranks WHERE scene='{}' ORDER BY date DESC LIMIT 1;\"\n .format(VAR_14))\n", "VAR_8 = \"SELECT * FROM ranks WHERE scene = '{}' and date='{}'\".format(VAR_14,\n VAR_19)\n", "VAR_12 = VAR_0.exec(VAR_8)\n", "VAR_12 = VAR_0.exec(VAR_8)\n", "VAR_19 = VAR_12[0][0]\n", "VAR_20 = {}\n", "for r in VAR_12:\n", "VAR_5 = r[1]\n", "VAR_21, VAR_22, VAR_23 = VAR_19.split('-')\n", "VAR_13 = r[2]\n", "VAR_24 = bracket_utils.get_previous_month(VAR_19)\n", "VAR_20[VAR_5] = VAR_13\n", "VAR_8 = \"SELECT * FROM ranks WHERE scene = '{}' and date='{}'\".format(VAR_14,\n VAR_24)\n", "VAR_12 = VAR_0.exec(VAR_8)\n", "VAR_25 = {}\n", "for r in VAR_12:\n", "VAR_5 = r[1]\n", "return render_template('libraries/html/ranks.html', VAR_20=cur_ranks,\n VAR_25=prev_ranks, VAR_14=scene, VAR_19=date)\n", "VAR_13 = r[2]\n", "VAR_25[VAR_5] = VAR_13\n" ]
[ "@endpoints.route('/ranks')...\n", "if db == None:\n", "init()\n", "scene = request.args.get('scene', default='austin')\n", "date = request.args.get('date')\n", "if date == None:\n", "sql = (\n \"SELECT distinct date FROM ranks WHERE scene='{}' ORDER BY date DESC LIMIT 1;\"\n .format(scene))\n", "sql = \"SELECT * FROM ranks WHERE scene = '{}' and date='{}'\".format(scene, date\n )\n", "res = db.exec(sql)\n", "res = db.exec(sql)\n", "date = res[0][0]\n", "cur_ranks = {}\n", "for r in res:\n", "tag = r[1]\n", "y, m, d = date.split('-')\n", "rank = r[2]\n", "prev_date = bracket_utils.get_previous_month(date)\n", "cur_ranks[tag] = rank\n", "sql = \"SELECT * FROM ranks WHERE scene = '{}' and date='{}'\".format(scene,\n prev_date)\n", "res = db.exec(sql)\n", "prev_ranks = {}\n", "for r in res:\n", "tag = r[1]\n", "return render_template('libraries/html/ranks.html', cur_ranks=cur_ranks,\n prev_ranks=prev_ranks, scene=scene, date=date)\n", "rank = r[2]\n", "prev_ranks[tag] = rank\n" ]
[ 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Assign'", "Assign'" ]
[ "def FUNC_5():...\n", "if VAR_11 and VAR_11.is_alive():\n", "return\n", "VAR_10, VAR_19 = multiprocessing.Pipe()\n", "VAR_11 = multiprocessing.Process(target=redditUserImageScraper.\n runLikedSavedDownloader, args=(childConnection,))\n", "VAR_11.start()\n" ]
[ "def startScript():...\n", "if scriptProcess and scriptProcess.is_alive():\n", "return\n", "scriptPipeConnection, childConnection = multiprocessing.Pipe()\n", "scriptProcess = multiprocessing.Process(target=redditUserImageScraper.\n runLikedSavedDownloader, args=(childConnection,))\n", "scriptProcess.start()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_1(VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = re.findall('[^a-zA-Z0-9:_-]', VAR_1)\n", "if len(VAR_5) > 0:\n", "VAR_0.info('Command: {0} contains invalid characters: {1}'.format(VAR_1, VAR_5)\n )\n", "return True\n", "return False\n" ]
[ "def _valid_command(string):...\n", "\"\"\"docstring\"\"\"\n", "invalid_characters = re.findall('[^a-zA-Z0-9:_-]', string)\n", "if len(invalid_characters) > 0:\n", "log.info('Command: {0} contains invalid characters: {1}'.format(string,\n invalid_characters))\n", "return True\n", "return False\n" ]
[ 0, 0, 2, 2, 2, 2, 2 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Expr'", "Return'", "Return'" ]
[ "def FUNC_7(self):...\n", "return \"\"\"Username: %s\nPassword: %s\n\"\"\" % (self.username, self.password\n ) + CLASS_1.specific_info(self)\n" ]
[ "def specific_info(self):...\n", "return \"\"\"Username: %s\nPassword: %s\n\"\"\" % (self.username, self.password\n ) + GenericRequest.specific_info(self)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(self, VAR_2, VAR_3, VAR_4=False):...\n", "VAR_6 = os.path.join(self.namespace, VAR_2)\n", "self.etcd.write(VAR_6, VAR_3, prevExist=replace)\n", "FUNC_0('Error storing key %s: [%r]' % (VAR_2, repr(err)))\n" ]
[ "def set(self, key, value, replace=False):...\n", "path = os.path.join(self.namespace, key)\n", "self.etcd.write(path, value, prevExist=replace)\n", "log_error('Error storing key %s: [%r]' % (key, repr(err)))\n" ]
[ 0, 1, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "@utils.synchronized('3par', external=True)...\n", "self.common.client_login()\n", "self.common.delete_snapshot(VAR_9)\n", "self.common.client_logout()\n" ]
[ "@utils.synchronized('3par', external=True)...\n", "self.common.client_login()\n", "self.common.delete_snapshot(snapshot)\n", "self.common.client_logout()\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_3(self):...\n", "self.assertTrue(self.client.login(VAR_1='autotest-1', VAR_2='password'))\n", "VAR_6 = self.client.get('/api/apps')\n", "VAR_8 = VAR_6.data['results'][0]['id']\n", "VAR_5 = '/api/apps/{}/perms'.format(VAR_8)\n", "VAR_7 = {'username': 'autotest-2'}\n", "VAR_6 = self.client.post(VAR_5, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_6.status_code, 201)\n", "self.assertTrue(self.client.login(VAR_1='autotest-2', VAR_2='password'))\n", "VAR_6 = self.client.get('/api/apps')\n", "self.assertEqual(VAR_6.status_code, 200)\n", "self.assertEqual(len(VAR_6.data['results']), 2)\n", "VAR_5 = '/api/apps/{}/perms/{}'.format(VAR_8, 'autotest-2')\n", "VAR_6 = self.client.delete(VAR_5, content_type='application/json')\n", "self.assertEqual(VAR_6.status_code, 403)\n", "self.assertIsNone(VAR_6.data)\n", "self.assertTrue(self.client.login(VAR_1='autotest-1', VAR_2='password'))\n", "VAR_6 = self.client.delete(VAR_5, content_type='application/json')\n", "self.assertEqual(VAR_6.status_code, 204)\n", "self.assertIsNone(VAR_6.data)\n", "self.assertTrue(self.client.login(VAR_1='autotest-2', VAR_2='password'))\n", "VAR_6 = self.client.get('/api/apps')\n", "self.assertEqual(len(VAR_6.data['results']), 1)\n", "self.assertTrue(self.client.login(VAR_1='autotest-1', VAR_2='password'))\n", "VAR_6 = self.client.delete(VAR_5, content_type='application/json')\n", "self.assertEqual(VAR_6.status_code, 404)\n" ]
[ "def test_delete(self):...\n", "self.assertTrue(self.client.login(username='autotest-1', password='password'))\n", "response = self.client.get('/api/apps')\n", "app_id = response.data['results'][0]['id']\n", "url = '/api/apps/{}/perms'.format(app_id)\n", "body = {'username': 'autotest-2'}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "self.assertTrue(self.client.login(username='autotest-2', password='password'))\n", "response = self.client.get('/api/apps')\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 2)\n", "url = '/api/apps/{}/perms/{}'.format(app_id, 'autotest-2')\n", "response = self.client.delete(url, content_type='application/json')\n", "self.assertEqual(response.status_code, 403)\n", "self.assertIsNone(response.data)\n", "self.assertTrue(self.client.login(username='autotest-1', password='password'))\n", "response = self.client.delete(url, content_type='application/json')\n", "self.assertEqual(response.status_code, 204)\n", "self.assertIsNone(response.data)\n", "self.assertTrue(self.client.login(username='autotest-2', password='password'))\n", "response = self.client.get('/api/apps')\n", "self.assertEqual(len(response.data['results']), 1)\n", "self.assertTrue(self.client.login(username='autotest-1', password='password'))\n", "response = self.client.delete(url, content_type='application/json')\n", "self.assertEqual(response.status_code, 404)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "@Throttle(VAR_4)...\n", "\"\"\"docstring\"\"\"\n", "import pyatmo\n", "self.station_data = pyatmo.WeatherStationData(self.auth)\n", "if self.station is not None:\n", "self.data = self.station_data.lastData(VAR_19=self.station, exclude=3600)\n", "self.data = self.station_data.lastData(exclude=3600)\n" ]
[ "@Throttle(MIN_TIME_BETWEEN_UPDATES)...\n", "\"\"\"docstring\"\"\"\n", "import pyatmo\n", "self.station_data = pyatmo.WeatherStationData(self.auth)\n", "if self.station is not None:\n", "self.data = self.station_data.lastData(station=self.station, exclude=3600)\n", "self.data = self.station_data.lastData(exclude=3600)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Import'", "Assign'", "Condition", "Assign'", "Assign'" ]
[ "def FUNC_22(self):...\n", "return self._email\n" ]
[ "def nickname(self):...\n", "return self._email\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_1(VAR_0):...\n", "return base64.b64decode(binascii.unhexlify(VAR_0.encode('utf-8'))).decode(\n 'utf-8')\n" ]
[ "def decode_hexed_b64_to_str(data):...\n", "return base64.b64decode(binascii.unhexlify(data.encode('utf-8'))).decode(\n 'utf-8')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_11(self):...\n", "VAR_9 = []\n", "VAR_10 = {}\n", "for kid in self.child_groups:\n", "VAR_13 = kid.get_hosts()\n", "for VAR_14 in self.hosts:\n", "for VAR_15 in VAR_13:\n", "if VAR_14 not in VAR_10:\n", "return VAR_9\n", "if VAR_15 not in VAR_10:\n", "VAR_10[VAR_14] = 1\n", "VAR_10[VAR_15] = 1\n", "if self.name == 'all' and VAR_14.implicit:\n", "if self.name == 'all' and VAR_15.implicit:\n", "VAR_9.append(VAR_14)\n", "VAR_9.append(VAR_15)\n" ]
[ "def _get_hosts(self):...\n", "hosts = []\n", "seen = {}\n", "for kid in self.child_groups:\n", "kid_hosts = kid.get_hosts()\n", "for mine in self.hosts:\n", "for kk in kid_hosts:\n", "if mine not in seen:\n", "return hosts\n", "if kk not in seen:\n", "seen[mine] = 1\n", "seen[kk] = 1\n", "if self.name == 'all' and mine.implicit:\n", "if self.name == 'all' and kk.implicit:\n", "hosts.append(mine)\n", "hosts.append(kk)\n" ]
[ 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Assign'", "For", "For", "Condition", "Return'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Expr'" ]
[ "\"\"\"\nAll queries and commands that the user makes in the retail application\n\"\"\"\n", "import psycopg2\n", "VAR_0 = 1\n", "def __init__(self, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5=5432):...\n", "self.db_conn = psycopg2.connect(VAR_4=host, VAR_1=dbname, VAR_2=user, VAR_3\n =password, VAR_5=port)\n", "self.cursor = self.db_conn.cursor()\n", "def FUNC_0(self):...\n", "self.db_conn.commit()\n", "def __del__(self):...\n", "self.cursor.close()\n", "self.db_conn.close()\n", "def FUNC_1(self, VAR_6):...\n", "self.customer_id = VAR_6\n", "def FUNC_2(self, VAR_7):...\n", "self.cursor.execute(\n \"SELECT upc, name, weight, description FROM product WHERE id = '%s';\",\n (VAR_7,))\n", "return self.cursor.fetchall()\n" ]
[ "\"\"\"\nAll queries and commands that the user makes in the retail application\n\"\"\"\n", "import psycopg2\n", "ONLINE_STORE_ID = 1\n", "def __init__(self, dbname, user, password, host, port=5432):...\n", "self.db_conn = psycopg2.connect(host=host, dbname=dbname, user=user,\n password=password, port=port)\n", "self.cursor = self.db_conn.cursor()\n", "def commit(self):...\n", "self.db_conn.commit()\n", "def __del__(self):...\n", "self.cursor.close()\n", "self.db_conn.close()\n", "def login(self, customer_id):...\n", "self.customer_id = customer_id\n", "def find_product_by_id(self, product_id):...\n", "self.cursor.execute(\n \"SELECT upc, name, weight, description FROM product WHERE id = '%s';\",\n (product_id,))\n", "return self.cursor.fetchall()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "FunctionDef'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "FunctionDef'", "Expr'", "Return'" ]