lines
sequencelengths
1
383
raw_lines
sequencelengths
1
383
label
sequencelengths
1
383
type
sequencelengths
1
383
[ "@VAR_0.route('/bGVhdmVfcmlnaHRfbm93', methods=['POST'])...\n", "if not request.json or 'image' not in request.json:\n", "print('No data sent or no image provided. Aborting with 400.')\n", "VAR_13 = request.json['image']\n", "abort(400)\n", "VAR_14 = base64.b64decode(VAR_13.encode('utf-8'))\n", "VAR_14, VAR_15 = FUNC_1(VAR_14)\n", "if not VAR_15:\n", "return escape({'entry': 'False'})\n", "VAR_16 = Image.open(io.BytesIO(VAR_14))\n", "VAR_17 = VAR_16.format\n", "print(f'File has filetype {VAR_17}.')\n", "if VAR_17 == 'JPEG':\n", "VAR_17 = '.jpg'\n", "VAR_17 = '.png'\n", "VAR_18 = 100000000\n", "VAR_19 = 999999999\n", "VAR_20 = None\n", "VAR_3 = open('all_files', 'r')\n", "VAR_21 = ast.literal_eval(VAR_3.read())\n", "VAR_3.close()\n", "VAR_22 = 0\n", "while VAR_20 is None or VAR_20 in VAR_21:\n", "if VAR_22 <= 1000:\n", "print(f'Successful file name: {VAR_20}')\n", "VAR_20 = random.randint(VAR_18, VAR_19)\n", "VAR_22 = 0\n", "VAR_23 = request.json['title']\n", "VAR_20 = base64.b64encode(str(VAR_20).encode('utf-8')).decode('utf-8')\n", "VAR_18 += 100000\n", "if VAR_23[:9] == '[PAUSED] ':\n", "print(f'Trying new file name: {VAR_20}')\n", "VAR_19 += 1000000\n", "VAR_23 = VAR_23[9:]\n", "VAR_24 = request.json['singer']\n", "while VAR_18 >= VAR_19:\n", "VAR_25 = request.json['album']\n", "VAR_18 -= 10000\n", "VAR_18 -= 10000\n", "VAR_26 = [{'title': VAR_23, 'singer': VAR_24, 'album': VAR_25}, VAR_20, VAR_17]\n", "print(f'New db entry: {VAR_26}')\n", "VAR_21.append(VAR_26)\n", "VAR_27, VAR_11, VAR_28 = FUNC_0()\n", "VAR_29 = len(VAR_21)\n", "while VAR_29 > VAR_27:\n", "VAR_31 = VAR_21[0][1] + VAR_21[0][2]\n", "VAR_3 = open('all_files', 'w')\n", "remove(VAR_31)\n", "VAR_3.write(str(VAR_21))\n", "VAR_29 = len(VAR_21)\n", "VAR_3.close()\n", "VAR_20 = VAR_20 + VAR_17\n", "VAR_16.save(VAR_20)\n", "print(f'Saved {VAR_20} from {VAR_26}.')\n", "print(f'Returning {VAR_26}.')\n", "return escape(str({'entry': VAR_26}))\n" ]
[ "@app.route('/bGVhdmVfcmlnaHRfbm93', methods=['POST'])...\n", "if not request.json or 'image' not in request.json:\n", "print('No data sent or no image provided. Aborting with 400.')\n", "im_b64 = request.json['image']\n", "abort(400)\n", "img_bytes = base64.b64decode(im_b64.encode('utf-8'))\n", "img_bytes, valid = allowed_file(img_bytes)\n", "if not valid:\n", "return escape({'entry': 'False'})\n", "img = Image.open(io.BytesIO(img_bytes))\n", "file_ending = img.format\n", "print(f'File has filetype {file_ending}.')\n", "if file_ending == 'JPEG':\n", "file_ending = '.jpg'\n", "file_ending = '.png'\n", "one_hundred_million = 100000000\n", "lots_of_nine = 999999999\n", "file_name = None\n", "f = open('all_files', 'r')\n", "all_files = ast.literal_eval(f.read())\n", "f.close()\n", "attempt = 0\n", "while file_name is None or file_name in all_files:\n", "if attempt <= 1000:\n", "print(f'Successful file name: {file_name}')\n", "file_name = random.randint(one_hundred_million, lots_of_nine)\n", "attempt = 0\n", "title = request.json['title']\n", "file_name = base64.b64encode(str(file_name).encode('utf-8')).decode('utf-8')\n", "one_hundred_million += 100000\n", "if title[:9] == '[PAUSED] ':\n", "print(f'Trying new file name: {file_name}')\n", "lots_of_nine += 1000000\n", "title = title[9:]\n", "singer = request.json['singer']\n", "while one_hundred_million >= lots_of_nine:\n", "album = request.json['album']\n", "one_hundred_million -= 10000\n", "one_hundred_million -= 10000\n", "file_db_entry = [{'title': title, 'singer': singer, 'album': album},\n file_name, file_ending]\n", "print(f'New db entry: {file_db_entry}')\n", "all_files.append(file_db_entry)\n", "cache, x, y = get_config()\n", "length = len(all_files)\n", "while length > cache:\n", "filename = all_files[0][1] + all_files[0][2]\n", "f = open('all_files', 'w')\n", "remove(filename)\n", "f.write(str(all_files))\n", "length = len(all_files)\n", "f.close()\n", "file_name = file_name + file_ending\n", "img.save(file_name)\n", "print(f'Saved {file_name} from {file_db_entry}.')\n", "print(f'Returning {file_db_entry}.')\n", "return escape(str({'entry': file_db_entry}))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "AugAssign'", "Condition", "Expr'", "AugAssign'", "Assign'", "Assign'", "Condition", "Assign'", "AugAssign'", "AugAssign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "\"\"\"string\"\"\"\n", "import base64\n", "from functools import reduce\n", "from gluon._compat import pickle, thread, urllib2, Cookie, StringIO, urlencode\n", "from gluon._compat import configparser, MIMEBase, MIMEMultipart, MIMEText, Header\n", "from gluon._compat import Encoders, Charset, long, urllib_quote, iteritems\n", "from gluon._compat import to_bytes, to_native, add_charset, string_types\n", "from gluon._compat import charset_QP, basestring, unicodeT, to_unicode\n", "from gluon._compat import urllib2, urlopen\n", "import datetime\n", "import logging\n", "import sys\n", "import glob\n", "import os\n", "import re\n", "import time\n", "import fnmatch\n", "import traceback\n", "import smtplib\n", "import email.utils\n", "import random\n", "import hmac\n", "import hashlib\n", "import json\n", "from email import message_from_string\n", "from gluon.authapi import AuthAPI\n", "from gluon.contenttype import contenttype\n", "from gluon.storage import Storage, StorageList, Settings, Messages\n", "from gluon.utils import web2py_uuid, compare\n", "from gluon.fileutils import read_file, check_credentials\n", "from gluon import *\n", "from gluon.contrib.autolinks import expand_one\n", "from gluon.contrib.markmin.markmin2html import replace_at_urls\n", "from gluon.contrib.markmin.markmin2html import replace_autolinks\n", "from gluon.contrib.markmin.markmin2html import replace_components\n", "from pydal.objects import Row, Set, Query\n", "import gluon.serializers as serializers\n", "VAR_0 = DAL.Table\n", "VAR_1 = DAL.Field\n", "__all__ = ['Mail', 'Auth', 'Recaptcha2', 'Crud', 'Service', 'Wiki',\n 'PluginManager', 'fetch', 'geocode', 'reverse_geocode', 'prettydate']\n", "VAR_2 = logging.getLogger('web2py')\n", "VAR_3 = lambda : None\n", "def FUNC_0(VAR_4, VAR_5=None):...\n", "VAR_11 = VAR_263.request.args\n", "if VAR_4 < 0 and len(VAR_11) >= -VAR_4:\n", "return VAR_11[VAR_4]\n", "if VAR_4 >= 0 and len(VAR_11) > VAR_4:\n", "return VAR_11[VAR_4]\n", "return VAR_5\n" ]
[ "\"\"\"\n| This file is part of the web2py Web Framework\n| Copyrighted by Massimo Di Pierro <[email protected]>\n| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)\n\nAuth, Mail, PluginManager and various utilities\n------------------------------------------------\n\"\"\"\n", "import base64\n", "from functools import reduce\n", "from gluon._compat import pickle, thread, urllib2, Cookie, StringIO, urlencode\n", "from gluon._compat import configparser, MIMEBase, MIMEMultipart, MIMEText, Header\n", "from gluon._compat import Encoders, Charset, long, urllib_quote, iteritems\n", "from gluon._compat import to_bytes, to_native, add_charset, string_types\n", "from gluon._compat import charset_QP, basestring, unicodeT, to_unicode\n", "from gluon._compat import urllib2, urlopen\n", "import datetime\n", "import logging\n", "import sys\n", "import glob\n", "import os\n", "import re\n", "import time\n", "import fnmatch\n", "import traceback\n", "import smtplib\n", "import email.utils\n", "import random\n", "import hmac\n", "import hashlib\n", "import json\n", "from email import message_from_string\n", "from gluon.authapi import AuthAPI\n", "from gluon.contenttype import contenttype\n", "from gluon.storage import Storage, StorageList, Settings, Messages\n", "from gluon.utils import web2py_uuid, compare\n", "from gluon.fileutils import read_file, check_credentials\n", "from gluon import *\n", "from gluon.contrib.autolinks import expand_one\n", "from gluon.contrib.markmin.markmin2html import replace_at_urls\n", "from gluon.contrib.markmin.markmin2html import replace_autolinks\n", "from gluon.contrib.markmin.markmin2html import replace_components\n", "from pydal.objects import Row, Set, Query\n", "import gluon.serializers as serializers\n", "Table = DAL.Table\n", "Field = DAL.Field\n", "__all__ = ['Mail', 'Auth', 'Recaptcha2', 'Crud', 'Service', 'Wiki',\n 'PluginManager', 'fetch', 'geocode', 'reverse_geocode', 'prettydate']\n", "logger = logging.getLogger('web2py')\n", "DEFAULT = lambda : None\n", "def getarg(position, default=None):...\n", "args = current.request.args\n", "if position < 0 and len(args) >= -position:\n", "return args[position]\n", "if position >= 0 and len(args) > position:\n", "return args[position]\n", "return default\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_60(self):...\n", "\"\"\"docstring\"\"\"\n", "return self._blitzcon.getUserId()\n" ]
[ "def getId(self):...\n", "\"\"\"docstring\"\"\"\n", "return self._blitzcon.getUserId()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@pytest.fixture...\n", "VAR_3 = tempfile.mkdtemp()\n", "VAR_4 = os.path.join(VAR_3, 'yet_another_%s.txt' % VAR_0.node.name)\n", "f.write(VAR_0.node.name)\n", "return VAR_4\n" ]
[ "@pytest.fixture...\n", "path = tempfile.mkdtemp()\n", "file_name = os.path.join(path, 'yet_another_%s.txt' % request.node.name)\n", "f.write(request.node.name)\n", "return file_name\n" ]
[ 1, 1, 1, 0, 1 ]
[ "Condition", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "async def FUNC_12(self, VAR_23: str, VAR_16: Optional[QueryParams]=None,...\n", "\"\"\"docstring\"\"\"\n", "if VAR_16:\n", "VAR_39 = urllib.parse.urlencode(VAR_16, True)\n", "VAR_46 = {b'User-Agent': [self.user_agent]}\n", "VAR_23 = '%s?%s' % (VAR_23, VAR_39)\n", "if VAR_24:\n", "VAR_46.update(VAR_24)\n", "VAR_13 = await self.request('GET', VAR_23, VAR_24=Headers(actual_headers))\n", "VAR_47 = await make_deferred_yieldable(readBody(VAR_13))\n", "if 200 <= VAR_13.code < 300:\n", "return VAR_47\n" ]
[ "async def get_raw(self, uri: str, args: Optional[QueryParams]=None, headers:...\n", "\"\"\"docstring\"\"\"\n", "if args:\n", "query_str = urllib.parse.urlencode(args, True)\n", "actual_headers = {b'User-Agent': [self.user_agent]}\n", "uri = '%s?%s' % (uri, query_str)\n", "if headers:\n", "actual_headers.update(headers)\n", "response = await self.request('GET', uri, headers=Headers(actual_headers))\n", "body = await make_deferred_yieldable(readBody(response))\n", "if 200 <= response.code < 300:\n", "return body\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Condition", "Return'" ]
[ "def __call__(self, VAR_9):...\n", "if not re.compile('^\\\\w+$').match(VAR_9):\n", "return VAR_9, T('Invalid application name')\n", "if not request.vars.overwrite and os.path.exists(os.path.join(apath(VAR_122\n", "return VAR_9, T('Application exists already')\n", "return VAR_9, None\n" ]
[ "def __call__(self, value):...\n", "if not re.compile('^\\\\w+$').match(value):\n", "return value, T('Invalid application name')\n", "if not request.vars.overwrite and os.path.exists(os.path.join(apath(r=\n", "return value, T('Application exists already')\n", "return value, None\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "VAR_19 = VAR_2.session['connector'].server_id\n", "VAR_136 = {}\n", "VAR_1.debug(traceback.format_exc())\n", "VAR_136['format'] = 'video/' + VAR_2.GET.get('format', 'quicktime')\n", "VAR_136['fps'] = int(VAR_2.GET.get('fps', 4))\n", "VAR_136['minsize'] = 512, 512, 'Black'\n", "VAR_210 = '.avi'\n", "VAR_79 = '%s-%s-%s-%d-%s-%s' % (VAR_6, VAR_26, VAR_27, VAR_136['fps'],\n FUNC_11(VAR_2), VAR_2.GET.get('format', 'quicktime'))\n", "VAR_27 = int(VAR_27)\n", "VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn)\n", "if VAR_64 is None:\n", "VAR_92, VAR_16 = VAR_64\n", "VAR_206, VAR_207, VAR_208 = webgateway_tempfile.new(VAR_92.getName() +\n VAR_210, VAR_79=key)\n", "VAR_1.debug(VAR_206, VAR_207, VAR_208)\n", "if VAR_208 is True:\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' +\n VAR_207)\n", "if 'optsCB' in VAR_9:\n", "VAR_136.update(VAR_9['optsCB'](VAR_92))\n", "VAR_136.update(VAR_9.get('opts', {}))\n", "VAR_1.debug('rendering movie for img %s with axis %s, pos %i and opts %s' %\n (VAR_6, VAR_26, VAR_27, VAR_136))\n", "if VAR_206 is None:\n", "VAR_276, VAR_277 = tempfile.mkstemp()\n", "VAR_277 = VAR_206\n", "if VAR_26.lower() == 'z':\n", "VAR_278, VAR_279 = VAR_92.createMovie(VAR_277, 0, VAR_92.getSizeZ() - 1, \n VAR_27 - 1, VAR_27 - 1, VAR_136)\n", "VAR_278, VAR_279 = VAR_92.createMovie(VAR_277, VAR_27 - 1, VAR_27 - 1, 0, \n VAR_92.getSizeT() - 1, VAR_136)\n", "if VAR_278 is None and VAR_279 is None:\n", "if VAR_206 is None:\n", "VAR_280 = open(VAR_277).read()\n", "VAR_208.close()\n", "os.close(VAR_276)\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' +\n VAR_207)\n", "VAR_61 = HttpResponse(VAR_280, content_type=mimetype)\n", "VAR_61['Content-Disposition'] = 'attachment; filename=\"%s\"' % (VAR_92.\n getName() + VAR_210)\n", "VAR_61['Content-Length'] = len(VAR_280)\n", "return VAR_61\n" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "server_id = request.session['connector'].server_id\n", "opts = {}\n", "logger.debug(traceback.format_exc())\n", "opts['format'] = 'video/' + request.GET.get('format', 'quicktime')\n", "opts['fps'] = int(request.GET.get('fps', 4))\n", "opts['minsize'] = 512, 512, 'Black'\n", "ext = '.avi'\n", "key = '%s-%s-%s-%d-%s-%s' % (iid, axis, pos, opts['fps'],\n _get_signature_from_request(request), request.GET.get('format',\n 'quicktime'))\n", "pos = int(pos)\n", "pi = _get_prepared_image(request, iid, server_id=server_id, conn=conn)\n", "if pi is None:\n", "img, compress_quality = pi\n", "fpath, rpath, fobj = webgateway_tempfile.new(img.getName() + ext, key=key)\n", "logger.debug(fpath, rpath, fobj)\n", "if fobj is True:\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' + rpath)\n", "if 'optsCB' in kwargs:\n", "opts.update(kwargs['optsCB'](img))\n", "opts.update(kwargs.get('opts', {}))\n", "logger.debug('rendering movie for img %s with axis %s, pos %i and opts %s' %\n (iid, axis, pos, opts))\n", "if fpath is None:\n", "fo, fn = tempfile.mkstemp()\n", "fn = fpath\n", "if axis.lower() == 'z':\n", "dext, mimetype = img.createMovie(fn, 0, img.getSizeZ() - 1, pos - 1, pos - \n 1, opts)\n", "dext, mimetype = img.createMovie(fn, pos - 1, pos - 1, 0, img.getSizeT() - \n 1, opts)\n", "if dext is None and mimetype is None:\n", "if fpath is None:\n", "movie = open(fn).read()\n", "fobj.close()\n", "os.close(fo)\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' + rpath)\n", "rsp = HttpResponse(movie, content_type=mimetype)\n", "rsp['Content-Disposition'] = 'attachment; filename=\"%s\"' % (img.getName() + ext\n )\n", "rsp['Content-Length'] = len(movie)\n", "return rsp\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Return'", "Condition", "Expr'", "Expr'", "Expr'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Expr'", "Return'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_18(self, VAR_81):...\n", "VAR_84 = to_bytes(self.secret_key)\n", "if self.salt:\n", "if callable(self.salt):\n", "VAR_235 = self.cached_b64h\n", "VAR_84 = '%s$%s' % (VAR_84, self.salt(VAR_81))\n", "VAR_84 = '%s$%s' % (VAR_84, self.salt)\n", "VAR_236 = self.jwt_b64e(serializers.json(VAR_81))\n", "if isinstance(VAR_84, unicodeT):\n", "VAR_237 = VAR_235 + b'.' + VAR_236\n", "VAR_84 = VAR_84.encode('ascii', 'ignore')\n", "VAR_238 = hmac.new(VAR_199=secret, msg=jbody, digestmod=self.digestmod)\n", "VAR_239 = self.jwt_b64e(VAR_238.digest())\n", "return to_native(VAR_237 + b'.' + VAR_239)\n" ]
[ "def generate_token(self, payload):...\n", "secret = to_bytes(self.secret_key)\n", "if self.salt:\n", "if callable(self.salt):\n", "b64h = self.cached_b64h\n", "secret = '%s$%s' % (secret, self.salt(payload))\n", "secret = '%s$%s' % (secret, self.salt)\n", "b64p = self.jwt_b64e(serializers.json(payload))\n", "if isinstance(secret, unicodeT):\n", "jbody = b64h + b'.' + b64p\n", "secret = secret.encode('ascii', 'ignore')\n", "mauth = hmac.new(key=secret, msg=jbody, digestmod=self.digestmod)\n", "jsign = self.jwt_b64e(mauth.digest())\n", "return to_native(jbody + b'.' + jsign)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_11(self):...\n", "VAR_25 = 'inputs=[{\"text\":[\"foo\"], \"bytes\":[b\"bar\"]}]'\n", "VAR_23 = saved_model_cli.preprocess_input_examples_arg_string(VAR_25)\n", "VAR_26 = example_pb2.Example.FromString(VAR_23['inputs'][0])\n", "self.assertProtoEquals('string', VAR_26)\n" ]
[ "def testInputPreProcessExamplesWithStrAndBytes(self):...\n", "input_examples_str = 'inputs=[{\"text\":[\"foo\"], \"bytes\":[b\"bar\"]}]'\n", "input_dict = saved_model_cli.preprocess_input_examples_arg_string(\n input_examples_str)\n", "feature = example_pb2.Example.FromString(input_dict['inputs'][0])\n", "self.assertProtoEquals(\n \"\"\"\n features {\n feature {\n key: \"bytes\"\n value {\n bytes_list {\n value: \"bar\"\n }\n }\n }\n feature {\n key: \"text\"\n value {\n bytes_list {\n value: \"foo\"\n }\n }\n }\n }\n \"\"\"\n , feature)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "@login_required(doConnectionCleanup=False)...\n", "\"\"\"docstring\"\"\"\n", "VAR_150 = []\n", "VAR_151 = []\n", "VAR_150 = VAR_2.GET.getlist('image')\n", "VAR_151 = VAR_2.GET.getlist('well')\n", "if VAR_6 is None:\n", "if len(VAR_150) == 0 and len(VAR_151) == 0:\n", "VAR_150 = [VAR_6]\n", "return HttpResponseServerError(\n 'No images or wells specified in request. Use ?image=123 or ?well=123')\n", "VAR_152 = list()\n", "VAR_153 = list()\n", "if VAR_150:\n", "VAR_152 = list(VAR_8.getObjects('Image', VAR_150))\n", "if VAR_151:\n", "if len(VAR_152) == 0:\n", "VAR_319 = int(VAR_2.GET.get('index', 0))\n", "VAR_319 = 0\n", "VAR_153 = VAR_8.getObjects('Well', VAR_151)\n", "VAR_236 = (\n 'Cannot download archived file because Images not found (ids: %s)' %\n VAR_150)\n", "for ob in VAR_153:\n", "for VAR_10 in VAR_153:\n", "VAR_1.debug(VAR_236)\n", "if hasattr(ob, 'canDownload'):\n", "for ob in VAR_152:\n", "VAR_152.append(VAR_10.getWellSample(VAR_319).image())\n", "return HttpResponseServerError(VAR_236)\n", "if not ob.canDownload():\n", "VAR_119 = None\n", "VAR_154 = {}\n", "return HttpResponseNotFound()\n", "VAR_119 = ob.getParent().getParent()\n", "if hasattr(ob, 'canDownload'):\n", "if VAR_119 and isinstance(VAR_119, omero.gateway.WellWrapper):\n", "for VAR_15 in VAR_152:\n", "if not ob.canDownload():\n", "if hasattr(VAR_119, 'canDownload'):\n", "for VAR_28 in VAR_15.getImportedImageFiles():\n", "VAR_155 = list(VAR_154.values())\n", "return HttpResponseNotFound()\n", "if not VAR_119.canDownload():\n", "VAR_154[VAR_28.getId()] = VAR_28\n", "if len(VAR_155) == 0:\n", "return HttpResponseNotFound()\n", "VAR_236 = 'Tried downloading archived files from image with no files archived.'\n", "if len(VAR_155) == 1:\n", "VAR_1.debug(VAR_236)\n", "VAR_237 = VAR_155[0]\n", "VAR_239 = sum(VAR_28.size for VAR_28 in VAR_155)\n", "return HttpResponseServerError(VAR_236)\n", "VAR_61 = ConnCleaningHttpResponse(VAR_237.getFileInChunks(buf=settings.\n CHUNK_SIZE))\n", "if VAR_239 > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:\n", "VAR_61.conn = VAR_8\n", "VAR_236 = (\n 'Total size of files %d is larger than %d. Try requesting fewer files.' %\n (VAR_239, settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE))\n", "VAR_232 = tempfile.NamedTemporaryFile(suffix='.archive')\n", "VAR_61['Content-Length'] = VAR_237.getSize()\n", "VAR_1.warn(VAR_236)\n", "VAR_240 = VAR_2.GET.get('zipname', VAR_15.getName())\n", "VAR_238 = VAR_237.getName().replace(' ', '_').replace(',', '.')\n", "return HttpResponseForbidden(VAR_236)\n", "VAR_240 = zip_archived_files(VAR_152, VAR_232, VAR_240, buf=settings.CHUNK_SIZE\n )\n", "VAR_232.close()\n", "VAR_61['Content-Type'] = 'application/force-download'\n", "VAR_61['Content-Disposition'] = 'attachment; filename=%s' % VAR_238\n", "VAR_295 = FileWrapper(VAR_232)\n", "VAR_236 = 'Cannot download file (id:%s)' % VAR_6\n", "return VAR_61\n", "VAR_61 = ConnCleaningHttpResponse(VAR_295)\n", "VAR_1.error(VAR_236, exc_info=True)\n", "VAR_61.conn = VAR_8\n", "return HttpResponseServerError(VAR_236)\n", "VAR_61['Content-Length'] = VAR_232.tell()\n", "VAR_61['Content-Disposition'] = 'attachment; filename=%s' % VAR_240\n", "VAR_232.seek(0)\n" ]
[ "@login_required(doConnectionCleanup=False)...\n", "\"\"\"docstring\"\"\"\n", "imgIds = []\n", "wellIds = []\n", "imgIds = request.GET.getlist('image')\n", "wellIds = request.GET.getlist('well')\n", "if iid is None:\n", "if len(imgIds) == 0 and len(wellIds) == 0:\n", "imgIds = [iid]\n", "return HttpResponseServerError(\n 'No images or wells specified in request. Use ?image=123 or ?well=123')\n", "images = list()\n", "wells = list()\n", "if imgIds:\n", "images = list(conn.getObjects('Image', imgIds))\n", "if wellIds:\n", "if len(images) == 0:\n", "index = int(request.GET.get('index', 0))\n", "index = 0\n", "wells = conn.getObjects('Well', wellIds)\n", "message = (\n 'Cannot download archived file because Images not found (ids: %s)' % imgIds\n )\n", "for ob in wells:\n", "for w in wells:\n", "logger.debug(message)\n", "if hasattr(ob, 'canDownload'):\n", "for ob in images:\n", "images.append(w.getWellSample(index).image())\n", "return HttpResponseServerError(message)\n", "if not ob.canDownload():\n", "well = None\n", "fileMap = {}\n", "return HttpResponseNotFound()\n", "well = ob.getParent().getParent()\n", "if hasattr(ob, 'canDownload'):\n", "if well and isinstance(well, omero.gateway.WellWrapper):\n", "for image in images:\n", "if not ob.canDownload():\n", "if hasattr(well, 'canDownload'):\n", "for f in image.getImportedImageFiles():\n", "files = list(fileMap.values())\n", "return HttpResponseNotFound()\n", "if not well.canDownload():\n", "fileMap[f.getId()] = f\n", "if len(files) == 0:\n", "return HttpResponseNotFound()\n", "message = 'Tried downloading archived files from image with no files archived.'\n", "if len(files) == 1:\n", "logger.debug(message)\n", "orig_file = files[0]\n", "total_size = sum(f.size for f in files)\n", "return HttpResponseServerError(message)\n", "rsp = ConnCleaningHttpResponse(orig_file.getFileInChunks(buf=settings.\n CHUNK_SIZE))\n", "if total_size > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:\n", "rsp.conn = conn\n", "message = (\n 'Total size of files %d is larger than %d. Try requesting fewer files.' %\n (total_size, settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE))\n", "temp = tempfile.NamedTemporaryFile(suffix='.archive')\n", "rsp['Content-Length'] = orig_file.getSize()\n", "logger.warn(message)\n", "zipName = request.GET.get('zipname', image.getName())\n", "fname = orig_file.getName().replace(' ', '_').replace(',', '.')\n", "return HttpResponseForbidden(message)\n", "zipName = zip_archived_files(images, temp, zipName, buf=settings.CHUNK_SIZE)\n", "temp.close()\n", "rsp['Content-Type'] = 'application/force-download'\n", "rsp['Content-Disposition'] = 'attachment; filename=%s' % fname\n", "archivedFile_data = FileWrapper(temp)\n", "message = 'Cannot download file (id:%s)' % iid\n", "return rsp\n", "rsp = ConnCleaningHttpResponse(archivedFile_data)\n", "logger.error(message, exc_info=True)\n", "rsp.conn = conn\n", "return HttpResponseServerError(message)\n", "rsp['Content-Length'] = temp.tell()\n", "rsp['Content-Disposition'] = 'attachment; filename=%s' % zipName\n", "temp.seek(0)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "For", "For", "Expr'", "Condition", "For", "Expr'", "Return'", "Condition", "Assign'", "Assign'", "Return'", "Assign'", "Condition", "Condition", "For", "Condition", "Condition", "For", "Assign'", "Return'", "Condition", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Return'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Return'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Assign'", "Expr'", "Assign'", "Return'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_13(self):...\n", "VAR_62 = self.useroptions.forUser(self.getUserId())\n", "VAR_63 = VAR_62.getChangableOptions()\n", "if VAR_53.session['admin']:\n", "VAR_63['media'].update({'may_download': True})\n", "VAR_63['media'].update({'may_download': VAR_62.getOptionValue(\n 'media.may_download')})\n", "return VAR_63\n" ]
[ "def api_getuseroptions(self):...\n", "uo = self.useroptions.forUser(self.getUserId())\n", "uco = uo.getChangableOptions()\n", "if cherrypy.session['admin']:\n", "uco['media'].update({'may_download': True})\n", "uco['media'].update({'may_download': uo.getOptionValue('media.may_download')})\n", "return uco\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_14(self):...\n", "VAR_5 = self.get_counts('json', period='this-month')\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertJSONEqual(VAR_5.content.decode(), VAR_0)\n" ]
[ "def test_counts_view_this_month(self):...\n", "response = self.get_counts('json', period='this-month')\n", "self.assertEqual(response.status_code, 200)\n", "self.assertJSONEqual(response.content.decode(), COUNTS_DATA)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_32(self):...\n", "VAR_74 = VAR_53.session.get('playlist', [])\n", "return VAR_74\n" ]
[ "def api_restoreplaylist(self):...\n", "session_playlist = cherrypy.session.get('playlist', [])\n", "return session_playlist\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "@log_function...\n", "\"\"\"docstring\"\"\"\n", "if VAR_33:\n", "VAR_2 = FUNC_1('/groups/%s/summary/categories/%s/rooms/%s', VAR_30, VAR_33,\n VAR_6)\n", "VAR_2 = FUNC_1('/groups/%s/summary/rooms/%s', VAR_30, VAR_6)\n", "return self.client.post_json(VAR_5=destination, VAR_2=path, VAR_3={\n 'requester_user_id': user_id}, VAR_39=content, VAR_15=True)\n" ]
[ "@log_function...\n", "\"\"\"docstring\"\"\"\n", "if category_id:\n", "path = _create_v1_path('/groups/%s/summary/categories/%s/rooms/%s',\n group_id, category_id, room_id)\n", "path = _create_v1_path('/groups/%s/summary/rooms/%s', group_id, room_id)\n", "return self.client.post_json(destination=destination, path=path, args={\n 'requester_user_id': user_id}, data=content, ignore_backoff=True)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_39(self):...\n", "if not test.is_built_with_xla():\n", "self.skipTest('Skipping test because XLA is not compiled in.')\n", "VAR_6 = 'all'\n", "VAR_7 = 'func2'\n", "VAR_14 = os.path.join(test.get_temp_dir(), 'dummy_model')\n", "VAR_15 = self.AOTCompileDummyModel()\n", "VAR_7 = getattr(VAR_15, VAR_7)\n", "self.evaluate(VAR_15.var.initializer)\n", "self.evaluate(VAR_15.write_var.initializer)\n", "save.save(VAR_15, VAR_14, signatures={'func': func})\n", "self.parser = saved_model_cli.create_parser()\n", "VAR_51 = os.path.join(test.get_temp_dir(), 'aot_compile_cpu_dir/out')\n", "VAR_11 = ['freeze_model', '--dir', VAR_14, '--tag_set', 'serve',\n '--signature_def_key', 'func', '--output_prefix', VAR_51,\n '--variables_to_feed', VAR_6]\n", "VAR_11 = self.parser.parse_args(VAR_11)\n", "saved_model_cli.freeze_model(VAR_11)\n", "self.assertTrue(file_io.file_exists(os.path.join(VAR_51, 'frozen_graph.pb')))\n", "self.assertTrue(file_io.file_exists(os.path.join(VAR_51, 'config.pbtxt')))\n" ]
[ "def testFreezeModel(self):...\n", "if not test.is_built_with_xla():\n", "self.skipTest('Skipping test because XLA is not compiled in.')\n", "variables_to_feed = 'all'\n", "func = 'func2'\n", "saved_model_dir = os.path.join(test.get_temp_dir(), 'dummy_model')\n", "dummy_model = self.AOTCompileDummyModel()\n", "func = getattr(dummy_model, func)\n", "self.evaluate(dummy_model.var.initializer)\n", "self.evaluate(dummy_model.write_var.initializer)\n", "save.save(dummy_model, saved_model_dir, signatures={'func': func})\n", "self.parser = saved_model_cli.create_parser()\n", "output_prefix = os.path.join(test.get_temp_dir(), 'aot_compile_cpu_dir/out')\n", "args = ['freeze_model', '--dir', saved_model_dir, '--tag_set', 'serve',\n '--signature_def_key', 'func', '--output_prefix', output_prefix,\n '--variables_to_feed', variables_to_feed]\n", "args = self.parser.parse_args(args)\n", "saved_model_cli.freeze_model(args)\n", "self.assertTrue(file_io.file_exists(os.path.join(output_prefix,\n 'frozen_graph.pb')))\n", "self.assertTrue(file_io.file_exists(os.path.join(output_prefix,\n 'config.pbtxt')))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_87():...\n", "VAR_364 = ','.join(VAR_301.get('columns'))\n", "yield VAR_364\n", "for rows in VAR_301.get('lazy_rows'):\n", "yield '\\n' + '\\n'.join([','.join([VAR_345(VAR_362) for VAR_362 in VAR_378]) for\n VAR_378 in rows])\n" ]
[ "def csv_gen():...\n", "csv_cols = ','.join(table_data.get('columns'))\n", "yield csv_cols\n", "for rows in table_data.get('lazy_rows'):\n", "yield '\\n' + '\\n'.join([','.join([str(d) for d in row]) for row in rows])\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "For", "Expr'" ]
[ "@pytest.mark.linux...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = ['--temp-basedir'] + FUNC_0(VAR_4.config)\n", "VAR_6.start(VAR_11, VAR_16={'LC_ALL': 'C'})\n", "VAR_6.set_setting('url.auto_search', 'never')\n", "VAR_6.send_cmd(':open {}'.format(VAR_7))\n", "if not VAR_4.config.webengine:\n", "VAR_18 = VAR_6.wait_for(message=\n 'Error while loading *: Error opening /*: No such file or directory')\n", "VAR_6.wait_for(message=\n \"load status for <* tab_id=* url='*/f%C3%B6%C3%B6.html'>: LoadStatus.error\"\n )\n", "VAR_18.expected = True\n" ]
[ "@pytest.mark.linux...\n", "\"\"\"docstring\"\"\"\n", "args = ['--temp-basedir'] + _base_args(request.config)\n", "quteproc_new.start(args, env={'LC_ALL': 'C'})\n", "quteproc_new.set_setting('url.auto_search', 'never')\n", "quteproc_new.send_cmd(':open {}'.format(url))\n", "if not request.config.webengine:\n", "line = quteproc_new.wait_for(message=\n 'Error while loading *: Error opening /*: No such file or directory')\n", "quteproc_new.wait_for(message=\n \"load status for <* tab_id=* url='*/f%C3%B6%C3%B6.html'>: LoadStatus.error\"\n )\n", "line.expected = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Assign'" ]
[ "def FUNC_74(self, VAR_2, VAR_176=None, VAR_177=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_176 is None and VAR_177 is not None:\n", "VAR_303 = []\n", "if VAR_176 is None:\n", "for VAR_33 in VAR_177:\n", "VAR_176 = 'Login failed. Reason unknown.'\n", "return JsonResponse({'message': VAR_176}, VAR_315=403)\n", "for VAR_318 in VAR_33.errors:\n", "VAR_176 = ' '.join(VAR_303)\n", "VAR_303.append('%s: %s' % (VAR_33.label, VAR_318))\n" ]
[ "def handle_not_logged_in(self, request, error=None, form=None):...\n", "\"\"\"docstring\"\"\"\n", "if error is None and form is not None:\n", "formErrors = []\n", "if error is None:\n", "for field in form:\n", "error = 'Login failed. Reason unknown.'\n", "return JsonResponse({'message': error}, status=403)\n", "for e in field.errors:\n", "error = ' '.join(formErrors)\n", "formErrors.append('%s: %s' % (field.label, e))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "For", "Assign'", "Return'", "For", "Assign'", "Expr'" ]
[ "def FUNC_5(VAR_4, VAR_13, VAR_3):...\n", "if '%(key)s' in VAR_4:\n", "VAR_4 = VAR_4.replace('%(key)s', VAR_13)\n", "if '%s' in VAR_4:\n", "VAR_4 = VAR_4.replace('%s', (VAR_3 or '') + '%')\n", "return VAR_4\n" ]
[ "def scrub_custom_query(query, key, txt):...\n", "if '%(key)s' in query:\n", "query = query.replace('%(key)s', key)\n", "if '%s' in query:\n", "query = query.replace('%s', (txt or '') + '%')\n", "return query\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_9(self):...\n", "VAR_5 = self.get_credits('rst')\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(VAR_5.content.decode(),\n \"\"\"\n\n* Czech\n\n * Weblate Test <[email protected]> (1)\n\n\"\"\")\n" ]
[ "def test_credits_view_rst(self):...\n", "response = self.get_credits('rst')\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(response.content.decode(),\n \"\"\"\n\n* Czech\n\n * Weblate Test <[email protected]> (1)\n\n\"\"\")\n" ]
[ 0, 0, 0, 2 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_19(self):...\n", "VAR_5 = self._makeContext()\n", "VAR_5.beginScope()\n", "VAR_5.setRepeat('loop', 'python:[1,2,3]')\n", "self.assertTrue(VAR_5.evaluate(\"python:repeat['loop'].odd()\"))\n", "VAR_5.endScope()\n" ]
[ "def test_access_iterator_from_python_expression(self):...\n", "ec = self._makeContext()\n", "ec.beginScope()\n", "ec.setRepeat('loop', 'python:[1,2,3]')\n", "self.assertTrue(ec.evaluate(\"python:repeat['loop'].odd()\"))\n", "ec.endScope()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "\"\"\"A tornado based Jupyter notebook server.\"\"\"\n", "from __future__ import absolute_import, print_function\n", "import base64\n", "import datetime\n", "import errno\n", "import importlib\n", "import io\n", "import json\n", "import logging\n", "import os\n", "import random\n", "import re\n", "import select\n", "import signal\n", "import socket\n", "import ssl\n", "import sys\n", "import threading\n", "import webbrowser\n", "from jinja2 import Environment, FileSystemLoader\n", "from zmq.eventloop import ioloop\n", "ioloop.install()\n", "VAR_0 = 'The Jupyter Notebook requires tornado >= 4.0'\n", "import tornado\n", "VAR_10 = tornado.version_info\n", "if VAR_10 < (4, 0):\n", "from tornado import httpserver\n", "from tornado import web\n", "from tornado.log import LogFormatter, app_log, access_log, gen_log\n", "from notebook import DEFAULT_STATIC_FILES_PATH, DEFAULT_TEMPLATE_PATH_LIST, __version__\n", "from .base.handlers import Template404\n", "from .log import log_request\n", "from .services.kernels.kernelmanager import MappingKernelManager\n", "from .services.config import ConfigManager\n", "from .services.contents.manager import ContentsManager\n", "from .services.contents.filemanager import FileContentsManager\n", "from .services.sessions.sessionmanager import SessionManager\n", "from .auth.login import LoginHandler\n", "from .auth.logout import LogoutHandler\n", "from .base.handlers import FileFindHandler, IPythonHandler\n", "from traitlets.config import Config\n", "from traitlets.config.application import catch_config_error, boolean_flag\n", "from jupyter_core.application import JupyterApp, base_flags, base_aliases\n", "from jupyter_client import KernelManager\n", "from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel, NATIVE_KERNEL_NAME\n", "from jupyter_client.session import Session\n", "from nbformat.sign import NotebookNotary\n", "from traitlets import Dict, Unicode, Integer, List, Bool, Bytes, Instance, TraitError, Type\n", "from ipython_genutils import py3compat\n", "from IPython.paths import get_ipython_dir\n", "from jupyter_core.paths import jupyter_runtime_dir, jupyter_path\n", "from notebook._sysinfo import get_sys_info\n", "from .utils import url_path_join, check_pid\n", "VAR_1 = 'string'\n", "def FUNC_0(VAR_2, VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "for i in range(min(5, VAR_3)):\n", "yield VAR_2 + i\n", "for i in range(VAR_3 - 5):\n", "yield max(1, VAR_2 + random.randint(-2 * VAR_3, 2 * VAR_3))\n", "def FUNC_1(VAR_4):...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = 'notebook.' + VAR_4\n", "VAR_11 = __import__(VAR_4, fromlist=['default_handlers'])\n", "return VAR_11.default_handlers\n" ]
[ "\"\"\"A tornado based Jupyter notebook server.\"\"\"\n", "from __future__ import absolute_import, print_function\n", "import base64\n", "import datetime\n", "import errno\n", "import importlib\n", "import io\n", "import json\n", "import logging\n", "import os\n", "import random\n", "import re\n", "import select\n", "import signal\n", "import socket\n", "import ssl\n", "import sys\n", "import threading\n", "import webbrowser\n", "from jinja2 import Environment, FileSystemLoader\n", "from zmq.eventloop import ioloop\n", "ioloop.install()\n", "msg = 'The Jupyter Notebook requires tornado >= 4.0'\n", "import tornado\n", "version_info = tornado.version_info\n", "if version_info < (4, 0):\n", "from tornado import httpserver\n", "from tornado import web\n", "from tornado.log import LogFormatter, app_log, access_log, gen_log\n", "from notebook import DEFAULT_STATIC_FILES_PATH, DEFAULT_TEMPLATE_PATH_LIST, __version__\n", "from .base.handlers import Template404\n", "from .log import log_request\n", "from .services.kernels.kernelmanager import MappingKernelManager\n", "from .services.config import ConfigManager\n", "from .services.contents.manager import ContentsManager\n", "from .services.contents.filemanager import FileContentsManager\n", "from .services.sessions.sessionmanager import SessionManager\n", "from .auth.login import LoginHandler\n", "from .auth.logout import LogoutHandler\n", "from .base.handlers import FileFindHandler, IPythonHandler\n", "from traitlets.config import Config\n", "from traitlets.config.application import catch_config_error, boolean_flag\n", "from jupyter_core.application import JupyterApp, base_flags, base_aliases\n", "from jupyter_client import KernelManager\n", "from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel, NATIVE_KERNEL_NAME\n", "from jupyter_client.session import Session\n", "from nbformat.sign import NotebookNotary\n", "from traitlets import Dict, Unicode, Integer, List, Bool, Bytes, Instance, TraitError, Type\n", "from ipython_genutils import py3compat\n", "from IPython.paths import get_ipython_dir\n", "from jupyter_core.paths import jupyter_runtime_dir, jupyter_path\n", "from notebook._sysinfo import get_sys_info\n", "from .utils import url_path_join, check_pid\n", "_examples = \"\"\"\njupyter notebook # start the notebook\njupyter notebook --certfile=mycert.pem # use SSL/TLS certificate\n\"\"\"\n", "def random_ports(port, n):...\n", "\"\"\"docstring\"\"\"\n", "for i in range(min(5, n)):\n", "yield port + i\n", "for i in range(n - 5):\n", "yield max(1, port + random.randint(-2 * n, 2 * n))\n", "def load_handlers(name):...\n", "\"\"\"docstring\"\"\"\n", "name = 'notebook.' + name\n", "mod = __import__(name, fromlist=['default_handlers'])\n", "return mod.default_handlers\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "Expr'", "Assign'", "Import'", "Assign'", "Condition", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Docstring", "For", "Expr'", "For", "Expr'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_5(self) ->Sequence[GeneratorError]:...\n", "\"\"\"docstring\"\"\"\n", "if not self.package_dir.is_dir():\n", "print(f'Updating {self.project_name}')\n", "shutil.rmtree(self.package_dir)\n", "self._create_package()\n", "self._build_models()\n", "self._build_api()\n", "self._reformat()\n", "return self._get_errors()\n" ]
[ "def update(self) ->Sequence[GeneratorError]:...\n", "\"\"\"docstring\"\"\"\n", "if not self.package_dir.is_dir():\n", "print(f'Updating {self.project_name}')\n", "shutil.rmtree(self.package_dir)\n", "self._create_package()\n", "self._build_models()\n", "self._build_api()\n", "self._reformat()\n", "return self._get_errors()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_81(VAR_19):...\n", "if self.parentfield:\n", "return '{} {} #{}: {} {}'.format(frappe.bold(_(self.doctype)), _('Row'),\n self.idx, _('Value cannot be negative for'), frappe.bold(_(VAR_19.label)))\n", "return _('Value cannot be negative for {0}: {1}').format(_(VAR_19.parent),\n frappe.bold(_(VAR_19.label)))\n" ]
[ "def get_msg(df):...\n", "if self.parentfield:\n", "return '{} {} #{}: {} {}'.format(frappe.bold(_(self.doctype)), _('Row'),\n self.idx, _('Value cannot be negative for'), frappe.bold(_(df.label)))\n", "return _('Value cannot be negative for {0}: {1}').format(_(df.parent),\n frappe.bold(_(df.label)))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_14(self, VAR_18, VAR_19=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_48 = []\n", "if not VAR_19:\n", "VAR_19 = self.meta.get_field(VAR_18)\n", "for VAR_21 in self.get(VAR_19.fieldname):\n", "VAR_21.db_update()\n", "if VAR_19.options in (self.flags.ignore_children_type or []):\n", "VAR_48.append(VAR_21.name)\n", "return\n", "if VAR_48:\n", "VAR_83 = frappe.db.sql('string'.format(VAR_19.options, ','.join(['%s'] *\n len(VAR_48))), [self.name, self.doctype, VAR_18] + VAR_48)\n", "frappe.db.sql(\n \"\"\"delete from `tab{0}` where parent=%s\n\t\t\t\tand parenttype=%s and parentfield=%s\"\"\"\n .format(VAR_19.options), (self.name, self.doctype, VAR_18))\n", "if len(VAR_83) > 0:\n", "frappe.db.sql('delete from `tab{0}` where name in ({1})'.format(VAR_19.\n options, ','.join(['%s'] * len(VAR_83))), tuple(row[0] for row in VAR_83))\n" ]
[ "def update_child_table(self, fieldname, df=None):...\n", "\"\"\"docstring\"\"\"\n", "rows = []\n", "if not df:\n", "df = self.meta.get_field(fieldname)\n", "for d in self.get(df.fieldname):\n", "d.db_update()\n", "if df.options in (self.flags.ignore_children_type or []):\n", "rows.append(d.name)\n", "return\n", "if rows:\n", "deleted_rows = frappe.db.sql(\n \"\"\"select name from `tab{0}` where parent=%s\n\t\t\t\tand parenttype=%s and parentfield=%s\n\t\t\t\tand name not in ({1})\"\"\"\n .format(df.options, ','.join(['%s'] * len(rows))), [self.name, self.\n doctype, fieldname] + rows)\n", "frappe.db.sql(\n \"\"\"delete from `tab{0}` where parent=%s\n\t\t\t\tand parenttype=%s and parentfield=%s\"\"\"\n .format(df.options), (self.name, self.doctype, fieldname))\n", "if len(deleted_rows) > 0:\n", "frappe.db.sql('delete from `tab{0}` where name in ({1})'.format(df.options,\n ','.join(['%s'] * len(deleted_rows))), tuple(row[0] for row in\n deleted_rows))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "For", "Expr'", "Condition", "Expr'", "Return'", "Condition", "Assign'", "Expr'", "Condition", "Expr'" ]
[ "import json\n", "import os\n", "import sqlite3\n", "from flask import Flask, render_template, redirect, g, request, url_for, Response\n", "from sqlalchemy import desc, func\n", "from sqlalchemy.exc import IntegrityError\n", "import yaml\n", "from . import worker, output\n", "from .connections import Connections\n", "from .login import auth\n", "from .models.user import UserGroup\n", "from .models.query import Query\n", "from .models.queryrevision import QueryRevision\n", "from .models.queryrun import QueryRun\n", "from .models.star import Star\n", "from .redissession import RedisSessionInterface\n", "from .results import SQLiteResultReader\n", "from .user import user_blueprint, get_user, get_preferences\n", "from .utils import json_formatter\n", "from .utils import monkey as _unused\n", "from .utils.pagination import RangeBasedPagination\n", "from .health import health_blueprint\n", "from .webhelpers import templatehelpers\n", "__dir__ = os.path.dirname(__file__)\n", "VAR_0 = Flask(__name__)\n", "VAR_0.config.update(yaml.load(open(os.path.join(__dir__,\n '../default_config.yaml'))))\n", "VAR_0.config.update(yaml.load(open(os.path.join(__dir__, '../config.yaml'))))\n", "VAR_0.register_blueprint(auth)\n", "VAR_0.register_blueprint(health_blueprint)\n", "VAR_0.register_blueprint(user_blueprint)\n", "VAR_0.register_blueprint(templatehelpers)\n", "VAR_1 = Connections(VAR_0.config)\n", "VAR_0.session_interface = RedisSessionInterface(VAR_1.redis)\n", "def FUNC_20(self, VAR_12, VAR_13):...\n", "VAR_32 = dict(request.args)\n", "VAR_32.update({'from': VAR_12, 'limit': VAR_13})\n", "return url_for('query_runs_all', **dict([(key, value) for key, value in\n list(get_params.items())]))\n" ]
[ "import json\n", "import os\n", "import sqlite3\n", "from flask import Flask, render_template, redirect, g, request, url_for, Response\n", "from sqlalchemy import desc, func\n", "from sqlalchemy.exc import IntegrityError\n", "import yaml\n", "from . import worker, output\n", "from .connections import Connections\n", "from .login import auth\n", "from .models.user import UserGroup\n", "from .models.query import Query\n", "from .models.queryrevision import QueryRevision\n", "from .models.queryrun import QueryRun\n", "from .models.star import Star\n", "from .redissession import RedisSessionInterface\n", "from .results import SQLiteResultReader\n", "from .user import user_blueprint, get_user, get_preferences\n", "from .utils import json_formatter\n", "from .utils import monkey as _unused\n", "from .utils.pagination import RangeBasedPagination\n", "from .health import health_blueprint\n", "from .webhelpers import templatehelpers\n", "__dir__ = os.path.dirname(__file__)\n", "app = Flask(__name__)\n", "app.config.update(yaml.load(open(os.path.join(__dir__,\n '../default_config.yaml'))))\n", "app.config.update(yaml.load(open(os.path.join(__dir__, '../config.yaml'))))\n", "app.register_blueprint(auth)\n", "app.register_blueprint(health_blueprint)\n", "app.register_blueprint(user_blueprint)\n", "app.register_blueprint(templatehelpers)\n", "global_conn = Connections(app.config)\n", "app.session_interface = RedisSessionInterface(global_conn.redis)\n", "def get_page_link(self, page_key, limit):...\n", "get_params = dict(request.args)\n", "get_params.update({'from': page_key, 'limit': limit})\n", "return url_for('query_runs_all', **dict([(key, value) for key, value in\n list(get_params.items())]))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "@VAR_25.whitelist()...\n", "VAR_15 = None\n", "if VAR_25.form_dict.get('from_form'):\n", "VAR_25.errprint(VAR_25.utils.get_traceback())\n", "return VAR_15\n", "if VAR_25.form_dict.get('method'):\n", "VAR_15 = VAR_25.get_doc({'doctype': 'File', 'attached_to_name': VAR_25.\n form_dict.docname, 'attached_to_doctype': VAR_25.form_dict.doctype,\n 'attached_to_field': VAR_25.form_dict.docfield, 'file_url': VAR_25.\n form_dict.file_url, 'file_name': VAR_25.form_dict.filename,\n 'is_private': VAR_25.utils.cint(VAR_25.form_dict.is_private), 'content':\n VAR_25.form_dict.filedata, 'decode': True})\n", "VAR_15 = None\n", "VAR_25.response['http_status_code'] = 500\n", "VAR_3 = VAR_25.get_attr(VAR_25.form_dict.method)\n", "VAR_15.save()\n", "VAR_25.db.rollback()\n", "VAR_15 = None\n", "FUNC_3(VAR_3)\n", "VAR_15 = VAR_3()\n" ]
[ "@frappe.whitelist()...\n", "ret = None\n", "if frappe.form_dict.get('from_form'):\n", "frappe.errprint(frappe.utils.get_traceback())\n", "return ret\n", "if frappe.form_dict.get('method'):\n", "ret = frappe.get_doc({'doctype': 'File', 'attached_to_name': frappe.\n form_dict.docname, 'attached_to_doctype': frappe.form_dict.doctype,\n 'attached_to_field': frappe.form_dict.docfield, 'file_url': frappe.\n form_dict.file_url, 'file_name': frappe.form_dict.filename,\n 'is_private': frappe.utils.cint(frappe.form_dict.is_private), 'content':\n frappe.form_dict.filedata, 'decode': True})\n", "ret = None\n", "frappe.response['http_status_code'] = 500\n", "method = frappe.get_attr(frappe.form_dict.method)\n", "ret.save()\n", "frappe.db.rollback()\n", "ret = None\n", "is_whitelisted(method)\n", "ret = method()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "For", "Expr'", "Return'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'" ]
[ "def FUNC_47(self):...\n", "VAR_157 = StringIO(self.list_values)\n", "VAR_158 = [[item.strip(), item.strip()] for item in VAR_157.readlines()]\n", "VAR_157.close()\n", "return VAR_158\n" ]
[ "def _choices_as_array(self):...\n", "valuebuffer = StringIO(self.list_values)\n", "choices = [[item.strip(), item.strip()] for item in valuebuffer.readlines()]\n", "valuebuffer.close()\n", "return choices\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_8(self):...\n", "VAR_5 = {'not_types': ['m.room.message', 'org.matrix.foo.bar']}\n", "VAR_6 = FUNC_0(sender='@foo:bar', type='m.room.message', room_id='!foo:bar')\n", "self.assertFalse(Filter(VAR_5).check(VAR_6))\n" ]
[ "def test_definition_not_types_works_with_literals(self):...\n", "definition = {'not_types': ['m.room.message', 'org.matrix.foo.bar']}\n", "event = MockEvent(sender='@foo:bar', type='m.room.message', room_id='!foo:bar')\n", "self.assertFalse(Filter(definition).check(event))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_8(self):...\n", "VAR_15 = self._makeOne()\n", "VAR_16 = CLASS_1()\n", "VAR_17 = FauxRequest(RESPONSE=response)\n", "self.assertEqual(VAR_15.extractCredentials(VAR_17), {})\n" ]
[ "def test_extractCredentials_no_creds(self):...\n", "helper = self._makeOne()\n", "response = FauxCookieResponse()\n", "request = FauxRequest(RESPONSE=response)\n", "self.assertEqual(helper.extractCredentials(request), {})\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_15(self):...\n", "return getattr(self, '_doc_before_save', None)\n" ]
[ "def get_doc_before_save(self):...\n", "return getattr(self, '_doc_before_save', None)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(VAR_8: Iterable[T]) ->List[VAR_1]:...\n", "VAR_33 = set()\n", "VAR_34 = []\n", "for item in VAR_8:\n", "if item not in VAR_33:\n", "return VAR_34\n", "VAR_33.add(item)\n", "VAR_34.append(item)\n" ]
[ "def deduped_ordered_list(it: Iterable[T]) ->List[T]:...\n", "seen = set()\n", "ret = []\n", "for item in it:\n", "if item not in seen:\n", "return ret\n", "seen.add(item)\n", "ret.append(item)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Condition", "Return'", "Expr'", "Expr'" ]
[ "def FUNC_40(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_93 = self.log.info\n", "VAR_93('interrupted')\n", "print(self.notebook_info())\n", "sys.stdout.write('Shutdown this notebook server (y/[n])? ')\n", "sys.stdout.flush()\n", "VAR_94, VAR_95, VAR_96 = select.select([sys.stdin], [], [], 5)\n", "if VAR_94:\n", "VAR_104 = sys.stdin.readline()\n", "print('No answer for 5s:', end=' ')\n", "if VAR_104.lower().startswith('y') and 'n' not in VAR_104.lower():\n", "print('resuming operation...')\n", "self.log.critical('Shutdown confirmed')\n", "ioloop.IOLoop.current().add_callback(self._restore_sigint_handler)\n", "ioloop.IOLoop.current().stop()\n", "return\n" ]
[ "def _confirm_exit(self):...\n", "\"\"\"docstring\"\"\"\n", "info = self.log.info\n", "info('interrupted')\n", "print(self.notebook_info())\n", "sys.stdout.write('Shutdown this notebook server (y/[n])? ')\n", "sys.stdout.flush()\n", "r, w, x = select.select([sys.stdin], [], [], 5)\n", "if r:\n", "line = sys.stdin.readline()\n", "print('No answer for 5s:', end=' ')\n", "if line.lower().startswith('y') and 'n' not in line.lower():\n", "print('resuming operation...')\n", "self.log.critical('Shutdown confirmed')\n", "ioloop.IOLoop.current().add_callback(self._restore_sigint_handler)\n", "ioloop.IOLoop.current().stop()\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_22(VAR_9, VAR_5, VAR_10):...\n", "VAR_13 = calibre_db.session.query(db.Series).filter(db.Series.id == VAR_5\n ).first()\n", "if VAR_13:\n", "VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0, db.Books, db.\n Books.series.any(db.Series.id == VAR_5), [VAR_10[0]])\n", "abort(404)\n", "return render_title_template('index.html', VAR_68=random, VAR_65=pagination,\n VAR_63=entries, id=book_id, VAR_149=_(u'Series: %(serie)s', serie=name.\n name), VAR_9='series')\n" ]
[ "def render_series_books(page, book_id, order):...\n", "name = calibre_db.session.query(db.Series).filter(db.Series.id == book_id\n ).first()\n", "if name:\n", "entries, random, pagination = calibre_db.fill_indexpage(page, 0, db.Books,\n db.Books.series.any(db.Series.id == book_id), [order[0]])\n", "abort(404)\n", "return render_title_template('index.html', random=random, pagination=\n pagination, entries=entries, id=book_id, title=_(u'Series: %(serie)s',\n serie=name.name), page='series')\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_4(self, VAR_5, VAR_9, VAR_6):...\n", "VAR_13 = settings.PING_INTERVAL\n", "if VAR_13 > 0:\n", "VAR_9['ping_interval'] = VAR_13\n", "VAR_14 = settings.TOP_LINKS\n", "VAR_15 = []\n", "for tl in VAR_14:\n", "if len(tl) < 2:\n", "VAR_9['ome']['top_links'] = VAR_15\n", "VAR_21 = {}\n", "if settings.TOP_LOGO:\n", "VAR_21['label'] = tl[0]\n", "VAR_9['ome']['logo_src'] = settings.TOP_LOGO\n", "if settings.TOP_LOGO_LINK:\n", "VAR_22 = tl[1]\n", "VAR_9['ome']['logo_href'] = settings.TOP_LOGO_LINK\n", "VAR_16 = settings.METADATA_PANES\n", "VAR_21['link'] = reverse_with_params(**link_id)\n", "if len(tl) > 2:\n", "VAR_21['link'] = reverse(VAR_22)\n", "VAR_21['link'] = VAR_22\n", "VAR_9['ome']['metadata_panes'] = VAR_16\n", "VAR_21['attrs'] = tl[2]\n", "VAR_15.append(VAR_21)\n", "VAR_17 = settings.RIGHT_PLUGINS\n", "VAR_18 = []\n", "for rt in VAR_17:\n", "VAR_23 = rt[0]\n", "VAR_9['ome']['right_plugins'] = VAR_18\n", "VAR_24 = rt[1]\n", "VAR_19 = settings.CENTER_PLUGINS\n", "VAR_25 = rt[2]\n", "VAR_20 = []\n", "VAR_18.append({'label': VAR_23, 'include': VAR_24, 'plugin_id': VAR_25})\n", "for cp in VAR_19:\n", "VAR_23 = cp[0]\n", "VAR_9['ome']['center_plugins'] = VAR_20\n", "VAR_24 = cp[1]\n", "VAR_9['ome']['user_dropdown'] = settings.USER_DROPDOWN\n", "VAR_25 = cp[2]\n", "VAR_20.append({'label': VAR_23, 'include': VAR_24, 'plugin_id': VAR_25})\n" ]
[ "def load_settings(self, request, context, conn):...\n", "ping_interval = settings.PING_INTERVAL\n", "if ping_interval > 0:\n", "context['ping_interval'] = ping_interval\n", "top_links = settings.TOP_LINKS\n", "links = []\n", "for tl in top_links:\n", "if len(tl) < 2:\n", "context['ome']['top_links'] = links\n", "link = {}\n", "if settings.TOP_LOGO:\n", "link['label'] = tl[0]\n", "context['ome']['logo_src'] = settings.TOP_LOGO\n", "if settings.TOP_LOGO_LINK:\n", "link_id = tl[1]\n", "context['ome']['logo_href'] = settings.TOP_LOGO_LINK\n", "metadata_panes = settings.METADATA_PANES\n", "link['link'] = reverse_with_params(**link_id)\n", "if len(tl) > 2:\n", "link['link'] = reverse(link_id)\n", "link['link'] = link_id\n", "context['ome']['metadata_panes'] = metadata_panes\n", "link['attrs'] = tl[2]\n", "links.append(link)\n", "right_plugins = settings.RIGHT_PLUGINS\n", "r_plugins = []\n", "for rt in right_plugins:\n", "label = rt[0]\n", "context['ome']['right_plugins'] = r_plugins\n", "include = rt[1]\n", "center_plugins = settings.CENTER_PLUGINS\n", "plugin_id = rt[2]\n", "c_plugins = []\n", "r_plugins.append({'label': label, 'include': include, 'plugin_id': plugin_id})\n", "for cp in center_plugins:\n", "label = cp[0]\n", "context['ome']['center_plugins'] = c_plugins\n", "include = cp[1]\n", "context['ome']['user_dropdown'] = settings.USER_DROPDOWN\n", "plugin_id = cp[2]\n", "c_plugins.append({'label': label, 'include': include, 'plugin_id': plugin_id})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "For", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_35():...\n", "return FUNC_0(VAR_6=request.url_root, VAR_7=additional_unless)\n" ]
[ "def unless():...\n", "return _preemptive_unless(base_url=request.url_root, additional_unless=\n additional_unless)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "async def FUNC_8(self):...\n", "if not hasattr(self, 'resolved_ref'):\n", "self.resolved_ref = await self.get_resolved_ref()\n", "return f'https://{self.hostname}/{self.namespace}/tree/{self.resolved_ref}'\n" ]
[ "async def get_resolved_ref_url(self):...\n", "if not hasattr(self, 'resolved_ref'):\n", "self.resolved_ref = await self.get_resolved_ref()\n", "return f'https://{self.hostname}/{self.namespace}/tree/{self.resolved_ref}'\n" ]
[ 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_120():...\n", "VAR_214 = VAR_1.db.get_value('DocType', VAR_62, 'module')\n", "return VAR_1.module_app[FUNC_56(VAR_214)]\n" ]
[ "def _get_doctype_app():...\n", "doctype_module = local.db.get_value('DocType', doctype, 'module')\n", "return local.module_app[scrub(doctype_module)]\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "@VAR_2.route('/admin/view')...\n", "VAR_34 = updater_thread.get_current_version_info()\n", "if VAR_34 is False:\n", "VAR_98 = _(u'Unknown')\n", "if 'datetime' in VAR_34:\n", "VAR_35 = ub.session.query(ub.User).all()\n", "VAR_98 = VAR_34['datetime']\n", "VAR_98 = VAR_34['version']\n", "VAR_36 = config.get_mail_settings()\n", "VAR_117 = timedelta(seconds=time.timezone if time.localtime().tm_isdst == 0\n else time.altzone)\n", "VAR_25 = VAR_1['kobo'] and config.config_kobo_sync\n", "VAR_118 = datetime.strptime(VAR_98[:19], '%Y-%m-%dT%H:%M:%S')\n", "return render_title_template('admin.html', VAR_35=allUser, email=\n email_settings, config=config, VAR_98=commit, VAR_1=feature_support,\n VAR_25=kobo_support, title=_(u'Admin page'), page='admin')\n", "if len(VAR_98) > 19:\n", "if VAR_98[19] == '+':\n", "VAR_98 = format_datetime(VAR_118 - VAR_117, format='short', VAR_58=get_locale()\n )\n", "VAR_118 -= timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))\n", "if VAR_98[19] == '-':\n", "VAR_118 += timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))\n" ]
[ "@admi.route('/admin/view')...\n", "version = updater_thread.get_current_version_info()\n", "if version is False:\n", "commit = _(u'Unknown')\n", "if 'datetime' in version:\n", "allUser = ub.session.query(ub.User).all()\n", "commit = version['datetime']\n", "commit = version['version']\n", "email_settings = config.get_mail_settings()\n", "tz = timedelta(seconds=time.timezone if time.localtime().tm_isdst == 0 else\n time.altzone)\n", "kobo_support = feature_support['kobo'] and config.config_kobo_sync\n", "form_date = datetime.strptime(commit[:19], '%Y-%m-%dT%H:%M:%S')\n", "return render_title_template('admin.html', allUser=allUser, email=\n email_settings, config=config, commit=commit, feature_support=\n feature_support, kobo_support=kobo_support, title=_(u'Admin page'),\n page='admin')\n", "if len(commit) > 19:\n", "if commit[19] == '+':\n", "commit = format_datetime(form_date - tz, format='short', locale=get_locale())\n", "form_date -= timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))\n", "if commit[19] == '-':\n", "form_date += timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Condition", "Condition", "Assign'", "AugAssign'", "Condition", "AugAssign'" ]
[ "def FUNC_7(self):...\n", "self.assert_expected(self.folder.laf, 'TeeShopLAF.html')\n" ]
[ "def test_1(self):...\n", "self.assert_expected(self.folder.laf, 'TeeShopLAF.html')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def __str__(self):...\n", "return 'Preferences for %s' % self.user\n" ]
[ "def __str__(self):...\n", "return 'Preferences for %s' % self.user\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_161():...\n", "return URL.verify(VAR_263.request, user_signature=True, VAR_151=hash_vars,\n VAR_152=True)\n" ]
[ "def verify():...\n", "return URL.verify(current.request, user_signature=True, hash_vars=hash_vars,\n hash_extension=True)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@VAR_0.route('/note_attachment/<filename>')...\n", "\"\"\"docstring\"\"\"\n", "VAR_32 = os.path.join(PATH_NOTE_ATTACHMENTS, VAR_3)\n", "if VAR_32 is not None:\n", "return send_file(VAR_32, as_attachment=True)\n", "VAR_1.exception('Send note attachment')\n" ]
[ "@blueprint.route('/note_attachment/<filename>')...\n", "\"\"\"docstring\"\"\"\n", "file_path = os.path.join(PATH_NOTE_ATTACHMENTS, filename)\n", "if file_path is not None:\n", "return send_file(file_path, as_attachment=True)\n", "logger.exception('Send note attachment')\n" ]
[ 0, 0, 0, 0, 1, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Return'", "Expr'" ]
[ "def FUNC_30(self):...\n", "self.parser = saved_model_cli.create_parser()\n", "VAR_9 = test.test_src_dir_path(VAR_0)\n", "VAR_41 = os.path.join(test.get_temp_dir(), 'new_dir')\n", "VAR_10 = self.parser.parse_args(['run', '--dir', VAR_9, '--tag_set',\n 'serve', '--signature_def', 'regress_x_to_y', '--input_examples',\n 'inputs=[{\"x\":[[1],[2]]}]', '--outdir', VAR_41])\n", "saved_model_cli.run(VAR_10)\n" ]
[ "def testRunCommandInputExamplesFeatureBadType(self):...\n", "self.parser = saved_model_cli.create_parser()\n", "base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n", "output_dir = os.path.join(test.get_temp_dir(), 'new_dir')\n", "args = self.parser.parse_args(['run', '--dir', base_path, '--tag_set',\n 'serve', '--signature_def', 'regress_x_to_y', '--input_examples',\n 'inputs=[{\"x\":[[1],[2]]}]', '--outdir', output_dir])\n", "saved_model_cli.run(args)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_4(self, VAR_0, VAR_1, VAR_3):...\n", "self.handler = VAR_3.get_directory_handler()\n", "self.admin_user = self.register_user('admin', 'pass', admin=True)\n", "self.admin_user_tok = self.login('admin', 'pass')\n", "self.room_id = self.helper.create_room_as(self.admin_user, tok=self.\n admin_user_tok)\n", "self.test_alias = '#test:test'\n", "self.room_alias = RoomAlias.from_string(self.test_alias)\n", "self.test_user = self.register_user('user', 'pass', admin=False)\n", "self.test_user_tok = self.login('user', 'pass')\n", "self.helper.join(room=self.room_id, VAR_4=self.test_user, tok=self.\n test_user_tok)\n" ]
[ "def prepare(self, reactor, clock, hs):...\n", "self.handler = hs.get_directory_handler()\n", "self.admin_user = self.register_user('admin', 'pass', admin=True)\n", "self.admin_user_tok = self.login('admin', 'pass')\n", "self.room_id = self.helper.create_room_as(self.admin_user, tok=self.\n admin_user_tok)\n", "self.test_alias = '#test:test'\n", "self.room_alias = RoomAlias.from_string(self.test_alias)\n", "self.test_user = self.register_user('user', 'pass', admin=False)\n", "self.test_user_tok = self.login('user', 'pass')\n", "self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok\n )\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_22(self):...\n", "VAR_5 = self._makeContext()\n", "self.assertEqual(VAR_5.evaluate('path:'), None)\n" ]
[ "def test_empty_path_expression_explicit(self):...\n", "ec = self._makeContext()\n", "self.assertEqual(ec.evaluate('path:'), None)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_4(self, VAR_0, VAR_1, VAR_3):...\n", "VAR_17 = {}\n", "VAR_17['alias_creation_rules'] = [{'user_id': '*', 'alias': '#unofficial_*',\n 'action': 'allow'}]\n", "VAR_17['room_list_publication_rules'] = []\n", "VAR_18 = RoomDirectoryConfig()\n", "VAR_18.read_config(VAR_17)\n", "self.hs.config.is_alias_creation_allowed = VAR_18.is_alias_creation_allowed\n", "return VAR_3\n" ]
[ "def prepare(self, reactor, clock, hs):...\n", "config = {}\n", "config['alias_creation_rules'] = [{'user_id': '*', 'alias': '#unofficial_*',\n 'action': 'allow'}]\n", "config['room_list_publication_rules'] = []\n", "rd_config = RoomDirectoryConfig()\n", "rd_config.read_config(config)\n", "self.hs.config.is_alias_creation_allowed = rd_config.is_alias_creation_allowed\n", "return hs\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_28(self, VAR_5, VAR_30, VAR_31, VAR_6):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = FUNC_1('/groups/%s/room/%s', VAR_30, VAR_6)\n", "return self.client.delete_json(VAR_5=destination, VAR_2=path, VAR_3={\n 'requester_user_id': requester_user_id}, VAR_15=True)\n" ]
[ "def remove_room_from_group(self, destination, group_id, requester_user_id,...\n", "\"\"\"docstring\"\"\"\n", "path = _create_v1_path('/groups/%s/room/%s', group_id, room_id)\n", "return self.client.delete_json(destination=destination, path=path, args={\n 'requester_user_id': requester_user_id}, ignore_backoff=True)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_5(VAR_0, VAR_1):...\n", "return FUNC_0(VAR_0=request, VAR_1=pk, VAR_2='is_pinned', VAR_3=True, VAR_4\n =Comment.PINNED, VAR_5=_('The topic has been pinned'))\n" ]
[ "def pin(request, pk):...\n", "return _moderate(request=request, pk=pk, field_name='is_pinned', to_value=\n True, action=Comment.PINNED, message=_('The topic has been pinned'))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@VAR_2.route('/admin/logfile')...\n", "VAR_89 = {(0): logger.get_logfile(config.config_logfile), (1): logger.\n get_accesslogfile(config.config_access_logfile)}\n", "return render_title_template('logviewer.html', title=_(u'Logfile viewer'),\n accesslog_enable=config.config_access_log, log_enable=bool(config.\n config_logfile != logger.LOG_TO_STDOUT), VAR_89=logfiles, page='logfile')\n" ]
[ "@admi.route('/admin/logfile')...\n", "logfiles = {(0): logger.get_logfile(config.config_logfile), (1): logger.\n get_accesslogfile(config.config_access_logfile)}\n", "return render_title_template('logviewer.html', title=_(u'Logfile viewer'),\n accesslog_enable=config.config_access_log, log_enable=bool(config.\n config_logfile != logger.LOG_TO_STDOUT), logfiles=logfiles, page='logfile')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "async def FUNC_5(self):...\n", "VAR_33 = AsyncHTTPClient()\n", "VAR_34 = HTTPRequest('https://doi.org/{}'.format(self.spec), user_agent=\n 'BinderHub')\n", "VAR_35 = await VAR_33.fetch(VAR_34)\n", "VAR_37 = self.url_regex.match(VAR_35.effective_url)\n", "VAR_38 = VAR_37.groups()[3]\n", "VAR_39 = VAR_37.groups()[5]\n", "if not VAR_39:\n", "VAR_39 = '1'\n", "self.record_id = '{}.v{}'.format(VAR_38, VAR_39)\n", "return self.record_id\n" ]
[ "async def get_resolved_ref(self):...\n", "client = AsyncHTTPClient()\n", "req = HTTPRequest('https://doi.org/{}'.format(self.spec), user_agent=\n 'BinderHub')\n", "r = await client.fetch(req)\n", "match = self.url_regex.match(r.effective_url)\n", "article_id = match.groups()[3]\n", "article_version = match.groups()[5]\n", "if not article_version:\n", "article_version = '1'\n", "self.record_id = '{}.v{}'.format(article_id, article_version)\n", "return self.record_id\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "@FUNC_0...\n", "if self.config.worker_app:\n", "return GroupsServerWorkerHandler(self)\n", "return GroupsServerHandler(self)\n" ]
[ "@cache_in_self...\n", "if self.config.worker_app:\n", "return GroupsServerWorkerHandler(self)\n", "return GroupsServerHandler(self)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "@ratelimit(field='email', rate='5/5m')...\n", "if VAR_7.user.is_authenticated:\n", "return redirect(VAR_7.GET.get('next', reverse('spirit:user:update')))\n", "VAR_17 = ResendActivationForm(data=post_data(request))\n", "if is_post(VAR_7):\n", "if not VAR_7.is_limited() and VAR_17.is_valid():\n", "return render(VAR_7=request, VAR_12=\n 'spirit/user/auth/activation_resend.html', context={'form': form})\n", "VAR_18 = VAR_17.get_user()\n", "messages.info(VAR_7, _('string'))\n", "send_activation_email(VAR_7, VAR_18)\n", "return redirect(reverse(settings.LOGIN_URL))\n" ]
[ "@ratelimit(field='email', rate='5/5m')...\n", "if request.user.is_authenticated:\n", "return redirect(request.GET.get('next', reverse('spirit:user:update')))\n", "form = ResendActivationForm(data=post_data(request))\n", "if is_post(request):\n", "if not request.is_limited() and form.is_valid():\n", "return render(request=request, template_name=\n 'spirit/user/auth/activation_resend.html', context={'form': form})\n", "user = form.get_user()\n", "messages.info(request, _(\n \"If you don't receive an email, please make sure you've entered the address you registered with, and check your spam folder.\"\n ))\n", "send_activation_email(request, user)\n", "return redirect(reverse(settings.LOGIN_URL))\n" ]
[ 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Assign'", "Condition", "Condition", "Return'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "import ast\n", "import base64\n", "import io\n", "import logging\n", "import random\n", "from html import escape\n", "from os import remove\n", "import magic\n", "from flask import Flask, abort, request\n", "from PIL import Image\n", "VAR_0 = Flask(__name__)\n", "VAR_0.logger.setLevel(logging.DEBUG)\n", "VAR_0.config['MAX_CONTENT_LENGTH'] = 2 * 10 ** 5\n", "def FUNC_0():...\n", "VAR_3 = open('config', 'r')\n", "VAR_4 = ast.literal_eval(VAR_3.read())\n", "VAR_3.close()\n", "VAR_5 = VAR_4['max_cache_size']\n", "VAR_6 = VAR_4['host']\n", "VAR_7 = VAR_4['port']\n", "return VAR_5, VAR_6, VAR_7\n" ]
[ "import ast\n", "import base64\n", "import io\n", "import logging\n", "import random\n", "from html import escape\n", "from os import remove\n", "import magic\n", "from flask import Flask, abort, request\n", "from PIL import Image\n", "app = Flask(__name__)\n", "app.logger.setLevel(logging.DEBUG)\n", "app.config['MAX_CONTENT_LENGTH'] = 2 * 10 ** 5\n", "def get_config():...\n", "f = open('config', 'r')\n", "conf = ast.literal_eval(f.read())\n", "f.close()\n", "cache_size = conf['max_cache_size']\n", "host = conf['host']\n", "port = conf['port']\n", "return cache_size, host, port\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "Assign'", "Expr'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_6():...\n", "yield '<!-- CLICK_WEB START HEADER -->'\n", "yield '<div class=\"command-line\">Executing: {}</div>'.format('/'.join(VAR_4))\n", "yield '<!-- CLICK_WEB END HEADER -->'\n" ]
[ "def generate():...\n", "yield '<!-- CLICK_WEB START HEADER -->'\n", "yield '<div class=\"command-line\">Executing: {}</div>'.format('/'.join(commands)\n )\n", "yield '<!-- CLICK_WEB END HEADER -->'\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):...\n", "super().__init__(VAR_5, VAR_4, *VAR_6, **kwargs)\n", "self.fields['target'].widget.attrs['tabindex'] = 101\n", "self.fields['target'].widget.profile = VAR_4.profile\n", "self.fields['target'].initial = Unit(VAR_5=translation, id_hash=0)\n" ]
[ "def __init__(self, translation, user, *args, **kwargs):...\n", "super().__init__(translation, user, *args, **kwargs)\n", "self.fields['target'].widget.attrs['tabindex'] = 101\n", "self.fields['target'].widget.profile = user.profile\n", "self.fields['target'].initial = Unit(translation=translation, id_hash=0)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_12(self, VAR_26: str, *, VAR_29: Optional[Type[Any]]=None, VAR_13:...\n", "return self.api_route(VAR_26=path, VAR_29=response_model, VAR_13=\n status_code, VAR_30=tags, VAR_31=dependencies, VAR_32=summary, VAR_33=\n description, VAR_34=response_description, VAR_35=responses, VAR_36=\n deprecated, VAR_37=['GET'], VAR_38=operation_id, VAR_16=\n response_model_include, VAR_17=response_model_exclude, VAR_18=\n response_model_by_alias, VAR_19=response_model_exclude_unset, VAR_20=\n response_model_exclude_defaults, VAR_21=response_model_exclude_none,\n VAR_39=include_in_schema, VAR_14=response_class, VAR_28=name, VAR_40=\n callbacks)\n" ]
[ "def get(self, path: str, *, response_model: Optional[Type[Any]]=None,...\n", "return self.api_route(path=path, response_model=response_model, status_code\n =status_code, tags=tags, dependencies=dependencies, summary=summary,\n description=description, response_description=response_description,\n responses=responses, deprecated=deprecated, methods=['GET'],\n operation_id=operation_id, response_model_include=\n response_model_include, response_model_exclude=response_model_exclude,\n response_model_by_alias=response_model_by_alias,\n response_model_exclude_unset=response_model_exclude_unset,\n response_model_exclude_defaults=response_model_exclude_defaults,\n response_model_exclude_none=response_model_exclude_none,\n include_in_schema=include_in_schema, response_class=response_class,\n name=name, callbacks=callbacks)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@FUNC_0...\n", "return TransportLayerClient(self)\n" ]
[ "@cache_in_self...\n", "return TransportLayerClient(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_19(VAR_23, VAR_16):...\n", "VAR_40 = False\n", "if VAR_23:\n", "VAR_23 = clean_html(VAR_23)\n", "if len(VAR_16.comments):\n", "if VAR_16.comments[0].text != VAR_23:\n", "if VAR_23:\n", "VAR_16.comments[0].text = VAR_23\n", "return VAR_40\n", "VAR_16.comments.append(db.Comments(text=comments, VAR_16=book.id))\n", "VAR_40 = True\n", "VAR_40 = True\n" ]
[ "def edit_book_comments(comments, book):...\n", "modif_date = False\n", "if comments:\n", "comments = clean_html(comments)\n", "if len(book.comments):\n", "if book.comments[0].text != comments:\n", "if comments:\n", "book.comments[0].text = comments\n", "return modif_date\n", "book.comments.append(db.Comments(text=comments, book=book.id))\n", "modif_date = True\n", "modif_date = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Return'", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_62(self, VAR_73, VAR_62, VAR_14):...\n", "\"\"\"docstring\"\"\"\n", "VAR_73 = FUNC_3(VAR_73)\n", "VAR_14 = VAR_19(VAR_14, VAR_15=True)\n", "if VAR_62:\n", "VAR_62 = VAR_19(VAR_62, VAR_15=True)\n", "VAR_64 = '<img src=\"%s\" alt=\"%s\"' % (VAR_73, VAR_14)\n", "VAR_64 = '<img src=\"%s\" alt=\"%s\" title=\"%s\"' % (VAR_73, VAR_14, VAR_62)\n", "if self.options.get('use_xhtml'):\n", "return '%s />' % VAR_64\n", "return '%s>' % VAR_64\n" ]
[ "def image(self, src, title, text):...\n", "\"\"\"docstring\"\"\"\n", "src = escape_link(src)\n", "text = escape(text, quote=True)\n", "if title:\n", "title = escape(title, quote=True)\n", "html = '<img src=\"%s\" alt=\"%s\"' % (src, text)\n", "html = '<img src=\"%s\" alt=\"%s\" title=\"%s\"' % (src, text, title)\n", "if self.options.get('use_xhtml'):\n", "return '%s />' % html\n", "return '%s>' % html\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "import inspect\n", "import json\n", "import mimetypes\n", "import os\n", "import threading\n", "from typing import Any, Dict, Optional, Type, Union\n", "import tornado.web\n", "from streamlit.scriptrunner import get_script_run_ctx\n", "import streamlit.server.routes\n", "from streamlit import type_util\n", "from streamlit.elements.form import current_form_id\n", "from streamlit import util\n", "from streamlit.errors import StreamlitAPIException\n", "from streamlit.logger import get_logger\n", "from streamlit.proto.Components_pb2 import SpecialArg, ArrowTable as ArrowTableProto\n", "from streamlit.proto.Element_pb2 import Element\n", "from streamlit.state import NoValue, register_widget\n", "from streamlit.type_util import to_bytes\n", "VAR_0 = get_logger(__name__)\n", "\"\"\"Class for exceptions generated during custom component marshalling.\"\"\"\n", "\"\"\"A Custom Component declaration.\"\"\"\n", "def __init__(self, VAR_1: str, VAR_2: Optional[str]=None, VAR_3: Optional[...\n", "if VAR_2 is None and VAR_3 is None or VAR_2 is not None and VAR_3 is not None:\n", "self.name = VAR_1\n", "self.path = VAR_2\n", "self.url = VAR_3\n", "def __repr__(self) ->str:...\n", "return util.repr_(self)\n" ]
[ "import inspect\n", "import json\n", "import mimetypes\n", "import os\n", "import threading\n", "from typing import Any, Dict, Optional, Type, Union\n", "import tornado.web\n", "from streamlit.scriptrunner import get_script_run_ctx\n", "import streamlit.server.routes\n", "from streamlit import type_util\n", "from streamlit.elements.form import current_form_id\n", "from streamlit import util\n", "from streamlit.errors import StreamlitAPIException\n", "from streamlit.logger import get_logger\n", "from streamlit.proto.Components_pb2 import SpecialArg, ArrowTable as ArrowTableProto\n", "from streamlit.proto.Element_pb2 import Element\n", "from streamlit.state import NoValue, register_widget\n", "from streamlit.type_util import to_bytes\n", "LOGGER = get_logger(__name__)\n", "\"\"\"Class for exceptions generated during custom component marshalling.\"\"\"\n", "\"\"\"A Custom Component declaration.\"\"\"\n", "def __init__(self, name: str, path: Optional[str]=None, url: Optional[str]=None...\n", "if path is None and url is None or path is not None and url is not None:\n", "self.name = name\n", "self.path = path\n", "self.url = url\n", "def __repr__(self) ->str:...\n", "return util.repr_(self)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Import'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Expr'", "Expr'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Return'" ]
[ "def FUNC_80():...\n", "VAR_84 = self.as_dict()\n", "for VAR_43, VAR_26 in iteritems(VAR_84):\n", "if VAR_26 == None:\n", "return VAR_84\n", "VAR_84[VAR_43] = ''\n" ]
[ "def get_values():...\n", "values = self.as_dict()\n", "for key, value in iteritems(values):\n", "if value == None:\n", "return values\n", "values[key] = ''\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Condition", "Return'", "Assign'" ]
[ "@def_function.function...\n", "return VAR_50 + 2 * VAR_56\n" ]
[ "@def_function.function...\n", "return y + 2 * c\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@Document.whitelist...\n", "self.db_set('disabled', cint(VAR_16))\n" ]
[ "@Document.whitelist...\n", "self.db_set('disabled', cint(disable))\n" ]
[ 0, 0 ]
[ "Condition", "Expr'" ]
[ "def FUNC_11(VAR_4):...\n", "\"\"\"docstring\"\"\"\n", "VAR_34 = set(meta_graph_lib.ops_used_by_graph_def(VAR_4.graph_def))\n", "VAR_35 = VAR_1 & VAR_34\n", "if VAR_35:\n", "print('MetaGraph with tag set %s contains the following denylisted ops:' %\n VAR_4.meta_info_def.tags, VAR_35)\n", "print('MetaGraph with tag set %s does not contain denylisted ops.' % VAR_4.\n meta_info_def.tags)\n" ]
[ "def scan_meta_graph_def(meta_graph_def):...\n", "\"\"\"docstring\"\"\"\n", "all_ops_set = set(meta_graph_lib.ops_used_by_graph_def(meta_graph_def.\n graph_def))\n", "denylisted_ops = _OP_DENYLIST & all_ops_set\n", "if denylisted_ops:\n", "print('MetaGraph with tag set %s contains the following denylisted ops:' %\n meta_graph_def.meta_info_def.tags, denylisted_ops)\n", "print('MetaGraph with tag set %s does not contain denylisted ops.' %\n meta_graph_def.meta_info_def.tags)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Expr'", "Expr'" ]
[ "@VAR_0.route('/')...\n", "return flask.Response('Welcome to XMPP HTTP Upload. State your business.',\n mimetype='text/plain')\n" ]
[ "@app.route('/')...\n", "return flask.Response('Welcome to XMPP HTTP Upload. State your business.',\n mimetype='text/plain')\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_23(self):...\n", "if not self.enable_mathjax:\n", "return u''\n", "VAR_88 = self.tornado_settings.get('static_url_prefix', url_path_join(self.\n base_url, 'static'))\n", "return url_path_join(VAR_88, 'components', 'MathJax', 'MathJax.js')\n" ]
[ "def _mathjax_url_default(self):...\n", "if not self.enable_mathjax:\n", "return u''\n", "static_url_prefix = self.tornado_settings.get('static_url_prefix',\n url_path_join(self.base_url, 'static'))\n", "return url_path_join(static_url_prefix, 'components', 'MathJax', 'MathJax.js')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Return'" ]
[ "def FUNC_1(self, VAR_10):...\n", "VAR_10 = self.form.refresh(request.form)\n", "VAR_21 = self.appbuilder.sm.get_user_by_id(g.user.id)\n", "VAR_10.populate_obj(VAR_21)\n", "self.appbuilder.sm.update_user(VAR_21)\n", "flash(as_unicode(self.message), 'info')\n" ]
[ "def form_post(self, form):...\n", "form = self.form.refresh(request.form)\n", "item = self.appbuilder.sm.get_user_by_id(g.user.id)\n", "form.populate_obj(item)\n", "self.appbuilder.sm.update_user(item)\n", "flash(as_unicode(self.message), 'info')\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_11(self, VAR_17, VAR_18={}):...\n", "if 'markdown' in self.md.markupShorthands:\n", "VAR_30 = MarkdownCodeSpans(VAR_17)\n", "VAR_30 = Functor(VAR_17)\n", "VAR_29 = dict(self.macros, **moreMacros)\n", "VAR_30 = VAR_30.map(curry(replaceMacros, VAR_29=macros))\n", "VAR_30 = VAR_30.map(fixTypography)\n", "if 'css' in self.md.markupShorthands:\n", "VAR_30 = VAR_30.map(replaceAwkwardCSSShorthands)\n", "return VAR_30.extract()\n" ]
[ "def fixText(self, text, moreMacros={}):...\n", "if 'markdown' in self.md.markupShorthands:\n", "textFunctor = MarkdownCodeSpans(text)\n", "textFunctor = Functor(text)\n", "macros = dict(self.macros, **moreMacros)\n", "textFunctor = textFunctor.map(curry(replaceMacros, macros=macros))\n", "textFunctor = textFunctor.map(fixTypography)\n", "if 'css' in self.md.markupShorthands:\n", "textFunctor = textFunctor.map(replaceAwkwardCSSShorthands)\n", "return textFunctor.extract()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_42():...\n", "VAR_129 = make_response(render_template('index.jinja2', **render_kwargs))\n", "if VAR_107:\n", "VAR_129 = util.flask.add_non_caching_response_headers(VAR_129)\n", "return VAR_129\n" ]
[ "def make_default_ui():...\n", "r = make_response(render_template('index.jinja2', **render_kwargs))\n", "if wizard:\n", "r = util.flask.add_non_caching_response_headers(r)\n", "return r\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_151(VAR_199):...\n", "if [VAR_14 for VAR_14 in VAR_199 if 32 > ord(VAR_14) or ord(VAR_14) > 127]:\n", "return Header(VAR_199.encode('utf-8'), 'utf-8')\n", "return VAR_199\n" ]
[ "def encode_header(key):...\n", "if [c for c in key if 32 > ord(c) or ord(c) > 127]:\n", "return Header(key.encode('utf-8'), 'utf-8')\n", "return key\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Return'", "Return'" ]
[ "def FUNC_0(VAR_12):...\n", "VAR_21 = VAR_12.pattern\n", "if VAR_21.startswith('^'):\n", "VAR_21 = VAR_21[1:]\n", "return VAR_21\n" ]
[ "def _pure_pattern(regex):...\n", "pattern = regex.pattern\n", "if pattern.startswith('^'):\n", "pattern = pattern[1:]\n", "return pattern\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_2(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_13 = 'otherserver'\n", "VAR_14 = '@otheruser:' + VAR_13\n", "VAR_8 = self.register_user('kermit', 'test')\n", "VAR_9 = self.login('kermit', 'test')\n", "VAR_6 = self.helper.create_room_as(room_creator=user_id, VAR_9=tok)\n", "VAR_15 = self.get_success(self.store.get_room_version(VAR_6))\n", "VAR_16 = self._build_and_send_join_event(VAR_13, VAR_14, VAR_6)\n", "VAR_17 = self.successResultOf(self.store._get_state_group_for_event(VAR_16.\n event_id))\n", "VAR_18 = event_from_pdu_json({'type': EventTypes.Message, 'content': {},\n 'room_id': VAR_6, 'sender': '@yetanotheruser:' + VAR_13, 'depth': \n VAR_16['depth'] + 1, 'prev_events': [VAR_16.event_id], 'auth_events': [\n ], 'origin_server_ts': self.clock.time_msec()}, VAR_15)\n", "VAR_11 = run_in_background(self.handler.on_receive_pdu, VAR_13, VAR_18)\n", "self.get_success(VAR_11)\n", "VAR_19 = self.get_success(self.store.get_event(VAR_18.event_id,\n allow_rejected=True))\n", "self.assertIsNotNone(VAR_19.rejected_reason)\n", "VAR_20 = self.successResultOf(self.store._get_state_group_for_event(VAR_18.\n event_id))\n", "self.assertEqual(VAR_17, VAR_20)\n" ]
[ "def test_rejected_message_event_state(self):...\n", "\"\"\"docstring\"\"\"\n", "OTHER_SERVER = 'otherserver'\n", "OTHER_USER = '@otheruser:' + OTHER_SERVER\n", "user_id = self.register_user('kermit', 'test')\n", "tok = self.login('kermit', 'test')\n", "room_id = self.helper.create_room_as(room_creator=user_id, tok=tok)\n", "room_version = self.get_success(self.store.get_room_version(room_id))\n", "join_event = self._build_and_send_join_event(OTHER_SERVER, OTHER_USER, room_id)\n", "sg = self.successResultOf(self.store._get_state_group_for_event(join_event.\n event_id))\n", "ev = event_from_pdu_json({'type': EventTypes.Message, 'content': {},\n 'room_id': room_id, 'sender': '@yetanotheruser:' + OTHER_SERVER,\n 'depth': join_event['depth'] + 1, 'prev_events': [join_event.event_id],\n 'auth_events': [], 'origin_server_ts': self.clock.time_msec()},\n room_version)\n", "d = run_in_background(self.handler.on_receive_pdu, OTHER_SERVER, ev)\n", "self.get_success(d)\n", "e = self.get_success(self.store.get_event(ev.event_id, allow_rejected=True))\n", "self.assertIsNotNone(e.rejected_reason)\n", "sg2 = self.successResultOf(self.store._get_state_group_for_event(ev.event_id))\n", "self.assertEqual(sg, sg2)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def __deepcopy__(self, VAR_20):...\n", "VAR_34 = copy.copy(self)\n", "VAR_34.widget = copy.deepcopy(self.widget, VAR_20)\n", "VAR_34.attrs = self.widget.attrs\n", "VAR_20[VAR_35(self)] = VAR_34\n", "return VAR_34\n" ]
[ "def __deepcopy__(self, memo):...\n", "obj = copy.copy(self)\n", "obj.widget = copy.deepcopy(self.widget, memo)\n", "obj.attrs = self.widget.attrs\n", "memo[id(self)] = obj\n", "return obj\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def __init__(self, VAR_21=None, VAR_68=()):...\n", "VAR_68 = CHECKS.get_choices()\n", "super().__init__(VAR_21=attrs, VAR_68=choices)\n" ]
[ "def __init__(self, attrs=None, choices=()):...\n", "choices = CHECKS.get_choices()\n", "super().__init__(attrs=attrs, choices=choices)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_9(self):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def get_build_slug(self):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "def FUNC_8(self, VAR_26: str, *, VAR_29: Optional[Type[Any]]=None, VAR_13:...\n", "def FUNC_20(VAR_66: DecoratedCallable) ->DecoratedCallable:...\n", "self.add_api_route(VAR_26, VAR_66, VAR_29=response_model, VAR_13=\n status_code, VAR_30=tags, VAR_31=dependencies, VAR_32=summary, VAR_33=\n description, VAR_34=response_description, VAR_35=responses, VAR_36=\n deprecated, VAR_37=methods, VAR_38=operation_id, VAR_16=\n response_model_include, VAR_17=response_model_exclude, VAR_18=\n response_model_by_alias, VAR_19=response_model_exclude_unset, VAR_20=\n response_model_exclude_defaults, VAR_21=response_model_exclude_none,\n VAR_39=include_in_schema, VAR_14=response_class, VAR_28=name, VAR_40=\n callbacks)\n", "return VAR_66\n" ]
[ "def api_route(self, path: str, *, response_model: Optional[Type[Any]]=None,...\n", "def decorator(func: DecoratedCallable) ->DecoratedCallable:...\n", "self.add_api_route(path, func, response_model=response_model, status_code=\n status_code, tags=tags, dependencies=dependencies, summary=summary,\n description=description, response_description=response_description,\n responses=responses, deprecated=deprecated, methods=methods,\n operation_id=operation_id, response_model_include=\n response_model_include, response_model_exclude=response_model_exclude,\n response_model_by_alias=response_model_by_alias,\n response_model_exclude_unset=response_model_exclude_unset,\n response_model_exclude_defaults=response_model_exclude_defaults,\n response_model_exclude_none=response_model_exclude_none,\n include_in_schema=include_in_schema, response_class=response_class,\n name=name, callbacks=callbacks)\n", "return func\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "FunctionDef'", "Expr'", "Return'" ]
[ "def FUNC_76(VAR_42, VAR_38):...\n", "\"\"\"docstring\"\"\"\n", "VAR_40 = get_user_by_username_or_email(VAR_42)\n", "if not CourseEnrollment.is_enrolled(VAR_40, VAR_38):\n", "return VAR_40\n" ]
[ "def get_student(username_or_email, course_key):...\n", "\"\"\"docstring\"\"\"\n", "student = get_user_by_username_or_email(username_or_email)\n", "if not CourseEnrollment.is_enrolled(student, course_key):\n", "return student\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Return'" ]
[ "def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):...\n", "FUNC_83(self, VAR_72(self, *VAR_0, **kwargs))\n", "for VAR_6 in VAR_73:\n", "FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **kwargs))\n", "return self._return_value\n" ]
[ "def runner(self, method, *args, **kwargs):...\n", "add_to_return_value(self, fn(self, *args, **kwargs))\n", "for f in hooks:\n", "add_to_return_value(self, f(self, method, *args, **kwargs))\n", "return self._return_value\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "For", "Expr'", "Return'" ]
[ "@ensure_csrf_cookie...\n", "\"\"\"docstring\"\"\"\n", "VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)\n", "VAR_68 = ['company_name', 'company_contact_name', 'company_contact_email',\n 'total_codes', 'total_used_codes', 'total_amount', 'created',\n 'customer_reference_number', 'recipient_name', 'recipient_email',\n 'created_by', 'internal_reference', 'invoice_number', 'codes', 'course_id']\n", "VAR_69 = instructor_analytics.basic.sale_record_features(VAR_10, VAR_68)\n", "if not VAR_26:\n", "for VAR_197 in VAR_69:\n", "VAR_86, VAR_73 = instructor_analytics.csvs.format_dictlist(VAR_69, VAR_68)\n", "VAR_197['created_by'] = VAR_197['created_by'].username\n", "VAR_63 = {'course_id': VAR_10.to_deprecated_string(), 'sale': VAR_69,\n 'queried_features': VAR_68}\n", "return instructor_analytics.csvs.create_csv_response(\n 'e-commerce_sale_invoice_records.csv', VAR_86, VAR_73)\n", "return JsonResponse(VAR_63)\n" ]
[ "@ensure_csrf_cookie...\n", "\"\"\"docstring\"\"\"\n", "course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)\n", "query_features = ['company_name', 'company_contact_name',\n 'company_contact_email', 'total_codes', 'total_used_codes',\n 'total_amount', 'created', 'customer_reference_number',\n 'recipient_name', 'recipient_email', 'created_by', 'internal_reference',\n 'invoice_number', 'codes', 'course_id']\n", "sale_data = instructor_analytics.basic.sale_record_features(course_id,\n query_features)\n", "if not csv:\n", "for item in sale_data:\n", "header, datarows = instructor_analytics.csvs.format_dictlist(sale_data,\n query_features)\n", "item['created_by'] = item['created_by'].username\n", "response_payload = {'course_id': course_id.to_deprecated_string(), 'sale':\n sale_data, 'queried_features': query_features}\n", "return instructor_analytics.csvs.create_csv_response(\n 'e-commerce_sale_invoice_records.csv', header, datarows)\n", "return JsonResponse(response_payload)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "For", "Assign'", "Assign'", "Assign'", "Return'", "Return'" ]
[ "def FUNC_19(VAR_27):...\n", "\"\"\"docstring\"\"\"\n", "return smart_urlquote(VAR_27)\n", "return None\n" ]
[ "def smart_urlquote_wrapper(matched_url):...\n", "\"\"\"docstring\"\"\"\n", "return smart_urlquote(matched_url)\n", "return None\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'", "Return'" ]
[ "@VAR_1.route('/<bfile>/<report_name>/')...\n", "\"\"\"docstring\"\"\"\n", "if VAR_19 in VAR_3:\n", "return render_template('_layout.html', active_page=report_name)\n", "return abort(404)\n" ]
[ "@app.route('/<bfile>/<report_name>/')...\n", "\"\"\"docstring\"\"\"\n", "if report_name in REPORTS:\n", "return render_template('_layout.html', active_page=report_name)\n", "return abort(404)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Return'" ]
[ "def FUNC_61(self, VAR_25):...\n", "VAR_72 = getattr(self, VAR_25, None)\n", "if not VAR_72:\n", "if not getattr(VAR_72, 'whitelisted', False):\n" ]
[ "def is_whitelisted(self, method):...\n", "fn = getattr(self, method, None)\n", "if not fn:\n", "if not getattr(fn, 'whitelisted', False):\n" ]
[ 0, 2, 0, 2 ]
[ "FunctionDef'", "Assign'", "Condition", "Condition" ]
[ "@wraps(VAR_10)...\n", "VAR_1 = self.__class__\n", "if VAR_2:\n", "VAR_42 = [arg[0] for arg in FUNC_3(VAR_1)]\n", "VAR_39 = vars(FUNC_2(VAR_1))\n", "VAR_3.update(dict(zip(VAR_42, VAR_2)))\n", "VAR_3 = dict(list(VAR_39.items()) + list(VAR_3.items()))\n", "return VAR_10(self, **kwargs)\n" ]
[ "@wraps(fn)...\n", "cls = self.__class__\n", "if args:\n", "cls_arg_names = [arg[0] for arg in get_init_arguments_and_types(cls)]\n", "env_variables = vars(parse_env_variables(cls))\n", "kwargs.update(dict(zip(cls_arg_names, args)))\n", "kwargs = dict(list(env_variables.items()) + list(kwargs.items()))\n", "return fn(self, **kwargs)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "@CLASS_4('settings')...\n", "\"\"\"docstring\"\"\"\n", "if VAR_3.path() == '/set':\n", "return FUNC_14(VAR_3)\n", "VAR_20 = jinja.render('settings.html', title='settings', configdata=\n configdata, confget=config.instance.get_str)\n", "return 'text/html', VAR_20\n" ]
[ "@add_handler('settings')...\n", "\"\"\"docstring\"\"\"\n", "if url.path() == '/set':\n", "return _qute_settings_set(url)\n", "src = jinja.render('settings.html', title='settings', configdata=configdata,\n confget=config.instance.get_str)\n", "return 'text/html', src\n" ]
[ 0, 0, 0, 0, 3, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Assign'", "Return'" ]
[ "def FUNC_48(self, VAR_5):...\n", "VAR_3 = self.login()\n", "self.assertEqual(VAR_3.status_code, 302)\n", "self.assertURLEqual(VAR_3.url, VAR_5)\n" ]
[ "def assertLoginRedirectURLEqual(self, url):...\n", "response = self.login()\n", "self.assertEqual(response.status_code, 302)\n", "self.assertURLEqual(response.url, url)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def __call__(self, VAR_31):...\n", "VAR_3, VAR_4, VAR_73 = self._get_url_and_credentials(VAR_31.url)\n", "VAR_31.url = VAR_3\n", "if VAR_4 is not None and VAR_73 is not None:\n", "VAR_31 = HTTPBasicAuth(VAR_4, VAR_73)(VAR_31)\n", "VAR_31.register_hook('response', self.handle_401)\n", "return VAR_31\n" ]
[ "def __call__(self, req):...\n", "url, username, password = self._get_url_and_credentials(req.url)\n", "req.url = url\n", "if username is not None and password is not None:\n", "req = HTTPBasicAuth(username, password)(req)\n", "req.register_hook('response', self.handle_401)\n", "return req\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_2(self, VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "return self._verify_objects(CLASS_0(VAR_2, VAR_3, VAR_10, VAR_5) for VAR_2,\n VAR_3, VAR_10, VAR_5 in VAR_11)\n" ]
[ "def verify_json_objects_for_server(self, server_and_json):...\n", "\"\"\"docstring\"\"\"\n", "return self._verify_objects(VerifyJsonRequest(server_name, json_object,\n validity_time, request_name) for server_name, json_object,\n validity_time, request_name in server_and_json)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "if VAR_2.method != 'POST':\n", "VAR_154 = FUNC_42(VAR_2, VAR_5)\n", "if len(VAR_154['share']) < 1:\n", "for obs in VAR_154.values():\n", "VAR_158 = VAR_2.POST.get('mapAnnotation')\n", "if len(obs) > 0:\n", "VAR_158 = json.loads(VAR_158)\n", "VAR_5.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)\n", "VAR_159 = VAR_2.POST.getlist('annId')\n", "VAR_90 = VAR_2.POST.get('ns', omero.constants.metadata.NSCLIENTMAPANNOTATION)\n", "if len(VAR_159) == 0 and len(VAR_158) > 0:\n", "VAR_287 = VAR_2.POST.get('duplicate', 'false')\n", "for VAR_34 in VAR_159:\n", "VAR_287.lower() == 'true'\n", "VAR_180 = VAR_5.getObject('MapAnnotation', VAR_34)\n", "if len(VAR_158) == 0:\n", "if VAR_90 == omero.constants.metadata.NSCLIENTMAPANNOTATION:\n", "if VAR_180 is None:\n", "VAR_159 = None\n", "return {'annId': VAR_159}\n", "VAR_287 = True\n", "if VAR_287:\n", "if len(VAR_158) > 0:\n", "for VAR_415, VAR_143 in VAR_154.items():\n", "VAR_180 = omero.gateway.MapAnnotationWrapper(VAR_5)\n", "VAR_180.setValue(VAR_158)\n", "VAR_84 = VAR_5.deleteObjects('/Annotation', [VAR_34])\n", "for VAR_38 in VAR_143:\n", "VAR_180.setValue(VAR_158)\n", "VAR_180.save()\n", "VAR_5._waitOnCmd(VAR_84)\n", "VAR_84.close()\n", "VAR_180 = omero.gateway.MapAnnotationWrapper(VAR_5)\n", "VAR_180.setNs(VAR_90)\n", "VAR_180.setValue(VAR_158)\n", "VAR_180.save()\n", "VAR_180.setNs(VAR_90)\n", "VAR_159.append(VAR_180.getId())\n", "VAR_180.save()\n", "for VAR_415, VAR_143 in VAR_154.items():\n", "VAR_159.append(VAR_180.getId())\n", "for VAR_38 in VAR_143:\n", "VAR_38.linkAnnotation(VAR_180)\n", "VAR_38.linkAnnotation(VAR_180)\n" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "if request.method != 'POST':\n", "oids = getObjects(request, conn)\n", "if len(oids['share']) < 1:\n", "for obs in oids.values():\n", "data = request.POST.get('mapAnnotation')\n", "if len(obs) > 0:\n", "data = json.loads(data)\n", "conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)\n", "annIds = request.POST.getlist('annId')\n", "ns = request.POST.get('ns', omero.constants.metadata.NSCLIENTMAPANNOTATION)\n", "if len(annIds) == 0 and len(data) > 0:\n", "duplicate = request.POST.get('duplicate', 'false')\n", "for annId in annIds:\n", "duplicate.lower() == 'true'\n", "ann = conn.getObject('MapAnnotation', annId)\n", "if len(data) == 0:\n", "if ns == omero.constants.metadata.NSCLIENTMAPANNOTATION:\n", "if ann is None:\n", "annIds = None\n", "return {'annId': annIds}\n", "duplicate = True\n", "if duplicate:\n", "if len(data) > 0:\n", "for k, objs in oids.items():\n", "ann = omero.gateway.MapAnnotationWrapper(conn)\n", "ann.setValue(data)\n", "handle = conn.deleteObjects('/Annotation', [annId])\n", "for obj in objs:\n", "ann.setValue(data)\n", "ann.save()\n", "conn._waitOnCmd(handle)\n", "handle.close()\n", "ann = omero.gateway.MapAnnotationWrapper(conn)\n", "ann.setNs(ns)\n", "ann.setValue(data)\n", "ann.save()\n", "ann.setNs(ns)\n", "annIds.append(ann.getId())\n", "ann.save()\n", "for k, objs in oids.items():\n", "annIds.append(ann.getId())\n", "for obj in objs:\n", "obj.linkAnnotation(ann)\n", "obj.linkAnnotation(ann)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Assign'", "Condition", "For", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "For", "Expr'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Return'", "Assign'", "Condition", "Condition", "For", "Assign'", "Expr'", "Assign'", "For", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "For", "Expr'", "For", "Expr'", "Expr'" ]
[ "def FUNC_30(self):...\n", "VAR_5 = self._makeContext()\n", "self.assertIs(VAR_5.evaluate('nocall: list'), safe_builtins['list'])\n" ]
[ "def test_list_in_path_expr(self):...\n", "ec = self._makeContext()\n", "self.assertIs(ec.evaluate('nocall: list'), safe_builtins['list'])\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_94(self, *VAR_119, **VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "VAR_153 = VAR_119[0]\n", "VAR_133 = VAR_11.get('fields', VAR_153.fields)\n", "VAR_292 = VAR_11.get('validate', True)\n", "VAR_56 = VAR_263.request\n", "VAR_101 = self.db\n", "if not (isinstance(VAR_153, VAR_0) or VAR_153 in VAR_101.tables):\n", "VAR_172 = {}\n", "for VAR_199 in ('orderby', 'groupby', 'left', 'distinct', 'limitby', 'cache'):\n", "if VAR_199 in VAR_11:\n", "VAR_293 = TABLE()\n", "VAR_172[VAR_199] = VAR_11[VAR_199]\n", "VAR_294 = []\n", "VAR_180 = []\n", "VAR_295 = []\n", "VAR_296 = VAR_11.get('showall', False)\n", "if VAR_296:\n", "VAR_294 = VAR_133\n", "VAR_297 = VAR_11.get('chkall', False)\n", "if VAR_297:\n", "for VAR_10 in VAR_133:\n", "VAR_298 = VAR_11.get('queries', [])\n", "VAR_56.vars['chk%s' % VAR_10] = 'on'\n", "VAR_299 = VAR_11.get('zero', '')\n", "if not VAR_298:\n", "VAR_298 = ['equals', 'not equal', 'greater than', 'less than',\n 'starts with', 'ends with', 'contains']\n", "VAR_298.insert(0, VAR_299)\n", "VAR_300 = VAR_11.get('query_labels', {})\n", "VAR_173 = VAR_11.get('query', VAR_153.id > 0)\n", "VAR_301 = VAR_11.get('field_labels', {})\n", "for VAR_177 in VAR_133:\n", "VAR_177 = VAR_153[VAR_177]\n", "VAR_7 = FORM(VAR_293, INPUT(_type='submit'))\n", "if not VAR_177.readable:\n", "if VAR_294:\n", "VAR_385 = VAR_177.name\n", "return VAR_7, VAR_295\n", "VAR_295 = VAR_101(VAR_173).select(*VAR_294, **attributes)\n", "VAR_295 = None\n", "VAR_386 = VAR_56.vars.get('chk' + VAR_385, None)\n", "for VAR_391 in VAR_180:\n", "VAR_387 = VAR_56.vars.get('txt' + VAR_385, None)\n", "VAR_295 = VAR_295.find(VAR_391)\n", "VAR_388 = VAR_56.vars.get('op' + VAR_385, None)\n", "VAR_266 = TR(TD(INPUT(_type='checkbox', _name='chk' + fieldname, _disabled=\n field.type == 'id', VAR_179=field.type == 'id' or chkval == 'on')), TD(\n VAR_301.get(VAR_385, VAR_177.label)), TD(SELECT([OPTION(VAR_300.get(\n VAR_178, VAR_178), _value=op) for VAR_178 in VAR_298], _name='op' +\n fieldname, VAR_179=opval)), TD(INPUT(_type='text', _name='txt' +\n fieldname, _value=txtval, VAR_16='txt' + fieldname, _class=str(field.\n type))))\n", "VAR_293.append(VAR_266)\n", "if VAR_56.post_vars and (VAR_386 or VAR_177.type == 'id'):\n", "if VAR_387 and VAR_388 != '':\n", "if VAR_177.type[0:10] == 'reference ':\n", "VAR_294.append(VAR_177)\n", "VAR_180.append(self.get_query(VAR_177, VAR_388, VAR_387, VAR_180=True))\n", "if VAR_292:\n", "VAR_179, VAR_394 = VAR_177.validate(VAR_387)\n", "VAR_173 &= self.get_query(VAR_177, VAR_388, VAR_387)\n", "if not VAR_394:\n", "VAR_173 &= self.get_query(VAR_177, VAR_388, VAR_179)\n", "VAR_266[3].append(DIV(VAR_394, _class='error'))\n" ]
[ "def search(self, *tables, **args):...\n", "\"\"\"docstring\"\"\"\n", "table = tables[0]\n", "fields = args.get('fields', table.fields)\n", "validate = args.get('validate', True)\n", "request = current.request\n", "db = self.db\n", "if not (isinstance(table, Table) or table in db.tables):\n", "attributes = {}\n", "for key in ('orderby', 'groupby', 'left', 'distinct', 'limitby', 'cache'):\n", "if key in args:\n", "tbl = TABLE()\n", "attributes[key] = args[key]\n", "selected = []\n", "refsearch = []\n", "results = []\n", "showall = args.get('showall', False)\n", "if showall:\n", "selected = fields\n", "chkall = args.get('chkall', False)\n", "if chkall:\n", "for f in fields:\n", "ops = args.get('queries', [])\n", "request.vars['chk%s' % f] = 'on'\n", "zero = args.get('zero', '')\n", "if not ops:\n", "ops = ['equals', 'not equal', 'greater than', 'less than', 'starts with',\n 'ends with', 'contains']\n", "ops.insert(0, zero)\n", "query_labels = args.get('query_labels', {})\n", "query = args.get('query', table.id > 0)\n", "field_labels = args.get('field_labels', {})\n", "for field in fields:\n", "field = table[field]\n", "form = FORM(tbl, INPUT(_type='submit'))\n", "if not field.readable:\n", "if selected:\n", "fieldname = field.name\n", "return form, results\n", "results = db(query).select(*selected, **attributes)\n", "results = None\n", "chkval = request.vars.get('chk' + fieldname, None)\n", "for r in refsearch:\n", "txtval = request.vars.get('txt' + fieldname, None)\n", "results = results.find(r)\n", "opval = request.vars.get('op' + fieldname, None)\n", "row = TR(TD(INPUT(_type='checkbox', _name='chk' + fieldname, _disabled=\n field.type == 'id', value=field.type == 'id' or chkval == 'on')), TD(\n field_labels.get(fieldname, field.label)), TD(SELECT([OPTION(\n query_labels.get(op, op), _value=op) for op in ops], _name='op' +\n fieldname, value=opval)), TD(INPUT(_type='text', _name='txt' +\n fieldname, _value=txtval, _id='txt' + fieldname, _class=str(field.type))))\n", "tbl.append(row)\n", "if request.post_vars and (chkval or field.type == 'id'):\n", "if txtval and opval != '':\n", "if field.type[0:10] == 'reference ':\n", "selected.append(field)\n", "refsearch.append(self.get_query(field, opval, txtval, refsearch=True))\n", "if validate:\n", "value, error = field.validate(txtval)\n", "query &= self.get_query(field, opval, txtval)\n", "if not error:\n", "query &= self.get_query(field, opval, value)\n", "row[3].append(DIV(error, _class='error'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "For", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "For", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Condition", "Condition", "Expr'", "Expr'", "Condition", "Assign'", "AugAssign'", "Condition", "AugAssign'", "Expr'" ]
[ "def FUNC_17(self):...\n", "return FUNC_17(self.items())\n" ]
[ "def urlencode(self):...\n", "return urlencode(self.items())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_15(self):...\n", "VAR_5 = self.get_counts('json', period='month')\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertJSONEqual(VAR_5.content.decode(), [])\n" ]
[ "def test_counts_view_month(self):...\n", "response = self.get_counts('json', period='month')\n", "self.assertEqual(response.status_code, 200)\n", "self.assertJSONEqual(response.content.decode(), [])\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "@VAR_0.route('/api/jobs/incomplete_ids', methods=['GET'])...\n", "\"\"\"docstring\"\"\"\n", "VAR_116 = FUNC_58('/internal/jobs/incomplete-ids', 'get')\n", "return jsonify({'success': False, 'message': str(err)}), 400\n", "return jsonify(VAR_116)\n" ]
[ "@gui.route('/api/jobs/incomplete_ids', methods=['GET'])...\n", "\"\"\"docstring\"\"\"\n", "incomplete_ids_list = query_internal_api('/internal/jobs/incomplete-ids', 'get'\n )\n", "return jsonify({'success': False, 'message': str(err)}), 400\n", "return jsonify(incomplete_ids_list)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'", "Return'" ]
[ "def FUNC_65(VAR_6):...\n", "return reverse(VAR_120, VAR_116=(iid,))\n" ]
[ "def urlprefix(iid):...\n", "return reverse(prefix, args=(iid,))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_58, VAR_6, VAR_59=False):...\n", "xmlrpc_client.Transport.__init__(self, VAR_59)\n", "VAR_88 = urllib_parse.urlparse(VAR_58)\n", "self._scheme = VAR_88.scheme\n", "self._session = VAR_6\n" ]
[ "def __init__(self, index_url, session, use_datetime=False):...\n", "xmlrpc_client.Transport.__init__(self, use_datetime)\n", "index_parts = urllib_parse.urlparse(index_url)\n", "self._scheme = index_parts.scheme\n", "self._session = session\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'" ]
[ "async def FUNC_33(self, VAR_5: str, VAR_1: EventBase, VAR_32: EventContext,...\n", "\"\"\"docstring\"\"\"\n", "VAR_28 = await self.store.get_room_version_id(VAR_1.room_id)\n", "VAR_90 = KNOWN_ROOM_VERSIONS[VAR_28]\n", "VAR_32 = await self._update_auth_events_and_context_for_auth(VAR_5, VAR_1,\n VAR_32, VAR_3)\n", "VAR_0.exception('string', VAR_1.event_id)\n", "event_auth.check(VAR_90, VAR_1, VAR_3=auth_events)\n", "VAR_0.warning('Failed auth resolution for %r because %s', VAR_1, VAR_170)\n", "return VAR_32\n", "VAR_32.rejected = RejectedReason.AUTH_ERROR\n" ]
[ "async def do_auth(self, origin: str, event: EventBase, context:...\n", "\"\"\"docstring\"\"\"\n", "room_version = await self.store.get_room_version_id(event.room_id)\n", "room_version_obj = KNOWN_ROOM_VERSIONS[room_version]\n", "context = await self._update_auth_events_and_context_for_auth(origin, event,\n context, auth_events)\n", "logger.exception(\n 'Failed to double check auth events for %s with remote. Ignoring failure and continuing processing of event.'\n , event.event_id)\n", "event_auth.check(room_version_obj, event, auth_events=auth_events)\n", "logger.warning('Failed auth resolution for %r because %s', event, e)\n", "return context\n", "context.rejected = RejectedReason.AUTH_ERROR\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Return'", "Assign'" ]
[ "def FUNC_16(self):...\n", "VAR_32 = {'a': 5, 'b': np.array(range(4))}\n", "VAR_33 = np.array([1])\n", "VAR_34 = np.array([[1], [3]])\n", "VAR_35 = os.path.join(test.get_temp_dir(), 'pickle0.pkl')\n", "VAR_36 = os.path.join(test.get_temp_dir(), 'pickle1.pkl')\n", "VAR_37 = os.path.join(test.get_temp_dir(), 'pickle2.pkl')\n", "pickle.dump(VAR_32, f)\n", "pickle.dump(VAR_33, f)\n", "pickle.dump(VAR_34, f)\n", "VAR_20 = 'x=' + VAR_35 + '[b];y=' + VAR_36 + '[c];'\n", "VAR_20 += 'z=' + VAR_37\n", "VAR_30 = saved_model_cli.load_inputs_from_input_arg_string(VAR_20, '', '')\n", "self.assertTrue(np.all(VAR_30['x'] == VAR_32['b']))\n", "self.assertTrue(np.all(VAR_30['y'] == VAR_33))\n", "self.assertTrue(np.all(VAR_30['z'] == VAR_34))\n" ]
[ "def testInputParserPickle(self):...\n", "pkl0 = {'a': 5, 'b': np.array(range(4))}\n", "pkl1 = np.array([1])\n", "pkl2 = np.array([[1], [3]])\n", "input_path0 = os.path.join(test.get_temp_dir(), 'pickle0.pkl')\n", "input_path1 = os.path.join(test.get_temp_dir(), 'pickle1.pkl')\n", "input_path2 = os.path.join(test.get_temp_dir(), 'pickle2.pkl')\n", "pickle.dump(pkl0, f)\n", "pickle.dump(pkl1, f)\n", "pickle.dump(pkl2, f)\n", "input_str = 'x=' + input_path0 + '[b];y=' + input_path1 + '[c];'\n", "input_str += 'z=' + input_path2\n", "feed_dict = saved_model_cli.load_inputs_from_input_arg_string(input_str, '', ''\n )\n", "self.assertTrue(np.all(feed_dict['x'] == pkl0['b']))\n", "self.assertTrue(np.all(feed_dict['y'] == pkl1))\n", "self.assertTrue(np.all(feed_dict['z'] == pkl2))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "AugAssign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_20(self, VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "frappe.db.sql('delete from `tabSingles` where doctype=%s', self.doctype)\n", "for field, VAR_26 in iteritems(VAR_21):\n", "if field != 'doctype':\n", "if self.doctype in frappe.db.value_cache:\n", "frappe.db.sql(\n \"\"\"insert into `tabSingles` (doctype, field, value)\n\t\t\t\t\tvalues (%s, %s, %s)\"\"\"\n , (self.doctype, field, VAR_26))\n" ]
[ "def update_single(self, d):...\n", "\"\"\"docstring\"\"\"\n", "frappe.db.sql('delete from `tabSingles` where doctype=%s', self.doctype)\n", "for field, value in iteritems(d):\n", "if field != 'doctype':\n", "if self.doctype in frappe.db.value_cache:\n", "frappe.db.sql(\n \"\"\"insert into `tabSingles` (doctype, field, value)\n\t\t\t\t\tvalues (%s, %s, %s)\"\"\"\n , (self.doctype, field, value))\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "For", "Condition", "Condition", "Expr'" ]
[ "def FUNC_15(self):...\n", "VAR_22, VAR_23 = self.make_request('POST', '/createRoom', '{}')\n", "self.assertEquals(200, VAR_23.code, VAR_23.result)\n", "self.assertTrue('room_id' in VAR_23.json_body)\n" ]
[ "def test_post_room_no_keys(self):...\n", "request, channel = self.make_request('POST', '/createRoom', '{}')\n", "self.assertEquals(200, channel.code, channel.result)\n", "self.assertTrue('room_id' in channel.json_body)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]