lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def FUNC_9(VAR_7, VAR_8, VAR_9):...\n", "VAR_11 = f\"\"\"\n INSERT INTO {VAR_2} (user_id, product_id, title, comment, rating)\n VALUES({VAR_8}, {VAR_7}, '{VAR_9['title']}',\n '{VAR_9['comment']}', {VAR_9['rating']})\n \"\"\"\n", "VAR_12 = create_connection()\n", "VAR_12.close()\n", "VAR_13 = VAR_12.cursor()\n", "VAR_13.execute(VAR_11)\n", "VAR_12.commit()\n" ]
[ "def add_product_review(productId, userId, review):...\n", "sql_query = f\"\"\"\n INSERT INTO {REVIEWS_TABLE} (user_id, product_id, title, comment, rating)\n VALUES({userId}, {productId}, '{review['title']}',\n '{review['comment']}', {review['rating']})\n \"\"\"\n", "connection = create_connection()\n", "connection.close()\n", "cursor = connection.cursor()\n", "cursor.execute(sql_query)\n", "connection.commit()\n" ]
[ 0, 4, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_1(self, VAR_12, VAR_13):...\n", "\"\"\"docstring\"\"\"\n", "VAR_22 = self.oldest_quant(VAR_12).in_date\n", "if not VAR_22:\n", "VAR_22 = VAR_13\n", "VAR_23 = 'string' % (VAR_12.id, VAR_22)\n", "self._cr.execute(VAR_23)\n", "VAR_24 = self._cr.fetchall()\n", "if VAR_24:\n", "VAR_30 = self.env['account.invoice.line']\n", "return False\n", "for VAR_31 in VAR_24:\n", "return VAR_30.browse(VAR_31[0])\n" ]
[ "def closest_invoice_line(self, prod, date_invoice):...\n", "\"\"\"docstring\"\"\"\n", "in_date = self.oldest_quant(prod).in_date\n", "if not in_date:\n", "in_date = date_invoice\n", "query = (\n \"\"\"\n SELECT ail.id, ai.date_invoice\n FROM account_invoice_line ail\n INNER JOIN account_invoice ai\n ON ail.invoice_id = ai.id\n INNER JOIN product_product pp\n on ail.product_id = pp.id\n INNER JOIN product_template pt\n on pp.product_tmpl_id = pt.id\n WHERE pt.id = %d AND\n ai.discount_processed = true\n ORDER BY abs(ai.date_invoice - date '%s')\n LIMIT 1;\n \"\"\"\n % (prod.id, in_date))\n", "self._cr.execute(query)\n", "invoice_lines = self._cr.fetchall()\n", "if invoice_lines:\n", "invoice_lines_obj = self.env['account.invoice.line']\n", "return False\n", "for invoice_line in invoice_lines:\n", "return invoice_lines_obj.browse(invoice_line[0])\n" ]
[ 0, 0, 0, 0, 0, 4, 4, 4, 4, 0, 0, 4, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Return'", "For", "Return'" ]
[ "def FUNC_4(**VAR_2):...\n", "VAR_20 = VAR_2.get('transaction_code', None)\n", "VAR_4 = get_object_or_404(Transaction, code=transaction_code)\n", "return VAR_4\n" ]
[ "def get_transaction_from_kwargs(**kwargs):...\n", "transaction_code = kwargs.get('transaction_code', None)\n", "transaction = get_object_or_404(Transaction, code=transaction_code)\n", "return transaction\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_3(self):...\n", "\"\"\"docstring\"\"\"\n", "self.require_user_role('user', self.params.cc)\n", "self.version = utils.get_latest_version(self.params.cc)\n", "self.facility = model.Facility.get_by_key_name(self.params.facility_name,\n self.version)\n", "if not self.facility:\n", "self.facility_type = model.FacilityType.get_by_key_name(self.facility.type,\n self.version)\n", "self.attributes = dict((a.key().name(), a) for a in model.Attribute.all().\n ancestor(self.version))\n", "self.readonly_attribute_names = ['healthc_id']\n" ]
[ "def init(self):...\n", "\"\"\"docstring\"\"\"\n", "self.require_user_role('user', self.params.cc)\n", "self.version = utils.get_latest_version(self.params.cc)\n", "self.facility = model.Facility.get_by_key_name(self.params.facility_name,\n self.version)\n", "if not self.facility:\n", "self.facility_type = model.FacilityType.get_by_key_name(self.facility.type,\n self.version)\n", "self.attributes = dict((a.key().name(), a) for a in model.Attribute.all().\n ancestor(self.version))\n", "self.readonly_attribute_names = ['healthc_id']\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_0(self, VAR_1):...\n", "VAR_8 = VAR_9(self.container, self)\n", "self.frames[VAR_9] = VAR_8\n", "VAR_8.grid(row=0, column=0, sticky='nsew')\n", "self.show_frame(VAR_8)\n" ]
[ "def create_frame(self, F):...\n", "new_frame = SearchPage(self.container, self)\n", "self.frames[SearchPage] = new_frame\n", "new_frame.grid(row=0, column=0, sticky='nsew')\n", "self.show_frame(new_frame)\n" ]
[ 0, 4, 4, 4, 4 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "import binascii\n", "import copy\n", "import datetime\n", "import hashlib\n", "import psycopg2\n", "import pytz\n", "import time\n", "import uuid\n", "from bzs import const\n", "def FUNC_0():...\n", "\"\"\"docstring\"\"\"\n", "return float(time.time())\n" ]
[ "import binascii\n", "import copy\n", "import datetime\n", "import hashlib\n", "import psycopg2\n", "import pytz\n", "import time\n", "import uuid\n", "from bzs import const\n", "def get_current_time():...\n", "\"\"\"docstring\"\"\"\n", "return float(time.time())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_18(self, VAR_14, VAR_28):...\n", "VAR_12 = \"ssh %s 'hyperion --config %s/%s.yaml slave --kill'\" % (VAR_28,\n VAR_1, VAR_14)\n", "self.logger.debug('Run cmd:\\n%s' % VAR_12)\n", "FUNC_7(self.session, VAR_12)\n" ]
[ "def stop_remote_component(self, comp_name, host):...\n", "cmd = \"ssh %s 'hyperion --config %s/%s.yaml slave --kill'\" % (host,\n TMP_SLAVE_DIR, comp_name)\n", "self.logger.debug('Run cmd:\\n%s' % cmd)\n", "send_main_session_command(self.session, cmd)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_21(self, VAR_5):...\n", "for p in rt.runtime().system.partitions:\n", "p._max_jobs = VAR_5\n" ]
[ "def set_max_jobs(self, value):...\n", "for p in rt.runtime().system.partitions:\n", "p._max_jobs = value\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "For", "Assign'" ]
[ "def FUNC_17(self):...\n", "if not self._IsServerAlive():\n", "return\n", "VAR_5 = {}\n", "FUNC_1(VAR_5)\n", "SendEventNotificationAsync('BufferVisit', VAR_5)\n" ]
[ "def OnBufferVisit(self):...\n", "if not self._IsServerAlive():\n", "return\n", "extra_data = {}\n", "_AddUltiSnipsDataIfNeeded(extra_data)\n", "SendEventNotificationAsync('BufferVisit', extra_data)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_0(VAR_1, VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = set()\n", "for translated_resource in VAR_1:\n", "VAR_7.add(translated_resource.resource.project)\n", "bulk_update(VAR_1, update_fields=['total_strings', 'approved_strings',\n 'fuzzy_strings', 'strings_with_errors', 'strings_with_warnings',\n 'unreviewed_strings'])\n", "translated_resource.calculate_stats(save=False)\n", "VAR_2.aggregate_stats()\n", "for VAR_5 in VAR_7:\n", "VAR_5.aggregate_stats()\n", "ProjectLocale.objects.get(VAR_2=locale, VAR_5=project).aggregate_stats()\n" ]
[ "def update_stats(translated_resources, locale):...\n", "\"\"\"docstring\"\"\"\n", "projects = set()\n", "for translated_resource in translated_resources:\n", "projects.add(translated_resource.resource.project)\n", "bulk_update(translated_resources, update_fields=['total_strings',\n 'approved_strings', 'fuzzy_strings', 'strings_with_errors',\n 'strings_with_warnings', 'unreviewed_strings'])\n", "translated_resource.calculate_stats(save=False)\n", "locale.aggregate_stats()\n", "for project in projects:\n", "project.aggregate_stats()\n", "ProjectLocale.objects.get(locale=locale, project=project).aggregate_stats()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Expr'", "Expr'", "Expr'", "Expr'", "For", "Expr'", "Expr'" ]
[ "def FUNC_12(self):...\n", "return self.current_user()['last_name']\n" ]
[ "def current_last_name(self):...\n", "return self.current_user()['last_name']\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@mock.patch('requests.post', FUNC_0)...\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 0)\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_8 = {'image': 'autotest/example', 'dockerfile':\n \"\"\"FROM busybox\nCMD /bin/true\"\"\"}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_8 = {'cmd': 6}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 204)\n", "VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 6)\n", "VAR_4 = '/api/apps/{app_id}'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "VAR_4 = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 6)\n", "VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_8 = {'cmd': 3}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 204)\n", "VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 3)\n", "self.assertEqual(max(VAR_7['num'] for VAR_7 in VAR_5.data['results']), 3)\n", "VAR_4 = '/api/apps/{app_id}'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_8 = {'cmd': 0}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 204)\n", "VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 0)\n", "VAR_4 = '/api/apps/{app_id}'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n" ]
[ "@mock.patch('requests.post', mock_import_repository_task)...\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 0)\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'dockerfile':\n \"\"\"FROM busybox\nCMD /bin/true\"\"\"}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'cmd': 6}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 6)\n", "url = '/api/apps/{app_id}'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "url = '/api/apps/{app_id}/containers/cmd'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 6)\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'cmd': 3}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 3)\n", "self.assertEqual(max(c['num'] for c in response.data['results']), 3)\n", "url = '/api/apps/{app_id}'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'cmd': 0}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 0)\n", "url = '/api/apps/{app_id}'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_3(self, VAR_1):...\n", "self.__checkCompilation(VAR_1, False)\n" ]
[ "def compilation_ko(self, script):...\n", "self.__checkCompilation(script, False)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "\"\"\"Worker script.\"\"\"\n", "import os\n", "import time\n", "import uuid\n", "import random\n", "import socket\n", "import subprocess\n", "from urlparse import urlsplit, urlunsplit\n", "from tempfile import NamedTemporaryFile\n", "from contextlib import contextmanager\n", "from requests.exceptions import Timeout\n", "from pdm.framework.RESTClient import RESTClient, RESTException\n", "from pdm.cred.CredClient import CredClient\n", "from pdm.endpoint.EndpointClient import EndpointClient\n", "from pdm.utils.daemon import Daemon\n", "from pdm.utils.config import getConfig\n", "from .WorkqueueDB import COMMANDMAP, PROTOCOLMAP, JobType\n", "@contextmanager...\n", "\"\"\"docstring\"\"\"\n", "VAR_1, VAR_2 = CredClient().get_cred(VAR_0)\n", "proxyfile.write(VAR_2)\n", "proxyfile.write(VAR_1)\n", "proxyfile.flush()\n", "os.fsync(proxyfile.fileno())\n", "yield proxyfile\n", "\"\"\"Worker Daemon.\"\"\"\n", "def __init__(self, VAR_3=False, VAR_4=False):...\n", "\"\"\"docstring\"\"\"\n", "RESTClient.__init__(self, 'workqueue')\n", "VAR_8 = getConfig('worker')\n", "self._uid = uuid.uuid4()\n", "Daemon.__init__(self, pidfile='/tmp/worker-%s.pid' % self._uid, logfile=\n '/tmp/worker-%s.log' % self._uid, target=self.run, VAR_3=debug)\n", "self._one_shot = VAR_4\n", "self._types = [JobType[type_.upper()] for type_ in VAR_8.pop('types', (\n 'LIST', 'COPY', 'REMOVE'))]\n", "self._interpoll_sleep_time = VAR_8.pop('poll_time', 2)\n", "self._script_path = VAR_8.pop('script_path', None)\n", "if self._script_path:\n", "self._script_path = os.path.abspath(self._script_path)\n", "VAR_11 = os.path.abspath(os.path.dirname(__file__))\n", "self._logger.info('Script search path is: %s', self._script_path)\n", "self._script_path = os.path.join(VAR_11, 'scripts')\n", "self._current_process = None\n", "if VAR_8:\n", "VAR_12 = ', '.join(VAR_8.keys())\n", "def FUNC_1(self, *VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "Daemon.terminate(self, *VAR_5)\n", "if self._current_process is not None:\n", "self._current_process.terminate()\n", "def FUNC_2(self, VAR_6, VAR_7):...\n", "\"\"\"docstring\"\"\"\n", "self._logger.error('Error with job %d: %s', VAR_6, VAR_7)\n", "self.put('worker/%s' % VAR_6, data={'log': message, 'returncode': 1, 'host':\n socket.gethostbyaddr(socket.getfqdn())})\n", "self._logger.exception('Error trying to PUT back abort message')\n", "def FUNC_3(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_9 = EndpointClient()\n", "VAR_10 = True\n", "while VAR_10:\n", "if self._one_shot:\n", "VAR_10 = False\n", "VAR_18 = self.post('worker', data={'types': self._types})\n", "self._logger.warning('Timed out contacting the WorkqueueService.')\n", "VAR_13, VAR_0 = VAR_18\n", "if err.code == 404:\n", "VAR_14 = VAR_9.get_site(VAR_13['src_siteid'])\n", "self._logger.debug('No work to pick up.')\n", "self._logger.exception('Error trying to get job from WorkqueueService.')\n", "VAR_15 = [urlsplit(site) for site in VAR_14['endpoints'].itervalues()]\n", "time.sleep(self._interpoll_sleep_time)\n", "VAR_16 = [urlunsplit(site._replace(path=job['src_filepath'])) for site in\n VAR_15 if site.scheme == PROTOCOLMAP[VAR_13['protocol']]]\n", "if not VAR_16:\n", "self._abort(VAR_13['id'], \n \"Protocol '%s' not supported at src site with id %d\" % (VAR_13[\n 'protocol'], VAR_13['src_siteid']))\n", "VAR_17 = '%s %s' % (COMMANDMAP[VAR_13['type']][VAR_13['protocol']], random.\n choice(VAR_16))\n", "if VAR_13['type'] == JobType.COPY:\n", "if VAR_13['dst_siteid'] is None:\n", "self._current_process = subprocess.Popen('(set -x && %s)' % VAR_17, shell=\n True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=dict(os.\n environ, PATH=self._script_path, X509_USER_PROXY=proxyfile.name))\n", "self._abort(VAR_13['id'], 'No dst site id set for copy operation')\n", "if VAR_13['dst_filepath'] is None:\n", "VAR_22, VAR_5 = self._current_process.communicate()\n", "self._abort(VAR_13['id'], 'No dst site filepath set for copy operation')\n", "VAR_19 = VAR_9.get_site(VAR_13['dst_siteid'])\n", "self.set_token(VAR_0)\n", "VAR_20 = [urlsplit(site) for site in VAR_19['endpoints'].itervalues()]\n", "self.put('worker/%s' % VAR_13['id'], data={'log': log, 'returncode': self.\n _current_process.returncode, 'host': socket.gethostbyaddr(socket.\n getfqdn())})\n", "self._logger.exception('Error trying to PUT back output from subcommand.')\n", "self.set_token(None)\n", "VAR_21 = [urlunsplit(site._replace(path=job['dst_filepath'])) for site in\n VAR_20 if site.scheme == PROTOCOLMAP[VAR_13['protocol']]]\n", "if not VAR_21:\n", "self._abort(VAR_13['id'], \n \"Protocol '%s' not supported at dst site with id %d\" % (VAR_13[\n 'protocol'], VAR_13['dst_siteid']))\n", "VAR_17 += ' %s' % random.choice(VAR_21)\n" ]
[ "\"\"\"Worker script.\"\"\"\n", "import os\n", "import time\n", "import uuid\n", "import random\n", "import socket\n", "import subprocess\n", "from urlparse import urlsplit, urlunsplit\n", "from tempfile import NamedTemporaryFile\n", "from contextlib import contextmanager\n", "from requests.exceptions import Timeout\n", "from pdm.framework.RESTClient import RESTClient, RESTException\n", "from pdm.cred.CredClient import CredClient\n", "from pdm.endpoint.EndpointClient import EndpointClient\n", "from pdm.utils.daemon import Daemon\n", "from pdm.utils.config import getConfig\n", "from .WorkqueueDB import COMMANDMAP, PROTOCOLMAP, JobType\n", "@contextmanager...\n", "\"\"\"docstring\"\"\"\n", "cert, key = CredClient().get_cred(token)\n", "proxyfile.write(key)\n", "proxyfile.write(cert)\n", "proxyfile.flush()\n", "os.fsync(proxyfile.fileno())\n", "yield proxyfile\n", "\"\"\"Worker Daemon.\"\"\"\n", "def __init__(self, debug=False, one_shot=False):...\n", "\"\"\"docstring\"\"\"\n", "RESTClient.__init__(self, 'workqueue')\n", "conf = getConfig('worker')\n", "self._uid = uuid.uuid4()\n", "Daemon.__init__(self, pidfile='/tmp/worker-%s.pid' % self._uid, logfile=\n '/tmp/worker-%s.log' % self._uid, target=self.run, debug=debug)\n", "self._one_shot = one_shot\n", "self._types = [JobType[type_.upper()] for type_ in conf.pop('types', (\n 'LIST', 'COPY', 'REMOVE'))]\n", "self._interpoll_sleep_time = conf.pop('poll_time', 2)\n", "self._script_path = conf.pop('script_path', None)\n", "if self._script_path:\n", "self._script_path = os.path.abspath(self._script_path)\n", "code_path = os.path.abspath(os.path.dirname(__file__))\n", "self._logger.info('Script search path is: %s', self._script_path)\n", "self._script_path = os.path.join(code_path, 'scripts')\n", "self._current_process = None\n", "if conf:\n", "keys = ', '.join(conf.keys())\n", "def terminate(self, *_):...\n", "\"\"\"docstring\"\"\"\n", "Daemon.terminate(self, *_)\n", "if self._current_process is not None:\n", "self._current_process.terminate()\n", "def _abort(self, job_id, message):...\n", "\"\"\"docstring\"\"\"\n", "self._logger.error('Error with job %d: %s', job_id, message)\n", "self.put('worker/%s' % job_id, data={'log': message, 'returncode': 1,\n 'host': socket.gethostbyaddr(socket.getfqdn())})\n", "self._logger.exception('Error trying to PUT back abort message')\n", "def run(self):...\n", "\"\"\"docstring\"\"\"\n", "endpoint_client = EndpointClient()\n", "run = True\n", "while run:\n", "if self._one_shot:\n", "run = False\n", "response = self.post('worker', data={'types': self._types})\n", "self._logger.warning('Timed out contacting the WorkqueueService.')\n", "job, token = response\n", "if err.code == 404:\n", "src_site = endpoint_client.get_site(job['src_siteid'])\n", "self._logger.debug('No work to pick up.')\n", "self._logger.exception('Error trying to get job from WorkqueueService.')\n", "src_endpoints = [urlsplit(site) for site in src_site['endpoints'].itervalues()]\n", "time.sleep(self._interpoll_sleep_time)\n", "src = [urlunsplit(site._replace(path=job['src_filepath'])) for site in\n src_endpoints if site.scheme == PROTOCOLMAP[job['protocol']]]\n", "if not src:\n", "self._abort(job['id'], \"Protocol '%s' not supported at src site with id %d\" %\n (job['protocol'], job['src_siteid']))\n", "command = '%s %s' % (COMMANDMAP[job['type']][job['protocol']], random.\n choice(src))\n", "if job['type'] == JobType.COPY:\n", "if job['dst_siteid'] is None:\n", "self._current_process = subprocess.Popen('(set -x && %s)' % command, shell=\n True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=dict(os.\n environ, PATH=self._script_path, X509_USER_PROXY=proxyfile.name))\n", "self._abort(job['id'], 'No dst site id set for copy operation')\n", "if job['dst_filepath'] is None:\n", "log, _ = self._current_process.communicate()\n", "self._abort(job['id'], 'No dst site filepath set for copy operation')\n", "dst_site = endpoint_client.get_site(job['dst_siteid'])\n", "self.set_token(token)\n", "dst_endpoints = [urlsplit(site) for site in dst_site['endpoints'].itervalues()]\n", "self.put('worker/%s' % job['id'], data={'log': log, 'returncode': self.\n _current_process.returncode, 'host': socket.gethostbyaddr(socket.\n getfqdn())})\n", "self._logger.exception('Error trying to PUT back output from subcommand.')\n", "self.set_token(None)\n", "dst = [urlunsplit(site._replace(path=job['dst_filepath'])) for site in\n dst_endpoints if site.scheme == PROTOCOLMAP[job['protocol']]]\n", "if not dst:\n", "self._abort(job['id'], \"Protocol '%s' not supported at dst site with id %d\" %\n (job['protocol'], job['dst_siteid']))\n", "command += ' %s' % random.choice(dst)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 2, 2, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Docstring", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "FunctionDef'", "Docstring", "Expr'", "Condition", "Expr'", "FunctionDef'", "Docstring", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Condition", "Expr'", "AugAssign'" ]
[ "def FUNC_13(VAR_1, VAR_3):...\n", "VAR_18 = \"SELECT has_tokens FROM {} WHERE name = '{}'\".format(CFG(\n 'poll_table_name'), VAR_3)\n", "return FUNC_3(VAR_1, VAR_18) == 1\n" ]
[ "def checkTokenNeeded(cursor, poll_name):...\n", "req = \"SELECT has_tokens FROM {} WHERE name = '{}'\".format(CFG(\n 'poll_table_name'), poll_name)\n", "return queryOne(cursor, req) == 1\n" ]
[ 0, 4, 4 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_1(VAR_5, VAR_6, VAR_3, VAR_2, VAR_4):...\n", "VAR_22 = VAR_3[VAR_5]\n", "VAR_7 = SymbolicInput(VAR_4[0], 'IsZero', VAR_22, VAR_6)\n", "VAR_3[VAR_4[0]] = VAR_7\n", "VAR_4[0] -= 1\n", "return (VAR_7.getId(), FUNC_3(VAR_6, VAR_2)), -1\n" ]
[ "def symbAdrJump(condition, address, symbols, items, symId):...\n", "sym = symbols[condition]\n", "x = SymbolicInput(symId[0], 'IsZero', sym, address)\n", "symbols[symId[0]] = x\n", "symId[0] -= 1\n", "return (x.getId(), jumpToLoc(address, items)), -1\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "AugAssign'", "Return'" ]
[ "def FUNC_6(self, VAR_66, VAR_23):...\n", "if VAR_66:\n", "VAR_103 = cache.get(VAR_100(self.cache_prefix + '_' + VAR_66))\n", "VAR_101.errors.add(errors.EXPIRED)\n", "VAR_15 = Account._byID(VAR_103, data=True)\n", "return None\n", "if VAR_23 and VAR_15.name.lower() != VAR_23.lower():\n", "VAR_101.errors.add(errors.BAD_USERNAME)\n", "if VAR_15:\n", "return VAR_15\n" ]
[ "def run(self, key, name):...\n", "if key:\n", "uid = cache.get(str(self.cache_prefix + '_' + key))\n", "c.errors.add(errors.EXPIRED)\n", "a = Account._byID(uid, data=True)\n", "return None\n", "if name and a.name.lower() != name.lower():\n", "c.errors.add(errors.BAD_USERNAME)\n", "if a:\n", "return a\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Expr'", "Assign'", "Return'", "Condition", "Expr'", "Condition", "Return'" ]
[ "\"\"\"string\"\"\"\n", "import logging as log\n", "import numpy as np\n", "from numpy.random import randn\n", "from openqml import Device, DeviceError\n", "from openqml import Variable\n", "import projectq as pq\n", "import projectq.setups.ibm\n", "from projectq.ops import HGate, XGate, YGate, ZGate, SGate, TGate, SqrtXGate, SwapGate, SqrtSwapGate, Rx, Ry, Rz, R\n", "from .ops import CNOT, CZ, Toffoli, AllZGate, Rot, Hermitian\n", "from ._version import __version__\n", "VAR_0 = {'PauliX': XGate, 'PauliY': YGate, 'PauliZ': ZGate, 'CNOT': CNOT,\n 'CZ': CZ, 'SWAP': SwapGate, 'RX': Rx, 'RY': Ry, 'RZ': Rz, 'Rot': Rot}\n", "\"\"\"string\"\"\"\n", "VAR_1 = 'ProjectQ OpenQML plugin'\n", "VAR_2 = 'projectq'\n", "VAR_3 = '0.1.0'\n", "VAR_4 = __version__\n", "VAR_5 = 'Christian Gogolin'\n", "VAR_6 = {'backend': list(['Simulator', 'ClassicalSimulator', 'IBMBackend'])}\n", "def __init__(self, VAR_7, **VAR_8):...\n", "VAR_8.setdefault('shots', 0)\n", "super().__init__(self.short_name, VAR_8['shots'])\n", "for k, v in {'log': 'verbose'}.items():\n", "if k in VAR_8:\n", "if 'num_runs' in VAR_8:\n", "VAR_8.setdefault(v, VAR_8[k])\n", "if isinstance(VAR_8['num_runs'], int) and VAR_8['num_runs'] > 0:\n", "self.wires = VAR_7\n", "self.n_eval = VAR_8['num_runs']\n", "self.n_eval = 0\n", "self.backend = VAR_8['backend']\n", "self.kwargs = VAR_8\n", "self.eng = None\n", "self.reg = None\n", "def FUNC_0(self):...\n", "self.reg = self.eng.allocate_qureg(self.wires)\n", "def __repr__(self):...\n", "return super().__repr__() + 'Backend: ' + self.backend + '\\n'\n" ]
[ "\"\"\"\nProjectQ plugin\n========================\n\n**Module name:** :mod:`openqml.plugins.projectq`\n\n.. currentmodule:: openqml.plugins.projectq\n\nThis plugin provides the interface between OpenQML and ProjecQ.\nIt enables OpenQML to optimize quantum circuits simulable with ProjectQ.\n\nProjecQ supports several different backends. Of those, the following are useful in the current context:\n\n- projectq.backends.Simulator([gate_fusion, ...])\tSimulator is a compiler engine which simulates a quantum computer using C++-based kernels.\n- projectq.backends.ClassicalSimulator()\t A simple introspective simulator that only permits classical operations.\n- projectq.backends.IBMBackend([use_hardware, ...])\tThe IBM Backend class, which stores the circuit, transforms it to JSON QASM, and sends the circuit through the IBM API.\n\nSee PluginAPI._capabilities['backend'] for a list of backend options.\n\nFunctions\n---------\n\n.. autosummary::\n init_plugin\n\nClasses\n-------\n\n.. autosummary::\n Gate\n Observable\n PluginAPI\n\n----\n\"\"\"\n", "import logging as log\n", "import numpy as np\n", "from numpy.random import randn\n", "from openqml import Device, DeviceError\n", "from openqml import Variable\n", "import projectq as pq\n", "import projectq.setups.ibm\n", "from projectq.ops import HGate, XGate, YGate, ZGate, SGate, TGate, SqrtXGate, SwapGate, SqrtSwapGate, Rx, Ry, Rz, R\n", "from .ops import CNOT, CZ, Toffoli, AllZGate, Rot, Hermitian\n", "from ._version import __version__\n", "operator_map = {'PauliX': XGate, 'PauliY': YGate, 'PauliZ': ZGate, 'CNOT':\n CNOT, 'CZ': CZ, 'SWAP': SwapGate, 'RX': Rx, 'RY': Ry, 'RZ': Rz, 'Rot': Rot}\n", "\"\"\"ProjectQ device for OpenQML.\n\n Args:\n wires (int): The number of qubits of the device.\n\n Keyword Args for Simulator backend:\n gate_fusion (bool): If True, gates are cached and only executed once a certain gate-size has been reached (only has an effect for the c++ simulator).\n rnd_seed (int): Random seed (uses random.randint(0, 4294967295) by default).\n\n Keyword Args for IBMBackend backend:\n use_hardware (bool): If True, the code is run on the IBM quantum chip (instead of using the IBM simulator)\n num_runs (int): Number of runs to collect statistics. (default is 1024)\n verbose (bool): If True, statistics are printed, in addition to the measurement result being registered (at the end of the circuit).\n user (string): IBM Quantum Experience user name\n password (string): IBM Quantum Experience password\n device (string): Device to use (‘ibmqx4’, or ‘ibmqx5’) if use_hardware is set to True. Default is ibmqx4.\n retrieve_execution (int): Job ID to retrieve instead of re-running the circuit (e.g., if previous run timed out).\n \"\"\"\n", "name = 'ProjectQ OpenQML plugin'\n", "short_name = 'projectq'\n", "api_version = '0.1.0'\n", "plugin_version = __version__\n", "author = 'Christian Gogolin'\n", "_capabilities = {'backend': list(['Simulator', 'ClassicalSimulator',\n 'IBMBackend'])}\n", "def __init__(self, wires, **kwargs):...\n", "kwargs.setdefault('shots', 0)\n", "super().__init__(self.short_name, kwargs['shots'])\n", "for k, v in {'log': 'verbose'}.items():\n", "if k in kwargs:\n", "if 'num_runs' in kwargs:\n", "kwargs.setdefault(v, kwargs[k])\n", "if isinstance(kwargs['num_runs'], int) and kwargs['num_runs'] > 0:\n", "self.wires = wires\n", "self.n_eval = kwargs['num_runs']\n", "self.n_eval = 0\n", "self.backend = kwargs['backend']\n", "self.kwargs = kwargs\n", "self.eng = None\n", "self.reg = None\n", "def reset(self):...\n", "self.reg = self.eng.allocate_qureg(self.wires)\n", "def __repr__(self):...\n", "return super().__repr__() + 'Backend: ' + self.backend + '\\n'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Expr'", "For", "Condition", "Condition", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "FunctionDef'", "Return'" ]
[ "def FUNC_1(self, VAR_5):...\n", "for p in self.ALLOWED_PATHS:\n", "if VAR_5.startswith(p):\n", "return False\n", "return True\n" ]
[ "def _path_allowed(self, path):...\n", "for p in self.ALLOWED_PATHS:\n", "if path.startswith(p):\n", "return False\n", "return True\n" ]
[ 0, 5, 5, 5, 5 ]
[ "FunctionDef'", "For", "Condition", "Return'", "Return'" ]
[ "def FUNC_1(self):...\n", "\"\"\"docstring\"\"\"\n", "return 'Running check for local component %s with pid %s' % (self.comp_name,\n self.pid)\n" ]
[ "def info(self):...\n", "\"\"\"docstring\"\"\"\n", "return 'Running check for local component %s with pid %s' % (self.comp_name,\n self.pid)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_0(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "return '%s_%s' % (cfg['OAUTHCLIENT_SESSION_KEY_PREFIX'], VAR_0)\n" ]
[ "def token_session_key(remote_app):...\n", "\"\"\"docstring\"\"\"\n", "return '%s_%s' % (cfg['OAUTHCLIENT_SESSION_KEY_PREFIX'], remote_app)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_3(self):...\n", "return shellutil.run('/sbin/service waagent stop', VAR_8=False)\n" ]
[ "def stop_agent_service(self):...\n", "return shellutil.run('/sbin/service waagent stop', chk_err=False)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "async def FUNC_8(VAR_6):...\n", "VAR_10 = {'Authorization': f'Bearer {VAR_1}'}\n", "VAR_8 = await VAR_6.get('/results', VAR_10=headers)\n", "VAR_9 = await VAR_8.json()\n", "assert VAR_8.status == 200\n" ]
[ "async def test_positive_get_all_results(test_cli):...\n", "headers = {'Authorization': f'Bearer {access_token}'}\n", "resp = await test_cli.get('/results', headers=headers)\n", "resp_json = await resp.json()\n", "assert resp.status == 200\n" ]
[ 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Assign'", "Assign'", "Assign'", "Assert'" ]
[ "@property...\n", "return self._adapter\n" ]
[ "@property...\n", "return self._adapter\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_11(VAR_12):...\n", "return {'id': VAR_12.id, 'size': VAR_12.members.count(), 'collaborators':\n VAR_12.collaborator_names(VAR_7)}\n" ]
[ "def group_entry(group):...\n", "return {'id': group.id, 'size': group.members.count(), 'collaborators':\n group.collaborator_names(profile)}\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@classmethod...\n", "super(CLASS_3, VAR_0).setUpTestData()\n", "VAR_0.permission = 'testplans.change_testplan'\n", "VAR_0.update_url = reverse('ajax-update')\n" ]
[ "@classmethod...\n", "super(TestUpdateObject, cls).setUpTestData()\n", "cls.permission = 'testplans.change_testplan'\n", "cls.update_url = reverse('ajax-update')\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Assign'" ]
[ "@script('up')...\n", "VAR_8 = argparse.ArgumentParser('Website')\n", "VAR_8.add_argument('purpose', help='which website to bring up')\n", "VAR_8.add_argument('--dependency', type=str, help=\n 'where @periodic_job is defined', nargs='+', dest='dependencies')\n", "VAR_9 = VAR_8.parse_args(VAR_4)\n", "for dependency in VAR_9.dependencies:\n", "__import__(dependency)\n", "FUNC_3(VAR_9.purpose)\n" ]
[ "@script('up')...\n", "argument_parser = argparse.ArgumentParser('Website')\n", "argument_parser.add_argument('purpose', help='which website to bring up')\n", "argument_parser.add_argument('--dependency', type=str, help=\n 'where @periodic_job is defined', nargs='+', dest='dependencies')\n", "args = argument_parser.parse_args(argv)\n", "for dependency in args.dependencies:\n", "__import__(dependency)\n", "start_website(args.purpose)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "For", "Expr'", "Expr'" ]
[ "def FUNC_30(self):...\n", "if VAR_54.flags.in_import or self.is_new(\n", "return\n", "VAR_42 = [VAR_6.fieldname for VAR_6 in self.meta.get('fields', {\n 'set_only_once': ('=', 1)})]\n", "if VAR_42:\n", "VAR_57 = VAR_54.db.get_value(self.doctype, self.name, VAR_42, as_dict=True)\n", "for VAR_16 in VAR_42:\n", "VAR_25 = self.meta.get_field(VAR_16)\n", "if VAR_25.fieldtype == 'Date' or VAR_25.fieldtype == 'Datetime':\n", "VAR_9 = str(VAR_57.get(VAR_16))\n", "VAR_9 = VAR_57.get(VAR_16)\n", "if self.get(VAR_16) != VAR_9:\n", "VAR_54.throw(_('Value cannot be changed for {0}').format(self.meta.\n get_label(VAR_16)), VAR_54.CannotChangeConstantError)\n" ]
[ "def _validate_constants(self):...\n", "if frappe.flags.in_import or self.is_new(\n", "return\n", "constants = [d.fieldname for d in self.meta.get('fields', {'set_only_once':\n ('=', 1)})]\n", "if constants:\n", "values = frappe.db.get_value(self.doctype, self.name, constants, as_dict=True)\n", "for fieldname in constants:\n", "df = self.meta.get_field(fieldname)\n", "if df.fieldtype == 'Date' or df.fieldtype == 'Datetime':\n", "value = str(values.get(fieldname))\n", "value = values.get(fieldname)\n", "if self.get(fieldname) != value:\n", "frappe.throw(_('Value cannot be changed for {0}').format(self.meta.\n get_label(fieldname)), frappe.CannotChangeConstantError)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Condition", "Assign'", "For", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_15(self, VAR_52):...\n", "\"\"\"docstring\"\"\"\n", "return self.field_mapping[VAR_52][self.TABLE_NAME]\n" ]
[ "def _get_table_name(self, field):...\n", "\"\"\"docstring\"\"\"\n", "return self.field_mapping[field][self.TABLE_NAME]\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_3(self):...\n", "VAR_2 = self.connects()\n", "VAR_4 = 'DELETE FROM crimes;'\n", "VAR_2.close()\n", "cursor.execute(VAR_4)\n", "VAR_2.commit()\n" ]
[ "def clear_all(self):...\n", "connection = self.connects()\n", "query = 'DELETE FROM crimes;'\n", "connection.close()\n", "cursor.execute(query)\n", "connection.commit()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_12(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_52 = {}\n", "VAR_52['bugs'] = VAR_0.GET.get('bug_id', '').split(',')\n", "return None, 'string' % VAR_94(e)\n", "VAR_52['bug_system_id'] = int(VAR_0.GET.get('bug_system_id', 1))\n", "VAR_52['runs'] = map(int, VAR_0.GET.get('case_runs', '').split(','))\n", "if VAR_0.GET.get('a') not in ('add', 'remove'):\n", "return None, 'Actions only allow \"add\" and \"remove\".'\n", "VAR_52['action'] = VAR_0.GET.get('a')\n", "VAR_52['bz_external_track'] = True if VAR_0.GET.get('bz_external_track', False\n ) else False\n", "return VAR_52, ''\n" ]
[ "def clean_bug_form(request):...\n", "\"\"\"docstring\"\"\"\n", "data = {}\n", "data['bugs'] = request.GET.get('bug_id', '').split(',')\n", "return None, 'Please specify only integers for bugs, caseruns(using comma to seperate IDs), and bug_system. (DEBUG INFO: %s)' % str(\n e)\n", "data['bug_system_id'] = int(request.GET.get('bug_system_id', 1))\n", "data['runs'] = map(int, request.GET.get('case_runs', '').split(','))\n", "if request.GET.get('a') not in ('add', 'remove'):\n", "return None, 'Actions only allow \"add\" and \"remove\".'\n", "data['action'] = request.GET.get('a')\n", "data['bz_external_track'] = True if request.GET.get('bz_external_track', False\n ) else False\n", "return data, ''\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Return'" ]
[ "@mock.patch('requests.post', FUNC_0)...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_14 = {'username': 'autotest', 'app': VAR_6}\n", "VAR_4 = '/api/hooks/builds'.format(**locals())\n", "VAR_7 = {'receive_user': 'autotest', 'receive_repo': VAR_6, 'image':\n '{app_id}:v2'.format(**locals())}\n", "self.assertIsNone(self.client.logout())\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 403)\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json', HTTP_X_DEIS_BUILDER_AUTH=settings.BUILDER_KEY)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertIn('release', VAR_5.data)\n", "self.assertIn('version', VAR_5.data['release'])\n", "self.assertIn('domains', VAR_5.data)\n" ]
[ "@mock.patch('requests.post', mock_import_repository_task)...\n", "\"\"\"docstring\"\"\"\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "build = {'username': 'autotest', 'app': app_id}\n", "url = '/api/hooks/builds'.format(**locals())\n", "body = {'receive_user': 'autotest', 'receive_repo': app_id, 'image':\n '{app_id}:v2'.format(**locals())}\n", "self.assertIsNone(self.client.logout())\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 403)\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json', HTTP_X_DEIS_BUILDER_AUTH=settings.BUILDER_KEY)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertIn('release', response.data)\n", "self.assertIn('version', response.data['release'])\n", "self.assertIn('domains', response.data)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "import numpy as np\n", "print('Importing Tensorflow libraries')\n", "from tensorflow.contrib.lite.python import interpreter as interpreter_wrapper\n", "import tensorflow as tf\n", "VAR_0 = '/home/pi/ballance/Ballance/Tensorflow/ballancenet_conv_3_quant.tflite'\n", "VAR_1 = (\n '/home/pi/ballance/Ballance/Tensorflow/ballancenet_boardcorner_conv_2_quant.tflite'\n )\n", "def FUNC_0(VAR_2, VAR_3):...\n", "print('Quantizing model')\n", "VAR_5 = tf.contrib.lite.TocoConverter.from_saved_model(VAR_2)\n", "VAR_5.post_training_quantize = True\n", "VAR_6 = VAR_5.convert()\n", "open(VAR_3 + '.tflite', 'wb').write(VAR_6)\n", "def __init__(self):...\n", "print('Creating TensorflowProcessor object')\n", "print('Loading ball detection tflite model')\n", "self.ball_detector_interpreter = interpreter_wrapper.Interpreter(VAR_2=\n TensorflowProcessor.ball_detector_model_path)\n", "self.ball_detector_interpreter.allocate_tensors()\n", "self.ball_detector_input_details = (self.ball_detector_interpreter.\n get_input_details())\n", "self.ball_detector_output_details = (self.ball_detector_interpreter.\n get_output_details())\n", "print('Loading corner detection tflite model')\n", "self.corner_detector_interpreter = interpreter_wrapper.Interpreter(VAR_2=\n TensorflowProcessor.corner_detector_model_path)\n", "self.corner_detector_interpreter.allocate_tensors()\n", "self.corner_detector_input_details = (self.corner_detector_interpreter.\n get_input_details())\n", "self.corner_detector_output_details = (self.corner_detector_interpreter.\n get_output_details())\n", "print('TensorflowProcessor object created')\n", "def FUNC_1(self, VAR_4):...\n", "VAR_4 = np.float32(VAR_4)\n", "VAR_4 /= 255.0\n", "VAR_4 = np.expand_dims(VAR_4, axis=0)\n", "VAR_4 = np.expand_dims(VAR_4, axis=3)\n", "self.ball_detector_interpreter.set_tensor(self.ball_detector_input_details[\n 0]['index'], VAR_4)\n", "self.ball_detector_interpreter.invoke()\n", "return np.squeeze(self.ball_detector_interpreter.get_tensor(self.\n ball_detector_output_details[0]['index']))\n" ]
[ "import numpy as np\n", "print('Importing Tensorflow libraries')\n", "from tensorflow.contrib.lite.python import interpreter as interpreter_wrapper\n", "import tensorflow as tf\n", "ball_detector_model_path = (\n '/home/pi/ballance/Ballance/Tensorflow/ballancenet_conv_3_quant.tflite')\n", "corner_detector_model_path = (\n '/home/pi/ballance/Ballance/Tensorflow/ballancenet_boardcorner_conv_2_quant.tflite'\n )\n", "def QuantizeModel(model_path, output_file_name):...\n", "print('Quantizing model')\n", "converter = tf.contrib.lite.TocoConverter.from_saved_model(model_path)\n", "converter.post_training_quantize = True\n", "quant_model = converter.convert()\n", "open(output_file_name + '.tflite', 'wb').write(quant_model)\n", "def __init__(self):...\n", "print('Creating TensorflowProcessor object')\n", "print('Loading ball detection tflite model')\n", "self.ball_detector_interpreter = interpreter_wrapper.Interpreter(model_path\n =TensorflowProcessor.ball_detector_model_path)\n", "self.ball_detector_interpreter.allocate_tensors()\n", "self.ball_detector_input_details = (self.ball_detector_interpreter.\n get_input_details())\n", "self.ball_detector_output_details = (self.ball_detector_interpreter.\n get_output_details())\n", "print('Loading corner detection tflite model')\n", "self.corner_detector_interpreter = interpreter_wrapper.Interpreter(model_path\n =TensorflowProcessor.corner_detector_model_path)\n", "self.corner_detector_interpreter.allocate_tensors()\n", "self.corner_detector_input_details = (self.corner_detector_interpreter.\n get_input_details())\n", "self.corner_detector_output_details = (self.corner_detector_interpreter.\n get_output_details())\n", "print('TensorflowProcessor object created')\n", "def getBallPosition(self, image):...\n", "image = np.float32(image)\n", "image /= 255.0\n", "image = np.expand_dims(image, axis=0)\n", "image = np.expand_dims(image, axis=3)\n", "self.ball_detector_interpreter.set_tensor(self.ball_detector_input_details[\n 0]['index'], image)\n", "self.ball_detector_interpreter.invoke()\n", "return np.squeeze(self.ball_detector_interpreter.get_tensor(self.\n ball_detector_output_details[0]['index']))\n" ]
[ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Expr'", "ImportFrom'", "Import'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "AugAssign'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def __call__(self, VAR_20):...\n", "VAR_32 = set()\n", "for VAR_53 in VAR_20:\n", "if VAR_53.depth <= self.maxdepth:\n", "return VAR_32\n", "VAR_32.add(VAR_53)\n" ]
[ "def __call__(self, urls):...\n", "newurls = set()\n", "for u in urls:\n", "if u.depth <= self.maxdepth:\n", "return newurls\n", "newurls.add(u)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Condition", "Return'", "Expr'" ]
[ "def FUNC_12(self):...\n", "self.run_test_case(self.scenario.delete_server_group(), max_retries=5)\n" ]
[ "def test_x_delete_server_group(self):...\n", "self.run_test_case(self.scenario.delete_server_group(), max_retries=5)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_0(self, VAR_0, VAR_1):...\n", "VAR_2 = None\n", "VAR_3 = []\n", "VAR_4 = []\n", "VAR_5 = []\n", "if len(VAR_1) == 0:\n", "VAR_2 = 'global'\n", "if len(VAR_1) == 1:\n", "if not VAR_2:\n", "VAR_3 = self.getOSNames(VAR_1)\n", "VAR_3 = []\n", "VAR_4 = self.getApplianceNames()\n", "VAR_4 = []\n", "VAR_5 = self.getHostnames()\n", "VAR_5 = []\n", "if VAR_1[0] in VAR_3:\n", "if not VAR_2:\n", "VAR_2 = 'os'\n", "if VAR_1[0] in VAR_4:\n", "VAR_6 = None\n", "VAR_2 = 'appliance'\n", "if VAR_1[0] in VAR_5:\n", "if VAR_2 == 'global':\n", "VAR_2 = 'host'\n", "VAR_6 = 'string'\n", "if VAR_2 == 'os':\n", "if not VAR_6:\n", "return\n", "if VAR_2 == 'appliance':\n", "return\n", "VAR_7 = None\n", "VAR_6 = 'string' % VAR_1[0]\n", "if VAR_2 == 'host':\n", "if VAR_2 == 'global':\n", "VAR_6 = 'string' % VAR_1[0]\n", "VAR_7 = 'global'\n", "if VAR_2 in ['appliance', 'host']:\n", "self.beginOutput()\n", "VAR_7 = VAR_1[0]\n", "self.db.execute(VAR_6)\n", "VAR_8 = 0\n", "for row in self.db.fetchall():\n", "VAR_9, VAR_10, VAR_11, VAR_12, VAR_13, VAR_14 = row\n", "self.endOutput(header=['scope', 'enclosure', 'adapter', 'slot', 'raidlevel',\n 'arrayid', 'options'], trimOwner=False)\n", "if VAR_8 > 0:\n", "VAR_7 = None\n", "if VAR_9 == -1:\n", "VAR_9 = None\n", "if VAR_10 == -1:\n", "VAR_10 = None\n", "if VAR_11 == -1:\n", "VAR_11 = '*'\n", "if VAR_12 == '-1':\n", "VAR_12 = 'hotspare'\n", "if VAR_13 == -1:\n", "VAR_13 = 'global'\n", "if VAR_13 == -2:\n", "VAR_14 = VAR_14.strip('\"')\n", "VAR_13 = '*'\n", "self.addOutput(VAR_7, [VAR_10, VAR_9, VAR_11, VAR_12, VAR_13, VAR_14])\n", "VAR_8 += 1\n" ]
[ "def run(self, params, args):...\n", "scope = None\n", "oses = []\n", "appliances = []\n", "hosts = []\n", "if len(args) == 0:\n", "scope = 'global'\n", "if len(args) == 1:\n", "if not scope:\n", "oses = self.getOSNames(args)\n", "oses = []\n", "appliances = self.getApplianceNames()\n", "appliances = []\n", "hosts = self.getHostnames()\n", "hosts = []\n", "if args[0] in oses:\n", "if not scope:\n", "scope = 'os'\n", "if args[0] in appliances:\n", "query = None\n", "scope = 'appliance'\n", "if args[0] in hosts:\n", "if scope == 'global':\n", "scope = 'host'\n", "query = \"\"\"select adapter, enclosure, slot, raidlevel,\n\t\t\t\tarrayid, options from storage_controller \n\t\t\t\twhere scope = 'global'\n\t\t\t\torder by enclosure, adapter, slot\"\"\"\n", "if scope == 'os':\n", "if not query:\n", "return\n", "if scope == 'appliance':\n", "return\n", "name = None\n", "query = (\n \"\"\"select adapter, enclosure, slot,\n\t\t\t\traidlevel, arrayid, options\n\t\t\t\tfrom storage_controller where\n\t\t\t\tscope = \"appliance\" and tableid = (select\n\t\t\t\tid from appliances\n\t\t\t\twhere name = '%s')\n\t\t\t\torder by enclosure, adapter, slot\"\"\"\n % args[0])\n", "if scope == 'host':\n", "if scope == 'global':\n", "query = (\n \"\"\"select adapter, enclosure, slot,\n\t\t\t\traidlevel, arrayid, options\n\t\t\t\tfrom storage_controller where\n\t\t\t\tscope = \"host\" and tableid = (select\n\t\t\t\tid from nodes where name = '%s')\n\t\t\t\torder by enclosure, adapter, slot\"\"\"\n % args[0])\n", "name = 'global'\n", "if scope in ['appliance', 'host']:\n", "self.beginOutput()\n", "name = args[0]\n", "self.db.execute(query)\n", "i = 0\n", "for row in self.db.fetchall():\n", "adapter, enclosure, slot, raidlevel, arrayid, options = row\n", "self.endOutput(header=['scope', 'enclosure', 'adapter', 'slot', 'raidlevel',\n 'arrayid', 'options'], trimOwner=False)\n", "if i > 0:\n", "name = None\n", "if adapter == -1:\n", "adapter = None\n", "if enclosure == -1:\n", "enclosure = None\n", "if slot == -1:\n", "slot = '*'\n", "if raidlevel == '-1':\n", "raidlevel = 'hotspare'\n", "if arrayid == -1:\n", "arrayid = 'global'\n", "if arrayid == -2:\n", "options = options.strip('\"')\n", "arrayid = '*'\n", "self.addOutput(name, [enclosure, adapter, slot, raidlevel, arrayid, options])\n", "i += 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Return'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "For", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "AugAssign'" ]
[ "def FUNC_11(VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1.value = beamr.interpreters.TableEnv(VAR_1.value)\n", "return VAR_1\n" ]
[ "def t_TABENV(t):...\n", "\"\"\"docstring\"\"\"\n", "t.value = beamr.interpreters.TableEnv(t.value)\n", "return t\n" ]
[ 0, 0, 2, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def __init__(self, VAR_31):...\n", "\"\"\"docstring\"\"\"\n", "logging.info('TaskDetails(%s)', VAR_31)\n", "if not isinstance(VAR_31, dict):\n", "self.bot_id = VAR_31['bot_id']\n", "self.command = VAR_31['command'] or []\n", "self.inputs_ref = VAR_31['inputs_ref']\n", "self.extra_args = VAR_31['extra_args']\n", "self.env = {k.encode('utf-8'): v.encode('utf-8') for k, v in VAR_31['env'].\n iteritems()}\n", "self.grace_period = VAR_31['grace_period']\n", "self.hard_timeout = VAR_31['hard_timeout']\n", "self.io_timeout = VAR_31['io_timeout']\n", "self.task_id = VAR_31['task_id']\n" ]
[ "def __init__(self, data):...\n", "\"\"\"docstring\"\"\"\n", "logging.info('TaskDetails(%s)', data)\n", "if not isinstance(data, dict):\n", "self.bot_id = data['bot_id']\n", "self.command = data['command'] or []\n", "self.inputs_ref = data['inputs_ref']\n", "self.extra_args = data['extra_args']\n", "self.env = {k.encode('utf-8'): v.encode('utf-8') for k, v in data['env'].\n iteritems()}\n", "self.grace_period = data['grace_period']\n", "self.hard_timeout = data['hard_timeout']\n", "self.io_timeout = data['io_timeout']\n", "self.task_id = data['task_id']\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_6(self, VAR_21, VAR_22, VAR_23=None):...\n", "if VAR_23 is None:\n", "VAR_23 = len(VAR_22)\n", "if self._connection is None:\n", "self._connection = self._engine.connect()\n", "VAR_22.to_sql(VAR_21, self._connection, if_exists='append', index=False,\n chunksize=batch_size)\n" ]
[ "def insert(self, table, dataframe, batch_size=None):...\n", "if batch_size is None:\n", "batch_size = len(dataframe)\n", "if self._connection is None:\n", "self._connection = self._engine.connect()\n", "dataframe.to_sql(table, self._connection, if_exists='append', index=False,\n chunksize=batch_size)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Assign'", "Expr'" ]
[ "@auth.route('/unconfirmed')...\n", "if current_user.is_anonymous or current_user.confirmed:\n", "return redirect(url_for('main.index'))\n", "return render_template('auth/unconfirmed.html')\n" ]
[ "@auth.route('/unconfirmed')...\n", "if current_user.is_anonymous or current_user.confirmed:\n", "return redirect(url_for('main.index'))\n", "return render_template('auth/unconfirmed.html')\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_5(self):...\n", "self.runall(self.checks)\n", "VAR_15 = self.runner.stats\n", "self.assertEqual(7, VAR_15.num_cases())\n", "self.assertEqual(4, len(VAR_15.failures()))\n", "self.assertEqual(2, self._num_failures_stage('setup'))\n", "self.assertEqual(1, self._num_failures_stage('sanity'))\n", "self.assertEqual(1, self._num_failures_stage('performance'))\n" ]
[ "def test_runall(self):...\n", "self.runall(self.checks)\n", "stats = self.runner.stats\n", "self.assertEqual(7, stats.num_cases())\n", "self.assertEqual(4, len(stats.failures()))\n", "self.assertEqual(2, self._num_failures_stage('setup'))\n", "self.assertEqual(1, self._num_failures_stage('sanity'))\n", "self.assertEqual(1, self._num_failures_stage('performance'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_8(VAR_12):...\n", "" ]
[ "def get_infos(row_datas):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "import mysql.connector\n", "from serverSetup import DBUSER, DBPASS\n", "def __init__(self, VAR_0, VAR_1, VAR_2, VAR_3, VAR_4=[]):...\n", "self.host = VAR_0\n", "self.user = VAR_1\n", "self.passwd = VAR_2\n", "self.database = VAR_3\n", "self.filterList = VAR_4\n", "def FUNC_0(self):...\n", "self.mydb = mysql.connector.connect(VAR_0=self.host, VAR_1=self.user, VAR_2\n =self.passwd, VAR_3=self.database)\n", "self.cursor = self.mydb.cursor()\n", "def FUNC_1(self):...\n", "self.cursor.close()\n", "self.mydb.close()\n", "def FUNC_2(self):...\n", "self.connect()\n", "VAR_7 = 'SELECT * FROM badwords'\n", "self.cursor.execute(VAR_7)\n", "VAR_8 = self.cursor.fetchall()\n", "VAR_9 = []\n", "for row in VAR_8:\n", "VAR_9.append(row[0])\n", "self.close()\n", "return VAR_9\n" ]
[ "import mysql.connector\n", "from serverSetup import DBUSER, DBPASS\n", "def __init__(self, host, user, passwd, database, filterList=[]):...\n", "self.host = host\n", "self.user = user\n", "self.passwd = passwd\n", "self.database = database\n", "self.filterList = filterList\n", "def connect(self):...\n", "self.mydb = mysql.connector.connect(host=self.host, user=self.user, passwd=\n self.passwd, database=self.database)\n", "self.cursor = self.mydb.cursor()\n", "def close(self):...\n", "self.cursor.close()\n", "self.mydb.close()\n", "def fetch(self):...\n", "self.connect()\n", "sqlFormula = 'SELECT * FROM badwords'\n", "self.cursor.execute(sqlFormula)\n", "myresults = self.cursor.fetchall()\n", "badWordArray = []\n", "for row in myresults:\n", "badWordArray.append(row[0])\n", "self.close()\n", "return badWordArray\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_31(self):...\n", "" ]
[ "def touch_or_create(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_16(self, VAR_9):...\n", "if not self.logged_in():\n", "print('shrub: unauthenticated; use \"login [username] to log in first')\n", "VAR_20 = self.send_cmd('edit_issue{} {}'.format(self.insecure_mode, VAR_9))\n", "return\n", "print(VAR_20)\n" ]
[ "def do_edit_issue(self, line):...\n", "if not self.logged_in():\n", "print('shrub: unauthenticated; use \"login [username] to log in first')\n", "response = self.send_cmd('edit_issue{} {}'.format(self.insecure_mode, line))\n", "return\n", "print(response)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Assign'", "Return'", "Expr'" ]
[ "def FUNC_6(self, VAR_49):...\n", "" ]
[ "def run(self, sr_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_0(self):...\n", "\"\"\"docstring\"\"\"\n", "\"\"\"string\"\"\"\n", "VAR_2 = request.form.get('userId')\n", "if VAR_2 is None:\n", "VAR_28 = db_interac.get_user_profiles()\n", "VAR_3 = db_interac.get_user_profile(VAR_2)\n", "if not VAR_28:\n", "VAR_4 = {}\n", "return {}, 500\n", "VAR_29 = []\n", "if VAR_3[0] == False:\n", "for user in VAR_28:\n", "VAR_4['error'] = 'error adding profile'\n", "VAR_4['username'] = VAR_3[1]\n", "VAR_29.append({'id': user[0], 'username': user[1], 'firstName': user[2],\n 'lastName': user[3], 'bio': user[4]})\n", "return VAR_29, 200\n", "return VAR_4, 200\n", "VAR_4['firstName'] = VAR_3[2]\n", "VAR_4['lastName'] = VAR_3[3]\n", "VAR_4['bio'] = VAR_3[4]\n", "VAR_4['messages'] = VAR_3[5]\n" ]
[ "def get(self):...\n", "\"\"\"docstring\"\"\"\n", "\"\"\"\n # Check the request comes from appropriate location.\n if not utils.validate_ip(request.remote_addr)\n return {}, 403\n \"\"\"\n", "user_id = request.form.get('userId')\n", "if user_id is None:\n", "users = db_interac.get_user_profiles()\n", "user_profile = db_interac.get_user_profile(user_id)\n", "if not users:\n", "return_obj = {}\n", "return {}, 500\n", "response_obj = []\n", "if user_profile[0] == False:\n", "for user in users:\n", "return_obj['error'] = 'error adding profile'\n", "return_obj['username'] = user_profile[1]\n", "response_obj.append({'id': user[0], 'username': user[1], 'firstName': user[\n 2], 'lastName': user[3], 'bio': user[4]})\n", "return response_obj, 200\n", "return return_obj, 200\n", "return_obj['firstName'] = user_profile[2]\n", "return_obj['lastName'] = user_profile[3]\n", "return_obj['bio'] = user_profile[4]\n", "return_obj['messages'] = user_profile[5]\n" ]
[ 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Condition", "For", "Assign'", "Assign'", "Expr'", "Return'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_5(self):...\n", "\"\"\"docstring\"\"\"\n", "if not self.run_local:\n", "if not (self.configuration.san_password or self.configuration.san_private_key):\n", "if not self.configuration.san_ip:\n" ]
[ "def check_for_setup_error(self):...\n", "\"\"\"docstring\"\"\"\n", "if not self.run_local:\n", "if not (self.configuration.san_password or self.configuration.san_private_key):\n", "if not self.configuration.san_ip:\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Condition", "Condition" ]
[ "def FUNC_10(self, VAR_10, VAR_12):...\n", "\"\"\"docstring\"\"\"\n", "self._create_server(VAR_12)\n", "VAR_5 = {}\n", "VAR_5['volumeName'] = VAR_10['name']\n", "VAR_5['serverName'] = VAR_12['host']\n", "self._cliq_run_xml('assignVolumeToServer', VAR_5)\n", "VAR_32 = self._get_iscsi_properties(VAR_10)\n", "return {'driver_volume_type': 'iscsi', 'data': VAR_32}\n" ]
[ "def initialize_connection(self, volume, connector):...\n", "\"\"\"docstring\"\"\"\n", "self._create_server(connector)\n", "cliq_args = {}\n", "cliq_args['volumeName'] = volume['name']\n", "cliq_args['serverName'] = connector['host']\n", "self._cliq_run_xml('assignVolumeToServer', cliq_args)\n", "iscsi_properties = self._get_iscsi_properties(volume)\n", "return {'driver_volume_type': 'iscsi', 'data': iscsi_properties}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "@property...\n", "return import_from_settings('LOGIN_REDIRECT_URL_FAILURE', '/')\n" ]
[ "@property...\n", "return import_from_settings('LOGIN_REDIRECT_URL_FAILURE', '/')\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_14(self, VAR_14):...\n", "" ]
[ "def is_boolean(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_32(VAR_19):...\n", "return dict((expr, vimsupport.VimExpressionToPythonType(expr)) for expr in\n VAR_19)\n" ]
[ "def BuildExtraConfData(extra_conf_vim_data):...\n", "return dict((expr, vimsupport.VimExpressionToPythonType(expr)) for expr in\n extra_conf_vim_data)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_1(self):...\n", "\"\"\"docstring\"\"\"\n", "return 'Running check for local component %s with pid %s' % (self.comp_name,\n self.pid)\n" ]
[ "def info(self):...\n", "\"\"\"docstring\"\"\"\n", "return 'Running check for local component %s with pid %s' % (self.comp_name,\n self.pid)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def __init__(self, VAR_5, VAR_12):...\n", "self.message = VAR_5\n", "if isinstance(VAR_12, str) or VAR_12 is None:\n", "VAR_12 = [VAR_12]\n", "self.deps = [d for d in VAR_12 if d is not None]\n", "self.fatal = None in VAR_12\n" ]
[ "def __init__(self, message, deps):...\n", "self.message = message\n", "if isinstance(deps, str) or deps is None:\n", "deps = [deps]\n", "self.deps = [d for d in deps if d is not None]\n", "self.fatal = None in deps\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_2(VAR_3, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = VAR_5.copy()\n", "VAR_18 = FUNC_4(VAR_3)\n", "if VAR_18:\n", "VAR_5['data']['registration_form_desc']['fields'] = FUNC_3(VAR_5['data'][\n 'registration_form_desc'])\n", "VAR_5['enable_enterprise_sidebar'] = False\n", "VAR_5.update(VAR_18)\n", "return VAR_5\n", "VAR_5['enable_enterprise_sidebar'] = True\n", "VAR_5['data']['hide_auth_warnings'] = True\n" ]
[ "def update_context_for_enterprise(request, context):...\n", "\"\"\"docstring\"\"\"\n", "context = context.copy()\n", "sidebar_context = enterprise_sidebar_context(request)\n", "if sidebar_context:\n", "context['data']['registration_form_desc']['fields'] = enterprise_fields_only(\n context['data']['registration_form_desc'])\n", "context['enable_enterprise_sidebar'] = False\n", "context.update(sidebar_context)\n", "return context\n", "context['enable_enterprise_sidebar'] = True\n", "context['data']['hide_auth_warnings'] = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Return'", "Assign'", "Assign'" ]
[ "def FUNC_3(self):...\n", "VAR_15 = []\n", "VAR_14 = datetime.now(timezone(timedelta(hours=+9), 'JST'))\n", "VAR_14 = VAR_14.replace(hour=0, minute=0, second=0, microsecond=0)\n", "for day in range(30)[::-1]:\n", "VAR_25 = VAR_14 - timedelta(days=day)\n", "return VAR_15\n", "VAR_26 = VAR_25.strftime('%Y-%m-%d')\n", "VAR_15.append(VAR_26)\n" ]
[ "def thirty_day_labels(self):...\n", "labels = []\n", "today = datetime.now(timezone(timedelta(hours=+9), 'JST'))\n", "today = today.replace(hour=0, minute=0, second=0, microsecond=0)\n", "for day in range(30)[::-1]:\n", "date = today - timedelta(days=day)\n", "return labels\n", "label = date.strftime('%Y-%m-%d')\n", "labels.append(label)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Assign'", "Expr'" ]
[ "def FUNC_10(*VAR_11):...\n", "VAR_12 = ' '.join(str(elem) for elem in VAR_11)\n", "return VAR_12\n" ]
[ "def fastboot_call(*args):...\n", "arg_str = ' '.join(str(elem) for elem in args)\n", "return arg_str\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_6(VAR_14, VAR_19, VAR_20=None, VAR_15=Article, VAR_3=None, VAR_4=...\n", "if VAR_14.method == 'GET':\n", "VAR_27 = {'title': VAR_19}\n", "return HttpResponseNotAllowed(['GET'])\n", "if VAR_3 is not None:\n", "VAR_8 = get_object_or_404(VAR_5, **{group_slug_field: group_slug})\n", "VAR_46 = VAR_47 = True\n", "VAR_27.update({'content_type': get_ct(VAR_8), 'object_id': VAR_8.id})\n", "if not VAR_46:\n", "VAR_46 = FUNC_3(VAR_14.user, VAR_8, VAR_12, VAR_13)\n", "return HttpResponseForbidden()\n", "VAR_49 = False\n", "VAR_47 = FUNC_4(VAR_14.user, VAR_8, VAR_12)\n", "VAR_50 = None\n", "VAR_28 = VAR_2.get(**article_args)\n", "if VAR_20 is not None:\n", "VAR_28 = ChangeSet.objects.filter(VAR_56=title).order_by('-revision')[0\n ].article\n", "VAR_28 = VAR_15(**article_args)\n", "if VAR_41 is not None:\n", "VAR_53 = get_object_or_404(VAR_28.changeset_set, VAR_20=revision)\n", "VAR_48 = {'article': VAR_28, 'revision': VAR_20, 'redirected_from': VAR_50,\n 'allow_write': VAR_47}\n", "VAR_50 = VAR_19\n", "VAR_49 = VAR_41.is_observing(VAR_28, VAR_14.user)\n", "VAR_28.content = VAR_53.get_content()\n", "if VAR_41 is not None:\n", "VAR_48.update({'is_observing': VAR_49, 'can_observe': True})\n", "if VAR_3 is not None:\n", "VAR_48['group'] = VAR_8\n", "if VAR_18 is not None:\n", "VAR_48.update(VAR_18)\n", "return render_to_response('/'.join([VAR_17, VAR_16]), VAR_48,\n context_instance=RequestContext(request))\n" ]
[ "def view_article(request, title, revision=None, ArticleClass=Article,...\n", "if request.method == 'GET':\n", "article_args = {'title': title}\n", "return HttpResponseNotAllowed(['GET'])\n", "if group_slug is not None:\n", "group = get_object_or_404(group_qs, **{group_slug_field: group_slug})\n", "allow_read = allow_write = True\n", "article_args.update({'content_type': get_ct(group), 'object_id': group.id})\n", "if not allow_read:\n", "allow_read = has_read_perm(request.user, group, is_member, is_private)\n", "return HttpResponseForbidden()\n", "is_observing = False\n", "allow_write = has_write_perm(request.user, group, is_member)\n", "redirected_from = None\n", "article = article_qs.get(**article_args)\n", "if revision is not None:\n", "article = ChangeSet.objects.filter(old_title=title).order_by('-revision')[0\n ].article\n", "article = ArticleClass(**article_args)\n", "if notification is not None:\n", "changeset = get_object_or_404(article.changeset_set, revision=revision)\n", "template_params = {'article': article, 'revision': revision,\n 'redirected_from': redirected_from, 'allow_write': allow_write}\n", "redirected_from = title\n", "is_observing = notification.is_observing(article, request.user)\n", "article.content = changeset.get_content()\n", "if notification is not None:\n", "template_params.update({'is_observing': is_observing, 'can_observe': True})\n", "if group_slug is not None:\n", "template_params['group'] = group\n", "if extra_context is not None:\n", "template_params.update(extra_context)\n", "return render_to_response('/'.join([template_dir, template_name]),\n template_params, context_instance=RequestContext(request))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Assign'", "Condition", "Expr'", "Return'" ]
[ "@auth.require(acl.is_bot)...\n", "VAR_4, VAR_14, VAR_9, VAR_25, VAR_24, VAR_26 = self._process()\n", "VAR_37 = VAR_4.get('event')\n", "if VAR_37 not in ('bot_error', 'bot_rebooting', 'bot_shutdown'):\n", "self.abort_with_error(400, error='Unsupported event type')\n", "VAR_8 = VAR_4.get('message')\n", "bot_management.bot_event(VAR_31=event, VAR_14=bot_id, external_ip=self.\n request.remote_addr, VAR_24=dimensions, VAR_25=state, VAR_9=version,\n VAR_16=bool(quarantined_msg), VAR_17=None, VAR_32=None, VAR_8=message)\n", "if VAR_37 == 'bot_error':\n", "VAR_50 = \"\"\"Bot: https://%s/restricted/bot/%s\nBot error:\n%s\"\"\" % (app_identity\n .get_default_version_hostname(), VAR_14, VAR_8)\n", "self.send_response({})\n", "ereporter2.log_request(self.request, VAR_5='bot', VAR_8=line)\n" ]
[ "@auth.require(acl.is_bot)...\n", "request, bot_id, version, state, dimensions, quarantined_msg = self._process()\n", "event = request.get('event')\n", "if event not in ('bot_error', 'bot_rebooting', 'bot_shutdown'):\n", "self.abort_with_error(400, error='Unsupported event type')\n", "message = request.get('message')\n", "bot_management.bot_event(event_type=event, bot_id=bot_id, external_ip=self.\n request.remote_addr, dimensions=dimensions, state=state, version=\n version, quarantined=bool(quarantined_msg), task_id=None, task_name=\n None, message=message)\n", "if event == 'bot_error':\n", "line = \"\"\"Bot: https://%s/restricted/bot/%s\nBot error:\n%s\"\"\" % (app_identity\n .get_default_version_hostname(), bot_id, message)\n", "self.send_response({})\n", "ereporter2.log_request(self.request, source='bot', message=line)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_11(self, VAR_15, VAR_16, VAR_17):...\n", "self.checked += 1\n", "self.curline = 0\n", "for VAR_25 in VAR_15:\n", "if pe.tok:\n", "VAR_25 = VAR_25.decode(locale.getpreferredencoding(False), errors='ignore')\n", "VAR_27 = VAR_25.find(VAR_14) + pe.tok.lexpos\n", "sys.stdout.write('%s: %d:0 %s\\n' % (VAR_17, self.curline, VAR_27, pe.txt))\n", "self.curline += 1\n", "VAR_3 = pe.tok.value\n", "self.spdx_errors += 1\n", "if self.curline > VAR_16:\n", "sys.stdout.write('%s: %d:%d %s: %s\\n' % (VAR_17, self.curline, VAR_27, pe.\n txt, VAR_3))\n", "self.lines_checked += 1\n", "if VAR_25.find('SPDX-License-Identifier:') < 0:\n", "VAR_14 = VAR_25.split(':')[1].strip()\n", "if VAR_25.strip().endswith('*/'):\n", "VAR_14 = VAR_14.rstrip('*/').strip()\n", "if VAR_25.startswith('LIST \"'):\n", "VAR_14 = VAR_14.rstrip('\"').strip()\n", "self.parse(VAR_14)\n", "self.spdx_valid += 1\n" ]
[ "def parse_lines(self, fd, maxlines, fname):...\n", "self.checked += 1\n", "self.curline = 0\n", "for line in fd:\n", "if pe.tok:\n", "line = line.decode(locale.getpreferredencoding(False), errors='ignore')\n", "col = line.find(expr) + pe.tok.lexpos\n", "sys.stdout.write('%s: %d:0 %s\\n' % (fname, self.curline, col, pe.txt))\n", "self.curline += 1\n", "tok = pe.tok.value\n", "self.spdx_errors += 1\n", "if self.curline > maxlines:\n", "sys.stdout.write('%s: %d:%d %s: %s\\n' % (fname, self.curline, col, pe.txt, tok)\n )\n", "self.lines_checked += 1\n", "if line.find('SPDX-License-Identifier:') < 0:\n", "expr = line.split(':')[1].strip()\n", "if line.strip().endswith('*/'):\n", "expr = expr.rstrip('*/').strip()\n", "if line.startswith('LIST \"'):\n", "expr = expr.rstrip('\"').strip()\n", "self.parse(expr)\n", "self.spdx_valid += 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "AugAssign'", "Assign'", "For", "Condition", "Assign'", "Assign'", "Expr'", "AugAssign'", "Assign'", "AugAssign'", "Condition", "Expr'", "AugAssign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "AugAssign'" ]
[ "def FUNC_26(*VAR_29, **VAR_30):...\n", "" ]
[ "def raiseOSError(*_a, **_k):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_4(VAR_2):...\n", "VAR_7 = files.get_static_data('./static/files.html')\n", "VAR_2 = FUNC_1(VAR_2)\n", "VAR_2 = '/'\n", "if not VAR_2:\n", "VAR_2 = '/'\n", "VAR_15 = VAR_2.split('/')\n", "VAR_16 = list()\n", "while '' in VAR_15:\n", "VAR_15.remove('')\n", "VAR_15 = [''] + VAR_15\n", "VAR_17 = ''\n", "for VAR_31 in range(0, len(VAR_15)):\n", "VAR_15[VAR_31] += '/'\n", "VAR_18 = list()\n", "VAR_17 += VAR_15[VAR_31]\n", "for f_handle in db.Filesystem.listdir(VAR_2):\n", "VAR_16.append(dict(folder_name=files_hierarchy[i], href_path=\n '/files/list/%s' % encode_str_to_hexed_b64(files_hierarchy_cwd),\n disabled=i == len(files_hierarchy) - 1))\n", "VAR_5 = f_handle['file-name']\n", "VAR_19 = FUNC_0(VAR_17)\n", "VAR_27 = VAR_2 + VAR_5\n", "VAR_12 = users.get_user_by_cookie(self.get_cookie('user_active_login',\n default=''))\n", "VAR_28 = dict()\n", "VAR_7 = preproc.preprocess_webpage(VAR_7, VAR_12, VAR_18=files_attrib_list,\n VAR_16=files_hierarchy_list, VAR_19=cwd_uuid)\n", "VAR_28['file-name'] = VAR_5\n", "VAR_6.set_result(VAR_7)\n", "VAR_28['file-name-url'] = urllib.parse.quote(VAR_5)\n", "VAR_28['file-name-escaped'] = cgi.escape(VAR_5)\n", "VAR_28['size'] = f_handle['file-size']\n", "VAR_28['size-str'] = files.format_file_size(VAR_28['size'])\n", "VAR_28['owner'] = f_handle['owner']\n", "VAR_28['date-uploaded'] = time.strftime(const.get_const('time-format'),\n time.localtime(f_handle['upload-time']))\n", "if f_handle['is-dir']:\n", "VAR_28['mime-type'] = 'directory/folder'\n", "VAR_28['mime-type'] = files.guess_mime_type(VAR_5)\n", "if VAR_28['mime-type'] == 'directory/folder':\n", "VAR_28['target-link'] = '/files/list/%s' % FUNC_0(VAR_27 + '/')\n", "VAR_28['target-link'] = '/files/download/%s/%s' % (FUNC_0(VAR_27), VAR_28[\n 'file-name-url'])\n", "VAR_28['uuid'] = FUNC_0(VAR_27)\n", "VAR_18.append(VAR_28)\n" ]
[ "def get_final_html_async(target_path):...\n", "file_temp = files.get_static_data('./static/files.html')\n", "target_path = decode_hexed_b64_to_str(target_path)\n", "target_path = '/'\n", "if not target_path:\n", "target_path = '/'\n", "files_hierarchy = target_path.split('/')\n", "files_hierarchy_list = list()\n", "while '' in files_hierarchy:\n", "files_hierarchy.remove('')\n", "files_hierarchy = [''] + files_hierarchy\n", "files_hierarchy_cwd = ''\n", "for i in range(0, len(files_hierarchy)):\n", "files_hierarchy[i] += '/'\n", "files_attrib_list = list()\n", "files_hierarchy_cwd += files_hierarchy[i]\n", "for f_handle in db.Filesystem.listdir(target_path):\n", "files_hierarchy_list.append(dict(folder_name=files_hierarchy[i], href_path=\n '/files/list/%s' % encode_str_to_hexed_b64(files_hierarchy_cwd),\n disabled=i == len(files_hierarchy) - 1))\n", "file_name = f_handle['file-name']\n", "cwd_uuid = encode_str_to_hexed_b64(files_hierarchy_cwd)\n", "actual_path = target_path + file_name\n", "working_user = users.get_user_by_cookie(self.get_cookie('user_active_login',\n default=''))\n", "attrib = dict()\n", "file_temp = preproc.preprocess_webpage(file_temp, working_user,\n files_attrib_list=files_attrib_list, files_hierarchy_list=\n files_hierarchy_list, cwd_uuid=cwd_uuid)\n", "attrib['file-name'] = file_name\n", "future.set_result(file_temp)\n", "attrib['file-name-url'] = urllib.parse.quote(file_name)\n", "attrib['file-name-escaped'] = cgi.escape(file_name)\n", "attrib['size'] = f_handle['file-size']\n", "attrib['size-str'] = files.format_file_size(attrib['size'])\n", "attrib['owner'] = f_handle['owner']\n", "attrib['date-uploaded'] = time.strftime(const.get_const('time-format'),\n time.localtime(f_handle['upload-time']))\n", "if f_handle['is-dir']:\n", "attrib['mime-type'] = 'directory/folder'\n", "attrib['mime-type'] = files.guess_mime_type(file_name)\n", "if attrib['mime-type'] == 'directory/folder':\n", "attrib['target-link'] = '/files/list/%s' % encode_str_to_hexed_b64(\n actual_path + '/')\n", "attrib['target-link'] = '/files/download/%s/%s' % (encode_str_to_hexed_b64(\n actual_path), attrib['file-name-url'])\n", "attrib['uuid'] = encode_str_to_hexed_b64(actual_path)\n", "files_attrib_list.append(attrib)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "For", "AugAssign'", "Assign'", "AugAssign'", "For", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_1(VAR_3, VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = []\n", "VAR_9 = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct()\n", "for changed_entity in VAR_3:\n", "VAR_18 = changed_entity.pk, VAR_2.pk\n", "ChangedEntityLocale.objects.bulk_create(VAR_8)\n", "if VAR_18 not in VAR_9:\n", "VAR_8.append(ChangedEntityLocale(entity=changed_entity, VAR_2=locale))\n" ]
[ "def mark_changed_translation(changed_entities, locale):...\n", "\"\"\"docstring\"\"\"\n", "changed_entities_array = []\n", "existing = ChangedEntityLocale.objects.values_list('entity', 'locale'\n ).distinct()\n", "for changed_entity in changed_entities:\n", "key = changed_entity.pk, locale.pk\n", "ChangedEntityLocale.objects.bulk_create(changed_entities_array)\n", "if key not in existing:\n", "changed_entities_array.append(ChangedEntityLocale(entity=changed_entity,\n locale=locale))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "Assign'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_21(self, VAR_17, VAR_16):...\n", "\"\"\"docstring\"\"\"\n", "self._check_volume(VAR_16)\n", "VAR_0.warn(_('Volume %s is not found!, it may have been deleted'), VAR_16[\n 'name'])\n", "VAR_0.error(_('Failed to ensure export of volume %s'), VAR_16['name'])\n" ]
[ "def ensure_export(self, context, volume):...\n", "\"\"\"docstring\"\"\"\n", "self._check_volume(volume)\n", "LOG.warn(_('Volume %s is not found!, it may have been deleted'), volume['name']\n )\n", "LOG.error(_('Failed to ensure export of volume %s'), volume['name'])\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'", "Expr'" ]
[ "\"\"\" Views for a student's account information. \"\"\"\n", "import json\n", "import logging\n", "import urlparse\n", "from datetime import datetime\n", "import pytz\n", "from django.conf import settings\n", "from django.contrib import messages\n", "from django.contrib.auth import get_user_model\n", "from django.contrib.auth.decorators import login_required\n", "from django.core.urlresolvers import resolve, reverse\n", "from django.http import HttpRequest, HttpResponse, HttpResponseBadRequest, HttpResponseForbidden\n", "from django.shortcuts import redirect\n", "from django.utils.translation import ugettext as _\n", "from django.views.decorators.csrf import ensure_csrf_cookie\n", "from django.views.decorators.http import require_http_methods\n", "from django_countries import countries\n", "import third_party_auth\n", "from commerce.models import CommerceConfiguration\n", "from edxmako.shortcuts import render_to_response, render_to_string\n", "from lms.djangoapps.commerce.utils import EcommerceService\n", "from openedx.core.djangoapps.commerce.utils import ecommerce_api_client\n", "from openedx.core.djangoapps.external_auth.login_and_register import login as external_auth_login\n", "from openedx.core.djangoapps.external_auth.login_and_register import register as external_auth_register\n", "from openedx.core.djangoapps.lang_pref.api import all_languages, released_languages\n", "from openedx.core.djangoapps.programs.models import ProgramsApiConfig\n", "from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers\n", "from openedx.core.djangoapps.theming.helpers import is_request_in_themed_site\n", "from openedx.core.djangoapps.user_api.accounts.api import request_password_change\n", "from openedx.core.djangoapps.user_api.errors import UserNotFound\n", "from openedx.core.lib.edx_api_utils import get_edx_api_data\n", "from openedx.core.lib.time_zone_utils import TIME_ZONE_CHOICES\n", "from openedx.features.enterprise_support.api import enterprise_customer_for_request\n", "from student.helpers import destroy_oauth_tokens, get_next_url_for_login_page\n", "from student.models import UserProfile\n", "from student.views import register_user as old_register_view\n", "from student.views import signin_user as old_login_view\n", "from third_party_auth import pipeline\n", "from third_party_auth.decorators import xframe_allow_whitelisted\n", "from util.bad_request_rate_limiter import BadRequestRateLimiter\n", "from util.date_utils import strftime_localized\n", "VAR_0 = logging.getLogger('audit')\n", "VAR_1 = logging.getLogger(__name__)\n", "VAR_2 = get_user_model()\n", "@require_http_methods(['GET'])...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = get_next_url_for_login_page(VAR_3)\n", "if VAR_3.user.is_authenticated():\n", "return redirect(VAR_7)\n", "VAR_13 = FUNC_6(VAR_3)\n", "VAR_14 = None\n", "if '?' in VAR_7:\n", "if is_request_in_themed_site() and not configuration_helpers.get_value(\n", "VAR_43 = urlparse.parse_qs(urlparse.urlparse(VAR_7).query)\n", "if VAR_4 == 'login':\n", "VAR_15 = FUNC_8(VAR_3, VAR_4)\n", "VAR_44 = VAR_43['tpa_hint'][0]\n", "return old_login_view(VAR_3)\n", "if VAR_4 == 'register':\n", "if VAR_15 is not None:\n", "VAR_45 = third_party_auth.provider.Registry.get(VAR_44=provider_id)\n", "return old_register_view(VAR_3)\n", "return VAR_15\n", "VAR_16 = [{'message': message.message, 'tags': message.tags} for message in\n messages.get_messages(VAR_3) if 'account-activation' in message.tags]\n", "if VAR_45:\n", "VAR_5 = {'data': {'login_redirect_url': VAR_7, 'initial_mode': VAR_4,\n 'third_party_auth': FUNC_5(VAR_3, VAR_7, VAR_14),\n 'third_party_auth_hint': VAR_14 or '', 'platform_name':\n configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME\n ), 'support_link': configuration_helpers.get_value('SUPPORT_SITE_LINK',\n settings.SUPPORT_SITE_LINK), 'password_reset_support_link': \n configuration_helpers.get_value('PASSWORD_RESET_SUPPORT_LINK', settings\n .PASSWORD_RESET_SUPPORT_LINK) or settings.SUPPORT_SITE_LINK,\n 'account_activation_messages': VAR_16, 'login_form_desc': json.loads(\n VAR_13['login']), 'registration_form_desc': json.loads(VAR_13[\n 'registration']), 'password_reset_form_desc': json.loads(VAR_13[\n 'password_reset']), 'account_creation_allowed': configuration_helpers.\n get_value('ALLOW_PUBLIC_ACCOUNT_CREATION', settings.FEATURES.get(\n 'ALLOW_PUBLIC_ACCOUNT_CREATION', True))}, 'login_redirect_url': VAR_7,\n 'responsive': True, 'allow_iframing': True, 'disable_courseware_js': \n True, 'combined_login_and_register': True, 'disable_footer': not\n configuration_helpers.get_value(\n 'ENABLE_COMBINED_LOGIN_REGISTRATION_FOOTER', settings.FEATURES[\n 'ENABLE_COMBINED_LOGIN_REGISTRATION_FOOTER'])}\n", "if VAR_45.skip_hinted_login_dialog:\n", "VAR_5 = FUNC_2(VAR_3, VAR_5)\n", "return redirect(pipeline.get_login_url(VAR_44, pipeline.AUTH_ENTRY_LOGIN,\n redirect_url=redirect_to))\n", "VAR_14 = VAR_44\n", "return render_to_response('student_account/login_and_register.html', VAR_5)\n", "VAR_4 = 'hinted_login'\n" ]
[ "\"\"\" Views for a student's account information. \"\"\"\n", "import json\n", "import logging\n", "import urlparse\n", "from datetime import datetime\n", "import pytz\n", "from django.conf import settings\n", "from django.contrib import messages\n", "from django.contrib.auth import get_user_model\n", "from django.contrib.auth.decorators import login_required\n", "from django.core.urlresolvers import resolve, reverse\n", "from django.http import HttpRequest, HttpResponse, HttpResponseBadRequest, HttpResponseForbidden\n", "from django.shortcuts import redirect\n", "from django.utils.translation import ugettext as _\n", "from django.views.decorators.csrf import ensure_csrf_cookie\n", "from django.views.decorators.http import require_http_methods\n", "from django_countries import countries\n", "import third_party_auth\n", "from commerce.models import CommerceConfiguration\n", "from edxmako.shortcuts import render_to_response, render_to_string\n", "from lms.djangoapps.commerce.utils import EcommerceService\n", "from openedx.core.djangoapps.commerce.utils import ecommerce_api_client\n", "from openedx.core.djangoapps.external_auth.login_and_register import login as external_auth_login\n", "from openedx.core.djangoapps.external_auth.login_and_register import register as external_auth_register\n", "from openedx.core.djangoapps.lang_pref.api import all_languages, released_languages\n", "from openedx.core.djangoapps.programs.models import ProgramsApiConfig\n", "from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers\n", "from openedx.core.djangoapps.theming.helpers import is_request_in_themed_site\n", "from openedx.core.djangoapps.user_api.accounts.api import request_password_change\n", "from openedx.core.djangoapps.user_api.errors import UserNotFound\n", "from openedx.core.lib.edx_api_utils import get_edx_api_data\n", "from openedx.core.lib.time_zone_utils import TIME_ZONE_CHOICES\n", "from openedx.features.enterprise_support.api import enterprise_customer_for_request\n", "from student.helpers import destroy_oauth_tokens, get_next_url_for_login_page\n", "from student.models import UserProfile\n", "from student.views import register_user as old_register_view\n", "from student.views import signin_user as old_login_view\n", "from third_party_auth import pipeline\n", "from third_party_auth.decorators import xframe_allow_whitelisted\n", "from util.bad_request_rate_limiter import BadRequestRateLimiter\n", "from util.date_utils import strftime_localized\n", "AUDIT_LOG = logging.getLogger('audit')\n", "log = logging.getLogger(__name__)\n", "User = get_user_model()\n", "@require_http_methods(['GET'])...\n", "\"\"\"docstring\"\"\"\n", "redirect_to = get_next_url_for_login_page(request)\n", "if request.user.is_authenticated():\n", "return redirect(redirect_to)\n", "form_descriptions = _get_form_descriptions(request)\n", "third_party_auth_hint = None\n", "if '?' in redirect_to:\n", "if is_request_in_themed_site() and not configuration_helpers.get_value(\n", "next_args = urlparse.parse_qs(urlparse.urlparse(redirect_to).query)\n", "if initial_mode == 'login':\n", "ext_auth_response = _external_auth_intercept(request, initial_mode)\n", "provider_id = next_args['tpa_hint'][0]\n", "return old_login_view(request)\n", "if initial_mode == 'register':\n", "if ext_auth_response is not None:\n", "tpa_hint_provider = third_party_auth.provider.Registry.get(provider_id=\n provider_id)\n", "return old_register_view(request)\n", "return ext_auth_response\n", "account_activation_messages = [{'message': message.message, 'tags': message\n .tags} for message in messages.get_messages(request) if \n 'account-activation' in message.tags]\n", "if tpa_hint_provider:\n", "context = {'data': {'login_redirect_url': redirect_to, 'initial_mode':\n initial_mode, 'third_party_auth': _third_party_auth_context(request,\n redirect_to, third_party_auth_hint), 'third_party_auth_hint': \n third_party_auth_hint or '', 'platform_name': configuration_helpers.\n get_value('PLATFORM_NAME', settings.PLATFORM_NAME), 'support_link':\n configuration_helpers.get_value('SUPPORT_SITE_LINK', settings.\n SUPPORT_SITE_LINK), 'password_reset_support_link': \n configuration_helpers.get_value('PASSWORD_RESET_SUPPORT_LINK', settings\n .PASSWORD_RESET_SUPPORT_LINK) or settings.SUPPORT_SITE_LINK,\n 'account_activation_messages': account_activation_messages,\n 'login_form_desc': json.loads(form_descriptions['login']),\n 'registration_form_desc': json.loads(form_descriptions['registration']),\n 'password_reset_form_desc': json.loads(form_descriptions[\n 'password_reset']), 'account_creation_allowed': configuration_helpers.\n get_value('ALLOW_PUBLIC_ACCOUNT_CREATION', settings.FEATURES.get(\n 'ALLOW_PUBLIC_ACCOUNT_CREATION', True))}, 'login_redirect_url':\n redirect_to, 'responsive': True, 'allow_iframing': True,\n 'disable_courseware_js': True, 'combined_login_and_register': True,\n 'disable_footer': not configuration_helpers.get_value(\n 'ENABLE_COMBINED_LOGIN_REGISTRATION_FOOTER', settings.FEATURES[\n 'ENABLE_COMBINED_LOGIN_REGISTRATION_FOOTER'])}\n", "if tpa_hint_provider.skip_hinted_login_dialog:\n", "context = update_context_for_enterprise(request, context)\n", "return redirect(pipeline.get_login_url(provider_id, pipeline.\n AUTH_ENTRY_LOGIN, redirect_url=redirect_to))\n", "third_party_auth_hint = provider_id\n", "return render_to_response('student_account/login_and_register.html', context)\n", "initial_mode = 'hinted_login'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Condition", "Docstring", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Condition", "Condition", "Assign'", "Return'", "Return'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Return'", "Assign'" ]
[ "from django.core.exceptions import ObjectDoesNotExist\n", "from rest_framework import exceptions\n", "from app import models\n", "def FUNC_0(VAR_0, VAR_1, VAR_2=('view_project',)):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = models.Project.objects.get(pk=project_pk, deleting=False)\n", "return VAR_6\n", "for perm in VAR_2:\n", "if not VAR_0.user.has_perm(perm, VAR_6):\n" ]
[ "from django.core.exceptions import ObjectDoesNotExist\n", "from rest_framework import exceptions\n", "from app import models\n", "def get_and_check_project(request, project_pk, perms=('view_project',)):...\n", "\"\"\"docstring\"\"\"\n", "project = models.Project.objects.get(pk=project_pk, deleting=False)\n", "return project\n", "for perm in perms:\n", "if not request.user.has_perm(perm, project):\n" ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Docstring", "Assign'", "Return'", "For", "Condition" ]
[ "@VAR_0.route('/search')...\n", "VAR_2 = 10\n", "VAR_3 = 0\n", "VAR_4 = request.args.get('term')\n", "VAR_5 = request.args.get('page')\n", "VAR_6 = VAR_4\n", "if not VAR_5:\n", "VAR_5 = 1\n", "VAR_3 = VAR_2 * (int(VAR_5) - 1)\n", "VAR_7 = re.compile('.*[a-zA-Z].*')\n", "if VAR_7.match(VAR_6):\n", "VAR_6 = transliterate(VAR_6, sanscript.ITRANS, sanscript.DEVANAGARI)\n", "VAR_6 = VAR_6.replace('*', '%')\n", "VAR_8 = VAR_6.split()\n", "con.row_factory = sql.Row\n", "con.close()\n", "VAR_15 = con.cursor()\n", "if len(VAR_8) == 1:\n", "VAR_15.execute('string' % (VAR_6, VAR_6, VAR_2, VAR_3))\n", "VAR_20 = 'string' % ','.join('?' for i in VAR_8)\n", "VAR_14 = VAR_15.fetchall()\n", "VAR_14 = VAR_15.execute(VAR_20, VAR_8)\n", "return render_template('search.html', VAR_14=rows, VAR_4=user_term, VAR_6=\n term, VAR_5=page)\n" ]
[ "@app.route('/search')...\n", "limit = 10\n", "offset = 0\n", "user_term = request.args.get('term')\n", "page = request.args.get('page')\n", "term = user_term\n", "if not page:\n", "page = 1\n", "offset = limit * (int(page) - 1)\n", "transliterate_regex = re.compile('.*[a-zA-Z].*')\n", "if transliterate_regex.match(term):\n", "term = transliterate(term, sanscript.ITRANS, sanscript.DEVANAGARI)\n", "term = term.replace('*', '%')\n", "term_words = term.split()\n", "con.row_factory = sql.Row\n", "con.close()\n", "cur = con.cursor()\n", "if len(term_words) == 1:\n", "cur.execute(\n \"select * from pada inner join mula on pada.sloka_line = mula.sloka_line where pada like '%s' or artha like '%s' order by id limit %d offset %d;\"\n % (term, term, limit, offset))\n", "query = (\n 'select * from pada inner join mula on pada.sloka_line = mula.sloka_line where pada in (%s) order by pada limit 100;'\n % ','.join('?' for i in term_words))\n", "rows = cur.fetchall()\n", "rows = cur.execute(query, term_words)\n", "return render_template('search.html', rows=rows, user_term=user_term, term=\n term, page=page)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_4(self):...\n", "VAR_11 = PasswordHasher()\n", "VAR_0 = VAR_11.hash(self.password)\n", "VAR_11.verify(VAR_0, self.password)\n", "VAR_0 = VAR_11.hash(self.password)\n", "return VAR_0\n" ]
[ "def hash_password(self):...\n", "ph = PasswordHasher()\n", "hashed_password = ph.hash(self.password)\n", "ph.verify(hashed_password, self.password)\n", "hashed_password = ph.hash(self.password)\n", "return hashed_password\n" ]
[ 0, 6, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_6(self):...\n", "if not VAR_101.user_is_admin:\n", "abort(404, 'page not found')\n" ]
[ "def run(self):...\n", "if not c.user_is_admin:\n", "abort(404, 'page not found')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'" ]
[ "def FUNC_3():...\n", "return 'munins node on ' + socket.getfqdn() + ' version: ' + VAR_2 + VAR_4\n" ]
[ "def version():...\n", "return 'munins node on ' + socket.getfqdn(\n ) + ' version: ' + VERSION + LINEBREAK\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_1(self, VAR_4):...\n", "VAR_4 = u\"'{0}'\".format(VAR_4)\n", "return VAR_4\n" ]
[ "def insert_format(self, value):...\n", "value = u\"'{0}'\".format(value)\n", "return value\n" ]
[ 0, 4, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_6(self, VAR_3, VAR_1):...\n", "VAR_5 = 'delete from comment_like where commentid=%d and userid=%d;' % (VAR_3,\n VAR_1)\n", "VAR_6 = sql.deleteDB(self.conn, VAR_5)\n", "return VAR_6\n" ]
[ "def dislikeComments(self, commentid, userid):...\n", "sqlText = 'delete from comment_like where commentid=%d and userid=%d;' % (\n commentid, userid)\n", "result = sql.deleteDB(self.conn, sqlText)\n", "return result\n" ]
[ 0, 4, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_12():...\n", "VAR_18 = requests.get(FUNC_2('healthy'))\n", "VAR_18.raise_for_status()\n", "return VAR_18.json()\n" ]
[ "def _ServerIsHealthy():...\n", "response = requests.get(_BuildUri('healthy'))\n", "response.raise_for_status()\n", "return response.json()\n" ]
[ 0, 7, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_0(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "return self.get_user_test_id() is not None\n" ]
[ "def test_success(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "return self.get_user_test_id() is not None\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_2(VAR_0):...\n", "VAR_5 = FUNC_0()\n", "VAR_7 = VAR_5.cursor()\n", "VAR_7.execute(VAR_0)\n", "VAR_8 = VAR_7.fetchone()[0]\n", "VAR_5.close()\n", "return VAR_8\n" ]
[ "def fetchone(query):...\n", "conn = connect()\n", "cur = conn.cursor()\n", "cur.execute(query)\n", "result = cur.fetchone()[0]\n", "conn.close()\n", "return result\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_27(self):...\n", "\"\"\"docstring\"\"\"\n", "if not self.exists and FUNC_0(self.file):\n" ]
[ "def check_broken_symlink(self):...\n", "\"\"\"docstring\"\"\"\n", "if not self.exists and lstat(self.file):\n" ]
[ 0, 0, 7 ]
[ "FunctionDef'", "Docstring", "Condition" ]
[ "def FUNC_4(self, VAR_6, VAR_18):...\n", "VAR_18 = int(VAR_18)\n", "VAR_23 = GaneshaConf.instance(VAR_6)\n", "if not VAR_23.has_export(VAR_18):\n", "return VAR_23.get_export(VAR_18).to_dict()\n" ]
[ "def get(self, cluster_id, export_id):...\n", "export_id = int(export_id)\n", "ganesha_conf = GaneshaConf.instance(cluster_id)\n", "if not ganesha_conf.has_export(export_id):\n", "return ganesha_conf.get_export(export_id).to_dict()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_0(self):...\n", "self.events.append('on_pre_enter: %s' % self.path)\n" ]
[ "def on_pre_enter(self):...\n", "self.events.append('on_pre_enter: %s' % self.path)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_0(self, VAR_1, VAR_2=None, VAR_3=False):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = 'string'.format(default_store=store, repo_dir=REPO_DIR, shard_str=\n '/shard_' + self.shard if self.shard else '', exp_text=name,\n a11y_custom_file=\n 'node_modules/edx-custom-a11y-rules/lib/custom_a11y_rules.js', VAR_3=\n verify_xss)\n", "return VAR_6\n" ]
[ "def _expected_command(self, name, store=None, verify_xss=False):...\n", "\"\"\"docstring\"\"\"\n", "expected_statement = (\n \"DEFAULT_STORE={default_store} SCREENSHOT_DIR='{repo_dir}/test_root/log{shard_str}' BOK_CHOY_HAR_DIR='{repo_dir}/test_root/log{shard_str}/hars' BOKCHOY_A11Y_CUSTOM_RULES_FILE='{repo_dir}/{a11y_custom_file}' SELENIUM_DRIVER_LOG_DIR='{repo_dir}/test_root/log{shard_str}' VERIFY_XSS='{verify_xss}' nosetests {repo_dir}/common/test/acceptance/{exp_text} --with-xunit --xunit-file={repo_dir}/reports/bok_choy{shard_str}/xunit.xml --verbosity=2 \"\n .format(default_store=store, repo_dir=REPO_DIR, shard_str='/shard_' +\n self.shard if self.shard else '', exp_text=name, a11y_custom_file=\n 'node_modules/edx-custom-a11y-rules/lib/custom_a11y_rules.js',\n verify_xss=verify_xss))\n", "return expected_statement\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_2(self):...\n", "VAR_1 = self.objects.dg.pk\n", "self.assertEqual(str(self.objects.dg.group_type), 'Composition',\n 'Type of DataGroup needs to be \"composition\" for this test.')\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertEqual(VAR_2.context['extract_fields'], ['data_document_id',\n 'data_document_filename', 'prod_name', 'doc_date', 'rev_num',\n 'raw_category', 'raw_cas', 'raw_chem_name', 'report_funcuse',\n 'raw_min_comp', 'raw_max_comp', 'unit_type', 'ingredient_rank',\n 'raw_central_comp'], 'Fieldnames passed are incorrect!')\n", "self.objects.gt.title = 'Functional use'\n", "self.objects.gt.code = 'FU'\n", "self.objects.gt.save()\n", "self.assertEqual(str(self.objects.dg.group_type), 'Functional use',\n 'Type of DataGroup needs to be \"FU\" for this test.')\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertEqual(VAR_2.context['extract_fields'], ['data_document_id',\n 'data_document_filename', 'prod_name', 'doc_date', 'rev_num',\n 'raw_category', 'raw_cas', 'raw_chem_name', 'report_funcuse'],\n 'Fieldnames passed are incorrect!')\n" ]
[ "def test_detail_template_fieldnames(self):...\n", "pk = self.objects.dg.pk\n", "self.assertEqual(str(self.objects.dg.group_type), 'Composition',\n 'Type of DataGroup needs to be \"composition\" for this test.')\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertEqual(response.context['extract_fields'], ['data_document_id',\n 'data_document_filename', 'prod_name', 'doc_date', 'rev_num',\n 'raw_category', 'raw_cas', 'raw_chem_name', 'report_funcuse',\n 'raw_min_comp', 'raw_max_comp', 'unit_type', 'ingredient_rank',\n 'raw_central_comp'], 'Fieldnames passed are incorrect!')\n", "self.objects.gt.title = 'Functional use'\n", "self.objects.gt.code = 'FU'\n", "self.objects.gt.save()\n", "self.assertEqual(str(self.objects.dg.group_type), 'Functional use',\n 'Type of DataGroup needs to be \"FU\" for this test.')\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertEqual(response.context['extract_fields'], ['data_document_id',\n 'data_document_filename', 'prod_name', 'doc_date', 'rev_num',\n 'raw_category', 'raw_cas', 'raw_chem_name', 'report_funcuse'],\n 'Fieldnames passed are incorrect!')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_39(VAR_101):...\n", "VAR_101.norun = True\n", "return VAR_101\n" ]
[ "def decorate(ruleinfo):...\n", "ruleinfo.norun = True\n", "return ruleinfo\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_7(self, VAR_1, VAR_5):...\n", "if VAR_1 == '':\n", "return self\n", "if VAR_1 == 'login':\n", "return self\n", "if not self.is_logged_in(VAR_5):\n", "return UnAuthorizedResource()\n", "return NoResource()\n" ]
[ "def getChild(self, path, request):...\n", "if path == '':\n", "return self\n", "if path == 'login':\n", "return self\n", "if not self.is_logged_in(request):\n", "return UnAuthorizedResource()\n", "return NoResource()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "@property...\n", "return self.dag.priority(self)\n" ]
[ "@property...\n", "return self.dag.priority(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_39(VAR_101):...\n", "VAR_101.priority = VAR_77\n", "return VAR_101\n" ]
[ "def decorate(ruleinfo):...\n", "ruleinfo.priority = priority\n", "return ruleinfo\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.getboolean(self.section, 'debug')\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.getboolean(self.section, 'debug')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_29(VAR_34):...\n", "self.assertEqual(logging.WARNING, VAR_34)\n" ]
[ "def check(x):...\n", "self.assertEqual(logging.WARNING, x)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_29(self):...\n", "VAR_0 = [SleepCheck(1), SleepCheck(1), SleepCheck(1),\n KeyboardInterruptCheck(phase='setup')]\n", "self._run_checks(VAR_0, 4)\n" ]
[ "def test_kbd_interrupt_in_setup_with_concurrency(self):...\n", "checks = [SleepCheck(1), SleepCheck(1), SleepCheck(1),\n KeyboardInterruptCheck(phase='setup')]\n", "self._run_checks(checks, 4)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_12(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_50 = frappe.db.get_table_columns(self.doctype)\n", "VAR_51 = []\n", "for fld in self.fields:\n", "for VAR_34 in optional_fields:\n", "for fld in VAR_51:\n", "if VAR_34 in fld and not VAR_34 in VAR_50:\n", "VAR_51 = []\n", "VAR_51.append(fld)\n", "for VAR_67 in self.filters:\n", "if isinstance(VAR_67, string_types):\n", "for VAR_67 in VAR_51:\n", "VAR_67 = [VAR_67]\n", "for element in VAR_67:\n", "if isinstance(self.filters, dict):\n", "if element in optional_fields and element not in VAR_50:\n", "self.filters.remove(VAR_67)\n", "VAR_51.append(VAR_67)\n" ]
[ "def set_optional_columns(self):...\n", "\"\"\"docstring\"\"\"\n", "columns = frappe.db.get_table_columns(self.doctype)\n", "to_remove = []\n", "for fld in self.fields:\n", "for f in optional_fields:\n", "for fld in to_remove:\n", "if f in fld and not f in columns:\n", "to_remove = []\n", "to_remove.append(fld)\n", "for each in self.filters:\n", "if isinstance(each, string_types):\n", "for each in to_remove:\n", "each = [each]\n", "for element in each:\n", "if isinstance(self.filters, dict):\n", "if element in optional_fields and element not in columns:\n", "self.filters.remove(each)\n", "to_remove.append(each)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "For", "For", "Condition", "Assign'", "Expr'", "For", "Condition", "For", "Assign'", "For", "Condition", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_8(VAR_27):...\n", "VAR_36 = datetime.utcfromtimestamp(VAR_27.created_utc)\n", "FUNC_2(VAR_14=str(message.author), VAR_18='message', VAR_15='new_address',\n VAR_25=message_time.strftime('%Y-%m-%d %H:%M:%S'), VAR_26=str(message.\n body)[:255])\n", "VAR_27.reply('not activated yet.')\n" ]
[ "def handle_new_address(message):...\n", "message_time = datetime.utcfromtimestamp(message.created_utc)\n", "add_history_record(username=str(message.author), comment_or_message=\n 'message', action='new_address', reddit_time=message_time.strftime(\n '%Y-%m-%d %H:%M:%S'), comment_text=str(message.body)[:255])\n", "message.reply('not activated yet.')\n" ]
[ 0, 0, 4, 4 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_21(VAR_16, VAR_22):...\n", "VAR_53 = (\"select date from matches where scene='{}' order by date limit 1;\"\n .format(VAR_22))\n", "VAR_54 = VAR_16.exec(VAR_53)\n", "VAR_23 = VAR_54[0][0]\n", "return VAR_23\n" ]
[ "def get_first_month(db, scene):...\n", "sql = (\"select date from matches where scene='{}' order by date limit 1;\".\n format(scene))\n", "res = db.exec(sql)\n", "date = res[0][0]\n", "return date\n" ]
[ 0, 4, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_10(self, VAR_7, VAR_9=None):...\n", "if VAR_9 == None:\n", "VAR_9 = {}\n", "if isinstance(VAR_9, (dict, CLASS_0)):\n", "if not self.__dict__.get(VAR_7):\n", "if getattr(self, '_metaclass', None) or self.__class__.__name__ in ('Meta',\n", "self.__dict__[VAR_7] = []\n", "VAR_9 = self._init_child(VAR_9, VAR_7)\n", "return VAR_9\n", "self.__dict__[VAR_7].append(VAR_9)\n", "VAR_9.parent_doc = self\n", "return VAR_9\n" ]
[ "def append(self, key, value=None):...\n", "if value == None:\n", "value = {}\n", "if isinstance(value, (dict, BaseDocument)):\n", "if not self.__dict__.get(key):\n", "if getattr(self, '_metaclass', None) or self.__class__.__name__ in ('Meta',\n", "self.__dict__[key] = []\n", "value = self._init_child(value, key)\n", "return value\n", "self.__dict__[key].append(value)\n", "value.parent_doc = self\n", "return value\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Return'", "Expr'", "Assign'", "Return'" ]
[ "@property...\n", "VAR_16 = await self.get_props_by_key('content')\n", "if VAR_16:\n", "return VAR_16.decode('utf-8')\n" ]
[ "@property...\n", "rv = await self.get_props_by_key('content')\n", "if rv:\n", "return rv.decode('utf-8')\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'" ]
[ "from utility import *\n", "import json\n", "def FUNC_0(VAR_0, VAR_1):...\n", "VAR_4 = 'curl ' + start_url + VAR_0 + '?'\n", "VAR_5 = VAR_1.keys()\n", "VAR_6 = VAR_1.values()\n", "VAR_7 = [(VAR_5[i] + '=' + VAR_6[i]) for i in range(len(VAR_5))]\n", "VAR_8 = '&'.join(VAR_7)\n", "VAR_4 += VAR_8\n", "return VAR_4\n" ]
[ "from utility import *\n", "import json\n", "def createGetScript(endpoint, params):...\n", "script = 'curl ' + start_url + endpoint + '?'\n", "keys = params.keys()\n", "values = params.values()\n", "pair = [(keys[i] + '=' + values[i]) for i in range(len(keys))]\n", "evil_param = '&'.join(pair)\n", "script += evil_param\n", "return script\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "AugAssign'", "Return'" ]
[ "def FUNC_1(self):...\n", "self.terminal.write(self.ps[self.pn])\n" ]
[ "def print_prompt(self):...\n", "self.terminal.write(self.ps[self.pn])\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@VAR_1.route('/tmp', methods=['POST'])...\n", "VAR_21 = base64.b64decode(request.form['content']).decode()\n", "f.write(VAR_21)\n", "VAR_18['tempFile'] = f.name\n", "return ''\n" ]
[ "@app.route('/tmp', methods=['POST'])...\n", "content = base64.b64decode(request.form['content']).decode()\n", "f.write(content)\n", "session['tempFile'] = f.name\n", "return ''\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_1(self, VAR_2, VAR_3, VAR_1, VAR_5):...\n", "create_check_cve_task(VAR_3)\n", "super().save_model(VAR_2, VAR_3, VAR_1, VAR_5)\n" ]
[ "def save_model(self, request, obj, form, change):...\n", "create_check_cve_task(obj)\n", "super().save_model(request, obj, form, change)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_1, VAR_2=None):...\n", "super(CLASS_0, self).__init__(VAR_1, VAR_2)\n", "self.TEST_APP = VAR_1['TEST_APP']\n", "self.__path = 'applications/%s/tasks' % self.TEST_APP\n", "self.TEST_STACK = VAR_1['TEST_STACK']\n", "self.TEST_REGION = VAR_1['TEST_GCE_REGION']\n", "self.TEST_ZONE = VAR_1['TEST_GCE_ZONE']\n", "self.__cluster_name = '%s-%s' % (self.TEST_APP, self.TEST_STACK)\n", "self.__server_group_name = '%s-v000' % self.__cluster_name\n", "self.__cloned_server_group_name = '%s-v001' % self.__cluster_name\n", "self.__lb_name = '%s-%s-fe' % (self.TEST_APP, self.TEST_STACK)\n" ]
[ "def __init__(self, bindings, agent=None):...\n", "super(GoogleServerGroupTestScenario, self).__init__(bindings, agent)\n", "self.TEST_APP = bindings['TEST_APP']\n", "self.__path = 'applications/%s/tasks' % self.TEST_APP\n", "self.TEST_STACK = bindings['TEST_STACK']\n", "self.TEST_REGION = bindings['TEST_GCE_REGION']\n", "self.TEST_ZONE = bindings['TEST_GCE_ZONE']\n", "self.__cluster_name = '%s-%s' % (self.TEST_APP, self.TEST_STACK)\n", "self.__server_group_name = '%s-v000' % self.__cluster_name\n", "self.__cloned_server_group_name = '%s-v001' % self.__cluster_name\n", "self.__lb_name = '%s-%s-fe' % (self.TEST_APP, self.TEST_STACK)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "@classmethod...\n", "return VAR_6 if VAR_6.startswith('v') else 'v' + VAR_6\n" ]
[ "@classmethod...\n", "return version if version.startswith('v') else 'v' + version\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "from flask import Blueprint, request, jsonify, redirect, url_for, render_template, flash, Response\n", "from flask_login import login_required, current_user\n", "from app.Data.operations import create_action, get_dataset_with_id\n", "from app.Data.helpers import table_name_to_object\n", "from app.Data.Transform.operations import restore_original, change_attribute_type, delete_rows, fill_null_with, fill_null_with_average, fill_null_with_median, rename_attribute, delete_attribute, one_hot_encode, normalize_attribute, discretize_width, discretize_eq_freq, find_replace, regex_find_replace, substring_find_replace\n", "VAR_0 = Blueprint('transform_bp', __name__, url_prefix='/data/transform')\n", "@VAR_0.route('/rename_column', methods=['POST'])...\n", "VAR_1 = get_dataset_with_id(request.args.get('dataset_id'))\n", "VAR_2 = request.form['column']\n", "VAR_3 = request.form['new_name']\n", "rename_attribute(VAR_1.working_copy, VAR_2, VAR_3)\n", "flash('An unexpected error occured while renaming the column', 'danger')\n", "flash('Column renamed successfully.', 'success')\n", "create_action('Renamed column {0} to {1}'.format(VAR_2, VAR_3), VAR_1.id,\n current_user.id)\n", "return redirect(request.referrer)\n" ]
[ "from flask import Blueprint, request, jsonify, redirect, url_for, render_template, flash, Response\n", "from flask_login import login_required, current_user\n", "from app.Data.operations import create_action, get_dataset_with_id\n", "from app.Data.helpers import table_name_to_object\n", "from app.Data.Transform.operations import restore_original, change_attribute_type, delete_rows, fill_null_with, fill_null_with_average, fill_null_with_median, rename_attribute, delete_attribute, one_hot_encode, normalize_attribute, discretize_width, discretize_eq_freq, find_replace, regex_find_replace, substring_find_replace\n", "_transform = Blueprint('transform_bp', __name__, url_prefix='/data/transform')\n", "@_transform.route('/rename_column', methods=['POST'])...\n", "dataset = get_dataset_with_id(request.args.get('dataset_id'))\n", "col = request.form['column']\n", "new_name = request.form['new_name']\n", "rename_attribute(dataset.working_copy, col, new_name)\n", "flash('An unexpected error occured while renaming the column', 'danger')\n", "flash('Column renamed successfully.', 'success')\n", "create_action('Renamed column {0} to {1}'.format(col, new_name), dataset.id,\n current_user.id)\n", "return redirect(request.referrer)\n" ]
[ 4, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "from GeneralEngine.Unit import *\n", "import numpy as np\n", "from GeneralEngine.BinaryHeap import BinaryHeap, Node\n", "def __init__(self):...\n", "self.initialised = False\n", "self.board = None\n", "def FUNC_0(self, VAR_0):...\n", "VAR_8 = open(VAR_0, 'r')\n", "self.mapType = VAR_8.readline().split(' ')[1]\n", "self.mapHeight = int(VAR_8.readline().split(' ')[1])\n", "self.mapWidth = int(VAR_8.readline().split(' ')[1])\n", "assert VAR_8.readline().rstrip('\\n') == 'map', 'Unknown map format'\n", "self.originalMap = [['def' for VAR_22 in range(self.mapHeight)] for VAR_23 in\n range(self.mapWidth)]\n", "for VAR_23 in range(self.mapHeight):\n", "VAR_16 = VAR_8.readline().rstrip('\\n')\n", "self.initialised = True\n", "for VAR_22 in range(self.mapWidth):\n", "self.resetToOriginal()\n", "self.originalMap[VAR_22][self.mapHeight - VAR_23 - 1] = VAR_16[VAR_22]\n", "def FUNC_1(self):...\n", "self.attachNeighbours()\n", "self.identifyTurning()\n", "self.genConnections()\n", "def FUNC_2(self):...\n", "assert self.initialised, 'Initialise the map from a file first!'\n", "for VAR_23 in range(self.mapHeight):\n", "for VAR_22 in range(self.mapWidth):\n", "def FUNC_3(self):...\n", "self.stateMap[VAR_22][VAR_23].attachNeighbours([self.stateMap[(VAR_22 - 1) %\n self.mapWidth][(VAR_23 + 1) % self.mapHeight], self.stateMap[VAR_22 %\n self.mapWidth][(VAR_23 + 1) % self.mapHeight], self.stateMap[(VAR_22 + \n 1) % self.mapWidth][(VAR_23 + 1) % self.mapHeight], self.stateMap[(\n VAR_22 + 1) % self.mapWidth][VAR_23 % self.mapHeight], self.stateMap[(\n VAR_22 + 1) % self.mapWidth][(VAR_23 - 1) % self.mapHeight], self.\n stateMap[VAR_22 % self.mapWidth][(VAR_23 - 1) % self.mapHeight], self.\n stateMap[(VAR_22 - 1) % self.mapWidth][(VAR_23 - 1) % self.mapHeight],\n self.stateMap[(VAR_22 - 1) % self.mapWidth][VAR_23 % self.mapHeight]])\n", "assert self.initialised, 'Initialise the map from a file first!'\n", "self.turningNodes = []\n", "for VAR_23 in range(self.mapHeight):\n", "for VAR_22 in range(self.mapWidth):\n", "def FUNC_4(self):...\n", "if self.stateMap[VAR_22][VAR_23].isTraversable():\n", "assert self.initialised, 'Initialise the map from a file first!'\n", "self.stateMap[VAR_22][VAR_23].identifyTurning()\n", "for VAR_23 in range(self.mapHeight):\n", "if self.stateMap[VAR_22][VAR_23].isTurning():\n", "for VAR_22 in range(self.mapWidth):\n", "def FUNC_5(self, VAR_1, VAR_2):...\n", "self.turningNodes.append([VAR_22, VAR_23])\n", "if self.stateMap[VAR_22][VAR_23].isTraversable():\n", "assert self.turningNodes != [], 'Empty turning nodes!'\n", "self.stateMap[VAR_22][VAR_23].setTurningNeighbours(self.nearestNodes(VAR_22,\n VAR_23))\n", "VAR_9 = [0, 0, 0, 0]\n", "VAR_10 = [[(VAR_1 - 1) % self.mapWidth, VAR_2 % self.mapHeight], [VAR_1 %\n self.mapWidth, (VAR_2 + 1) % self.mapHeight], [(VAR_1 + 1) % self.\n mapWidth, VAR_2 % self.mapHeight], [VAR_1 % self.mapWidth, (VAR_2 - 1) %\n self.mapHeight]]\n", "while 0 in VAR_9:\n", "for VAR_17 in range(len(VAR_10)):\n", "return VAR_9\n", "if VAR_9[VAR_17] == 0:\n", "if self.stateMap[VAR_10[VAR_17][0]][VAR_10[VAR_17][1]].isTraversable():\n", "if VAR_9[VAR_17] == 0:\n", "if VAR_10[VAR_17][0] == VAR_1 and VAR_10[VAR_17][1] == VAR_2:\n", "VAR_9[VAR_17] = -1\n", "if VAR_17 % 2 == 0:\n", "VAR_9[VAR_17] = [VAR_10[VAR_17], 'y' if VAR_17 % 2 else 'x']\n", "if self.stateMap[VAR_10[VAR_17][0]][VAR_10[VAR_17][1]].isTurning():\n", "VAR_10[VAR_17][0] = (VAR_10[VAR_17][0] + VAR_17 - 1) % self.mapWidth\n", "VAR_10[VAR_17][1] = (VAR_10[VAR_17][1] + 2 - VAR_17) % self.mapHeight\n", "if VAR_17 == 0:\n", "VAR_9[VAR_17] = [VAR_10[VAR_17], 'x' if VAR_10[VAR_17][0] > VAR_1 else None]\n", "if VAR_17 == 1:\n", "VAR_9[VAR_17] = [VAR_10[VAR_17], 'y' if VAR_10[VAR_17][1] < VAR_2 else None]\n", "if VAR_17 == 2:\n", "VAR_9[VAR_17] = [VAR_10[VAR_17], 'x' if VAR_10[VAR_17][0] < VAR_1 else None]\n", "if VAR_17 == 3:\n", "VAR_9[VAR_17] = [VAR_10[VAR_17], 'y' if VAR_10[VAR_17][1] > VAR_2 else None]\n" ]
[ "from GeneralEngine.Unit import *\n", "import numpy as np\n", "from GeneralEngine.BinaryHeap import BinaryHeap, Node\n", "def __init__(self):...\n", "self.initialised = False\n", "self.board = None\n", "def initialiseMapFromFile(self, filename):...\n", "file = open(filename, 'r')\n", "self.mapType = file.readline().split(' ')[1]\n", "self.mapHeight = int(file.readline().split(' ')[1])\n", "self.mapWidth = int(file.readline().split(' ')[1])\n", "assert file.readline().rstrip('\\n') == 'map', 'Unknown map format'\n", "self.originalMap = [['def' for col in range(self.mapHeight)] for row in\n range(self.mapWidth)]\n", "for row in range(self.mapHeight):\n", "tRow = file.readline().rstrip('\\n')\n", "self.initialised = True\n", "for col in range(self.mapWidth):\n", "self.resetToOriginal()\n", "self.originalMap[col][self.mapHeight - row - 1] = tRow[col]\n", "def resetToOriginal(self):...\n", "self.attachNeighbours()\n", "self.identifyTurning()\n", "self.genConnections()\n", "def attachNeighbours(self):...\n", "assert self.initialised, 'Initialise the map from a file first!'\n", "for row in range(self.mapHeight):\n", "for col in range(self.mapWidth):\n", "def identifyTurning(self):...\n", "self.stateMap[col][row].attachNeighbours([self.stateMap[(col - 1) % self.\n mapWidth][(row + 1) % self.mapHeight], self.stateMap[col % self.\n mapWidth][(row + 1) % self.mapHeight], self.stateMap[(col + 1) % self.\n mapWidth][(row + 1) % self.mapHeight], self.stateMap[(col + 1) % self.\n mapWidth][row % self.mapHeight], self.stateMap[(col + 1) % self.\n mapWidth][(row - 1) % self.mapHeight], self.stateMap[col % self.\n mapWidth][(row - 1) % self.mapHeight], self.stateMap[(col - 1) % self.\n mapWidth][(row - 1) % self.mapHeight], self.stateMap[(col - 1) % self.\n mapWidth][row % self.mapHeight]])\n", "assert self.initialised, 'Initialise the map from a file first!'\n", "self.turningNodes = []\n", "for row in range(self.mapHeight):\n", "for col in range(self.mapWidth):\n", "def genConnections(self):...\n", "if self.stateMap[col][row].isTraversable():\n", "assert self.initialised, 'Initialise the map from a file first!'\n", "self.stateMap[col][row].identifyTurning()\n", "for row in range(self.mapHeight):\n", "if self.stateMap[col][row].isTurning():\n", "for col in range(self.mapWidth):\n", "def nearestNodes(self, x, y):...\n", "self.turningNodes.append([col, row])\n", "if self.stateMap[col][row].isTraversable():\n", "assert self.turningNodes != [], 'Empty turning nodes!'\n", "self.stateMap[col][row].setTurningNeighbours(self.nearestNodes(col, row))\n", "found = [0, 0, 0, 0]\n", "current = [[(x - 1) % self.mapWidth, y % self.mapHeight], [x % self.\n mapWidth, (y + 1) % self.mapHeight], [(x + 1) % self.mapWidth, y % self\n .mapHeight], [x % self.mapWidth, (y - 1) % self.mapHeight]]\n", "while 0 in found:\n", "for z in range(len(current)):\n", "return found\n", "if found[z] == 0:\n", "if self.stateMap[current[z][0]][current[z][1]].isTraversable():\n", "if found[z] == 0:\n", "if current[z][0] == x and current[z][1] == y:\n", "found[z] = -1\n", "if z % 2 == 0:\n", "found[z] = [current[z], 'y' if z % 2 else 'x']\n", "if self.stateMap[current[z][0]][current[z][1]].isTurning():\n", "current[z][0] = (current[z][0] + z - 1) % self.mapWidth\n", "current[z][1] = (current[z][1] + 2 - z) % self.mapHeight\n", "if z == 0:\n", "found[z] = [current[z], 'x' if current[z][0] > x else None]\n", "if z == 1:\n", "found[z] = [current[z], 'y' if current[z][1] < y else None]\n", "if z == 2:\n", "found[z] = [current[z], 'x' if current[z][0] < x else None]\n", "if z == 3:\n", "found[z] = [current[z], 'y' if current[z][1] > y else None]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "Assign'", "For", "Assign'", "Assign'", "For", "Expr'", "Assign'", "FunctionDef'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assert'", "For", "For", "FunctionDef'", "Expr'", "Assert'", "Assign'", "For", "For", "FunctionDef'", "Condition", "Assert'", "Expr'", "For", "Condition", "For", "FunctionDef'", "Expr'", "Condition", "Assert'", "Expr'", "Assign'", "Assign'", "Condition", "For", "Return'", "Condition", "Condition", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'" ]