lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"@mock.patch('requests.post', FUNC_0)...\n",
"\"\"\"docstring\"\"\"\n",
"self.client.login(username='autotest2', password='password')\n",
"VAR_4 = '/api/apps'\n",
"VAR_5 = self.client.post(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_6 = VAR_5.data['id']\n",
"self.client.login(username='autotest', password='password')\n",
"VAR_4 = '/api/apps/{app_id}/config'.format(**locals())\n",
"VAR_7 = {'values': json.dumps({'NEW_URL1': 'http://localhost:8080/'})}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"self.assertIn('NEW_URL1', VAR_5.data['values'])\n",
"VAR_4 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(VAR_5.data['count'], 2)\n"
] | [
"@mock.patch('requests.post', mock_import_repository_task)...\n",
"\"\"\"docstring\"\"\"\n",
"self.client.login(username='autotest2', password='password')\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"self.client.login(username='autotest', password='password')\n",
"url = '/api/apps/{app_id}/config'.format(**locals())\n",
"body = {'values': json.dumps({'NEW_URL1': 'http://localhost:8080/'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"self.assertIn('NEW_URL1', response.data['values'])\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(response.data['count'], 2)\n"
] | [
0,
0,
5,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_5(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = '/api/apps'\n",
"VAR_5 = self.client.post(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_6 = VAR_5.data['id']\n",
"VAR_14 = {'username': 'autotest', 'app': VAR_6}\n",
"VAR_4 = '/api/hooks/builds'.format(**locals())\n",
"VAR_15 = {'web': 'node server.js', 'worker': 'node worker.js'}\n",
"VAR_16 = 'ecdff91c57a0b9ab82e89634df87e293d259a3aa'\n",
"VAR_7 = {'receive_user': 'autotest', 'receive_repo': VAR_6, 'image':\n '{app_id}:v2'.format(**locals()), 'sha': VAR_16, 'procfile': VAR_15}\n",
"self.assertIsNone(self.client.logout())\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 403)\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json', HTTP_X_DEIS_BUILDER_AUTH=settings.BUILDER_KEY)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertIn('release', VAR_5.data)\n",
"self.assertIn('version', VAR_5.data['release'])\n",
"self.assertIn('domains', VAR_5.data)\n",
"self.assertTrue(self.client.login(VAR_8='autotest', VAR_9='password'))\n",
"VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertIn('results', VAR_5.data)\n",
"VAR_14 = VAR_5.data['results'][0]\n",
"self.assertEqual(VAR_14['sha'], VAR_16)\n",
"self.assertEqual(VAR_14['procfile'], VAR_15)\n",
"VAR_4 = '/api/apps/{app_id}/containers/web'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 1)\n",
"VAR_17 = VAR_5.data['results'][0]\n",
"self.assertEqual(VAR_17['type'], 'web')\n",
"self.assertEqual(VAR_17['num'], 1)\n"
] | [
"def test_build_hook_procfile(self):...\n",
"\"\"\"docstring\"\"\"\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"build = {'username': 'autotest', 'app': app_id}\n",
"url = '/api/hooks/builds'.format(**locals())\n",
"PROCFILE = {'web': 'node server.js', 'worker': 'node worker.js'}\n",
"SHA = 'ecdff91c57a0b9ab82e89634df87e293d259a3aa'\n",
"body = {'receive_user': 'autotest', 'receive_repo': app_id, 'image':\n '{app_id}:v2'.format(**locals()), 'sha': SHA, 'procfile': PROCFILE}\n",
"self.assertIsNone(self.client.logout())\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 403)\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json', HTTP_X_DEIS_BUILDER_AUTH=settings.BUILDER_KEY)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertIn('release', response.data)\n",
"self.assertIn('version', response.data['release'])\n",
"self.assertIn('domains', response.data)\n",
"self.assertTrue(self.client.login(username='autotest', password='password'))\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertIn('results', response.data)\n",
"build = response.data['results'][0]\n",
"self.assertEqual(build['sha'], SHA)\n",
"self.assertEqual(build['procfile'], PROCFILE)\n",
"url = '/api/apps/{app_id}/containers/web'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"container = response.data['results'][0]\n",
"self.assertEqual(container['type'], 'web')\n",
"self.assertEqual(container['num'], 1)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_28(self):...\n",
"VAR_0 = [KeyboardInterruptCheck(), SleepCheck(10), SleepCheck(10),\n SleepCheck(10)]\n",
"self._run_checks(VAR_0, 2)\n"
] | [
"def test_kbd_interrupt_in_wait_with_limited_concurrency(self):...\n",
"checks = [KeyboardInterruptCheck(), SleepCheck(10), SleepCheck(10),\n SleepCheck(10)]\n",
"self._run_checks(checks, 2)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(self, **VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = ['UPDATE', connection.ops.quote_name(self._meta.db_table), 'SET']\n",
"for field_name in VAR_0:\n",
"setattr(self, field_name, VAR_0[field_name])\n",
"VAR_1.pop(-1)\n",
"VAR_2 = self._meta.get_field(field_name)\n",
"VAR_1.extend(['WHERE', 'id', '=', str(self.id)])\n",
"VAR_3 = VAR_2.get_db_prep_save(VAR_0[field_name])\n",
"VAR_1 = ' '.join(VAR_1)\n",
"if isinstance(VAR_3, basestring):\n",
"connection.cursor().execute(VAR_1)\n",
"VAR_3 = \"'%s'\" % VAR_3.encode('utf-8').replace('\\\\', '\\\\\\\\')\n",
"if isinstance(VAR_3, models.Model):\n",
"transaction.commit_unless_managed()\n",
"VAR_1.extend((connection.ops.quote_name(VAR_2.column), '=', VAR_3, ','))\n",
"VAR_3 = str(VAR_3.id)\n",
"if VAR_3 is None:\n",
"VAR_3 = 'NULL'\n",
"VAR_3 = str(VAR_3)\n"
] | [
"def update_fields(self, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"sql = ['UPDATE', connection.ops.quote_name(self._meta.db_table), 'SET']\n",
"for field_name in kwargs:\n",
"setattr(self, field_name, kwargs[field_name])\n",
"sql.pop(-1)\n",
"field = self._meta.get_field(field_name)\n",
"sql.extend(['WHERE', 'id', '=', str(self.id)])\n",
"value = field.get_db_prep_save(kwargs[field_name])\n",
"sql = ' '.join(sql)\n",
"if isinstance(value, basestring):\n",
"connection.cursor().execute(sql)\n",
"value = \"'%s'\" % value.encode('utf-8').replace('\\\\', '\\\\\\\\')\n",
"if isinstance(value, models.Model):\n",
"transaction.commit_unless_managed()\n",
"sql.extend((connection.ops.quote_name(field.column), '=', value, ','))\n",
"value = str(value.id)\n",
"if value is None:\n",
"value = 'NULL'\n",
"value = str(value)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
4,
4,
4,
4,
0,
4,
4,
4,
4,
4
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_6(self, VAR_45):...\n",
""
] | [
"def run(self, uh):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_4(self, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = VAR_1.session.get('oidc_nonce')\n",
"if VAR_6:\n",
"if 'code' in VAR_1.GET and 'state' in VAR_1.GET:\n",
"VAR_3 = {'request': VAR_1, 'nonce': VAR_6}\n",
"return self.login_failure()\n",
"if 'oidc_state' not in VAR_1.session:\n",
"return self.login_failure()\n",
"if VAR_1.GET['state'] != VAR_1.session['oidc_state']:\n",
"VAR_14 = 'Session `oidc_state` does not match the OIDC callback state'\n",
"self.user = auth.authenticate(**kwargs)\n",
"if self.user and self.user.is_active:\n",
"return self.login_success()\n"
] | [
"def get(self, request):...\n",
"\"\"\"docstring\"\"\"\n",
"nonce = request.session.get('oidc_nonce')\n",
"if nonce:\n",
"if 'code' in request.GET and 'state' in request.GET:\n",
"kwargs = {'request': request, 'nonce': nonce}\n",
"return self.login_failure()\n",
"if 'oidc_state' not in request.session:\n",
"return self.login_failure()\n",
"if request.GET['state'] != request.session['oidc_state']:\n",
"msg = 'Session `oidc_state` does not match the OIDC callback state'\n",
"self.user = auth.authenticate(**kwargs)\n",
"if self.user and self.user.is_active:\n",
"return self.login_success()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Return'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"def __init__(self, VAR_1, VAR_2, VAR_3):...\n",
"self.filepath = VAR_2\n",
"self.options = VAR_1\n",
"super(CLASS_1, self).__init__(VAR_3, VAR_0)\n"
] | [
"def __init__(self, options, filepath, env_ctx):...\n",
"self.filepath = filepath\n",
"self.options = options\n",
"super(GometaLinter, self).__init__(env_ctx, _go_get)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"from __future__ import absolute_import\n",
"from __future__ import division\n",
"from __future__ import print_function\n",
"import logging\n",
"import ray.ray_constants as ray_constants\n",
"\"\"\"string\"\"\"\n",
"def __init__(self, VAR_0=None, VAR_1=None, VAR_2=None, VAR_3=None, VAR_4=...\n",
"self.object_id_seed = VAR_11\n",
"self.redis_address = VAR_0\n",
"self.num_cpus = VAR_1\n",
"self.num_gpus = VAR_2\n",
"self.resources = VAR_3\n",
"self.object_store_memory = VAR_4\n",
"self.redis_max_memory = VAR_5\n",
"self.redis_port = VAR_6\n",
"self.redis_shard_ports = VAR_7\n",
"self.object_manager_port = VAR_8\n",
"self.node_manager_port = VAR_9\n",
"self.node_ip_address = VAR_10\n",
"self.num_workers = VAR_12\n",
"self.local_mode = VAR_13\n",
"self.driver_mode = VAR_14\n",
"self.redirect_worker_output = VAR_15\n",
"self.redirect_output = VAR_16\n",
"self.num_redis_shards = VAR_17\n",
"self.redis_max_clients = VAR_18\n",
"self.redis_password = VAR_19\n",
"self.plasma_directory = VAR_20\n",
"self.worker_path = VAR_21\n",
"self.huge_pages = VAR_22\n",
"self.include_webui = VAR_23\n",
"self.plasma_store_socket_name = VAR_26\n",
"self.raylet_socket_name = VAR_27\n",
"self.temp_dir = VAR_28\n",
"self.include_log_monitor = VAR_29\n",
"self.autoscaling_config = VAR_30\n",
"self.include_java = VAR_31\n",
"self.java_worker_options = VAR_32\n",
"self._internal_config = VAR_33\n",
"self._check_usage()\n",
"def FUNC_0(self, **VAR_34):...\n",
"\"\"\"docstring\"\"\"\n",
"for arg in VAR_34:\n",
"if hasattr(self, arg):\n",
"self._check_usage()\n",
"setattr(self, arg, VAR_34[arg])\n",
"def FUNC_1(self, **VAR_34):...\n",
"\"\"\"docstring\"\"\"\n",
"for arg in VAR_34:\n",
"if hasattr(self, arg):\n",
"self._check_usage()\n",
"if getattr(self, arg) is None:\n",
"def FUNC_2(self):...\n",
"setattr(self, arg, VAR_34[arg])\n",
"if self.resources is not None:\n",
"assert 'CPU' not in self.resources, \"'CPU' should not be included in the resource dictionary. Use num_cpus instead.\"\n",
"if self.num_workers is not None:\n",
"assert 'GPU' not in self.resources, \"'GPU' should not be included in the resource dictionary. Use num_gpus instead.\"\n",
"if self.include_java is None and self.java_worker_options is not None:\n"
] | [
"from __future__ import absolute_import\n",
"from __future__ import division\n",
"from __future__ import print_function\n",
"import logging\n",
"import ray.ray_constants as ray_constants\n",
"\"\"\"A class used to store the parameters used by Ray.\n\n Attributes:\n redis_address (str): The address of the Redis server to connect to. If\n this address is not provided, then this command will start Redis, a\n global scheduler, a local scheduler, a plasma store, a plasma\n manager, and some workers. It will also kill these processes when\n Python exits.\n redis_port (int): The port that the primary Redis shard should listen\n to. If None, then a random port will be chosen.\n redis_shard_ports: A list of the ports to use for the non-primary Redis\n shards.\n num_cpus (int): Number of CPUs to configure the raylet with.\n num_gpus (int): Number of GPUs to configure the raylet with.\n resources: A dictionary mapping the name of a resource to the quantity\n of that resource available.\n object_store_memory: The amount of memory (in bytes) to start the\n object store with.\n redis_max_memory: The max amount of memory (in bytes) to allow redis\n to use, or None for no limit. Once the limit is exceeded, redis\n will start LRU eviction of entries. This only applies to the\n sharded redis tables (task and object tables).\n object_manager_port int: The port to use for the object manager.\n node_manager_port: The port to use for the node manager.\n node_ip_address (str): The IP address of the node that we are on.\n object_id_seed (int): Used to seed the deterministic generation of\n object IDs. The same value can be used across multiple runs of the\n same job in order to generate the object IDs in a consistent\n manner. However, the same ID should not be used for different jobs.\n local_mode (bool): True if the code should be executed serially\n without Ray. This is useful for debugging.\n redirect_worker_output: True if the stdout and stderr of worker\n processes should be redirected to files.\n redirect_output (bool): True if stdout and stderr for non-worker\n processes should be redirected to files and false otherwise.\n num_redis_shards: The number of Redis shards to start in addition to\n the primary Redis shard.\n redis_max_clients: If provided, attempt to configure Redis with this\n maxclients number.\n redis_password (str): Prevents external clients without the password\n from connecting to Redis if provided.\n plasma_directory: A directory where the Plasma memory mapped files will\n be created.\n worker_path (str): The path of the source code that will be run by the\n worker.\n huge_pages: Boolean flag indicating whether to start the Object\n Store with hugetlbfs support. Requires plasma_directory.\n include_webui: Boolean flag indicating whether to start the web\n UI, which is a Jupyter notebook.\n logging_level: Logging level, default will be logging.INFO.\n logging_format: Logging format, default contains a timestamp,\n filename, line number, and message. See ray_constants.py.\n plasma_store_socket_name (str): If provided, it will specify the socket\n name used by the plasma store.\n raylet_socket_name (str): If provided, it will specify the socket path\n used by the raylet process.\n temp_dir (str): If provided, it will specify the root temporary\n directory for the Ray process.\n include_log_monitor (bool): If True, then start a log monitor to\n monitor the log files for all processes on this node and push their\n contents to Redis.\n autoscaling_config: path to autoscaling config file.\n include_java (bool): If True, the raylet backend can also support\n Java worker.\n java_worker_options (str): The command options for Java worker.\n _internal_config (str): JSON configuration for overriding\n RayConfig defaults. For testing purposes ONLY.\n \"\"\"\n",
"def __init__(self, redis_address=None, num_cpus=None, num_gpus=None,...\n",
"self.object_id_seed = object_id_seed\n",
"self.redis_address = redis_address\n",
"self.num_cpus = num_cpus\n",
"self.num_gpus = num_gpus\n",
"self.resources = resources\n",
"self.object_store_memory = object_store_memory\n",
"self.redis_max_memory = redis_max_memory\n",
"self.redis_port = redis_port\n",
"self.redis_shard_ports = redis_shard_ports\n",
"self.object_manager_port = object_manager_port\n",
"self.node_manager_port = node_manager_port\n",
"self.node_ip_address = node_ip_address\n",
"self.num_workers = num_workers\n",
"self.local_mode = local_mode\n",
"self.driver_mode = driver_mode\n",
"self.redirect_worker_output = redirect_worker_output\n",
"self.redirect_output = redirect_output\n",
"self.num_redis_shards = num_redis_shards\n",
"self.redis_max_clients = redis_max_clients\n",
"self.redis_password = redis_password\n",
"self.plasma_directory = plasma_directory\n",
"self.worker_path = worker_path\n",
"self.huge_pages = huge_pages\n",
"self.include_webui = include_webui\n",
"self.plasma_store_socket_name = plasma_store_socket_name\n",
"self.raylet_socket_name = raylet_socket_name\n",
"self.temp_dir = temp_dir\n",
"self.include_log_monitor = include_log_monitor\n",
"self.autoscaling_config = autoscaling_config\n",
"self.include_java = include_java\n",
"self.java_worker_options = java_worker_options\n",
"self._internal_config = _internal_config\n",
"self._check_usage()\n",
"def update(self, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"for arg in kwargs:\n",
"if hasattr(self, arg):\n",
"self._check_usage()\n",
"setattr(self, arg, kwargs[arg])\n",
"def update_if_absent(self, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"for arg in kwargs:\n",
"if hasattr(self, arg):\n",
"self._check_usage()\n",
"if getattr(self, arg) is None:\n",
"def _check_usage(self):...\n",
"setattr(self, arg, kwargs[arg])\n",
"if self.resources is not None:\n",
"assert 'CPU' not in self.resources, \"'CPU' should not be included in the resource dictionary. Use num_cpus instead.\"\n",
"if self.num_workers is not None:\n",
"assert 'GPU' not in self.resources, \"'GPU' should not be included in the resource dictionary. Use num_gpus instead.\"\n",
"if self.include_java is None and self.java_worker_options is not None:\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Docstring",
"For",
"Condition",
"Expr'",
"Expr'",
"FunctionDef'",
"Docstring",
"For",
"Condition",
"Expr'",
"Condition",
"FunctionDef'",
"Expr'",
"Condition",
"Assert'",
"Condition",
"Assert'",
"Condition"
] |
[
"@click.command(help='List available monitor locations')...\n",
"VAR_32 = newrelic.get_locations(VAR_0.obj['ACCOUNT'])\n",
"if VAR_11:\n",
"print(json.dumps(VAR_32))\n",
"VAR_24 = [['#', 'City', 'Continent', 'Code', 'Availability', 'Accessibility']]\n",
"return\n",
"for number, VAR_3 in enumerate(VAR_32.values()):\n",
"VAR_33 = click.style(u'✔', fg='green')\n",
"VAR_25 = SingleTable(VAR_24)\n",
"if not VAR_3['available']:\n",
"VAR_25.title = click.style('Locations', fg='black')\n",
"click.style(u'✖', fg='red')\n",
"VAR_34 = 'Private' if VAR_3['private'] else 'Public'\n",
"for VAR_35 in [0, 4, 5]:\n",
"VAR_24.append([number, VAR_3['label'], VAR_3['continent'], VAR_3['name'],\n VAR_33, VAR_34])\n",
"VAR_25.justify_columns[VAR_35] = 'right'\n",
"print(VAR_25.table)\n"
] | [
"@click.command(help='List available monitor locations')...\n",
"locations = newrelic.get_locations(ctx.obj['ACCOUNT'])\n",
"if raw:\n",
"print(json.dumps(locations))\n",
"data = [['#', 'City', 'Continent', 'Code', 'Availability', 'Accessibility']]\n",
"return\n",
"for number, location in enumerate(locations.values()):\n",
"available = click.style(u'✔', fg='green')\n",
"table = SingleTable(data)\n",
"if not location['available']:\n",
"table.title = click.style('Locations', fg='black')\n",
"click.style(u'✖', fg='red')\n",
"private = 'Private' if location['private'] else 'Public'\n",
"for i in [0, 4, 5]:\n",
"data.append([number, location['label'], location['continent'], location[\n 'name'], available, private])\n",
"table.justify_columns[i] = 'right'\n",
"print(table.table)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"For",
"Expr'",
"Assign'",
"Expr'"
] |
[
"@staticmethod...\n",
"if not VAR_3:\n",
"VAR_2 = quote(VAR_2)\n",
"VAR_2 = map(lambda i: hex(ord(i)).replace('0x', '%'), VAR_2)\n",
"return VAR_2\n",
"VAR_2 = ''.join(VAR_2)\n"
] | [
"@staticmethod...\n",
"if not force:\n",
"s = quote(s)\n",
"s = map(lambda i: hex(ord(i)).replace('0x', '%'), s)\n",
"return s\n",
"s = ''.join(s)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Assign'"
] |
[
"def FUNC_27(VAR_50):...\n",
"if re.compile('^(select|delete|update|drop|create)\\\\s').match(VAR_50):\n",
"FUNC_26()\n",
"if re.compile('\\\\s*[a-zA-z]*\\\\s*( from | group by | order by | where | join )'\n",
"FUNC_26()\n"
] | [
"def _is_query(field):...\n",
"if re.compile('^(select|delete|update|drop|create)\\\\s').match(field):\n",
"_raise_exception()\n",
"if re.compile('\\\\s*[a-zA-z]*\\\\s*( from | group by | order by | where | join )'\n",
"_raise_exception()\n"
] | [
0,
0,
0,
4,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_3(self, VAR_3):...\n",
"VAR_20 = VAR_3.value.upper()\n",
"if VAR_3.type == 'ID':\n",
"if not VAR_20 in self.spdx.licenses:\n",
"if VAR_3.type == 'EXC':\n",
"self.lastid = VAR_20\n",
"if VAR_20 not in self.spdx.exceptions:\n",
"if VAR_3.type != 'WITH':\n",
"if self.lastid not in self.spdx.exceptions[VAR_20]:\n",
"self.lastid = None\n",
"self.lastid = None\n"
] | [
"def validate(self, tok):...\n",
"id = tok.value.upper()\n",
"if tok.type == 'ID':\n",
"if not id in self.spdx.licenses:\n",
"if tok.type == 'EXC':\n",
"self.lastid = id\n",
"if id not in self.spdx.exceptions:\n",
"if tok.type != 'WITH':\n",
"if self.lastid not in self.spdx.exceptions[id]:\n",
"self.lastid = None\n",
"self.lastid = None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_13(self, VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"self._check_volume(VAR_16)\n",
"VAR_0.warn(_('Volume %s was not found while trying to delete it'), VAR_16[\n 'name'])\n",
"self._eql_execute('volume', 'select', VAR_16['name'], 'offline')\n",
"VAR_0.error(_('Failed to delete volume %s'), VAR_16['name'])\n",
"self._eql_execute('volume', 'delete', VAR_16['name'])\n"
] | [
"def delete_volume(self, volume):...\n",
"\"\"\"docstring\"\"\"\n",
"self._check_volume(volume)\n",
"LOG.warn(_('Volume %s was not found while trying to delete it'), volume['name']\n )\n",
"self._eql_execute('volume', 'select', volume['name'], 'offline')\n",
"LOG.error(_('Failed to delete volume %s'), volume['name'])\n",
"self._eql_execute('volume', 'delete', volume['name'])\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@api.multi...\n",
"\"\"\"docstring\"\"\"\n",
"return super(CLASS_0, self + self.mapped('church_id')\n ).update_number_sponsorships()\n"
] | [
"@api.multi...\n",
"\"\"\"docstring\"\"\"\n",
"return super(ResPartner, self + self.mapped('church_id')\n ).update_number_sponsorships()\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_37(self, VAR_16, VAR_21=None):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.model.meta import get_field_precision\n",
"if VAR_21 and not isinstance(VAR_21, string_types):\n",
"VAR_21 = VAR_21.parentfield\n",
"VAR_45 = VAR_21 or 'main'\n",
"if not hasattr(self, '_precision'):\n",
"self._precision = VAR_54._dict()\n",
"if VAR_45 not in self._precision:\n",
"self._precision[VAR_45] = VAR_54._dict()\n",
"if VAR_16 not in self._precision[VAR_45]:\n",
"self._precision[VAR_45][VAR_16] = None\n",
"return self._precision[VAR_45][VAR_16]\n",
"VAR_1 = self.meta.get_field(VAR_21).options if VAR_21 else self.doctype\n",
"VAR_25 = VAR_54.get_meta(VAR_1).get_field(VAR_16)\n",
"if VAR_25.fieldtype in ('Currency', 'Float', 'Percent'):\n",
"self._precision[VAR_45][VAR_16] = get_field_precision(VAR_25, self)\n"
] | [
"def precision(self, fieldname, parentfield=None):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.model.meta import get_field_precision\n",
"if parentfield and not isinstance(parentfield, string_types):\n",
"parentfield = parentfield.parentfield\n",
"cache_key = parentfield or 'main'\n",
"if not hasattr(self, '_precision'):\n",
"self._precision = frappe._dict()\n",
"if cache_key not in self._precision:\n",
"self._precision[cache_key] = frappe._dict()\n",
"if fieldname not in self._precision[cache_key]:\n",
"self._precision[cache_key][fieldname] = None\n",
"return self._precision[cache_key][fieldname]\n",
"doctype = self.meta.get_field(parentfield\n ).options if parentfield else self.doctype\n",
"df = frappe.get_meta(doctype).get_field(fieldname)\n",
"if df.fieldtype in ('Currency', 'Float', 'Percent'):\n",
"self._precision[cache_key][fieldname] = get_field_precision(df, self)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"ImportFrom'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'"
] |
[
"from django.core.urlresolvers import reverse\n",
"from rest_framework.settings import api_settings\n",
"def FUNC_0(VAR_0, VAR_1=None, **VAR_2):...\n",
"if not VAR_1:\n",
"VAR_1 = {}\n",
"VAR_1.setdefault('version', api_settings.DEFAULT_VERSION)\n",
"return reverse('api:' + VAR_0, VAR_1=kwargs, **extra)\n"
] | [
"from django.core.urlresolvers import reverse\n",
"from rest_framework.settings import api_settings\n",
"def api_reverse(name, kwargs=None, **extra):...\n",
"if not kwargs:\n",
"kwargs = {}\n",
"kwargs.setdefault('version', api_settings.DEFAULT_VERSION)\n",
"return reverse('api:' + name, kwargs=kwargs, **extra)\n"
] | [
4,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Condition",
"Assign'",
"Expr'",
"Return'"
] |
[
"def __init__(self, VAR_64):...\n",
"self.func = VAR_64\n",
"self.shellcmd = None\n",
"self.norun = False\n",
"self.input = None\n",
"self.output = None\n",
"self.params = None\n",
"self.message = None\n",
"self.benchmark = None\n",
"self.threads = None\n",
"self.resources = None\n",
"self.priority = None\n",
"self.version = None\n",
"self.log = None\n",
"self.docstring = None\n"
] | [
"def __init__(self, func):...\n",
"self.func = func\n",
"self.shellcmd = None\n",
"self.norun = False\n",
"self.input = None\n",
"self.output = None\n",
"self.params = None\n",
"self.message = None\n",
"self.benchmark = None\n",
"self.threads = None\n",
"self.resources = None\n",
"self.priority = None\n",
"self.version = None\n",
"self.log = None\n",
"self.docstring = None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Docstring"
] |
[
"def FUNC_17(self, VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = \"UPDATE {0} SET file_id=NULL WHERE path='{1}'\".format(VAR_2, VAR_8)\n",
"self._run_command(VAR_7)\n"
] | [
"def invalidateCached(self, pth):...\n",
"\"\"\"docstring\"\"\"\n",
"command = \"UPDATE {0} SET file_id=NULL WHERE path='{1}'\".format(TABLE_NAME, pth\n )\n",
"self._run_command(command)\n"
] | [
0,
0,
4,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_0, VAR_1, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"self.proxy = VAR_0\n",
"self.attribute = VAR_1\n",
"self.relation_operation = VAR_2\n"
] | [
"def __init__(self, proxy, attribute, relation_operation):...\n",
"\"\"\"docstring\"\"\"\n",
"self.proxy = proxy\n",
"self.attribute = attribute\n",
"self.relation_operation = relation_operation\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_6(self, VAR_6):...\n",
"VAR_10 = str(VAR_6.date_time) if VAR_6.date_time else None\n",
"VAR_25 = f'{VAR_6.camera_brand} {VAR_6.camera_model}'\n",
"VAR_26 = f'{VAR_6.lens_brand} {VAR_6.lens_model}'\n",
"VAR_25 = self._dedupe_string(VAR_25) if VAR_25 != ' ' else None\n",
"VAR_26 = self._dedupe_string(VAR_26) if VAR_26 != ' ' else None\n",
"VAR_25, VAR_26 = self._check_camera_tags([VAR_25, VAR_26])\n",
"VAR_7, VAR_8 = self._convert_coordinates(VAR_6)\n",
"VAR_21 = VAR_22 = VAR_7 = VAR_8 = None\n",
"VAR_21, VAR_22 = self._get_address(VAR_7, VAR_8)\n",
"log.warning(e)\n",
"return VAR_10, VAR_25, VAR_26, VAR_21, VAR_22, VAR_7, VAR_8\n",
"VAR_21 = VAR_22 = None\n"
] | [
"def _convert_data(self, raw_data):...\n",
"date_time = str(raw_data.date_time) if raw_data.date_time else None\n",
"camera = f'{raw_data.camera_brand} {raw_data.camera_model}'\n",
"lens = f'{raw_data.lens_brand} {raw_data.lens_model}'\n",
"camera = self._dedupe_string(camera) if camera != ' ' else None\n",
"lens = self._dedupe_string(lens) if lens != ' ' else None\n",
"camera, lens = self._check_camera_tags([camera, lens])\n",
"latitude, longitude = self._convert_coordinates(raw_data)\n",
"address = country = latitude = longitude = None\n",
"address, country = self._get_address(latitude, longitude)\n",
"log.warning(e)\n",
"return date_time, camera, lens, address, country, latitude, longitude\n",
"address = country = None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Assign'"
] |
[
"def FUNC_4(self, VAR_1):...\n",
"VAR_11 = VAR_1.split(':')[1].split(',')\n",
"VAR_15 = 4\n",
"VAR_16 = VAR_11[VAR_15:VAR_15 + self.dim_x]\n",
"VAR_17 = VAR_11[VAR_15 + self.dim_x:VAR_15 + self.dim_x + self.dim_y]\n",
"VAR_18 = VAR_11[VAR_15 + self.dim_x + self.dim_y:VAR_15 + self.dim_x * 2 +\n self.dim_y]\n",
"VAR_19 = VAR_11[VAR_15 + self.dim_x * 2 + self.dim_y:VAR_15 + self.dim_x * \n 2 + self.dim_y * 2]\n",
"return VAR_16, VAR_17, VAR_18, VAR_19\n"
] | [
"def calc_board_count(self, board_str):...\n",
"board_split = board_str.split(':')[1].split(',')\n",
"board_count_start = 4\n",
"top_row = board_split[board_count_start:board_count_start + self.dim_x]\n",
"right_col = board_split[board_count_start + self.dim_x:board_count_start +\n self.dim_x + self.dim_y]\n",
"bottom_row = board_split[board_count_start + self.dim_x + self.dim_y:\n board_count_start + self.dim_x * 2 + self.dim_y]\n",
"left_col = board_split[board_count_start + self.dim_x * 2 + self.dim_y:\n board_count_start + self.dim_x * 2 + self.dim_y * 2]\n",
"return top_row, right_col, bottom_row, left_col\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_3(self, VAR_7):...\n",
"\"\"\"docstring\"\"\"\n",
"if getattr(g, 'oidc_id_token_dirty', False):\n",
"VAR_24 = self.cookie_serializer.dumps(g.oidc_id_token)\n",
"return VAR_7\n",
"VAR_7.set_cookie(self.app.config['OIDC_ID_TOKEN_COOKIE_NAME'], VAR_24,\n secure=self.app.config['OIDC_ID_TOKEN_COOKIE_SECURE'], httponly=True,\n max_age=self.app.config['OIDC_ID_TOKEN_COOKIE_TTL'])\n"
] | [
"def after_request(self, response):...\n",
"\"\"\"docstring\"\"\"\n",
"if getattr(g, 'oidc_id_token_dirty', False):\n",
"signed_id_token = self.cookie_serializer.dumps(g.oidc_id_token)\n",
"return response\n",
"response.set_cookie(self.app.config['OIDC_ID_TOKEN_COOKIE_NAME'],\n signed_id_token, secure=self.app.config['OIDC_ID_TOKEN_COOKIE_SECURE'],\n httponly=True, max_age=self.app.config['OIDC_ID_TOKEN_COOKIE_TTL'])\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_7(self, *VAR_7, **VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"for VAR_9 in VAR_7:\n",
"self._set_inoutput_item(VAR_9, VAR_7=True)\n",
"for VAR_10, VAR_9 in VAR_8.items():\n",
"self._set_inoutput_item(VAR_9, VAR_7=True, VAR_10=name)\n",
"for VAR_9 in self.output:\n",
"if self.dynamic_output and VAR_9 not in self.dynamic_output:\n",
"VAR_3 = VAR_9.get_wildcard_names()\n",
"if self.wildcard_names:\n",
"if self.wildcard_names != VAR_3:\n",
"self.wildcard_names = VAR_3\n"
] | [
"def set_output(self, *output, **kwoutput):...\n",
"\"\"\"docstring\"\"\"\n",
"for item in output:\n",
"self._set_inoutput_item(item, output=True)\n",
"for name, item in kwoutput.items():\n",
"self._set_inoutput_item(item, output=True, name=name)\n",
"for item in self.output:\n",
"if self.dynamic_output and item not in self.dynamic_output:\n",
"wildcards = item.get_wildcard_names()\n",
"if self.wildcard_names:\n",
"if self.wildcard_names != wildcards:\n",
"self.wildcard_names = wildcards\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Expr'",
"For",
"Expr'",
"For",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'"
] |
[
"def FUNC_40(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return list(self.list_properties.keys())\n"
] | [
"def get_columns_list(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return list(self.list_properties.keys())\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"fork(['svn', 'status', self.directory])\n"
] | [
"def status(self):...\n",
"fork(['svn', 'status', self.directory])\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@VAR_0.route('/varga')...\n",
"VAR_13 = request.args.get('varga')\n",
"VAR_14 = []\n",
"con.close()\n",
"con.row_factory = sql.Row\n",
"VAR_15 = con.cursor()\n",
"VAR_15.execute(\"select * from mula where varga = '%s';\" % VAR_13)\n",
"VAR_16 = VAR_15.fetchall()\n",
"return render_template('varga.html', VAR_16=mula, VAR_13=varga)\n"
] | [
"@app.route('/varga')...\n",
"varga = request.args.get('varga')\n",
"rows = []\n",
"con.close()\n",
"con.row_factory = sql.Row\n",
"cur = con.cursor()\n",
"cur.execute(\"select * from mula where varga = '%s';\" % varga)\n",
"mula = cur.fetchall()\n",
"return render_template('varga.html', mula=mula, varga=varga)\n"
] | [
0,
0,
0,
0,
0,
0,
4,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Return'"
] |
[
"@BaseHandler.check_is_group_user('Announcement Manager')...\n",
"if VAR_6:\n",
"VAR_15 = Announce.by_id(VAR_6, self.sql_session).scalar()\n",
"self._['user_groups'] = GroupList.get_user_groups(self.current_user.key,\n self.sql_session)\n",
"if not VAR_15:\n",
"self.page_render(self._)\n",
"self._['ann_id'] = VAR_6\n",
"self._['title'] = VAR_15.title\n",
"self._['content'] = VAR_15.content\n",
"self._['is_private'] = VAR_15.is_private\n",
"VAR_16 = AttachmentList.by_ann_id(VAR_6, self.sql_session).all()\n",
"self._['tags'] = AnnTag.get_ann_tags(VAR_6, self.sql_session)\n",
"self._['atts'] = [att.to_dict() for att in VAR_16]\n",
"if self.is_group_user(VAR_15.author_group_name):\n",
"self._['group'] = VAR_15.author_group_name\n"
] | [
"@BaseHandler.check_is_group_user('Announcement Manager')...\n",
"if ann_id:\n",
"ann = Announce.by_id(ann_id, self.sql_session).scalar()\n",
"self._['user_groups'] = GroupList.get_user_groups(self.current_user.key,\n self.sql_session)\n",
"if not ann:\n",
"self.page_render(self._)\n",
"self._['ann_id'] = ann_id\n",
"self._['title'] = ann.title\n",
"self._['content'] = ann.content\n",
"self._['is_private'] = ann.is_private\n",
"atts = AttachmentList.by_ann_id(ann_id, self.sql_session).all()\n",
"self._['tags'] = AnnTag.get_ann_tags(ann_id, self.sql_session)\n",
"self._['atts'] = [att.to_dict() for att in atts]\n",
"if self.is_group_user(ann.author_group_name):\n",
"self._['group'] = ann.author_group_name\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_7(self, VAR_10):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self._rules:\n",
"if not VAR_10 in self._rules:\n",
"return self._rules[VAR_10]\n"
] | [
"def get_rule(self, name):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self._rules:\n",
"if not name in self._rules:\n",
"return self._rules[name]\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Condition",
"Return'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"super(CLASS_0, VAR_0).initArgumentParser(VAR_2, VAR_3=defaults)\n",
"VAR_3 = VAR_3 or {}\n",
"VAR_2.add_argument('--test_component_detail', default='fe', help=\n 'Refinement for component name to create.')\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"super(AwsSmokeTestScenario, cls).initArgumentParser(parser, defaults=defaults)\n",
"defaults = defaults or {}\n",
"parser.add_argument('--test_component_detail', default='fe', help=\n 'Refinement for component name to create.')\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def __exit__(self, VAR_7, VAR_8, VAR_9):...\n",
""
] | [
"def __exit__(self, type, value, tb):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_14(self):...\n",
"self.path = self.path[:-1]\n"
] | [
"def go_to_parent_directory(self):...\n",
"self.path = self.path[:-1]\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_39(VAR_101):...\n",
"VAR_101.threads = VAR_75\n",
"return VAR_101\n"
] | [
"def decorate(ruleinfo):...\n",
"ruleinfo.threads = threads\n",
"return ruleinfo\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"import re\n",
"import salt.modules.cmdmod\n",
"import logging\n",
"VAR_0 = logging.getLogger(__name__)\n",
"__salt__ = {'cmd.run': salt.modules.cmdmod._run_quiet, 'config.get': salt.\n modules.config.get}\n",
"def FUNC_0():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_0.debug('Fetching custom grains and pillar details')\n",
"VAR_2 = {}\n",
"salt.modules.config.__opts__ = __opts__\n",
"VAR_3 = __salt__['config.get']('custom_grains_pillar:grains', [])\n",
"for grain in VAR_3:\n",
"for VAR_7 in grain:\n",
"VAR_4 = __salt__['config.get']('custom_grains_pillar:pillar', [])\n",
"if FUNC_1(grain[VAR_7]):\n",
"for pillar in VAR_4:\n",
"VAR_6 = __salt__['cmd.run']('salt-call grains.get {0}'.format(grain[VAR_7])\n ).split('\\n')[1].strip()\n",
"for VAR_7 in pillar:\n",
"VAR_0.debug('Done with fetching custom grains and pillar details')\n",
"VAR_2[VAR_7] = VAR_6\n",
"if FUNC_1(pillar[VAR_7]):\n",
"return VAR_2\n",
"VAR_6 = __salt__['cmd.run']('salt-call pillar.get {0}'.format(pillar[VAR_7])\n ).split('\\n')[1].strip()\n",
"VAR_2[VAR_7] = VAR_6\n"
] | [
"\"\"\"\nHubbleStack Custom Grains and Pillar\n\nAllows for fetching custom grain and pillar data from a local salt-minion via\nsalt-call\n\n:maintainer: HubbleStack\n:platform: All\n:requires: SaltStack\n\"\"\"\n",
"import re\n",
"import salt.modules.cmdmod\n",
"import logging\n",
"log = logging.getLogger(__name__)\n",
"__salt__ = {'cmd.run': salt.modules.cmdmod._run_quiet, 'config.get': salt.\n modules.config.get}\n",
"def populate_custom_grains_and_pillar():...\n",
"\"\"\"docstring\"\"\"\n",
"log.debug('Fetching custom grains and pillar details')\n",
"grains = {}\n",
"salt.modules.config.__opts__ = __opts__\n",
"custom_grains = __salt__['config.get']('custom_grains_pillar:grains', [])\n",
"for grain in custom_grains:\n",
"for key in grain:\n",
"custom_pillar = __salt__['config.get']('custom_grains_pillar:pillar', [])\n",
"if _valid_command(grain[key]):\n",
"for pillar in custom_pillar:\n",
"value = __salt__['cmd.run']('salt-call grains.get {0}'.format(grain[key])\n ).split('\\n')[1].strip()\n",
"for key in pillar:\n",
"log.debug('Done with fetching custom grains and pillar details')\n",
"grains[key] = value\n",
"if _valid_command(pillar[key]):\n",
"return grains\n",
"value = __salt__['cmd.run']('salt-call pillar.get {0}'.format(pillar[key])\n ).split('\\n')[1].strip()\n",
"grains[key] = value\n"
] | [
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
2,
0,
0,
0,
2,
0,
2,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"For",
"For",
"Assign'",
"Condition",
"For",
"Assign'",
"For",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'"
] |
[
"\"\"\"string\"\"\"\n",
"import mimetypes\n",
"import os\n",
"import posixpath\n",
"import re\n",
"import stat\n",
"from django.http import FileResponse, Http404, HttpResponse, HttpResponseNotModified, HttpResponseRedirect\n",
"from django.template import Context, Engine, TemplateDoesNotExist, loader\n",
"from django.utils.http import http_date, parse_http_date\n",
"from django.utils.translation import gettext as _, gettext_lazy\n",
"def FUNC_0(VAR_0, VAR_1, VAR_2=None, VAR_3=False):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = posixpath.normpath(VAR_1)\n",
"VAR_1 = VAR_1.lstrip('/')\n",
"VAR_10 = ''\n",
"for VAR_18 in VAR_1.split('/'):\n",
"if not VAR_18:\n",
"if VAR_10 and VAR_1 != VAR_10:\n",
"VAR_17, VAR_18 = os.path.splitdrive(VAR_18)\n",
"return HttpResponseRedirect(VAR_10)\n",
"VAR_6 = os.path.join(VAR_2, VAR_10)\n",
"VAR_19, VAR_18 = os.path.split(VAR_18)\n",
"if os.path.isdir(VAR_6):\n",
"if VAR_18 in (os.curdir, os.pardir):\n",
"if VAR_3:\n",
"if not os.path.exists(VAR_6):\n",
"VAR_10 = os.path.join(VAR_10, VAR_18).replace('\\\\', '/')\n",
"return FUNC_1(VAR_10, VAR_6)\n",
"VAR_11 = os.stat(VAR_6)\n",
"if not FUNC_2(VAR_0.META.get('HTTP_IF_MODIFIED_SINCE'), VAR_11.st_mtime,\n",
"return HttpResponseNotModified()\n",
"VAR_12, VAR_13 = mimetypes.guess_type(VAR_6)\n",
"VAR_12 = VAR_12 or 'application/octet-stream'\n",
"VAR_14 = FileResponse(open(VAR_6, 'rb'), VAR_12=content_type)\n",
"VAR_14['Last-Modified'] = http_date(VAR_11.st_mtime)\n",
"if stat.S_ISREG(VAR_11.st_mode):\n",
"VAR_14['Content-Length'] = VAR_11.st_size\n",
"if VAR_13:\n",
"VAR_14['Content-Encoding'] = VAR_13\n",
"return VAR_14\n"
] | [
"\"\"\"\nViews and functions for serving static files. These are only to be used\nduring development, and SHOULD NOT be used in a production setting.\n\"\"\"\n",
"import mimetypes\n",
"import os\n",
"import posixpath\n",
"import re\n",
"import stat\n",
"from django.http import FileResponse, Http404, HttpResponse, HttpResponseNotModified, HttpResponseRedirect\n",
"from django.template import Context, Engine, TemplateDoesNotExist, loader\n",
"from django.utils.http import http_date, parse_http_date\n",
"from django.utils.translation import gettext as _, gettext_lazy\n",
"def serve(request, path, document_root=None, show_indexes=False):...\n",
"\"\"\"docstring\"\"\"\n",
"path = posixpath.normpath(path)\n",
"path = path.lstrip('/')\n",
"newpath = ''\n",
"for part in path.split('/'):\n",
"if not part:\n",
"if newpath and path != newpath:\n",
"drive, part = os.path.splitdrive(part)\n",
"return HttpResponseRedirect(newpath)\n",
"fullpath = os.path.join(document_root, newpath)\n",
"head, part = os.path.split(part)\n",
"if os.path.isdir(fullpath):\n",
"if part in (os.curdir, os.pardir):\n",
"if show_indexes:\n",
"if not os.path.exists(fullpath):\n",
"newpath = os.path.join(newpath, part).replace('\\\\', '/')\n",
"return directory_index(newpath, fullpath)\n",
"statobj = os.stat(fullpath)\n",
"if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),\n",
"return HttpResponseNotModified()\n",
"content_type, encoding = mimetypes.guess_type(fullpath)\n",
"content_type = content_type or 'application/octet-stream'\n",
"response = FileResponse(open(fullpath, 'rb'), content_type=content_type)\n",
"response['Last-Modified'] = http_date(statobj.st_mtime)\n",
"if stat.S_ISREG(statobj.st_mode):\n",
"response['Content-Length'] = statobj.st_size\n",
"if encoding:\n",
"response['Content-Encoding'] = encoding\n",
"return response\n"
] | [
0,
0,
0,
0,
0,
0,
6,
0,
0,
0,
0,
0,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
0,
6,
0,
0,
6,
6,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_0(VAR_2, VAR_3, VAR_4=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = dict(VAR_4 or {})\n",
"VAR_4['Content-Length'] = len(VAR_2)\n",
"def __init__(self):...\n",
"self.content = VAR_2\n",
"def FUNC_6(self, VAR_7):...\n",
"VAR_8 = self.content\n",
"while VAR_8:\n",
"yield VAR_8[:VAR_7]\n",
"def FUNC_7(self):...\n",
"VAR_8 = VAR_8[VAR_7:]\n",
"return self.content\n"
] | [
"def make_fake_response(content, url, headers=None):...\n",
"\"\"\"docstring\"\"\"\n",
"headers = dict(headers or {})\n",
"headers['Content-Length'] = len(content)\n",
"def __init__(self):...\n",
"self.content = content\n",
"def iter_content(self, chunk_size):...\n",
"c = self.content\n",
"while c:\n",
"yield c[:chunk_size]\n",
"def read(self):...\n",
"c = c[chunk_size:]\n",
"return self.content\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"FunctionDef'",
"Assign'",
"Condition",
"Expr'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_22(self):...\n",
"VAR_23 = f'{self.get_dg_folder()}/{str(self.fs_id)}.zip'\n",
"VAR_24 = f'{self.get_dg_folder()}/{self.get_name_as_slug()}.zip'\n",
"if os.path.isfile(VAR_23):\n",
"VAR_30 = VAR_23\n",
"if os.path.isfile(VAR_24):\n",
"return VAR_30\n",
"VAR_30 = VAR_24\n",
"VAR_30 = 'no_path_found'\n"
] | [
"def get_zip_url(self):...\n",
"uuid_path = f'{self.get_dg_folder()}/{str(self.fs_id)}.zip'\n",
"zip_file_path = f'{self.get_dg_folder()}/{self.get_name_as_slug()}.zip'\n",
"if os.path.isfile(uuid_path):\n",
"zip_url = uuid_path\n",
"if os.path.isfile(zip_file_path):\n",
"return zip_url\n",
"zip_url = zip_file_path\n",
"zip_url = 'no_path_found'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'"
] |
[
"@user_passes_test(user_is_superuser)...\n",
"VAR_10 = get_object_or_404(TestRailConfiguration, pk=testrail_id)\n",
"VAR_10.delete()\n",
"VAR_4 = context_project_dashboard(VAR_0)\n",
"VAR_4['last_tab'] = 'test_rails'\n",
"return render(VAR_0, 'projects/project_dashboard.html', VAR_4)\n"
] | [
"@user_passes_test(user_is_superuser)...\n",
"testrail = get_object_or_404(TestRailConfiguration, pk=testrail_id)\n",
"testrail.delete()\n",
"context = context_project_dashboard(request)\n",
"context['last_tab'] = 'test_rails'\n",
"return render(request, 'projects/project_dashboard.html', context)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_21(VAR_10):...\n",
"VAR_9.append(1)\n",
"return False\n"
] | [
"def setup_bot(_bot):...\n",
"setup_bots.append(1)\n",
"return False\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"def FUNC_2(VAR_1, VAR_2):...\n",
"for VAR_13 in VAR_2.strip('/').split('/'):\n",
"VAR_1 = VAR_1[VAR_13]\n",
"FUNC_1(VAR_1)\n"
] | [
"def scan_git_subtree(tree, path):...\n",
"for p in path.strip('/').split('/'):\n",
"tree = tree[p]\n",
"scan_git_tree(tree)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Assign'",
"Expr'"
] |
[
"def FUNC_12(VAR_0):...\n",
"generator.run_program()\n"
] | [
"def generate_inf_file(args):...\n",
"generator.run_program()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@api.public...\n",
"self.response.write(self.generate_xsrf_token({'some': 'data'}))\n"
] | [
"@api.public...\n",
"self.response.write(self.generate_xsrf_token({'some': 'data'}))\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_21(self, VAR_2):...\n",
"VAR_1 = frappe.get_meta(self.doctype)\n",
"if self.order_by:\n",
"VAR_2.order_by = self.order_by\n",
"VAR_2.order_by = ''\n",
"VAR_66 = len(self.fields) == 1 and (self.fields[0].lower().startswith(\n 'count(') or self.fields[0].lower().startswith('min(') or self.fields[0\n ].lower().startswith('max(')) and not self.group_by\n",
"if not VAR_66:\n",
"VAR_40 = VAR_41 = None\n",
"if VAR_1.sort_field and ',' in VAR_1.sort_field:\n",
"VAR_2.order_by = ', '.join(['`tab{0}`.`{1}` {2}'.format(self.doctype,\n VAR_34.split()[0].strip(), VAR_34.split()[1].strip()) for VAR_34 in\n VAR_1.sort_field.split(',')])\n",
"VAR_40 = VAR_1.sort_field or 'modified'\n",
"if VAR_1.is_submittable:\n",
"VAR_41 = VAR_1.sort_field and VAR_1.sort_order or 'desc'\n",
"VAR_2.order_by = '`tab{0}`.docstatus asc, {1}'.format(self.doctype, VAR_2.\n order_by)\n",
"VAR_2.order_by = '`tab{0}`.`{1}` {2}'.format(self.doctype, VAR_40 or\n 'modified', VAR_41 or 'desc')\n"
] | [
"def set_order_by(self, args):...\n",
"meta = frappe.get_meta(self.doctype)\n",
"if self.order_by:\n",
"args.order_by = self.order_by\n",
"args.order_by = ''\n",
"group_function_without_group_by = len(self.fields) == 1 and (self.fields[0]\n .lower().startswith('count(') or self.fields[0].lower().startswith(\n 'min(') or self.fields[0].lower().startswith('max(')) and not self.group_by\n",
"if not group_function_without_group_by:\n",
"sort_field = sort_order = None\n",
"if meta.sort_field and ',' in meta.sort_field:\n",
"args.order_by = ', '.join(['`tab{0}`.`{1}` {2}'.format(self.doctype, f.\n split()[0].strip(), f.split()[1].strip()) for f in meta.sort_field.\n split(',')])\n",
"sort_field = meta.sort_field or 'modified'\n",
"if meta.is_submittable:\n",
"sort_order = meta.sort_field and meta.sort_order or 'desc'\n",
"args.order_by = '`tab{0}`.docstatus asc, {1}'.format(self.doctype, args.\n order_by)\n",
"args.order_by = '`tab{0}`.`{1}` {2}'.format(self.doctype, sort_field or\n 'modified', sort_order or 'desc')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_4(VAR_2):...\n",
"VAR_8 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\users\\\\' + VAR_3 + '.db')\n",
"VAR_9 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + '\\\\cf.db')\n",
"VAR_10 = VAR_8.cursor()\n",
"VAR_11 = VAR_9.cursor()\n",
"VAR_11.execute('SELECT * FROM ' + VAR_2)\n",
"VAR_18 = list()\n",
"VAR_12 = VAR_11.fetchone()\n",
"while VAR_12 != None:\n",
"VAR_10.execute(\"SELECT * FROM result WHERE problem = '\" + str(VAR_12[0]) +\n \"' AND diff = '\" + str(VAR_12[1]) + \"' AND NOT verdict = 'OK'\")\n",
"VAR_8.close()\n",
"VAR_30 = VAR_10.fetchone()\n",
"VAR_9.close()\n",
"if VAR_30 != None and VAR_30 in VAR_4:\n",
"return VAR_18\n",
"VAR_18.append(VAR_30)\n",
"VAR_12 = VAR_11.fetchone()\n"
] | [
"def find_intersection(tag):...\n",
"conn = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\users\\\\' + username + '.db')\n",
"conn2 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + '\\\\cf.db')\n",
"cursor = conn.cursor()\n",
"cursor2 = conn2.cursor()\n",
"cursor2.execute('SELECT * FROM ' + tag)\n",
"a = list()\n",
"problem_and_diff = cursor2.fetchone()\n",
"while problem_and_diff != None:\n",
"cursor.execute(\"SELECT * FROM result WHERE problem = '\" + str(\n problem_and_diff[0]) + \"' AND diff = '\" + str(problem_and_diff[1]) +\n \"' AND NOT verdict = 'OK'\")\n",
"conn.close()\n",
"problem_and_diff_and_ok = cursor.fetchone()\n",
"conn2.close()\n",
"if problem_and_diff_and_ok != None and problem_and_diff_and_ok in tasks:\n",
"return a\n",
"a.append(problem_and_diff_and_ok)\n",
"problem_and_diff = cursor2.fetchone()\n"
] | [
0,
0,
0,
0,
0,
0,
4,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Assign'"
] |
[
"@property...\n",
"return FUNC_0(self.file).st_mtime\n"
] | [
"@property...\n",
"return lstat(self.file).st_mtime\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_0(VAR_1, VAR_2=False, VAR_3=False):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = namedtuple('spacket', ['rit_username', 'name', 'did_sign',\n 'total_signatures', 'required_signatures'])\n",
"VAR_5 = []\n",
"VAR_6 = get_number_required()\n",
"if VAR_2 and VAR_3:\n",
"VAR_6 -= 1\n",
"VAR_7 = FUNC_1(VAR_1, VAR_2, VAR_3)\n",
"VAR_8 = get_misc_signatures()\n",
"for pkt in FUNC_2():\n",
"VAR_0.error(e)\n",
"return VAR_5\n",
"VAR_9 = VAR_7.get(pkt.username)\n",
"VAR_10 = VAR_8.get(pkt.username)\n",
"if VAR_9 is None:\n",
"VAR_9 = False\n",
"if VAR_10 is None:\n",
"VAR_10 = 0\n",
"if VAR_10 > REQUIRED_MISC_SIGNATURES:\n",
"VAR_10 = REQUIRED_MISC_SIGNATURES\n",
"VAR_5.append(VAR_4(pkt.username, pkt.name, VAR_9, pkt.received + VAR_10, VAR_6)\n )\n"
] | [
"def current_packets(member, intro=False, onfloor=False):...\n",
"\"\"\"docstring\"\"\"\n",
"SPacket = namedtuple('spacket', ['rit_username', 'name', 'did_sign',\n 'total_signatures', 'required_signatures'])\n",
"packets = []\n",
"required = get_number_required()\n",
"if intro and onfloor:\n",
"required -= 1\n",
"signed_packets = get_signed_packets(member, intro, onfloor)\n",
"misc_signatures = get_misc_signatures()\n",
"for pkt in query_packets_with_signed():\n",
"LOGGER.error(e)\n",
"return packets\n",
"signed = signed_packets.get(pkt.username)\n",
"misc = misc_signatures.get(pkt.username)\n",
"if signed is None:\n",
"signed = False\n",
"if misc is None:\n",
"misc = 0\n",
"if misc > REQUIRED_MISC_SIGNATURES:\n",
"misc = REQUIRED_MISC_SIGNATURES\n",
"packets.append(SPacket(pkt.username, pkt.name, signed, pkt.received + misc,\n required))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"AugAssign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
""
] | [
"@utils.synchronized('3par', external=True)...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"@VAR_2.route('/base')...\n",
"return render_template('libraries/templates/html/base.html')\n"
] | [
"@endpoints.route('/base')...\n",
"return render_template('libraries/templates/html/base.html')\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_17 = [VAR_28.mtime for VAR_28 in self.input if VAR_28.exists]\n",
"if VAR_17:\n",
"return max(VAR_17)\n",
"return None\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"existing = [f.mtime for f in self.input if f.exists]\n",
"if existing:\n",
"return max(existing)\n",
"return None\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def __init__(self, VAR_23, *VAR_5, **VAR_6):...\n",
"assert isinstance(VAR_23, CLASS_6)\n",
"VAR_39 = CLASS_7(VAR_23.key_name, *VAR_23.source_expressions, **\n key_transform.extra)\n",
"super(CLASS_8, self).__init__(VAR_39, *VAR_5, **kwargs)\n"
] | [
"def __init__(self, key_transform, *args, **kwargs):...\n",
"assert isinstance(key_transform, JsonKeyTransform)\n",
"key_text_transform = JsonKeyTextTransform(key_transform.key_name, *\n key_transform.source_expressions, **key_transform.extra)\n",
"super(JsonKeyTransformTextLookupMixin, self).__init__(key_text_transform, *\n args, **kwargs)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assert'",
"Assign'",
"Expr'"
] |
[
"def FUNC_1(VAR_0):...\n",
"VAR_1 = psycopg2.connect('dbname=forum')\n",
"VAR_2 = VAR_1.cursor()\n",
"VAR_2.execute(\"INSERT INTO posts (content) VALUES ('%s')\" % VAR_0)\n",
"VAR_1.commit()\n",
"VAR_1.close()\n"
] | [
"def AddPost(content):...\n",
"conn = psycopg2.connect('dbname=forum')\n",
"cur = conn.cursor()\n",
"cur.execute(\"INSERT INTO posts (content) VALUES ('%s')\" % content)\n",
"conn.commit()\n",
"conn.close()\n"
] | [
0,
0,
0,
4,
4,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
"if not CLASS_1.test_success(self):\n",
"return False\n",
"return True\n"
] | [
"def test_success(self):...\n",
"if not GenericRequest.test_success(self):\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_37(VAR_47, VAR_56):...\n",
"return os.path.relpath(VAR_47, VAR_56).startswith(os.pardir)\n"
] | [
"def is_outside(path, putative_parent):...\n",
"return os.path.relpath(path, putative_parent).startswith(os.pardir)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_8(VAR_13, VAR_16):...\n",
"VAR_35 = subprocess.Popen([VAR_13], stdin=subprocess.PIPE, VAR_36=\n subprocess.PIPE, VAR_37=subprocess.PIPE)\n",
"VAR_36, VAR_37 = VAR_35.communicate(json.dumps(VAR_16))\n",
"VAR_35.wait()\n",
"if VAR_35.returncode != 0:\n",
"return VAR_36\n"
] | [
"def render_executable(path, config):...\n",
"p = subprocess.Popen([path], stdin=subprocess.PIPE, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n",
"stdout, stderr = p.communicate(json.dumps(config))\n",
"p.wait()\n",
"if p.returncode != 0:\n",
"return stdout\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Return'"
] |
[
"def FUNC_26(self):...\n",
"if self._IsServerAlive():\n",
"VAR_23 = BaseRequest.PostDataToHandler(BuildRequestData(), 'debug_info')\n",
"VAR_23 = 'Server crashed, no debug info from server'\n",
"VAR_23 += \"\"\"\nServer running at: {0}\"\"\".format(BaseRequest.server_location)\n",
"VAR_23 += \"\"\"\nServer process ID: {0}\"\"\".format(self._server_popen.pid)\n",
"if self._server_stderr or self._server_stdout:\n",
"VAR_23 += \"\"\"\nServer logfiles:\n {0}\n {1}\"\"\".format(self._server_stdout,\n self._server_stderr)\n",
"return VAR_23\n"
] | [
"def DebugInfo(self):...\n",
"if self._IsServerAlive():\n",
"debug_info = BaseRequest.PostDataToHandler(BuildRequestData(), 'debug_info')\n",
"debug_info = 'Server crashed, no debug info from server'\n",
"debug_info += \"\"\"\nServer running at: {0}\"\"\".format(BaseRequest.server_location)\n",
"debug_info += \"\"\"\nServer process ID: {0}\"\"\".format(self._server_popen.pid)\n",
"if self._server_stderr or self._server_stdout:\n",
"debug_info += \"\"\"\nServer logfiles:\n {0}\n {1}\"\"\".format(self.\n _server_stdout, self._server_stderr)\n",
"return debug_info\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"AugAssign'",
"AugAssign'",
"Condition",
"AugAssign'",
"Return'"
] |
[
"import requests\n",
"import sqlite3\n",
"import os\n",
"from bs4 import BeautifulSoup\n",
"VAR_0 = {'math', 'strings', 'trees', 'graphs', 'dp', 'greedy', 'geometry',\n 'combinatorics'}\n",
"def FUNC_0():...\n",
"VAR_2 = 'http://codeforces.com/problemset/'\n",
"VAR_3 = requests.get(VAR_2)\n",
"VAR_4 = 0\n",
"VAR_5 = BeautifulSoup(VAR_3.text, 'lxml')\n",
"VAR_6 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + '\\\\cf.db')\n",
"VAR_7 = VAR_6.cursor()\n",
"VAR_7.execute('create table problems (problem INTEGER, diff CHAR)')\n",
"for i in VAR_0:\n",
"VAR_7.execute('create table ' + i + ' (problems INTEGER, diff CHAR)')\n",
"for link in VAR_5.find_all(attrs={'class': 'page-index'}):\n",
"VAR_13 = link.find('a')\n",
"VAR_8 = 0\n",
"VAR_14 = VAR_13.get('href').split('/')\n",
"VAR_9 = 0\n",
"VAR_4 = max(VAR_4, int(VAR_14[3]))\n",
"VAR_10 = False\n",
"for i in range(1, VAR_4 + 1):\n",
"VAR_3 = requests.get('http://codeforces.com/problemset/' + '/page/' + str(i))\n",
"VAR_6.commit()\n",
"VAR_5 = BeautifulSoup(VAR_3.text, 'lxml')\n",
"VAR_6.close()\n",
"VAR_15 = ''\n",
"VAR_11 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\settings.db')\n",
"for link in VAR_5.find_all('a'):\n",
"VAR_7 = VAR_11.cursor()\n",
"VAR_13 = link.get('href')\n",
"VAR_7.execute('string')\n",
"if VAR_13 != None and VAR_13.find('/problemset') != -1:\n",
"VAR_7.execute('create table last_update_problemset (problem STRING)')\n",
"VAR_13 = VAR_13.split('/')\n",
"VAR_7.execute('insert into last_update_problemset values (?)', (VAR_16,))\n",
"if len(VAR_13) == 5 and VAR_15 != VAR_13[3] + VAR_13[4]:\n",
"VAR_11.commit()\n",
"VAR_8 = VAR_13[3]\n",
"if len(VAR_13) == 4 and VAR_13[3] in VAR_0:\n",
"VAR_11.close()\n",
"VAR_9 = VAR_13[4]\n",
"VAR_7.execute('insert into ' + VAR_13[3] + ' values (?, ?)', (VAR_8, VAR_9))\n",
"def FUNC_1():...\n",
"VAR_15 = VAR_13[3] + VAR_13[4]\n",
"VAR_12 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\theory.db')\n",
"if not VAR_10:\n",
"VAR_7 = VAR_12.cursor()\n",
"VAR_10 = True\n",
"VAR_7.execute('insert into problems values (?, ?)', (VAR_8, VAR_9))\n",
"for i in VAR_0:\n",
"VAR_16 = VAR_15\n",
"VAR_7.execute('create table ' + str(i) + ' (link STRING)')\n",
"VAR_12.commit()\n",
"VAR_12.close()\n",
"VAR_1 = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'cf.db')\n",
"if not os.path.exists(VAR_1):\n",
"FUNC_0()\n",
"VAR_1 = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'theory.db')\n",
"if not os.path.exists(VAR_1):\n",
"FUNC_1()\n"
] | [
"import requests\n",
"import sqlite3\n",
"import os\n",
"from bs4 import BeautifulSoup\n",
"available_tags = {'math', 'strings', 'trees', 'graphs', 'dp', 'greedy',\n 'geometry', 'combinatorics'}\n",
"def create_cf_base():...\n",
"url = 'http://codeforces.com/problemset/'\n",
"r = requests.get(url)\n",
"max_page = 0\n",
"soup = BeautifulSoup(r.text, 'lxml')\n",
"base = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + '\\\\cf.db')\n",
"conn = base.cursor()\n",
"conn.execute('create table problems (problem INTEGER, diff CHAR)')\n",
"for i in available_tags:\n",
"conn.execute('create table ' + i + ' (problems INTEGER, diff CHAR)')\n",
"for link in soup.find_all(attrs={'class': 'page-index'}):\n",
"s = link.find('a')\n",
"a = 0\n",
"s2 = s.get('href').split('/')\n",
"b = 0\n",
"max_page = max(max_page, int(s2[3]))\n",
"f = False\n",
"for i in range(1, max_page + 1):\n",
"r = requests.get('http://codeforces.com/problemset/' + '/page/' + str(i))\n",
"base.commit()\n",
"soup = BeautifulSoup(r.text, 'lxml')\n",
"base.close()\n",
"old = ''\n",
"settings = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\settings.db')\n",
"for link in soup.find_all('a'):\n",
"conn = settings.cursor()\n",
"s = link.get('href')\n",
"conn.execute(\n 'create table users (chat_id INTEGER, username STRING, last_update STRING, last_problem STRING, state INTEGER)'\n )\n",
"if s != None and s.find('/problemset') != -1:\n",
"conn.execute('create table last_update_problemset (problem STRING)')\n",
"s = s.split('/')\n",
"conn.execute('insert into last_update_problemset values (?)', (last_update,))\n",
"if len(s) == 5 and old != s[3] + s[4]:\n",
"settings.commit()\n",
"a = s[3]\n",
"if len(s) == 4 and s[3] in available_tags:\n",
"settings.close()\n",
"b = s[4]\n",
"conn.execute('insert into ' + s[3] + ' values (?, ?)', (a, b))\n",
"def create_theory_table():...\n",
"old = s[3] + s[4]\n",
"theory = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\theory.db')\n",
"if not f:\n",
"conn = theory.cursor()\n",
"f = True\n",
"conn.execute('insert into problems values (?, ?)', (a, b))\n",
"for i in available_tags:\n",
"last_update = old\n",
"conn.execute('create table ' + str(i) + ' (link STRING)')\n",
"theory.commit()\n",
"theory.close()\n",
"path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'cf.db')\n",
"if not os.path.exists(path):\n",
"create_cf_base()\n",
"path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'theory.db')\n",
"if not os.path.exists(path):\n",
"create_theory_table()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"For",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_2(self, VAR_5=None, VAR_6=None, VAR_7='', VAR_8=''):...\n",
"if VAR_6:\n",
"VAR_5 = VAR_6.apply_all(VAR_5)\n",
"if VAR_7 != '':\n",
"if hasattr(self.obj, VAR_7):\n",
"return VAR_5\n",
"if hasattr(getattr(self.obj, VAR_7), '_col_name'):\n",
"VAR_5 = VAR_5.order_by(VAR_7 + ' ' + VAR_8)\n",
"VAR_7 = getattr(getattr(self.obj, VAR_7), '_col_name')\n"
] | [
"def _get_base_query(self, query=None, filters=None, order_column='',...\n",
"if filters:\n",
"query = filters.apply_all(query)\n",
"if order_column != '':\n",
"if hasattr(self.obj, order_column):\n",
"return query\n",
"if hasattr(getattr(self.obj, order_column), '_col_name'):\n",
"query = query.order_by(order_column + ' ' + order_direction)\n",
"order_column = getattr(getattr(self.obj, order_column), '_col_name')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
4,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'"
] |
[
"@VAR_0.route('/')...\n",
"return render_template('homepage.html')\n"
] | [
"@app.route('/')...\n",
"return render_template('homepage.html')\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_7(self):...\n",
"VAR_34 = self.conf.get('shallow', False)\n",
"if self.ref_is_commit and VAR_34:\n",
"os.mkdir(self.directory)\n",
"VAR_58 = []\n",
"fork(['git', 'init'])\n",
"if VAR_34:\n",
"fork(['git', 'remote', 'add', 'origin', self.url.geturl()])\n",
"VAR_58 += ['--depth', '1']\n",
"if not self.ref_is_commit and self.ref != 'origin/HEAD':\n",
"fork(['git', 'fetch', '--depth', '1', 'origin', self.noremote_ref()])\n",
"VAR_58 += ['-b', self.noremote_ref()]\n",
"fork(['git', 'clone', '-n'] + VAR_58 + ['--', self.url.geturl(), self.\n directory])\n",
"fork(['git', 'checkout', self.ref])\n",
"fork(['git', 'checkout', self.ref, '--'])\n"
] | [
"def checkout(self):...\n",
"shallow = self.conf.get('shallow', False)\n",
"if self.ref_is_commit and shallow:\n",
"os.mkdir(self.directory)\n",
"extra_opts = []\n",
"fork(['git', 'init'])\n",
"if shallow:\n",
"fork(['git', 'remote', 'add', 'origin', self.url.geturl()])\n",
"extra_opts += ['--depth', '1']\n",
"if not self.ref_is_commit and self.ref != 'origin/HEAD':\n",
"fork(['git', 'fetch', '--depth', '1', 'origin', self.noremote_ref()])\n",
"extra_opts += ['-b', self.noremote_ref()]\n",
"fork(['git', 'clone', '-n'] + extra_opts + ['--', self.url.geturl(), self.\n directory])\n",
"fork(['git', 'checkout', self.ref])\n",
"fork(['git', 'checkout', self.ref, '--'])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"AugAssign'",
"Condition",
"Expr'",
"AugAssign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_5(self, VAR_5, VAR_6, VAR_7, VAR_8):...\n",
"return MailService(VAR_5, VAR_6, VAR_7, self._mail_address, VAR_8)\n"
] | [
"def _create_mail_service(self, mail_sender, mail_store, search_engine,...\n",
"return MailService(mail_sender, mail_store, search_engine, self.\n _mail_address, attachment_store)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_7(VAR_0, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_15 = pd.read_sql_table(VAR_0, db.engine, columns=[attr])\n",
"print('FILL AVERAGE FAILED')\n",
"VAR_20 = VAR_15[VAR_5].mean()\n",
"db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = {2} WHERE \"{1}\" IS NULL'.format\n (VAR_0, VAR_5, VAR_20))\n"
] | [
"def fill_null_with_average(table_name, attr):...\n",
"\"\"\"docstring\"\"\"\n",
"dataframe = pd.read_sql_table(table_name, db.engine, columns=[attr])\n",
"print('FILL AVERAGE FAILED')\n",
"average = dataframe[attr].mean()\n",
"db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = {2} WHERE \"{1}\" IS NULL'.format\n (table_name, attr, average))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_2, VAR_3):...\n",
"self._parsed_content_packages = None\n",
"self._parsed_status_test_usernames = None\n",
"self.section = VAR_1\n",
"VAR_0['debug'] = str(VAR_3)\n",
"VAR_4 = os.path.dirname(VAR_2)\n",
"VAR_0['pysaml2_config'] = os.path.join(VAR_4, VAR_0['pysaml2_config'])\n",
"self.config = ConfigParser.ConfigParser(VAR_0)\n",
"if not self.config.read([VAR_2]):\n"
] | [
"def __init__(self, filename, debug):...\n",
"self._parsed_content_packages = None\n",
"self._parsed_status_test_usernames = None\n",
"self.section = _CONFIG_SECTION\n",
"_CONFIG_DEFAULTS['debug'] = str(debug)\n",
"cfgdir = os.path.dirname(filename)\n",
"_CONFIG_DEFAULTS['pysaml2_config'] = os.path.join(cfgdir, _CONFIG_DEFAULTS[\n 'pysaml2_config'])\n",
"self.config = ConfigParser.ConfigParser(_CONFIG_DEFAULTS)\n",
"if not self.config.read([filename]):\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition"
] |
[
"def FUNC_20(self, VAR_7):...\n",
"if '%(key)s' in VAR_7:\n",
"VAR_7 = VAR_7.replace('%(key)s', 'name')\n",
"return frappe.db.sql(VAR_7, as_dict=not self.as_list)\n"
] | [
"def run_custom_query(self, query):...\n",
"if '%(key)s' in query:\n",
"query = query.replace('%(key)s', 'name')\n",
"return frappe.db.sql(query, as_dict=not self.as_list)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'"
] |
[
"@property...\n",
"VAR_28 = list(self.output)\n",
"if self.benchmark:\n",
"VAR_28.append(self.benchmark)\n",
"return VAR_28\n"
] | [
"@property...\n",
"products = list(self.output)\n",
"if self.benchmark:\n",
"products.append(self.benchmark)\n",
"return products\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_24(self):...\n",
"if not self.DiagnosticsForCurrentFileReady():\n",
"return\n",
"self._diag_interface.UpdateWithNewDiagnostics(self.\n GetDiagnosticsFromStoredRequest())\n"
] | [
"def UpdateDiagnosticInterface(self):...\n",
"if not self.DiagnosticsForCurrentFileReady():\n",
"return\n",
"self._diag_interface.UpdateWithNewDiagnostics(self.\n GetDiagnosticsFromStoredRequest())\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Expr'"
] |
[
"def FUNC_1(self, **VAR_3):...\n",
"VAR_13 = super().get_context_data(**kwargs)\n",
"VAR_13['is_mobile'] = FUNC_0(self.request.META['HTTP_USER_AGENT'])\n",
"return VAR_13\n"
] | [
"def get_context_data(self, **kwargs):...\n",
"context = super().get_context_data(**kwargs)\n",
"context['is_mobile'] = is_mobile(self.request.META['HTTP_USER_AGENT'])\n",
"return context\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_0(self):...\n",
"super().get_common_objects()\n",
"self.get_summary_submissions()\n",
"self.models = []\n",
"for url, name in self.exercise.get_models():\n",
"self.note('models')\n",
"VAR_33 = request_for_response(url)\n",
"self.models.append({'name': name})\n",
"self.models.append({'name': name, 'content': VAR_33.text, 'html': \n 'text/html' in VAR_33.headers.get('Content-Type')})\n"
] | [
"def get_common_objects(self):...\n",
"super().get_common_objects()\n",
"self.get_summary_submissions()\n",
"self.models = []\n",
"for url, name in self.exercise.get_models():\n",
"self.note('models')\n",
"response = request_for_response(url)\n",
"self.models.append({'name': name})\n",
"self.models.append({'name': name, 'content': response.text, 'html': \n 'text/html' in response.headers.get('Content-Type')})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"For",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_10(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = 'exec-{}.py'\n",
"if six.PY2:\n",
"VAR_8 = VAR_8.format('py2')\n",
"VAR_8 = VAR_8.format('py3')\n",
"VAR_2 = {'SEVERITY': {'MEDIUM': 2}, 'CONFIDENCE': {'HIGH': 2}}\n",
"VAR_2 = {'SEVERITY': {'MEDIUM': 1}, 'CONFIDENCE': {'HIGH': 1}}\n",
"self.check_example(VAR_8, VAR_2)\n"
] | [
"def test_exec(self):...\n",
"\"\"\"docstring\"\"\"\n",
"filename = 'exec-{}.py'\n",
"if six.PY2:\n",
"filename = filename.format('py2')\n",
"filename = filename.format('py3')\n",
"expect = {'SEVERITY': {'MEDIUM': 2}, 'CONFIDENCE': {'HIGH': 2}}\n",
"expect = {'SEVERITY': {'MEDIUM': 1}, 'CONFIDENCE': {'HIGH': 1}}\n",
"self.check_example(filename, expect)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_28(self, *VAR_76, **VAR_52):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.resources = VAR_76, VAR_52\n",
"return VAR_101\n"
] | [
"def resources(self, *args, **resources):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.resources = args, resources\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"async def FUNC_8(self):...\n",
"def FUNC_14():...\n",
"self.logger.info('recursing', VAR_52='5b8498e4-868d-413c-a67e-004516b8452c',\n VAR_56=len(self.pending), have=len(self.have) - len(self.running),\n running=len(self.running))\n",
"VAR_30 = 1\n",
"self.have = set()\n",
"self.logger.info('cancel', VAR_52='d58154c8-ec27-40f2-ab9e-e25c1b21cd88',\n VAR_56=len(self.pending), have=len(self.have) - len(self.running),\n running=len(self.running))\n",
"VAR_47 = await asyncio.gather(*self.running, return_exceptions=True)\n",
"self.pending = set([CLASS_8(self.url, VAR_33=0)])\n",
"for r in VAR_47:\n",
"while self.pending:\n",
"if isinstance(r, Exception):\n",
"self.running = set()\n",
"VAR_53 = self.pending.pop()\n",
"FUNC_14()\n",
"self.have.add(VAR_53)\n",
"VAR_54 = asyncio.ensure_future(self.fetch(VAR_53, VAR_30))\n",
"self.running.add(VAR_54)\n",
"VAR_30 += 1\n",
"FUNC_14()\n",
"if len(self.running) >= self.concurrency or not self.pending:\n",
"VAR_47, VAR_56 = await asyncio.wait(self.running, return_when=asyncio.\n FIRST_COMPLETED)\n",
"self.running.difference_update(VAR_47)\n",
"for r in VAR_47:\n",
"r.result()\n"
] | [
"async def run(self):...\n",
"def log():...\n",
"self.logger.info('recursing', uuid='5b8498e4-868d-413c-a67e-004516b8452c',\n pending=len(self.pending), have=len(self.have) - len(self.running),\n running=len(self.running))\n",
"seqnum = 1\n",
"self.have = set()\n",
"self.logger.info('cancel', uuid='d58154c8-ec27-40f2-ab9e-e25c1b21cd88',\n pending=len(self.pending), have=len(self.have) - len(self.running),\n running=len(self.running))\n",
"done = await asyncio.gather(*self.running, return_exceptions=True)\n",
"self.pending = set([SetEntry(self.url, depth=0)])\n",
"for r in done:\n",
"while self.pending:\n",
"if isinstance(r, Exception):\n",
"self.running = set()\n",
"u = self.pending.pop()\n",
"log()\n",
"self.have.add(u)\n",
"t = asyncio.ensure_future(self.fetch(u, seqnum))\n",
"self.running.add(t)\n",
"seqnum += 1\n",
"log()\n",
"if len(self.running) >= self.concurrency or not self.pending:\n",
"done, pending = await asyncio.wait(self.running, return_when=asyncio.\n FIRST_COMPLETED)\n",
"self.running.difference_update(done)\n",
"for r in done:\n",
"r.result()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"AugAssign'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"For",
"Expr'"
] |
[
"def FUNC_4(self, VAR_0, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = VAR_0.args.get('attrs', [''])[0]\n",
"if VAR_4:\n",
"VAR_7 = {}\n",
"return VAR_3\n",
"for VAR_8 in (VAR_4.decode('utf-8').split(',') + self.__builtin_attributes__):\n",
"if VAR_8 in VAR_3:\n",
"return VAR_7\n",
"VAR_7[VAR_8] = VAR_3[VAR_8]\n"
] | [
"def filter_attributes(self, request, data):...\n",
"\"\"\"docstring\"\"\"\n",
"attrs = request.args.get('attrs', [''])[0]\n",
"if attrs:\n",
"filtered_data = {}\n",
"return data\n",
"for a in (attrs.decode('utf-8').split(',') + self.__builtin_attributes__):\n",
"if a in data:\n",
"return filtered_data\n",
"filtered_data[a] = data[a]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Return'",
"For",
"Condition",
"Return'",
"Assign'"
] |
[
"def __str__(self):...\n",
"VAR_23 = 'OpenOCD at localhost port ' + str(self.port)\n",
"return VAR_23\n"
] | [
"def __str__(self):...\n",
"string = 'OpenOCD at localhost port ' + str(self.port)\n",
"return string\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"@staticmethod...\n",
"for i in xrange(256):\n",
"yield chr(i)\n"
] | [
"@staticmethod...\n",
"for i in xrange(256):\n",
"yield chr(i)\n"
] | [
0,
0,
0
] | [
"Condition",
"For",
"Expr'"
] |
[
"@BaseHandler.check_is_group_user('Announcement Manager')...\n",
"if not VAR_6:\n",
"if not Announce.by_id(VAR_6, self.sql_session).scalar():\n",
"VAR_8 = AttachmentList.by_ann_id(VAR_6, self.sql_session)\n",
"VAR_9 = VAR_8.all()\n",
"for old_att in VAR_9:\n",
"shutil.rmtree('file/%s' % old_att.key)\n",
"VAR_8.delete()\n",
"Announce.by_id(VAR_6, self.sql_session).delete()\n",
"self.write({'success': True})\n"
] | [
"@BaseHandler.check_is_group_user('Announcement Manager')...\n",
"if not ann_id:\n",
"if not Announce.by_id(ann_id, self.sql_session).scalar():\n",
"q = AttachmentList.by_ann_id(ann_id, self.sql_session)\n",
"old_atts = q.all()\n",
"for old_att in old_atts:\n",
"shutil.rmtree('file/%s' % old_att.key)\n",
"q.delete()\n",
"Announce.by_id(ann_id, self.sql_session).delete()\n",
"self.write({'success': True})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"For",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@tornado.gen.coroutine...\n",
"VAR_17 = 'INSERT INTO districts(name) VALUES (%s)'\n",
"VAR_18 = VAR_8,\n",
"yield VAR_1.execute(query=sql, VAR_18=params)\n"
] | [
"@tornado.gen.coroutine...\n",
"sql = 'INSERT INTO districts(name) VALUES (%s)'\n",
"params = name,\n",
"yield tx.execute(query=sql, params=params)\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_13(VAR_18, VAR_19):...\n",
"VAR_6 = {}\n",
"for id_, VAR_21 in enumerate(VAR_18.description):\n",
"VAR_6[VAR_21[0]] = VAR_19[id_]\n",
"return VAR_6\n"
] | [
"def dict_factory(cursor, row):...\n",
"dictionary = {}\n",
"for id_, column in enumerate(cursor.description):\n",
"dictionary[column[0]] = row[id_]\n",
"return dictionary\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'"
] |
[
"def FUNC_17(self):...\n",
"VAR_7 = [('x', 'y'), (1, 2)]\n",
"VAR_8 = [('key', 'filename', 'file data')]\n",
"VAR_9, VAR_10 = url_helper.EncodeMultipartFormData()\n",
"self.assertTrue(VAR_9.startswith('multipart/form-data; boundary='))\n",
"self.assertEqual('', VAR_10)\n",
"VAR_9, VAR_10 = url_helper.EncodeMultipartFormData(VAR_7=fields)\n",
"self.assertTrue(VAR_9.startswith('multipart/form-data; boundary='))\n",
"self.assertTrue('name=\"x\"\\r\\n\\r\\ny' in VAR_10, VAR_10)\n",
"self.assertTrue('name=\"1\"\\r\\n\\r\\n2' in VAR_10, VAR_10)\n",
"VAR_9, VAR_10 = url_helper.EncodeMultipartFormData(VAR_8=files)\n",
"self.assertTrue(VAR_9.startswith('multipart/form-data; boundary='))\n",
"self.assertTrue('name=\"key\"; filename=\"filename\"' in VAR_10, VAR_10)\n",
"self.assertTrue('file data' in VAR_10, VAR_10)\n",
"VAR_9, VAR_10 = url_helper.EncodeMultipartFormData(VAR_7=fields, VAR_8=files)\n",
"self.assertTrue(VAR_9.startswith('multipart/form-data; boundary='))\n",
"self.assertTrue('name=\"x\"\\r\\n\\r\\ny' in VAR_10, VAR_10)\n",
"self.assertTrue('name=\"1\"\\r\\n\\r\\n2' in VAR_10, VAR_10)\n"
] | [
"def testEncodeMultipartFormData(self):...\n",
"fields = [('x', 'y'), (1, 2)]\n",
"files = [('key', 'filename', 'file data')]\n",
"content_type, body = url_helper.EncodeMultipartFormData()\n",
"self.assertTrue(content_type.startswith('multipart/form-data; boundary='))\n",
"self.assertEqual('', body)\n",
"content_type, body = url_helper.EncodeMultipartFormData(fields=fields)\n",
"self.assertTrue(content_type.startswith('multipart/form-data; boundary='))\n",
"self.assertTrue('name=\"x\"\\r\\n\\r\\ny' in body, body)\n",
"self.assertTrue('name=\"1\"\\r\\n\\r\\n2' in body, body)\n",
"content_type, body = url_helper.EncodeMultipartFormData(files=files)\n",
"self.assertTrue(content_type.startswith('multipart/form-data; boundary='))\n",
"self.assertTrue('name=\"key\"; filename=\"filename\"' in body, body)\n",
"self.assertTrue('file data' in body, body)\n",
"content_type, body = url_helper.EncodeMultipartFormData(fields=fields,\n files=files)\n",
"self.assertTrue(content_type.startswith('multipart/form-data; boundary='))\n",
"self.assertTrue('name=\"x\"\\r\\n\\r\\ny' in body, body)\n",
"self.assertTrue('name=\"1\"\\r\\n\\r\\n2' in body, body)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@VAR_0.route('/table_detection')...\n",
"return render_template('table_detection.html', wait=WAIT_AFTER_CRAWLING)\n"
] | [
"@app.route('/table_detection')...\n",
"return render_template('table_detection.html', wait=WAIT_AFTER_CRAWLING)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_0(VAR_0):...\n",
"VAR_11 = ['Android', 'iPhone', 'SymbianOS', 'Windows Phone', 'iPad', 'iPod']\n",
"for d in VAR_11:\n",
"if d in VAR_0:\n",
"return False\n",
"return True\n"
] | [
"def is_mobile(useragent):...\n",
"devices = ['Android', 'iPhone', 'SymbianOS', 'Windows Phone', 'iPad', 'iPod']\n",
"for d in devices:\n",
"if d in useragent:\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14, VAR_15 = new_log_monitor_log_file()\n",
"VAR_16 = ray.services.start_log_monitor(self.redis_address, self.\n _node_ip_address, VAR_14=stdout_file, VAR_15=stderr_file,\n redis_password=self._ray_params.redis_password)\n",
"assert VAR_13.PROCESS_TYPE_LOG_MONITOR not in self.all_processes\n",
"self.all_processes[VAR_13.PROCESS_TYPE_LOG_MONITOR] = [VAR_16]\n"
] | [
"def start_log_monitor(self):...\n",
"\"\"\"docstring\"\"\"\n",
"stdout_file, stderr_file = new_log_monitor_log_file()\n",
"process_info = ray.services.start_log_monitor(self.redis_address, self.\n _node_ip_address, stdout_file=stdout_file, stderr_file=stderr_file,\n redis_password=self._ray_params.redis_password)\n",
"assert ray_constants.PROCESS_TYPE_LOG_MONITOR not in self.all_processes\n",
"self.all_processes[ray_constants.PROCESS_TYPE_LOG_MONITOR] = [process_info]\n"
] | [
0,
0,
0,
6,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assert'",
"Assign'"
] |
[
"def FUNC_16(self, VAR_9=True):...\n",
"\"\"\"docstring\"\"\"\n",
"self._kill_process_type(VAR_13.PROCESS_TYPE_REDIS_SERVER, VAR_9=check_alive)\n"
] | [
"def kill_redis(self, check_alive=True):...\n",
"\"\"\"docstring\"\"\"\n",
"self._kill_process_type(ray_constants.PROCESS_TYPE_REDIS_SERVER,\n check_alive=check_alive)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def FUNC_7(VAR_3, VAR_9):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_9 == 'login':\n",
"return external_auth_login(VAR_3)\n",
"if VAR_9 == 'register':\n",
"return external_auth_register(VAR_3)\n"
] | [
"def _external_auth_intercept(request, mode):...\n",
"\"\"\"docstring\"\"\"\n",
"if mode == 'login':\n",
"return external_auth_login(request)\n",
"if mode == 'register':\n",
"return external_auth_register(request)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Condition",
"Return'"
] |
[
"@app.route('/api/indi_profiles/<id>', methods=['PUT'])...\n",
"VAR_16 = None\n",
"profile.update(VAR_3)\n",
"VAR_16 = profile.to_map()\n",
"return VAR_16\n"
] | [
"@app.route('/api/indi_profiles/<id>', methods=['PUT'])...\n",
"updated_profile = None\n",
"profile.update(json)\n",
"updated_profile = profile.to_map()\n",
"return updated_profile\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_0(self):...\n",
"if isinstance(self.model_field, SmartListFilter):\n",
"return self.model_field.title\n",
"return super(CLASS_7, self).get_title()\n"
] | [
"def get_title(self):...\n",
"if isinstance(self.model_field, SmartListFilter):\n",
"return self.model_field.title\n",
"return super(SmartFilter, self).get_title()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"@staticmethod...\n",
"VAR_31 = openid.get_current_user(VAR_3)\n",
"return CLASS_4(VAR_31.sub, VAR_31.email, VAR_31.picture) if VAR_31 else None\n"
] | [
"@staticmethod...\n",
"user = openid.get_current_user(request)\n",
"return CurrentUser(user.sub, user.email, user.picture) if user else None\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self):...\n",
"self._tab_after('h')\n",
"eq_(self.terminal.method_calls, [('write', ('el',), {})])\n",
"self.terminal.reset_mock()\n",
"self.oms_ssh.handle_TAB()\n",
"eq_(self.terminal.method_calls, [('write', ('',), {}), ('nextLine', (), {}),\n ('write', ('help hello\\n',), {}), ('write', (self.oms_ssh.ps[0] +\n 'hel',), {})])\n"
] | [
"def test_complete_prefix(self):...\n",
"self._tab_after('h')\n",
"eq_(self.terminal.method_calls, [('write', ('el',), {})])\n",
"self.terminal.reset_mock()\n",
"self.oms_ssh.handle_TAB()\n",
"eq_(self.terminal.method_calls, [('write', ('',), {}), ('nextLine', (), {}),\n ('write', ('help hello\\n',), {}), ('write', (self.oms_ssh.ps[0] +\n 'hel',), {})])\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def __hash__(self):...\n",
"return hash(self.name)\n"
] | [
"def __hash__(self):...\n",
"return hash(self.name)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"VAR_4 = '/api/apps'\n",
"VAR_5 = self.client.post(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_6 = VAR_5.data['id']\n",
"VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 0)\n",
"VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_8 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_8 = {'web': 4, 'worker': 2}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 204)\n",
"VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 6)\n",
"VAR_4 = '/api/apps/{app_id}'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"VAR_4 = '/api/apps/{app_id}/containers/web'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 4)\n",
"VAR_9 = VAR_5.data['results'][0]['num']\n",
"VAR_4 = '/api/apps/{app_id}/containers/web/{num}'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(VAR_5.data['num'], VAR_9)\n",
"VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_8 = {'web': 2, 'worker': 1}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 204)\n",
"VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 3)\n",
"self.assertEqual(max(VAR_7['num'] for VAR_7 in VAR_5.data['results']), 2)\n",
"VAR_4 = '/api/apps/{app_id}'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_8 = {'web': 0, 'worker': 0}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 204)\n",
"VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 0)\n",
"VAR_4 = '/api/apps/{app_id}'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n"
] | [
"def test_container_api_heroku(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 0)\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 4, 'worker': 2}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 6)\n",
"url = '/api/apps/{app_id}'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"url = '/api/apps/{app_id}/containers/web'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 4)\n",
"num = response.data['results'][0]['num']\n",
"url = '/api/apps/{app_id}/containers/web/{num}'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(response.data['num'], num)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 2, 'worker': 1}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 3)\n",
"self.assertEqual(max(c['num'] for c in response.data['results']), 2)\n",
"url = '/api/apps/{app_id}'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 0, 'worker': 0}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 0)\n",
"url = '/api/apps/{app_id}'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n"
] | [
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_23(self, VAR_28):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_58 = VAR_28,\n",
"VAR_19 = ''\n",
"if self.execute('SELECT path FROM history WHERE nzo_id=?', VAR_58):\n",
"return VAR_19\n",
"VAR_19 = self.c.fetchone().get('path')\n"
] | [
"def get_path(self, nzo_id):...\n",
"\"\"\"docstring\"\"\"\n",
"t = nzo_id,\n",
"path = ''\n",
"if self.execute('SELECT path FROM history WHERE nzo_id=?', t):\n",
"return path\n",
"path = self.c.fetchone().get('path')\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'"
] |
[
"@VAR_0.route('/delete_predicate', methods=['POST'])...\n",
"VAR_1 = get_dataset_with_id(request.args.get('dataset_id'))\n",
"VAR_7 = table_name_to_object(VAR_1.working_copy)\n",
"VAR_8 = ''\n",
"VAR_9 = []\n",
"VAR_10 = []\n",
"VAR_11 = []\n",
"VAR_12 = []\n",
"for VAR_26 in request.form:\n",
"if VAR_26.startswith('column'):\n",
"VAR_9.sort()\n",
"VAR_9.append(VAR_26)\n",
"if VAR_26.startswith('condition'):\n",
"VAR_10.sort()\n",
"VAR_10.append(VAR_26)\n",
"if VAR_26.startswith('logical'):\n",
"VAR_12.sort()\n",
"VAR_12.append(VAR_26)\n",
"if VAR_26.startswith('operator'):\n",
"VAR_11.sort()\n",
"VAR_11.append(VAR_26)\n",
"for VAR_26 in range(len(VAR_9)):\n",
"if VAR_26 != len(VAR_9) - 1:\n",
"delete_rows(VAR_7.name, VAR_8)\n",
"flash('condition \"{0}\" not valid'.format(VAR_8), 'danger')\n",
"flash('successfully deleted rows using condition \"{0}\"'.format(VAR_8),\n 'success')\n",
"VAR_8 += '\"' + request.form[VAR_9[VAR_26 + 1]] + '\"'\n",
"VAR_8 += '\"' + request.form[VAR_9[0]] + '\"'\n",
"create_action('rows deleted with condition \"{0}\"'.format(VAR_8), VAR_1.id,\n current_user.id)\n",
"return redirect(request.referrer)\n",
"if request.form[VAR_11[VAR_26 + 1]] == 'CONTAINS':\n",
"if request.form[VAR_11[0]] == 'CONTAINS':\n",
"VAR_8 += ' ~ '\n",
"if request.form[VAR_11[VAR_26 + 1]] == 'NOT CONTIANS':\n",
"VAR_8 += ' ~ '\n",
"if request.form[VAR_11[0]] == 'NOT CONTIANS':\n",
"VAR_8 += \"'\" + request.form[VAR_10[VAR_26 + 1]] + \"'\"\n",
"VAR_8 += ' !~ '\n",
"VAR_8 += request.form[VAR_11[VAR_26 + 1]]\n",
"VAR_8 += \"'\" + request.form[VAR_10[0]] + \"'\"\n",
"VAR_8 += ' !~ '\n",
"VAR_8 += request.form[VAR_11[0]]\n",
"VAR_8 += ' ' + request.form[VAR_12[VAR_26]] + ' '\n"
] | [
"@_transform.route('/delete_predicate', methods=['POST'])...\n",
"dataset = get_dataset_with_id(request.args.get('dataset_id'))\n",
"table = table_name_to_object(dataset.working_copy)\n",
"condition = ''\n",
"columns = []\n",
"conditions = []\n",
"operators = []\n",
"logics = []\n",
"for i in request.form:\n",
"if i.startswith('column'):\n",
"columns.sort()\n",
"columns.append(i)\n",
"if i.startswith('condition'):\n",
"conditions.sort()\n",
"conditions.append(i)\n",
"if i.startswith('logical'):\n",
"logics.sort()\n",
"logics.append(i)\n",
"if i.startswith('operator'):\n",
"operators.sort()\n",
"operators.append(i)\n",
"for i in range(len(columns)):\n",
"if i != len(columns) - 1:\n",
"delete_rows(table.name, condition)\n",
"flash('condition \"{0}\" not valid'.format(condition), 'danger')\n",
"flash('successfully deleted rows using condition \"{0}\"'.format(condition),\n 'success')\n",
"condition += '\"' + request.form[columns[i + 1]] + '\"'\n",
"condition += '\"' + request.form[columns[0]] + '\"'\n",
"create_action('rows deleted with condition \"{0}\"'.format(condition),\n dataset.id, current_user.id)\n",
"return redirect(request.referrer)\n",
"if request.form[operators[i + 1]] == 'CONTAINS':\n",
"if request.form[operators[0]] == 'CONTAINS':\n",
"condition += ' ~ '\n",
"if request.form[operators[i + 1]] == 'NOT CONTIANS':\n",
"condition += ' ~ '\n",
"if request.form[operators[0]] == 'NOT CONTIANS':\n",
"condition += \"'\" + request.form[conditions[i + 1]] + \"'\"\n",
"condition += ' !~ '\n",
"condition += request.form[operators[i + 1]]\n",
"condition += \"'\" + request.form[conditions[0]] + \"'\"\n",
"condition += ' !~ '\n",
"condition += request.form[operators[0]]\n",
"condition += ' ' + request.form[logics[i]] + ' '\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
4,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"For",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"AugAssign'",
"AugAssign'",
"Expr'",
"Return'",
"For",
"For",
"AugAssign'",
"For",
"AugAssign'",
"For",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'"
] |
[
"@jwt_required...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = f'insert into admins values (%s);'\n",
"VAR_3 = request.get_json()\n",
"VAR_4 = VAR_3['email'],\n",
"database_utilities.execute_query(VAR_2, VAR_4)\n"
] | [
"@jwt_required...\n",
"\"\"\"docstring\"\"\"\n",
"query = f'insert into admins values (%s);'\n",
"json_data = request.get_json()\n",
"parameters = json_data['email'],\n",
"database_utilities.execute_query(query, parameters)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@tornado.gen.coroutine...\n",
"VAR_17 = (\n 'SELECT month(date) as month, day(date) as day FROM reports WHERE year(date) = %s'\n )\n",
"VAR_18 = VAR_2,\n",
"VAR_19 = yield VAR_1.execute(query=sql, VAR_18=params)\n",
"return VAR_19.fetchall()\n"
] | [
"@tornado.gen.coroutine...\n",
"sql = (\n 'SELECT month(date) as month, day(date) as day FROM reports WHERE year(date) = %s'\n )\n",
"params = year,\n",
"cursor = yield tx.execute(query=sql, params=params)\n",
"return cursor.fetchall()\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_0(self, VAR_2, VAR_3, VAR_4):...\n",
"if not VAR_4:\n",
"return\n",
"if validate_hash(VAR_4):\n",
"if validate_url(VAR_4):\n",
"VAR_21 = VirusTotalAPI().hash_fetch(VAR_4)\n",
"VAR_2['errors'].append('Error retrieving file hash: %s' % e)\n",
"VAR_18 = Files.create(VAR_3, VAR_4, VAR_21)\n",
"VAR_2['data'].append({'type': 'url', 'data': VAR_4})\n",
"VAR_2['errors'].append(\"'%s' was neither a valid hash or url\" % VAR_4)\n",
"return\n",
"VAR_2['data'].append({'type': 'file', 'data': VAR_18})\n",
"return\n",
"return\n"
] | [
"def _handle_string(self, submit, tmppath, line):...\n",
"if not line:\n",
"return\n",
"if validate_hash(line):\n",
"if validate_url(line):\n",
"filedata = VirusTotalAPI().hash_fetch(line)\n",
"submit['errors'].append('Error retrieving file hash: %s' % e)\n",
"filepath = Files.create(tmppath, line, filedata)\n",
"submit['data'].append({'type': 'url', 'data': line})\n",
"submit['errors'].append(\"'%s' was neither a valid hash or url\" % line)\n",
"return\n",
"submit['data'].append({'type': 'file', 'data': filepath})\n",
"return\n",
"return\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_16():...\n",
"VAR_0 = get_db()\n"
] | [
"def init():...\n",
"db = get_db()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"VAR_18 = self.common.create_volume(VAR_6)\n",
"self.common.client_logout()\n",
"return {'metadata': VAR_18}\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"metadata = self.common.create_volume(volume)\n",
"self.common.client_logout()\n",
"return {'metadata': metadata}\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_21(VAR_10, VAR_11, VAR_12):...\n",
"self.log.info('Termination signal %s recieved', repr((VAR_10, VAR_11, VAR_12)))\n",
"self.term()\n"
] | [
"def term_handler(interface, method, data):...\n",
"self.log.info('Termination signal %s recieved', repr((interface, method, data))\n )\n",
"self.term()\n"
] | [
0,
7,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"import copy\n",
"from awx.main.scheduler.dag_simple import SimpleDAG\n",
"def __init__(self, VAR_0=None):...\n",
"super(CLASS_0, self).__init__()\n",
"if VAR_0:\n",
"self._init_graph(VAR_0)\n",
"def FUNC_0(self, VAR_0):...\n",
"VAR_1 = VAR_0.workflow_job_nodes\n",
"VAR_2 = VAR_1.prefetch_related('success_nodes', 'failure_nodes', 'always_nodes'\n ).all()\n",
"for workflow_node in VAR_2:\n",
"self.add_node(workflow_node)\n",
"for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']:\n",
"for workflow_node in VAR_2:\n",
"def FUNC_1(self):...\n",
"VAR_15 = getattr(workflow_node, node_type).all()\n",
"VAR_3 = self.get_root_nodes()\n",
"for related_node in VAR_15:\n",
"VAR_4 = VAR_3\n",
"self.add_edge(workflow_node, related_node, node_type)\n",
"VAR_5 = []\n",
"for index, n in enumerate(VAR_4):\n",
"VAR_10 = n['node_object']\n",
"return [n['node_object'] for n in VAR_5]\n",
"VAR_11 = VAR_10.job\n",
"if not VAR_11 and VAR_10.do_not_run is False:\n",
"VAR_5.append(n)\n",
"if VAR_11 and VAR_11.status not in ['failed', 'successful']:\n",
"if VAR_11 and VAR_11.status == 'failed':\n",
"VAR_13 = self.get_dependencies(VAR_10, 'failure_nodes')\n",
"if VAR_11 and VAR_11.status == 'successful':\n",
"VAR_14 = self.get_dependencies(VAR_10, 'always_nodes')\n",
"VAR_12 = self.get_dependencies(VAR_10, 'success_nodes')\n",
"VAR_16 = VAR_13 + VAR_14\n",
"VAR_14 = self.get_dependencies(VAR_10, 'always_nodes')\n",
"VAR_4.extend(VAR_16)\n",
"VAR_16 = VAR_12 + VAR_14\n",
"VAR_4.extend(VAR_16)\n"
] | [
"import copy\n",
"from awx.main.scheduler.dag_simple import SimpleDAG\n",
"def __init__(self, workflow_job=None):...\n",
"super(WorkflowDAG, self).__init__()\n",
"if workflow_job:\n",
"self._init_graph(workflow_job)\n",
"def _init_graph(self, workflow_job):...\n",
"node_qs = workflow_job.workflow_job_nodes\n",
"workflow_nodes = node_qs.prefetch_related('success_nodes', 'failure_nodes',\n 'always_nodes').all()\n",
"for workflow_node in workflow_nodes:\n",
"self.add_node(workflow_node)\n",
"for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']:\n",
"for workflow_node in workflow_nodes:\n",
"def bfs_nodes_to_run(self):...\n",
"related_nodes = getattr(workflow_node, node_type).all()\n",
"root_nodes = self.get_root_nodes()\n",
"for related_node in related_nodes:\n",
"nodes = root_nodes\n",
"self.add_edge(workflow_node, related_node, node_type)\n",
"nodes_found = []\n",
"for index, n in enumerate(nodes):\n",
"obj = n['node_object']\n",
"return [n['node_object'] for n in nodes_found]\n",
"job = obj.job\n",
"if not job and obj.do_not_run is False:\n",
"nodes_found.append(n)\n",
"if job and job.status not in ['failed', 'successful']:\n",
"if job and job.status == 'failed':\n",
"children_failed = self.get_dependencies(obj, 'failure_nodes')\n",
"if job and job.status == 'successful':\n",
"children_always = self.get_dependencies(obj, 'always_nodes')\n",
"children_success = self.get_dependencies(obj, 'success_nodes')\n",
"children_all = children_failed + children_always\n",
"children_always = self.get_dependencies(obj, 'always_nodes')\n",
"nodes.extend(children_all)\n",
"children_all = children_success + children_always\n",
"nodes.extend(children_all)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"FunctionDef'",
"Expr'",
"Condition",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Expr'",
"For",
"For",
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_25(self, VAR_14):...\n",
""
] | [
"def is_fk(self, col_name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_7(self):...\n",
"self.assert404(self.client.get(url_for('oauthclient.authorized', remote_app\n ='invalid')))\n",
"self.assert404(self.client.get(url_for('oauthclient.disconnect', remote_app\n ='invalid')))\n"
] | [
"def test_no_remote_app(self):...\n",
"self.assert404(self.client.get(url_for('oauthclient.authorized', remote_app\n ='invalid')))\n",
"self.assert404(self.client.get(url_for('oauthclient.disconnect', remote_app\n ='invalid')))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_8(self):...\n",
""
] | [
"def describe(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_5(self, VAR_11='', VAR_6=None):...\n",
"VAR_5 = self.session.query(self.obj)\n",
"VAR_5 = self._get_base_query(VAR_5=query, VAR_6=filters)\n",
"VAR_25 = VAR_5.all()\n",
"VAR_26 = GroupByDateMonth(VAR_11, 'Group by Month')\n",
"return VAR_26.apply(VAR_25)\n"
] | [
"def query_month_group(self, group_by='', filters=None):...\n",
"query = self.session.query(self.obj)\n",
"query = self._get_base_query(query=query, filters=filters)\n",
"query_result = query.all()\n",
"group = GroupByDateMonth(group_by, 'Group by Month')\n",
"return group.apply(query_result)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.