lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_15(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"self.driver._eql_execute('pool', 'select', self.configuration.eqlx_pool, 'show'\n ).AndReturn(['TotalCapacity: 111GB', 'FreeSpace: 11GB'])\n",
"self.mox.ReplayAll()\n",
"VAR_10 = self.driver.get_volume_stats(refresh=True)\n",
"self.assertEqual(VAR_10['total_capacity_gb'], float('111.0'))\n",
"self.assertEqual(VAR_10['free_capacity_gb'], float('11.0'))\n",
"self.assertEqual(VAR_10['vendor_name'], 'Dell')\n"
] | [
"def test_get_volume_stats(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"self.driver._eql_execute('pool', 'select', self.configuration.eqlx_pool, 'show'\n ).AndReturn(['TotalCapacity: 111GB', 'FreeSpace: 11GB'])\n",
"self.mox.ReplayAll()\n",
"stats = self.driver.get_volume_stats(refresh=True)\n",
"self.assertEqual(stats['total_capacity_gb'], float('111.0'))\n",
"self.assertEqual(stats['free_capacity_gb'], float('11.0'))\n",
"self.assertEqual(stats['vendor_name'], 'Dell')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@classmethod...\n",
"if VAR_12.count:\n",
"from beamr.interpreters import Config\n",
"VAR_29 = VAR_31.getRaw('verbatim')\n",
"VAR_30 = VAR_31.getRaw('vbtmCmds', 'packageNames')\n",
"if VAR_29 not in VAR_30:\n",
"VAR_29 = VAR_30[0]\n",
"VAR_31.effectiveConfig['packages'].append(VAR_29)\n",
"VAR_31.effectiveConfig['verbatim'] = VAR_29\n",
"VAR_12.preambleDefs = VAR_31.getRaw('vbtmCmds', 'once', VAR_29) + '\\n'\n",
"for f in VAR_12.todo:\n",
"if f.head:\n",
"VAR_12.preambleDefs += VAR_31.getRaw('vbtmCmds', 'foreach', VAR_29) % (f.\n insertCmd, f.head, f.body)\n",
"VAR_12.preambleDefs += VAR_31.getRaw('vbtmCmds', 'foreachNoLang', VAR_29) % (f\n .insertCmd, f.body)\n"
] | [
"@classmethod...\n",
"if cls.count:\n",
"from beamr.interpreters import Config\n",
"package = Config.getRaw('verbatim')\n",
"packageList = Config.getRaw('vbtmCmds', 'packageNames')\n",
"if package not in packageList:\n",
"package = packageList[0]\n",
"Config.effectiveConfig['packages'].append(package)\n",
"Config.effectiveConfig['verbatim'] = package\n",
"cls.preambleDefs = Config.getRaw('vbtmCmds', 'once', package) + '\\n'\n",
"for f in cls.todo:\n",
"if f.head:\n",
"cls.preambleDefs += Config.getRaw('vbtmCmds', 'foreach', package) % (f.\n insertCmd, f.head, f.body)\n",
"cls.preambleDefs += Config.getRaw('vbtmCmds', 'foreachNoLang', package) % (f\n .insertCmd, f.body)\n"
] | [
0,
0,
0,
0,
2,
0,
0,
0,
0,
2,
0,
0,
2,
2
] | [
"Condition",
"Condition",
"ImportFrom'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Condition",
"AugAssign'",
"AugAssign'"
] |
[
"def FUNC_14(self):...\n",
"return self._get_ancestors().values()\n"
] | [
"def get_ancestors(self):...\n",
"return self._get_ancestors().values()\n"
] | [
0,
1
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"VAR_15 = self.runner.stats\n",
"for VAR_33 in self.runner.stats.tasks():\n",
"VAR_45 = VAR_33.check.poll()\n",
"VAR_45 = True\n",
"self.assertTrue(VAR_45)\n"
] | [
"def assert_all_dead(self):...\n",
"stats = self.runner.stats\n",
"for t in self.runner.stats.tasks():\n",
"finished = t.check.poll()\n",
"finished = True\n",
"self.assertTrue(finished)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_23(VAR_3):...\n",
"if VAR_3[0] == '%':\n",
"VAR_3 = '\\\\' + VAR_3\n",
"return VAR_3\n"
] | [
"def tpl_oneline(string):...\n",
"if string[0] == '%':\n",
"string = '\\\\' + string\n",
"return string\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_24(self):...\n",
"if not self.DiagnosticsForCurrentFileReady():\n",
"return\n",
"self._diag_interface.UpdateWithNewDiagnostics(self.\n GetDiagnosticsFromStoredRequest())\n"
] | [
"def UpdateDiagnosticInterface(self):...\n",
"if not self.DiagnosticsForCurrentFileReady():\n",
"return\n",
"self._diag_interface.UpdateWithNewDiagnostics(self.\n GetDiagnosticsFromStoredRequest())\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Expr'"
] |
[
"def FUNC_4(VAR_5):...\n",
"return app.config['SCOPED_AUTH_KEYS'].get(VAR_5)\n"
] | [
"def account_for_key_alias(key_alias):...\n",
"return app.config['SCOPED_AUTH_KEYS'].get(key_alias)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"from __future__ import absolute_import\n",
"import logging\n",
"from twisted.internet import reactor\n",
"from twisted.internet.defer import Deferred, inlineCallbacks\n",
"from twisted.internet.protocol import Protocol\n",
"from twisted.internet.task import deferLater\n",
"from twisted.web.client import Agent, HTTPConnectionPool\n",
"from twisted.web.http_headers import Headers\n",
"import Tribler.Core.Utilities.json_util as json\n",
"from Tribler.Core.simpledefs import NTFY_CHANNEL, NTFY_CREDIT_MINING, NTFY_DISCOVERED, NTFY_ERROR, NTFY_FINISHED, NTFY_INSERT, NTFY_MARKET_ON_ASK, NTFY_MARKET_ON_ASK_TIMEOUT, NTFY_MARKET_ON_BID, NTFY_MARKET_ON_BID_TIMEOUT, NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_MARKET_ON_PAYMENT_SENT, NTFY_MARKET_ON_TRANSACTION_COMPLETE, NTFY_NEW_VERSION, NTFY_REMOVE, NTFY_STARTED, NTFY_TORRENT, NTFY_TUNNEL, NTFY_UPDATE, NTFY_UPGRADER, NTFY_UPGRADER_TICK, NTFY_WATCH_FOLDER_CORRUPT_TORRENT, SIGNAL_LOW_SPACE, SIGNAL_RESOURCE_CHECK\n",
"from Tribler.Core.version import version_id\n",
"from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest\n",
"from Tribler.Test.tools import trial_timeout\n",
"from Tribler.pyipv8.ipv8.messaging.anonymization.tunnel import Circuit\n",
"\"\"\"\n This class is responsible for reading the data received over the event socket.\n \"\"\"\n",
"def __init__(self, VAR_0, VAR_1, VAR_2):...\n",
"self.json_buffer = []\n",
"self._logger = logging.getLogger(self.__class__.__name__)\n",
"self.messages_to_wait_for = VAR_0 + 1\n",
"self.finished = VAR_1\n",
"self.response = VAR_2\n",
"def FUNC_0(self, VAR_3):...\n",
"self._logger.info('Received data: %s' % VAR_3)\n",
"self.json_buffer.append(json.loads(VAR_3))\n",
"self.messages_to_wait_for -= 1\n",
"if self.messages_to_wait_for == 0:\n",
"self.response.loseConnection()\n",
"def FUNC_1(self, VAR_4='done'):...\n",
"self.finished.callback(self.json_buffer[1:])\n",
"@inlineCallbacks...\n",
"yield super(CLASS_1, self).setUp()\n",
"self.events_deferred = Deferred()\n",
"self.connection_pool = HTTPConnectionPool(reactor, False)\n",
"self.socket_open_deferred = self.tribler_started_deferred.addCallback(self.\n open_events_socket)\n",
"self.messages_to_wait_for = 0\n",
"@inlineCallbacks...\n",
"yield self.close_connections()\n",
"yield deferLater(reactor, 0.3, lambda : None)\n",
"yield super(CLASS_1, self).tearDown()\n",
"def FUNC_4(self, VAR_2):...\n",
"VAR_2.deliverBody(CLASS_0(self.messages_to_wait_for, self.events_deferred,\n VAR_2))\n",
"def FUNC_5(self, VAR_5):...\n",
"VAR_6 = Agent(reactor, pool=self.connection_pool)\n",
"return VAR_6.request('GET', 'http://localhost:%s/events' % self.session.\n config.get_http_api_port(), Headers({'User-Agent': ['Tribler ' +\n version_id]}), None).addCallback(self.on_event_socket_opened)\n"
] | [
"from __future__ import absolute_import\n",
"import logging\n",
"from twisted.internet import reactor\n",
"from twisted.internet.defer import Deferred, inlineCallbacks\n",
"from twisted.internet.protocol import Protocol\n",
"from twisted.internet.task import deferLater\n",
"from twisted.web.client import Agent, HTTPConnectionPool\n",
"from twisted.web.http_headers import Headers\n",
"import Tribler.Core.Utilities.json_util as json\n",
"from Tribler.Core.simpledefs import NTFY_CHANNEL, NTFY_CREDIT_MINING, NTFY_DISCOVERED, NTFY_ERROR, NTFY_FINISHED, NTFY_INSERT, NTFY_MARKET_ON_ASK, NTFY_MARKET_ON_ASK_TIMEOUT, NTFY_MARKET_ON_BID, NTFY_MARKET_ON_BID_TIMEOUT, NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_MARKET_ON_PAYMENT_SENT, NTFY_MARKET_ON_TRANSACTION_COMPLETE, NTFY_NEW_VERSION, NTFY_REMOVE, NTFY_STARTED, NTFY_TORRENT, NTFY_TUNNEL, NTFY_UPDATE, NTFY_UPGRADER, NTFY_UPGRADER_TICK, NTFY_WATCH_FOLDER_CORRUPT_TORRENT, SIGNAL_LOW_SPACE, SIGNAL_RESOURCE_CHECK\n",
"from Tribler.Core.version import version_id\n",
"from Tribler.Test.Core.Modules.RestApi.base_api_test import AbstractApiTest\n",
"from Tribler.Test.tools import trial_timeout\n",
"from Tribler.pyipv8.ipv8.messaging.anonymization.tunnel import Circuit\n",
"\"\"\"\n This class is responsible for reading the data received over the event socket.\n \"\"\"\n",
"def __init__(self, messages_to_wait_for, finished, response):...\n",
"self.json_buffer = []\n",
"self._logger = logging.getLogger(self.__class__.__name__)\n",
"self.messages_to_wait_for = messages_to_wait_for + 1\n",
"self.finished = finished\n",
"self.response = response\n",
"def dataReceived(self, data):...\n",
"self._logger.info('Received data: %s' % data)\n",
"self.json_buffer.append(json.loads(data))\n",
"self.messages_to_wait_for -= 1\n",
"if self.messages_to_wait_for == 0:\n",
"self.response.loseConnection()\n",
"def connectionLost(self, reason='done'):...\n",
"self.finished.callback(self.json_buffer[1:])\n",
"@inlineCallbacks...\n",
"yield super(TestEventsEndpoint, self).setUp()\n",
"self.events_deferred = Deferred()\n",
"self.connection_pool = HTTPConnectionPool(reactor, False)\n",
"self.socket_open_deferred = self.tribler_started_deferred.addCallback(self.\n open_events_socket)\n",
"self.messages_to_wait_for = 0\n",
"@inlineCallbacks...\n",
"yield self.close_connections()\n",
"yield deferLater(reactor, 0.3, lambda : None)\n",
"yield super(TestEventsEndpoint, self).tearDown()\n",
"def on_event_socket_opened(self, response):...\n",
"response.deliverBody(EventDataProtocol(self.messages_to_wait_for, self.\n events_deferred, response))\n",
"def open_events_socket(self, _):...\n",
"agent = Agent(reactor, pool=self.connection_pool)\n",
"return agent.request('GET', 'http://localhost:%s/events' % self.session.\n config.get_http_api_port(), Headers({'User-Agent': ['Tribler ' +\n version_id]}), None).addCallback(self.on_event_socket_opened)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"Expr'",
"AugAssign'",
"For",
"Expr'",
"FunctionDef'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_13(self, VAR_7):...\n",
"VAR_12 = VAR_7.model_class._meta.table_name\n",
"VAR_7 = 'string'.format(VAR_12, ','.join(VAR_7.model_class._meta.\n sorted_fields_names), ','.join([VAR_39(obj.id) for obj in VAR_7.objs]),\n VAR_7.model_class._meta.sorted_fields_names[0], VAR_7.model_class._meta\n .sorted_fields_names[1], VAR_7.objs[0].id, VAR_7.objs[1].id)\n",
"return VAR_7\n"
] | [
"def generate_add(self, query):...\n",
"table_name = query.model_class._meta.table_name\n",
"query = (\n \"INSERT INTO {0} ({1}) SELECT {2} WHERE NOT EXISTS (SELECT {3} FROM {0} WHERE {3}='{5}' AND {4}='{6}');\"\n .format(table_name, ','.join(query.model_class._meta.\n sorted_fields_names), ','.join([str(obj.id) for obj in query.objs]),\n query.model_class._meta.sorted_fields_names[0], query.model_class._meta\n .sorted_fields_names[1], query.objs[0].id, query.objs[1].id))\n",
"return query\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"User.upsert_mapped_projects(VAR_3, VAR_13)\n"
] | [
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"User.upsert_mapped_projects(user_id, project_id)\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'"
] |
[
"def FUNC_15(self, VAR_16, VAR_18):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_25 = self._eql_execute('volume', 'select', VAR_18['volume_name'],\n 'snapshot', 'select', VAR_18['name'], 'clone', VAR_16['name'])\n",
"VAR_0.error(_('Failed to create volume from snapshot %s'), VAR_18['name'])\n",
"return self._get_volume_data(VAR_25)\n"
] | [
"def create_volume_from_snapshot(self, volume, snapshot):...\n",
"\"\"\"docstring\"\"\"\n",
"out = self._eql_execute('volume', 'select', snapshot['volume_name'],\n 'snapshot', 'select', snapshot['name'], 'clone', volume['name'])\n",
"LOG.error(_('Failed to create volume from snapshot %s'), snapshot['name'])\n",
"return self._get_volume_data(out)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_2(self):...\n",
"self.cnxn = pyodbc.connect(self.connection_string)\n",
"self.cursor = self.cnxn.cursor()\n",
"for i in range(3):\n",
"self.cnxn.rollback()\n",
"self.cursor.execute('drop table t%d' % i)\n",
"self.cnxn.commit()\n"
] | [
"def setUp(self):...\n",
"self.cnxn = pyodbc.connect(self.connection_string)\n",
"self.cursor = self.cnxn.cursor()\n",
"for i in range(3):\n",
"self.cnxn.rollback()\n",
"self.cursor.execute('drop table t%d' % i)\n",
"self.cnxn.commit()\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_5(self, VAR_3, **VAR_6):...\n",
"logging.warn('url_read_json(%s, %s)', VAR_3[:500], str(VAR_6)[:500])\n",
"if not self._requests:\n",
"return None\n",
"VAR_6.pop('stream', None)\n",
"for i, n in enumerate(self._requests):\n",
"if n[0] == VAR_3:\n",
"self.fail('Unknown request %s' % VAR_3)\n",
"VAR_9 = self._requests.pop(i)\n",
"if len(VAR_9) != 3:\n",
"self.fail('Expected json request, got normal data; %s' % VAR_3)\n",
"VAR_10, VAR_11, VAR_12 = VAR_9\n",
"if callable(VAR_11):\n",
"VAR_11(VAR_6)\n",
"self.assertEqual(VAR_11, VAR_6)\n",
"if VAR_12 is not None:\n",
"return VAR_12\n",
"return None\n"
] | [
"def _url_read_json(self, url, **kwargs):...\n",
"logging.warn('url_read_json(%s, %s)', url[:500], str(kwargs)[:500])\n",
"if not self._requests:\n",
"return None\n",
"kwargs.pop('stream', None)\n",
"for i, n in enumerate(self._requests):\n",
"if n[0] == url:\n",
"self.fail('Unknown request %s' % url)\n",
"data = self._requests.pop(i)\n",
"if len(data) != 3:\n",
"self.fail('Expected json request, got normal data; %s' % url)\n",
"_, expected_kwargs, result = data\n",
"if callable(expected_kwargs):\n",
"expected_kwargs(kwargs)\n",
"self.assertEqual(expected_kwargs, kwargs)\n",
"if result is not None:\n",
"return result\n",
"return None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Condition",
"Return'",
"Expr'",
"For",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_25(self, VAR_31=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = self.build_graph.clone_new()\n",
"for VAR_21 in self.address_mapper.scan_addresses(VAR_31):\n",
"VAR_5.inject_address_closure(VAR_21)\n",
"return VAR_5\n"
] | [
"def scan(self, root=None):...\n",
"\"\"\"docstring\"\"\"\n",
"build_graph = self.build_graph.clone_new()\n",
"for address in self.address_mapper.scan_addresses(root):\n",
"build_graph.inject_address_closure(address)\n",
"return build_graph\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"def FUNC_37(self, VAR_69):...\n",
"if not_iterable(VAR_69):\n",
"return flag(os.path.join(self.workdir, VAR_69), 'subworkflow', self)\n",
"return [self.target(VAR_0) for VAR_0 in VAR_69]\n"
] | [
"def target(self, paths):...\n",
"if not_iterable(paths):\n",
"return flag(os.path.join(self.workdir, paths), 'subworkflow', self)\n",
"return [self.target(path) for path in paths]\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = self.config.get(self.section, 'logfile')\n",
"if not VAR_5:\n",
"VAR_5 = None\n",
"return VAR_5\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"res = self.config.get(self.section, 'logfile')\n",
"if not res:\n",
"res = None\n",
"return res\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_13(self):...\n",
"VAR_28 = f.readlines()\n",
"VAR_28 = [VAR_46.strip() for VAR_46 in VAR_28]\n",
"VAR_28 = [VAR_46 for VAR_46 in VAR_28 if VAR_46 and not VAR_46.startswith('#')]\n",
"VAR_28 = '\\n'.join(VAR_28).replace('\\\\\\n', '').split('\\n')\n",
"for VAR_46 in VAR_28:\n",
"VAR_46 = shlex.split(VAR_46, True)\n",
"FUNC_3('%s : Could not parse this line (%s) : %s' % (self.path, e, VAR_46))\n",
"yield VAR_46\n"
] | [
"def read_file(self):...\n",
"lines = f.readlines()\n",
"lines = [line.strip() for line in lines]\n",
"lines = [line for line in lines if line and not line.startswith('#')]\n",
"lines = '\\n'.join(lines).replace('\\\\\\n', '').split('\\n')\n",
"for line in lines:\n",
"line = shlex.split(line, True)\n",
"print_warning('%s : Could not parse this line (%s) : %s' % (self.path, e, line)\n )\n",
"yield line\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_10(self):...\n",
"VAR_13 = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION)\n",
"self.assertTrue(isinstance(VAR_13, int))\n"
] | [
"def test_getinfo_int(self):...\n",
"value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION)\n",
"self.assertTrue(isinstance(value, int))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"import collections\n",
"import mimetypes\n",
"import os\n",
"import re\n",
"import shutil\n",
"import urllib.parse\n",
"from fooster import web\n",
"def FUNC_0(VAR_0):...\n",
"VAR_6 = VAR_0.split('/')\n",
"VAR_7 = collections.deque()\n",
"for entry in VAR_6:\n",
"if not entry:\n",
"if VAR_6[0] == '':\n",
"if entry == '.':\n",
"VAR_7.appendleft('')\n",
"if VAR_6[-1] == '':\n",
"if entry == '..':\n",
"VAR_7.append('')\n",
"return '/'.join(VAR_7)\n",
"if len(VAR_7) > 0:\n",
"VAR_7.append(entry)\n",
"VAR_7.pop()\n"
] | [
"import collections\n",
"import mimetypes\n",
"import os\n",
"import re\n",
"import shutil\n",
"import urllib.parse\n",
"from fooster import web\n",
"def normpath(path):...\n",
"old_path = path.split('/')\n",
"new_path = collections.deque()\n",
"for entry in old_path:\n",
"if not entry:\n",
"if old_path[0] == '':\n",
"if entry == '.':\n",
"new_path.appendleft('')\n",
"if old_path[-1] == '':\n",
"if entry == '..':\n",
"new_path.append('')\n",
"return '/'.join(new_path)\n",
"if len(new_path) > 0:\n",
"new_path.append(entry)\n",
"new_path.pop()\n"
] | [
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Condition",
"Condition",
"Condition",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Return'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_2():...\n",
"super(CLASS_5, self).__init__(slideParser.parse(VAR_10, slideLexer), self.\n begin % (VAR_41, VAR_36), self.end % VAR_41)\n"
] | [
"def innerFunc():...\n",
"super(Box, self).__init__(slideParser.parse(txt, slideLexer), self.begin %\n (kind, head), self.end % kind)\n"
] | [
0,
2
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_28(self, VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_65 = [x[VAR_1] for x in self.get_path(VAR_21)]\n",
"return VAR_65\n"
] | [
"def listdir(self, path):...\n",
"\"\"\"docstring\"\"\"\n",
"names = [x[A_NAME] for x in self.get_path(path)]\n",
"return names\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_2(self, VAR_5=None, VAR_6=None, VAR_7='', VAR_8=''):...\n",
"if VAR_6:\n",
"VAR_5 = VAR_6.apply_all(VAR_5)\n",
"if VAR_7 != '':\n",
"if hasattr(self.obj, VAR_7):\n",
"return VAR_5\n",
"if hasattr(getattr(self.obj, VAR_7), '_col_name'):\n",
"VAR_5 = VAR_5.order_by(VAR_7 + ' ' + VAR_8)\n",
"VAR_7 = getattr(getattr(self.obj, VAR_7), '_col_name')\n"
] | [
"def _get_base_query(self, query=None, filters=None, order_column='',...\n",
"if filters:\n",
"query = filters.apply_all(query)\n",
"if order_column != '':\n",
"if hasattr(self.obj, order_column):\n",
"return query\n",
"if hasattr(getattr(self.obj, order_column), '_col_name'):\n",
"query = query.order_by(order_column + ' ' + order_direction)\n",
"order_column = getattr(getattr(self.obj, order_column), '_col_name')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
4,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_27(VAR_6):...\n",
"if callable(VAR_6):\n",
"return VAR_6()\n"
] | [
"def default_encode(value):...\n",
"if callable(value):\n",
"return value()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'"
] |
[
"def FUNC_9(self, *VAR_15, **VAR_75):...\n",
""
] | [
"def newfn(self, *a, **env):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_6(self, VAR_40):...\n",
"if VAR_40 and len(VAR_40) > 500:\n",
"VAR_101.errors.add(errors.DESC_TOO_LONG)\n",
"return unkeep_space(VAR_40 or '')\n"
] | [
"def run(self, description):...\n",
"if description and len(description) > 500:\n",
"c.errors.add(errors.DESC_TOO_LONG)\n",
"return unkeep_space(description or '')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_1(self, VAR_0):...\n",
"VAR_2 = re.search('\\\\d{2}.\\\\d{2}.\\\\d{4}', VAR_0)\n",
"if VAR_2:\n",
"VAR_1 = VAR_2.group()\n",
"return VAR_1\n"
] | [
"def search_date(self, string):...\n",
"search_date_result = re.search('\\\\d{2}.\\\\d{2}.\\\\d{4}', string)\n",
"if search_date_result:\n",
"date = search_date_result.group()\n",
"return date\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_30(self, VAR_16):...\n",
""
] | [
"def delete_all(self, items):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_3(self, VAR_13, VAR_11):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_27 = self.output_regex\n",
"if isinstance(VAR_27, str):\n",
"VAR_27 = VAR_27 % {'file_name': VAR_11}\n",
"for VAR_14 in re.finditer(VAR_27, ''.join(VAR_13)):\n",
"yield self.match_to_result(VAR_14, VAR_11)\n"
] | [
"def _process_issues(self, output, filename):...\n",
"\"\"\"docstring\"\"\"\n",
"regex = self.output_regex\n",
"if isinstance(regex, str):\n",
"regex = regex % {'file_name': filename}\n",
"for match in re.finditer(regex, ''.join(output)):\n",
"yield self.match_to_result(match, filename)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"For",
"Expr'"
] |
[
"def FUNC_18(self):...\n",
"self.compilation_ok(\"\"\"\nif anyof(true, false) {\n discard;\n}\n\"\"\")\n",
"self.representation_is('string')\n"
] | [
"def test_truefalse_testlist(self):...\n",
"self.compilation_ok(\"\"\"\nif anyof(true, false) {\n discard;\n}\n\"\"\")\n",
"self.representation_is(\n \"\"\"\nif (type: control)\n anyof (type: test)\n true (type: test)\n false (type: test)\n discard (type: action)\n\"\"\"\n )\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0():...\n",
"return [webapp2.Route('/', handler=RootHandler), webapp2.Route('/catalog',\n handler=CatalogHandler), webapp2.Route('/catalog/<machine_id>', handler\n =CatalogHandler), webapp2.Route('/leases', handler=LeaseRequestHandler),\n webapp2.Route('/leases/<lease_id>', handler=LeaseRequestHandler)]\n"
] | [
"def get_routes():...\n",
"return [webapp2.Route('/', handler=RootHandler), webapp2.Route('/catalog',\n handler=CatalogHandler), webapp2.Route('/catalog/<machine_id>', handler\n =CatalogHandler), webapp2.Route('/leases', handler=LeaseRequestHandler),\n webapp2.Route('/leases/<lease_id>', handler=LeaseRequestHandler)]\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12 = {}\n",
"VAR_8 = []\n",
"VAR_26 = Contest.get_from_id(VAR_0, session)\n",
"for participation in VAR_26.participations:\n",
"VAR_31 = participation.user\n",
"for VAR_30 in VAR_26.tasks:\n",
"VAR_12[VAR_31.username] = {'password': VAR_31.password}\n",
"VAR_8.append((VAR_30.id, VAR_30.name, VAR_30.statements.keys()))\n",
"return VAR_12, VAR_8\n"
] | [
"def harvest_contest_data(contest_id):...\n",
"\"\"\"docstring\"\"\"\n",
"users = {}\n",
"tasks = []\n",
"contest = Contest.get_from_id(contest_id, session)\n",
"for participation in contest.participations:\n",
"user = participation.user\n",
"for task in contest.tasks:\n",
"users[user.username] = {'password': user.password}\n",
"tasks.append((task.id, task.name, task.statements.keys()))\n",
"return users, tasks\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"For",
"Assign'",
"Expr'",
"Return'"
] |
[
"import re, requests, argparse, logging, os, coloredlogs, datetime, getpass, tempfile, itertools, json, concurrent.futures, random\n",
"from utils import *\n",
"from UploadForm import UploadForm\n",
"from threading import Lock\n",
"VAR_0 = '0.5.0'\n",
"logging.basicConfig(datefmt='[%m/%d/%Y-%H:%M:%S]')\n",
"VAR_1 = logging.getLogger('fuxploider')\n",
"coloredlogs.install(VAR_1=logger, fmt=\n '%(asctime)s %(levelname)s - %(message)s', level=logging.INFO)\n",
"logging.getLogger('requests').setLevel(logging.ERROR)\n",
"VAR_2 = 'payloads'\n",
"VAR_31 = json.loads(fd.read())\n",
"VAR_3 = [x['templateName'] for x in VAR_31]\n",
"VAR_4 = 'string'\n",
"VAR_4 += \"\"\"\n\tDefault templates are the following (name - description) : \"\"\"\n",
"for VAR_46 in VAR_31:\n",
"VAR_4 += \"\\n\\t * '\" + VAR_46['templateName'] + \"' - \" + VAR_46['description']\n",
"VAR_5 = argparse.ArgumentParser(epilog=templatesSection, description=\n __doc__, formatter_class=argparse.RawTextHelpFormatter)\n",
"VAR_5.add_argument('-d', '--data', metavar='postData', dest='data', help=\n 'Additionnal data to be transmitted via POST method. Example : -d \"key1=value1&key2=value2\"'\n , type=valid_postData)\n",
"VAR_5.add_argument('--proxy', metavar='proxyUrl', dest='proxy', help=\n 'Proxy information. Example : --proxy \"user:[email protected]:8080\"',\n type=valid_proxyString)\n",
"VAR_5.add_argument('--proxy-creds', metavar='credentials', nargs='?', const\n =True, dest='proxyCreds', help=\n \"Prompt for proxy credentials at runtime. Format : 'user:pass'\", type=\n valid_proxyCreds)\n",
"VAR_5.add_argument('-f', '--filesize', metavar='integer', nargs=1, default=\n ['10'], dest='size', help=\n 'File size to use for files to be created and uploaded (in kB).')\n",
"VAR_5.add_argument('--cookies', metavar='omnomnom', nargs=1, dest='cookies',\n help=\n 'Cookies to use with HTTP requests. Example : PHPSESSID=aef45aef45afeaef45aef45&JSESSID=AQSEJHQSQSG'\n , type=valid_postData)\n",
"VAR_5.add_argument('--uploads-path', default=[None], metavar='path', nargs=\n 1, dest='uploadsPath', help=\n \"Path on the remote server where uploads are put. Example : '/tmp/uploads/'\"\n )\n",
"VAR_5.add_argument('-t', '--template', metavar='templateName', nargs=1,\n dest='template', help=\n 'Malicious payload to use for code execution detection. Default is to use every known templates. For a complete list of templates, see the TEMPLATE section.'\n )\n",
"VAR_5.add_argument('-r', '--regex-override', metavar='regex', nargs=1, dest\n ='regexOverride', help=\n 'Specify a regular expression to detect code execution. Overrides the default code execution detection regex defined in the template in use.'\n , type=valid_regex)\n",
"VAR_6 = VAR_5.add_argument_group('Required named arguments')\n",
"VAR_6.add_argument('-u', '--url', metavar='target', dest='url', required=\n True, help=\n 'Web page URL containing the file upload form to be tested. Example : http://test.com/index.html?action=upload'\n , type=valid_url)\n",
"VAR_6.add_argument('--not-regex', metavar='regex', help=\n 'Regex matching an upload failure', type=valid_regex, dest='notRegex')\n",
"VAR_6.add_argument('--true-regex', metavar='regex', help=\n 'Regex matching an upload success', type=valid_regex, dest='trueRegex')\n",
"VAR_7 = VAR_5.add_mutually_exclusive_group()\n",
"VAR_7.add_argument('-l', '--legit-extensions', metavar='listOfExtensions',\n dest='legitExtensions', nargs=1, help=\n \"Legit extensions expected, for a normal use of the form, comma separated. Example : 'jpg,png,bmp'\"\n )\n",
"VAR_7.add_argument('-n', metavar='n', nargs=1, default=['100'], dest='n',\n help='Number of common extensions to use. Example : -n 100', type=\n valid_nArg)\n",
"VAR_8 = VAR_5.add_mutually_exclusive_group()\n",
"VAR_8.add_argument('-v', action='store_true', required=False, dest=\n 'verbose', help='Verbose mode')\n",
"VAR_8.add_argument('-vv', action='store_true', required=False, dest=\n 'veryVerbose', help='Very verbose mode')\n",
"VAR_8.add_argument('-vvv', action='store_true', required=False, dest=\n 'veryVeryVerbose', help='Much verbose, very log, wow.')\n",
"VAR_5.add_argument('-s', '--skip-recon', action='store_true', required=\n False, dest='skipRecon', help=\n 'Skip recon phase, where fuxploider tries to determine what extensions are expected and filtered by the server. Needs -l switch.'\n )\n",
"VAR_5.add_argument('-y', action='store_true', required=False, dest=\n 'detectAllEntryPoints', help=\n 'Force detection of every entry points. Will not stop at first code exec found.'\n )\n",
"VAR_5.add_argument('-T', '--threads', metavar='Threads', nargs=1, dest=\n 'nbThreads', help='Number of parallel tasks (threads).', type=int,\n default=[4])\n",
"VAR_9 = VAR_5.add_mutually_exclusive_group()\n",
"VAR_9.add_argument('-U', '--user-agent', metavar='useragent', nargs=1, dest\n ='userAgent', help='User-agent to use while requesting the target.',\n type=str, default=[requests.utils.default_user_agent()])\n",
"VAR_9.add_argument('--random-user-agent', action='store_true', required=\n False, dest='randomUserAgent', help=\n 'Use a random user-agent while requesting the target.')\n",
"VAR_10 = VAR_5.add_argument_group('Manual Form Detection arguments')\n",
"VAR_10.add_argument('-m', '--manual-form-detection', action='store_true',\n dest='manualFormDetection', help=\n 'Disable automatic form detection. Useful when automatic detection fails due to: (1) Form loaded using Javascript (2) Multiple file upload forms in URL.'\n )\n",
"VAR_10.add_argument('--input-name', metavar='image', dest='inputName', help\n ='Name of input for file. Example: <input type=\"file\" name=\"image\">')\n",
"VAR_10.add_argument('--form-action', default='', metavar='upload.php', dest\n ='formAction', help=\n 'Path of form action. Example: <form method=\"POST\" action=\"upload.php\">')\n",
"VAR_11 = VAR_5.parse_args()\n",
"VAR_11.uploadsPath = VAR_11.uploadsPath[0]\n",
"VAR_11.nbThreads = VAR_11.nbThreads[0]\n",
"VAR_11.userAgent = VAR_11.userAgent[0]\n",
"if VAR_11.randomUserAgent:\n",
"VAR_45 = 0\n",
"if VAR_11.template:\n",
"for l in fd:\n",
"VAR_11.template = VAR_11.template[0]\n",
"if VAR_11.regexOverride:\n",
"VAR_45 += 1\n",
"fd.seek(0)\n",
"if VAR_11.template not in VAR_3:\n",
"for VAR_46 in VAR_31:\n",
"VAR_11.verbosity = 0\n",
"VAR_45 = random.randint(0, VAR_45)\n",
"logging.warning('Unknown template : %s', VAR_11.template)\n",
"VAR_31 = [[x for x in VAR_31 if x['templateName'] == VAR_11.template][0]]\n",
"VAR_46['codeExecRegex'] = VAR_11.regexOverride[0]\n",
"if VAR_11.verbose:\n",
"for i in range(0, VAR_45):\n",
"VAR_22 = input('Use default templates instead ? [Y/n]')\n",
"VAR_11.verbosity = 1\n",
"if VAR_11.veryVerbose:\n",
"VAR_11.userAgent = fd.readline()[:-1]\n",
"if not VAR_22.lower().startswith('y'):\n",
"VAR_11.verbosity = 2\n",
"if VAR_11.veryVeryVerbose:\n",
"exit()\n",
"VAR_11.verbosity = 3\n",
"VAR_1.verbosity = VAR_11.verbosity\n",
"if VAR_11.verbosity > 0:\n",
"coloredlogs.install(VAR_1=logger, fmt=\n '%(asctime)s %(levelname)s - %(message)s', level=logging.DEBUG)\n",
"if VAR_11.proxyCreds and VAR_11.proxy == None:\n",
"VAR_5.error('--proxy-creds must be used with --proxy.')\n",
"if VAR_11.skipRecon and VAR_11.legitExtensions == None:\n",
"VAR_5.error(\n '-s switch needs -l switch. Cannot skip recon phase without any known entry point.'\n )\n",
"VAR_11.n = int(VAR_11.n[0])\n",
"VAR_11.size = int(VAR_11.size[0])\n",
"VAR_11.size = 1024 * VAR_11.size\n",
"if not VAR_11.notRegex and not VAR_11.trueRegex:\n",
"VAR_5.error(\n 'At least one detection method must be provided, either with --not-regex or with --true-regex.'\n )\n",
"if VAR_11.legitExtensions:\n",
"VAR_11.legitExtensions = VAR_11.legitExtensions[0].split(',')\n",
"if VAR_11.cookies:\n",
"VAR_11.cookies = postDataFromStringToJSON(VAR_11.cookies[0])\n",
"if VAR_11.manualFormDetection and VAR_11.inputName is None:\n",
"VAR_5.error('--manual-form-detection requires --input-name')\n",
"print('string' + VAR_0 + 'string')\n",
"if VAR_11.proxyCreds == True:\n",
"VAR_11.proxyCreds = {}\n",
"VAR_12 = datetime.datetime.now()\n",
"VAR_11.proxyCreds['username'] = input('Proxy username : ')\n",
"print('[*] starting at ' + str(VAR_12.hour) + ':' + str(VAR_12.minute) +\n ':' + str(VAR_12.second))\n",
"VAR_11.proxyCreds['password'] = getpass.getpass('Proxy password : ')\n",
"VAR_13 = 'mimeTypes.basic'\n",
"VAR_14 = loadExtensions('file', VAR_13)\n",
"VAR_15 = []\n",
"if VAR_11.legitExtensions:\n",
"VAR_11.legitExtensions = [x.lower() for x in VAR_11.legitExtensions]\n",
"VAR_11.legitExtensions = VAR_15\n",
"VAR_32 = [VAR_20[0] for VAR_20 in VAR_14]\n",
"VAR_16 = postDataFromStringToJSON(VAR_11.data)\n",
"for VAR_21 in VAR_11.legitExtensions:\n",
"VAR_17 = requests.Session()\n",
"if VAR_21 in VAR_32:\n",
"if VAR_11.cookies:\n",
"VAR_15.append(VAR_21)\n",
"logging.warning(\n \"Extension %s can't be found as a valid/known extension with associated mime type.\"\n , VAR_21)\n",
"for VAR_47 in VAR_11.cookies.keys():\n",
"VAR_17.headers = {'User-Agent': VAR_11.userAgent}\n",
"VAR_17.cookies[VAR_47] = VAR_11.cookies[VAR_47]\n",
"VAR_17.trust_env = False\n",
"if VAR_11.proxy:\n",
"if VAR_11.proxy['username'] and VAR_11.proxy['password'] and VAR_11.proxyCreds:\n",
"if VAR_11.manualFormDetection:\n",
"logging.warning('string')\n",
"if VAR_11.proxyCreds:\n",
"if VAR_11.formAction == '':\n",
"VAR_37 = UploadForm(VAR_11.notRegex, VAR_11.trueRegex, VAR_17, VAR_11.size,\n VAR_16, VAR_11.uploadsPath)\n",
"VAR_48 = VAR_11.proxyCreds['username']\n",
"VAR_48 = VAR_11.proxy['username']\n",
"VAR_1.warning('string')\n",
"VAR_37 = UploadForm(VAR_11.notRegex, VAR_11.trueRegex, VAR_17, VAR_11.size,\n VAR_16, VAR_11.uploadsPath, VAR_11.url, VAR_11.formAction, VAR_11.inputName\n )\n",
"VAR_37.setup(VAR_11.url)\n",
"VAR_49 = VAR_11.proxyCreds['password']\n",
"VAR_49 = VAR_11.proxy['password']\n",
"VAR_37.threads = VAR_11.nbThreads\n",
"VAR_33 = VAR_11.proxy['protocol']\n",
"VAR_18 = VAR_37.uploadUrl\n",
"VAR_34 = VAR_11.proxy['hostname']\n",
"VAR_19 = {'name': VAR_37.inputName}\n",
"VAR_35 = VAR_11.proxy['port']\n",
"VAR_20 = datetime.datetime.now()\n",
"VAR_36 = ''\n",
"if not VAR_11.skipRecon:\n",
"if VAR_33 != None:\n",
"if len(VAR_11.legitExtensions) > 0:\n",
"VAR_1.info(\n '### Skipping detection of valid extensions, using provided extensions instead (%s)'\n , VAR_11.legitExtensions)\n",
"VAR_36 += VAR_33 + '://'\n",
"VAR_36 += 'http://'\n",
"VAR_51 = VAR_37.detectValidExtensions(VAR_14, VAR_11.n, VAR_11.legitExtensions)\n",
"VAR_51 = VAR_37.detectValidExtensions(VAR_14, VAR_11.n)\n",
"VAR_37.validExtensions = VAR_11.legitExtensions\n",
"if VAR_48 != None and VAR_49 != None:\n",
"VAR_1.info('### Tried %s extensions, %s are valid.', VAR_51, len(VAR_37.\n validExtensions))\n",
"if VAR_37.validExtensions == []:\n",
"VAR_36 += VAR_48 + ':' + VAR_49 + '@'\n",
"VAR_36 += VAR_34\n",
"VAR_1.error('No valid extension found.')\n",
"VAR_21 = datetime.datetime.now()\n",
"if VAR_35 != None:\n",
"exit()\n",
"print('Extensions detection : ' + str(VAR_21 - VAR_20))\n",
"VAR_36 += ':' + VAR_35\n",
"if VAR_33 == 'https':\n",
"VAR_22 = input('Start uploading payloads ? [Y/n] : ')\n",
"VAR_50 = {'https': VAR_36}\n",
"VAR_50 = {'http': VAR_36, 'https': VAR_36}\n",
"VAR_37.shouldLog = True\n",
"VAR_17.proxies.update(VAR_50)\n",
"if VAR_22.lower().startswith('y') or VAR_22 == '':\n",
"exit('Exiting.')\n",
"VAR_23 = []\n",
"VAR_37.stopThreads = True\n",
"VAR_38 = json.loads(rawTechniques.read())\n",
"VAR_1.info(\n '### Starting code execution detection (messing with file extensions and mime types...)'\n )\n",
"VAR_24 = datetime.datetime.now()\n",
"VAR_25 = 0\n",
"VAR_26 = []\n",
"VAR_27 = {}\n",
"for VAR_40 in VAR_31:\n",
"VAR_39 = open(VAR_2 + '/' + VAR_40['filename'], 'rb')\n",
"VAR_28 = False\n",
"VAR_27[VAR_40['templateName']] = VAR_39.read()\n",
"VAR_29 = 0\n",
"VAR_39.close()\n",
"VAR_44 = []\n",
"VAR_41 = VAR_40['nastyExt']\n",
"for VAR_20 in VAR_26:\n",
"VAR_28 = True\n",
"VAR_30 = datetime.datetime.now()\n",
"VAR_42 = getMime(VAR_14, VAR_41)\n",
"VAR_52 = VAR_20['suffix']\n",
"for future in concurrent.futures.as_completed(VAR_44):\n",
"executor.shutdown(wait=False)\n",
"print()\n",
"VAR_43 = VAR_40['extVariants']\n",
"VAR_53 = VAR_20['mime']\n",
"VAR_57 = future.result()\n",
"executor._threads.clear()\n",
"logging.info('%s entry point(s) found using %s HTTP requests.', VAR_25,\n VAR_37.httpRequests)\n",
"for VAR_46 in VAR_38:\n",
"VAR_54 = VAR_27[VAR_20['templateName']]\n",
"VAR_29 += 1\n",
"concurrent.futures.thread._threads_queues.clear()\n",
"print('Found the following entry points : ')\n",
"for nastyVariant in ([VAR_41] + VAR_43):\n",
"VAR_55 = [VAR_46['codeExecRegex'] for VAR_46 in VAR_31 if VAR_46[\n 'templateName'] == VAR_20['templateName']][0]\n",
"if not VAR_28:\n",
"VAR_1.setLevel(logging.CRITICAL)\n",
"print(VAR_23)\n",
"for legitExt in VAR_37.validExtensions:\n",
"VAR_56 = executor.submit(VAR_37.submitTestCase, VAR_52, VAR_53, VAR_54, VAR_55)\n",
"if VAR_57['codeExec']:\n",
"VAR_1.verbosity = -1\n",
"VAR_58 = getMime(VAR_14, legitExt)\n",
"VAR_56.a = VAR_20\n",
"VAR_59 = future.a\n",
"VAR_53 = VAR_58 if VAR_46['mime'] == 'legit' else VAR_42\n",
"VAR_44.append(VAR_56)\n",
"logging.info(\"\\x1b[1m\\x1b[42mCode execution obtained ('%s','%s','%s')\\x1b[m\",\n VAR_59['suffix'], VAR_59['mime'], VAR_59['templateName'])\n",
"VAR_52 = VAR_46['suffix'].replace('$legitExt$', legitExt).replace('$nastyExt$',\n nastyVariant)\n",
"VAR_25 += 1\n",
"VAR_26.append({'suffix': VAR_52, 'mime': VAR_53, 'templateName': VAR_40[\n 'templateName']})\n",
"VAR_23.append(VAR_59)\n",
"if not VAR_11.detectAllEntryPoints:\n"
] | [
"import re, requests, argparse, logging, os, coloredlogs, datetime, getpass, tempfile, itertools, json, concurrent.futures, random\n",
"from utils import *\n",
"from UploadForm import UploadForm\n",
"from threading import Lock\n",
"version = '0.5.0'\n",
"logging.basicConfig(datefmt='[%m/%d/%Y-%H:%M:%S]')\n",
"logger = logging.getLogger('fuxploider')\n",
"coloredlogs.install(logger=logger, fmt=\n '%(asctime)s %(levelname)s - %(message)s', level=logging.INFO)\n",
"logging.getLogger('requests').setLevel(logging.ERROR)\n",
"templatesFolder = 'payloads'\n",
"templates = json.loads(fd.read())\n",
"templatesNames = [x['templateName'] for x in templates]\n",
"templatesSection = \"\"\"[TEMPLATES]\nTemplates are malicious payloads meant to be uploaded on the scanned remote server. Code execution detection is done based on the expected output of the payload.\"\"\"\n",
"templatesSection += \"\"\"\n\tDefault templates are the following (name - description) : \"\"\"\n",
"for t in templates:\n",
"templatesSection += \"\\n\\t * '\" + t['templateName'] + \"' - \" + t['description']\n",
"parser = argparse.ArgumentParser(epilog=templatesSection, description=\n __doc__, formatter_class=argparse.RawTextHelpFormatter)\n",
"parser.add_argument('-d', '--data', metavar='postData', dest='data', help=\n 'Additionnal data to be transmitted via POST method. Example : -d \"key1=value1&key2=value2\"'\n , type=valid_postData)\n",
"parser.add_argument('--proxy', metavar='proxyUrl', dest='proxy', help=\n 'Proxy information. Example : --proxy \"user:[email protected]:8080\"',\n type=valid_proxyString)\n",
"parser.add_argument('--proxy-creds', metavar='credentials', nargs='?',\n const=True, dest='proxyCreds', help=\n \"Prompt for proxy credentials at runtime. Format : 'user:pass'\", type=\n valid_proxyCreds)\n",
"parser.add_argument('-f', '--filesize', metavar='integer', nargs=1, default\n =['10'], dest='size', help=\n 'File size to use for files to be created and uploaded (in kB).')\n",
"parser.add_argument('--cookies', metavar='omnomnom', nargs=1, dest=\n 'cookies', help=\n 'Cookies to use with HTTP requests. Example : PHPSESSID=aef45aef45afeaef45aef45&JSESSID=AQSEJHQSQSG'\n , type=valid_postData)\n",
"parser.add_argument('--uploads-path', default=[None], metavar='path', nargs\n =1, dest='uploadsPath', help=\n \"Path on the remote server where uploads are put. Example : '/tmp/uploads/'\"\n )\n",
"parser.add_argument('-t', '--template', metavar='templateName', nargs=1,\n dest='template', help=\n 'Malicious payload to use for code execution detection. Default is to use every known templates. For a complete list of templates, see the TEMPLATE section.'\n )\n",
"parser.add_argument('-r', '--regex-override', metavar='regex', nargs=1,\n dest='regexOverride', help=\n 'Specify a regular expression to detect code execution. Overrides the default code execution detection regex defined in the template in use.'\n , type=valid_regex)\n",
"requiredNamedArgs = parser.add_argument_group('Required named arguments')\n",
"requiredNamedArgs.add_argument('-u', '--url', metavar='target', dest='url',\n required=True, help=\n 'Web page URL containing the file upload form to be tested. Example : http://test.com/index.html?action=upload'\n , type=valid_url)\n",
"requiredNamedArgs.add_argument('--not-regex', metavar='regex', help=\n 'Regex matching an upload failure', type=valid_regex, dest='notRegex')\n",
"requiredNamedArgs.add_argument('--true-regex', metavar='regex', help=\n 'Regex matching an upload success', type=valid_regex, dest='trueRegex')\n",
"exclusiveArgs = parser.add_mutually_exclusive_group()\n",
"exclusiveArgs.add_argument('-l', '--legit-extensions', metavar=\n 'listOfExtensions', dest='legitExtensions', nargs=1, help=\n \"Legit extensions expected, for a normal use of the form, comma separated. Example : 'jpg,png,bmp'\"\n )\n",
"exclusiveArgs.add_argument('-n', metavar='n', nargs=1, default=['100'],\n dest='n', help='Number of common extensions to use. Example : -n 100',\n type=valid_nArg)\n",
"exclusiveVerbosityArgs = parser.add_mutually_exclusive_group()\n",
"exclusiveVerbosityArgs.add_argument('-v', action='store_true', required=\n False, dest='verbose', help='Verbose mode')\n",
"exclusiveVerbosityArgs.add_argument('-vv', action='store_true', required=\n False, dest='veryVerbose', help='Very verbose mode')\n",
"exclusiveVerbosityArgs.add_argument('-vvv', action='store_true', required=\n False, dest='veryVeryVerbose', help='Much verbose, very log, wow.')\n",
"parser.add_argument('-s', '--skip-recon', action='store_true', required=\n False, dest='skipRecon', help=\n 'Skip recon phase, where fuxploider tries to determine what extensions are expected and filtered by the server. Needs -l switch.'\n )\n",
"parser.add_argument('-y', action='store_true', required=False, dest=\n 'detectAllEntryPoints', help=\n 'Force detection of every entry points. Will not stop at first code exec found.'\n )\n",
"parser.add_argument('-T', '--threads', metavar='Threads', nargs=1, dest=\n 'nbThreads', help='Number of parallel tasks (threads).', type=int,\n default=[4])\n",
"exclusiveUserAgentsArgs = parser.add_mutually_exclusive_group()\n",
"exclusiveUserAgentsArgs.add_argument('-U', '--user-agent', metavar=\n 'useragent', nargs=1, dest='userAgent', help=\n 'User-agent to use while requesting the target.', type=str, default=[\n requests.utils.default_user_agent()])\n",
"exclusiveUserAgentsArgs.add_argument('--random-user-agent', action=\n 'store_true', required=False, dest='randomUserAgent', help=\n 'Use a random user-agent while requesting the target.')\n",
"manualFormArgs = parser.add_argument_group('Manual Form Detection arguments')\n",
"manualFormArgs.add_argument('-m', '--manual-form-detection', action=\n 'store_true', dest='manualFormDetection', help=\n 'Disable automatic form detection. Useful when automatic detection fails due to: (1) Form loaded using Javascript (2) Multiple file upload forms in URL.'\n )\n",
"manualFormArgs.add_argument('--input-name', metavar='image', dest=\n 'inputName', help=\n 'Name of input for file. Example: <input type=\"file\" name=\"image\">')\n",
"manualFormArgs.add_argument('--form-action', default='', metavar=\n 'upload.php', dest='formAction', help=\n 'Path of form action. Example: <form method=\"POST\" action=\"upload.php\">')\n",
"args = parser.parse_args()\n",
"args.uploadsPath = args.uploadsPath[0]\n",
"args.nbThreads = args.nbThreads[0]\n",
"args.userAgent = args.userAgent[0]\n",
"if args.randomUserAgent:\n",
"nb = 0\n",
"if args.template:\n",
"for l in fd:\n",
"args.template = args.template[0]\n",
"if args.regexOverride:\n",
"nb += 1\n",
"fd.seek(0)\n",
"if args.template not in templatesNames:\n",
"for t in templates:\n",
"args.verbosity = 0\n",
"nb = random.randint(0, nb)\n",
"logging.warning('Unknown template : %s', args.template)\n",
"templates = [[x for x in templates if x['templateName'] == args.template][0]]\n",
"t['codeExecRegex'] = args.regexOverride[0]\n",
"if args.verbose:\n",
"for i in range(0, nb):\n",
"cont = input('Use default templates instead ? [Y/n]')\n",
"args.verbosity = 1\n",
"if args.veryVerbose:\n",
"args.userAgent = fd.readline()[:-1]\n",
"if not cont.lower().startswith('y'):\n",
"args.verbosity = 2\n",
"if args.veryVeryVerbose:\n",
"exit()\n",
"args.verbosity = 3\n",
"logger.verbosity = args.verbosity\n",
"if args.verbosity > 0:\n",
"coloredlogs.install(logger=logger, fmt=\n '%(asctime)s %(levelname)s - %(message)s', level=logging.DEBUG)\n",
"if args.proxyCreds and args.proxy == None:\n",
"parser.error('--proxy-creds must be used with --proxy.')\n",
"if args.skipRecon and args.legitExtensions == None:\n",
"parser.error(\n '-s switch needs -l switch. Cannot skip recon phase without any known entry point.'\n )\n",
"args.n = int(args.n[0])\n",
"args.size = int(args.size[0])\n",
"args.size = 1024 * args.size\n",
"if not args.notRegex and not args.trueRegex:\n",
"parser.error(\n 'At least one detection method must be provided, either with --not-regex or with --true-regex.'\n )\n",
"if args.legitExtensions:\n",
"args.legitExtensions = args.legitExtensions[0].split(',')\n",
"if args.cookies:\n",
"args.cookies = postDataFromStringToJSON(args.cookies[0])\n",
"if args.manualFormDetection and args.inputName is None:\n",
"parser.error('--manual-form-detection requires --input-name')\n",
"print(\n \"\"\"\u001b[1;32m\n \n ___ _ _ _ \n| _|_ _ _ _ ___| |___|_|_| |___ ___ \n| _| | |_'_| . | | . | | . | -_| _|\n|_| |___|_,_| _|_|___|_|___|___|_| \n |_| \n\n\u001b[1m\u001b[42m{version \"\"\"\n + version +\n \"\"\"}\u001b[m\n\n\u001b[m[!] legal disclaimer : Usage of fuxploider for attacking targets without prior mutual consent is illegal. It is the end user's responsibility to obey all applicable local, state and federal laws. Developers assume no liability and are not responsible for any misuse or damage caused by this program\n\t\"\"\"\n )\n",
"if args.proxyCreds == True:\n",
"args.proxyCreds = {}\n",
"now = datetime.datetime.now()\n",
"args.proxyCreds['username'] = input('Proxy username : ')\n",
"print('[*] starting at ' + str(now.hour) + ':' + str(now.minute) + ':' +\n str(now.second))\n",
"args.proxyCreds['password'] = getpass.getpass('Proxy password : ')\n",
"mimeFile = 'mimeTypes.basic'\n",
"extensions = loadExtensions('file', mimeFile)\n",
"tmpLegitExt = []\n",
"if args.legitExtensions:\n",
"args.legitExtensions = [x.lower() for x in args.legitExtensions]\n",
"args.legitExtensions = tmpLegitExt\n",
"foundExt = [a[0] for a in extensions]\n",
"postData = postDataFromStringToJSON(args.data)\n",
"for b in args.legitExtensions:\n",
"s = requests.Session()\n",
"if b in foundExt:\n",
"if args.cookies:\n",
"tmpLegitExt.append(b)\n",
"logging.warning(\n \"Extension %s can't be found as a valid/known extension with associated mime type.\"\n , b)\n",
"for key in args.cookies.keys():\n",
"s.headers = {'User-Agent': args.userAgent}\n",
"s.cookies[key] = args.cookies[key]\n",
"s.trust_env = False\n",
"if args.proxy:\n",
"if args.proxy['username'] and args.proxy['password'] and args.proxyCreds:\n",
"if args.manualFormDetection:\n",
"logging.warning(\n 'Proxy username and password provided by the --proxy-creds switch replaces credentials provided using the --proxy switch'\n )\n",
"if args.proxyCreds:\n",
"if args.formAction == '':\n",
"up = UploadForm(args.notRegex, args.trueRegex, s, args.size, postData, args\n .uploadsPath)\n",
"proxyUser = args.proxyCreds['username']\n",
"proxyUser = args.proxy['username']\n",
"logger.warning(\n 'Using Manual Form Detection and no action specified with --form-action. Defaulting to empty string - meaning form action will be set to --url parameter.'\n )\n",
"up = UploadForm(args.notRegex, args.trueRegex, s, args.size, postData, args\n .uploadsPath, args.url, args.formAction, args.inputName)\n",
"up.setup(args.url)\n",
"proxyPass = args.proxyCreds['password']\n",
"proxyPass = args.proxy['password']\n",
"up.threads = args.nbThreads\n",
"proxyProtocol = args.proxy['protocol']\n",
"uploadURL = up.uploadUrl\n",
"proxyHostname = args.proxy['hostname']\n",
"fileInput = {'name': up.inputName}\n",
"proxyPort = args.proxy['port']\n",
"a = datetime.datetime.now()\n",
"proxy = ''\n",
"if not args.skipRecon:\n",
"if proxyProtocol != None:\n",
"if len(args.legitExtensions) > 0:\n",
"logger.info(\n '### Skipping detection of valid extensions, using provided extensions instead (%s)'\n , args.legitExtensions)\n",
"proxy += proxyProtocol + '://'\n",
"proxy += 'http://'\n",
"n = up.detectValidExtensions(extensions, args.n, args.legitExtensions)\n",
"n = up.detectValidExtensions(extensions, args.n)\n",
"up.validExtensions = args.legitExtensions\n",
"if proxyUser != None and proxyPass != None:\n",
"logger.info('### Tried %s extensions, %s are valid.', n, len(up.\n validExtensions))\n",
"if up.validExtensions == []:\n",
"proxy += proxyUser + ':' + proxyPass + '@'\n",
"proxy += proxyHostname\n",
"logger.error('No valid extension found.')\n",
"b = datetime.datetime.now()\n",
"if proxyPort != None:\n",
"exit()\n",
"print('Extensions detection : ' + str(b - a))\n",
"proxy += ':' + proxyPort\n",
"if proxyProtocol == 'https':\n",
"cont = input('Start uploading payloads ? [Y/n] : ')\n",
"proxies = {'https': proxy}\n",
"proxies = {'http': proxy, 'https': proxy}\n",
"up.shouldLog = True\n",
"s.proxies.update(proxies)\n",
"if cont.lower().startswith('y') or cont == '':\n",
"exit('Exiting.')\n",
"entryPoints = []\n",
"up.stopThreads = True\n",
"techniques = json.loads(rawTechniques.read())\n",
"logger.info(\n '### Starting code execution detection (messing with file extensions and mime types...)'\n )\n",
"c = datetime.datetime.now()\n",
"nbOfEntryPointsFound = 0\n",
"attempts = []\n",
"templatesData = {}\n",
"for template in templates:\n",
"templatefd = open(templatesFolder + '/' + template['filename'], 'rb')\n",
"stopThreads = False\n",
"templatesData[template['templateName']] = templatefd.read()\n",
"attemptsTested = 0\n",
"templatefd.close()\n",
"futures = []\n",
"nastyExt = template['nastyExt']\n",
"for a in attempts:\n",
"stopThreads = True\n",
"d = datetime.datetime.now()\n",
"nastyMime = getMime(extensions, nastyExt)\n",
"suffix = a['suffix']\n",
"for future in concurrent.futures.as_completed(futures):\n",
"executor.shutdown(wait=False)\n",
"print()\n",
"nastyExtVariants = template['extVariants']\n",
"mime = a['mime']\n",
"res = future.result()\n",
"executor._threads.clear()\n",
"logging.info('%s entry point(s) found using %s HTTP requests.',\n nbOfEntryPointsFound, up.httpRequests)\n",
"for t in techniques:\n",
"payload = templatesData[a['templateName']]\n",
"attemptsTested += 1\n",
"concurrent.futures.thread._threads_queues.clear()\n",
"print('Found the following entry points : ')\n",
"for nastyVariant in ([nastyExt] + nastyExtVariants):\n",
"codeExecRegex = [t['codeExecRegex'] for t in templates if t['templateName'] ==\n a['templateName']][0]\n",
"if not stopThreads:\n",
"logger.setLevel(logging.CRITICAL)\n",
"print(entryPoints)\n",
"for legitExt in up.validExtensions:\n",
"f = executor.submit(up.submitTestCase, suffix, mime, payload, codeExecRegex)\n",
"if res['codeExec']:\n",
"logger.verbosity = -1\n",
"legitMime = getMime(extensions, legitExt)\n",
"f.a = a\n",
"foundEntryPoint = future.a\n",
"mime = legitMime if t['mime'] == 'legit' else nastyMime\n",
"futures.append(f)\n",
"logging.info(\"\\x1b[1m\\x1b[42mCode execution obtained ('%s','%s','%s')\\x1b[m\",\n foundEntryPoint['suffix'], foundEntryPoint['mime'], foundEntryPoint[\n 'templateName'])\n",
"suffix = t['suffix'].replace('$legitExt$', legitExt).replace('$nastyExt$',\n nastyVariant)\n",
"nbOfEntryPointsFound += 1\n",
"attempts.append({'suffix': suffix, 'mime': mime, 'templateName': template[\n 'templateName']})\n",
"entryPoints.append(foundEntryPoint)\n",
"if not args.detectAllEntryPoints:\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
0,
0,
0,
7,
0,
0,
0,
0,
7,
0,
0,
0,
0,
7,
0,
0,
0,
0,
7,
0,
0,
0,
0,
7,
7,
0,
0,
7,
0,
0,
7,
0,
0,
7,
0,
7,
0,
0
] | [
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"AugAssign'",
"For",
"AugAssign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"For",
"Assign'",
"Condition",
"AugAssign'",
"Expr'",
"Condition",
"For",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Expr'",
"Condition",
"For",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Expr'",
"AugAssign'",
"AugAssign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"AugAssign'",
"AugAssign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"AugAssign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"For",
"Assign'",
"AugAssign'",
"Expr'",
"Expr'",
"For",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"For",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"AugAssign'",
"Expr'",
"Expr'",
"Condition"
] |
[
"def FUNC_6(self, VAR_7, VAR_5):...\n",
"VAR_8 = VAR_7.connect()\n",
"VAR_3 = VAR_8.execution_options(no_parameters=True).execute(VAR_5)\n",
"return VAR_3\n"
] | [
"def _execute_with_engine(self, engine, query):...\n",
"connection = engine.connect()\n",
"result = connection.execution_options(no_parameters=True).execute(query)\n",
"return result\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"return 'check user_test %s' % self.user_test_id\n"
] | [
"def describe(self):...\n",
"return 'check user_test %s' % self.user_test_id\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"from osv import osv\n",
"from tools.translate import _\n",
"VAR_0 = 'pos.close.statement'\n",
"VAR_1 = 'Close Statements'\n",
"def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = self.pool.get('res.users').browse(VAR_2, VAR_3, VAR_3).company_id.id\n",
"VAR_7 = []\n",
"VAR_8 = self.pool.get('ir.model.data')\n",
"VAR_9 = self.pool.get('account.bank.statement')\n",
"VAR_10 = self.pool.get('account.journal')\n",
"VAR_2.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % VAR_3)\n",
"VAR_11 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"VAR_2.execute('string' % ','.join(map(lambda x: \"'\" + str(x) + \"'\", VAR_11)))\n",
"VAR_12 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"for journal in VAR_10.browse(VAR_2, VAR_3, VAR_12):\n",
"VAR_4 = VAR_9.search(VAR_2, VAR_3, [('state', '!=', 'confirm'), ('user_id',\n '=', VAR_3), ('journal_id', '=', journal.id)])\n",
"VAR_13 = self.pool.get('ir.model.data')\n",
"if not VAR_4:\n",
"VAR_14 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_tree')\n",
"VAR_7.append(VAR_4[0])\n",
"VAR_15 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_form2')\n",
"if not journal.check_dtls:\n",
"if VAR_14:\n",
"VAR_9.button_confirm_cash(VAR_2, VAR_3, VAR_4, VAR_5)\n",
"VAR_14 = VAR_13.browse(VAR_2, VAR_3, VAR_14, VAR_5=context).res_id\n",
"if VAR_15:\n",
"VAR_15 = VAR_13.browse(VAR_2, VAR_3, VAR_15, VAR_5=context).res_id\n",
"return {'domain': \"[('id','in',\" + str(VAR_7) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(VAR_14, 'tree'), (\n VAR_15, 'form')], 'type': 'ir.actions.act_window'}\n"
] | [
"from osv import osv\n",
"from tools.translate import _\n",
"_name = 'pos.close.statement'\n",
"_description = 'Close Statements'\n",
"def close_statement(self, cr, uid, ids, context):...\n",
"\"\"\"docstring\"\"\"\n",
"company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id\n",
"list_statement = []\n",
"mod_obj = self.pool.get('ir.model.data')\n",
"statement_obj = self.pool.get('account.bank.statement')\n",
"journal_obj = self.pool.get('account.journal')\n",
"cr.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % uid)\n",
"j_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"cr.execute(\n \"\"\" select id from account_journal\n where auto_cash='True' and type='cash'\n and id in (%s)\"\"\"\n % ','.join(map(lambda x: \"'\" + str(x) + \"'\", j_ids)))\n",
"journal_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"for journal in journal_obj.browse(cr, uid, journal_ids):\n",
"ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id',\n '=', uid), ('journal_id', '=', journal.id)])\n",
"data_obj = self.pool.get('ir.model.data')\n",
"if not ids:\n",
"id2 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_tree')\n",
"list_statement.append(ids[0])\n",
"id3 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_form2')\n",
"if not journal.check_dtls:\n",
"if id2:\n",
"statement_obj.button_confirm_cash(cr, uid, ids, context)\n",
"id2 = data_obj.browse(cr, uid, id2, context=context).res_id\n",
"if id3:\n",
"id3 = data_obj.browse(cr, uid, id3, context=context).res_id\n",
"return {'domain': \"[('id','in',\" + str(list_statement) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(id2, 'tree'), (id3,\n 'form')], 'type': 'ir.actions.act_window'}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_6(VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"return {'password_reset': get_password_reset_form().to_json(), 'login':\n get_login_session_form().to_json(), 'registration':\n RegistrationFormFactory().get_registration_form(VAR_3).to_json()}\n"
] | [
"def _get_form_descriptions(request):...\n",
"\"\"\"docstring\"\"\"\n",
"return {'password_reset': get_password_reset_form().to_json(), 'login':\n get_login_session_form().to_json(), 'registration':\n RegistrationFormFactory().get_registration_form(request).to_json()}\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_0(VAR_0, VAR_1, VAR_2=None, VAR_3=False):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = posixpath.normpath(VAR_1)\n",
"VAR_1 = VAR_1.lstrip('/')\n",
"VAR_10 = ''\n",
"for VAR_18 in VAR_1.split('/'):\n",
"if not VAR_18:\n",
"if VAR_10 and VAR_1 != VAR_10:\n",
"VAR_17, VAR_18 = os.path.splitdrive(VAR_18)\n",
"return HttpResponseRedirect(VAR_10)\n",
"VAR_6 = os.path.join(VAR_2, VAR_10)\n",
"VAR_19, VAR_18 = os.path.split(VAR_18)\n",
"if os.path.isdir(VAR_6):\n",
"if VAR_18 in (os.curdir, os.pardir):\n",
"if VAR_3:\n",
"if not os.path.exists(VAR_6):\n",
"VAR_10 = os.path.join(VAR_10, VAR_18).replace('\\\\', '/')\n",
"return FUNC_1(VAR_10, VAR_6)\n",
"VAR_11 = os.stat(VAR_6)\n",
"if not FUNC_2(VAR_0.META.get('HTTP_IF_MODIFIED_SINCE'), VAR_11.st_mtime,\n",
"return HttpResponseNotModified()\n",
"VAR_12, VAR_13 = mimetypes.guess_type(VAR_6)\n",
"VAR_12 = VAR_12 or 'application/octet-stream'\n",
"VAR_14 = FileResponse(open(VAR_6, 'rb'), VAR_12=content_type)\n",
"VAR_14['Last-Modified'] = http_date(VAR_11.st_mtime)\n",
"if stat.S_ISREG(VAR_11.st_mode):\n",
"VAR_14['Content-Length'] = VAR_11.st_size\n",
"if VAR_13:\n",
"VAR_14['Content-Encoding'] = VAR_13\n",
"return VAR_14\n"
] | [
"def serve(request, path, document_root=None, show_indexes=False):...\n",
"\"\"\"docstring\"\"\"\n",
"path = posixpath.normpath(path)\n",
"path = path.lstrip('/')\n",
"newpath = ''\n",
"for part in path.split('/'):\n",
"if not part:\n",
"if newpath and path != newpath:\n",
"drive, part = os.path.splitdrive(part)\n",
"return HttpResponseRedirect(newpath)\n",
"fullpath = os.path.join(document_root, newpath)\n",
"head, part = os.path.split(part)\n",
"if os.path.isdir(fullpath):\n",
"if part in (os.curdir, os.pardir):\n",
"if show_indexes:\n",
"if not os.path.exists(fullpath):\n",
"newpath = os.path.join(newpath, part).replace('\\\\', '/')\n",
"return directory_index(newpath, fullpath)\n",
"statobj = os.stat(fullpath)\n",
"if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),\n",
"return HttpResponseNotModified()\n",
"content_type, encoding = mimetypes.guess_type(fullpath)\n",
"content_type = content_type or 'application/octet-stream'\n",
"response = FileResponse(open(fullpath, 'rb'), content_type=content_type)\n",
"response['Last-Modified'] = http_date(statobj.st_mtime)\n",
"if stat.S_ISREG(statobj.st_mode):\n",
"response['Content-Length'] = statobj.st_size\n",
"if encoding:\n",
"response['Content-Encoding'] = encoding\n",
"return response\n"
] | [
0,
0,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
0,
6,
0,
0,
6,
6,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = '/api/keys'\n",
"VAR_5 = {'id': 'autotest', 'public': 'string'}\n",
"VAR_6 = self.client.post(VAR_4, json.dumps(VAR_5), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_6.status_code, 201)\n",
"VAR_8 = Key.objects.get(uuid=response.data['uuid'])\n",
"self.assertEqual(str(VAR_8),\n 'ssh-rsa AAAAB3NzaC.../HJDw9QckTS0vN [email protected]')\n"
] | [
"def test_rsa_key_str(self):...\n",
"\"\"\"docstring\"\"\"\n",
"url = '/api/keys'\n",
"body = {'id': 'autotest', 'public':\n 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDzqPAwHN70xsB0LXG//KzOgcPikyhdN/KRc4x3j/RA0pmFj63Ywv0PJ2b1LcMSqfR8F11WBlrW8c9xFua0ZAKzI+gEk5uqvOR78bs/SITOtKPomW4e/1d2xEkJqOmYH30u94+NZZYwEBqYaRb34fhtrnJS70XeGF0RhXE5Qea5eh7DBbeLxPfSYd8rfHgzMSb/wmx3h2vmHdQGho20pfJktNu7DxeVkTHn9REMUphf85su7slTgTlWKq++3fASE8PdmFGzb6PkOR4c+LS5WWXd2oM6HyBQBxxiwXbA2lSgQxOdgDiM2FzT0GVSFMUklkUHMdsaG6/HJDw9QckTS0vN [email protected]'\n }\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"key = Key.objects.get(uuid=response.data['uuid'])\n",
"self.assertEqual(str(key),\n 'ssh-rsa AAAAB3NzaC.../HJDw9QckTS0vN [email protected]')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_15(self, VAR_6, VAR_9):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_20 = None\n",
"VAR_10 = self.common._safe_hostname(VAR_9['host'])\n",
"VAR_25 = self.common.get_cpg(VAR_6, allowSnap=True)\n",
"VAR_12 = self.common.get_domain(VAR_25)\n",
"VAR_20 = self.common._get_3par_host(VAR_10)\n",
"VAR_13 = self.common.get_persona_type(VAR_6)\n",
"return VAR_20\n",
"if not VAR_20['FCPaths']:\n",
"VAR_10 = self._create_3par_fibrechan_host(VAR_10, VAR_9['wwpns'], VAR_12,\n VAR_13)\n",
"self._modify_3par_fibrechan_host(VAR_10, VAR_9['wwpns'])\n",
"VAR_20 = self.common._get_3par_host(VAR_10)\n",
"VAR_20 = self.common._get_3par_host(VAR_10)\n"
] | [
"def _create_host(self, volume, connector):...\n",
"\"\"\"docstring\"\"\"\n",
"host = None\n",
"hostname = self.common._safe_hostname(connector['host'])\n",
"cpg = self.common.get_cpg(volume, allowSnap=True)\n",
"domain = self.common.get_domain(cpg)\n",
"host = self.common._get_3par_host(hostname)\n",
"persona_id = self.common.get_persona_type(volume)\n",
"return host\n",
"if not host['FCPaths']:\n",
"hostname = self._create_3par_fibrechan_host(hostname, connector['wwpns'],\n domain, persona_id)\n",
"self._modify_3par_fibrechan_host(hostname, connector['wwpns'])\n",
"host = self.common._get_3par_host(hostname)\n",
"host = self.common._get_3par_host(hostname)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_4(VAR_14, VAR_2, VAR_15, VAR_3, VAR_4):...\n",
"VAR_1 = helper.get_header(VAR_4, 'Subject', '(none)')\n",
"if not FUNC_0(VAR_1):\n",
"return\n",
"VAR_21 = VAR_4.get('From', 'anonymous')\n",
"VAR_28 = student.resolve_alias(VAR_2, VAR_21)\n",
"sendmail.send_template(VAR_14, VAR_21, 'Mail fehlerhaft: %s' % VAR_1,\n 'mail_sheet_not_found.html')\n",
"VAR_29 = FUNC_1(VAR_2, VAR_3, VAR_4)\n",
"VAR_30 = VAR_35.time()\n",
"VAR_31 = datetime.datetime.fromtimestamp(VAR_30)\n",
"VAR_32 = VAR_31.strftime('%Y-%m-%d_%H-%M-%S_%f')\n",
"VAR_8 = os.path.join(VAR_14('attachment_path'), helper.escape_filename(str(\n VAR_28.id)), helper.escape_filename(str(VAR_29.id)), helper.\n escape_filename(VAR_32))\n",
"if os.path.exists(VAR_8):\n",
"VAR_42 = VAR_8\n",
"VAR_33 = FUNC_2(VAR_2, VAR_29.id, VAR_28.id, int(VAR_30), VAR_8)\n",
"for i in itertools.count(2):\n",
"VAR_34 = b''\n",
"VAR_8 = '%s___%s' % (VAR_42, i)\n",
"os.makedirs(VAR_8)\n",
"if not os.path.exists(VAR_8):\n",
"for subpart in VAR_4.walk():\n",
"VAR_43 = subpart.get_filename()\n",
"if VAR_34:\n",
"VAR_44 = subpart.get_payload(decode=True)\n",
"VAR_45 = os.path.join(VAR_8, 'mail')\n",
"commands.move(VAR_14, VAR_15, VAR_3, 'Abgaben')\n",
"if not VAR_44:\n",
"VAR_46 = len(VAR_34)\n",
"sendmail.send_template(VAR_14, VAR_21, 'Mail erhalten: %s' % VAR_1,\n 'mail_received.html')\n",
"if VAR_43:\n",
"VAR_47 = 'sha256-%s' % hashlib.sha256(VAR_34).hexdigest()\n",
"VAR_48 = helper.escape_filename(VAR_43)\n",
"if VAR_34:\n",
"payload_file.write(VAR_34)\n",
"VAR_45 = os.path.join(VAR_8, VAR_48)\n",
"VAR_34 += b'\\n\\n--- Part ---\\n'\n",
"VAR_34 += VAR_44\n",
"FUNC_3(VAR_2, VAR_33.id, VAR_47, 'mail', VAR_46)\n",
"VAR_46 = len(VAR_44)\n",
"VAR_47 = 'sha256-%s' % hashlib.sha256(VAR_44).hexdigest()\n",
"payload_file.write(VAR_44)\n",
"FUNC_3(VAR_2, VAR_33.id, VAR_47, VAR_48, VAR_46)\n"
] | [
"def handle_mail(config, db, imapmail, uid, message):...\n",
"subject = helper.get_header(message, 'Subject', '(none)')\n",
"if not _match_subject(subject):\n",
"return\n",
"alias = message.get('From', 'anonymous')\n",
"stu = student.resolve_alias(db, alias)\n",
"sendmail.send_template(config, alias, 'Mail fehlerhaft: %s' % subject,\n 'mail_sheet_not_found.html')\n",
"sheet = sheet_by_mail(db, uid, message)\n",
"now_ts = time.time()\n",
"now_dt = datetime.datetime.fromtimestamp(now_ts)\n",
"now_str = now_dt.strftime('%Y-%m-%d_%H-%M-%S_%f')\n",
"files_path = os.path.join(config('attachment_path'), helper.escape_filename\n (str(stu.id)), helper.escape_filename(str(sheet.id)), helper.\n escape_filename(now_str))\n",
"if os.path.exists(files_path):\n",
"orig_files_path = files_path\n",
"subm = create(db, sheet.id, stu.id, int(now_ts), files_path)\n",
"for i in itertools.count(2):\n",
"mailtext = b''\n",
"files_path = '%s___%s' % (orig_files_path, i)\n",
"os.makedirs(files_path)\n",
"if not os.path.exists(files_path):\n",
"for subpart in message.walk():\n",
"fn = subpart.get_filename()\n",
"if mailtext:\n",
"payload = subpart.get_payload(decode=True)\n",
"payload_path = os.path.join(files_path, 'mail')\n",
"commands.move(config, imapmail, uid, 'Abgaben')\n",
"if not payload:\n",
"payload_size = len(mailtext)\n",
"sendmail.send_template(config, alias, 'Mail erhalten: %s' % subject,\n 'mail_received.html')\n",
"if fn:\n",
"hash_str = 'sha256-%s' % hashlib.sha256(mailtext).hexdigest()\n",
"payload_name = helper.escape_filename(fn)\n",
"if mailtext:\n",
"payload_file.write(mailtext)\n",
"payload_path = os.path.join(files_path, payload_name)\n",
"mailtext += b'\\n\\n--- Part ---\\n'\n",
"mailtext += payload\n",
"add_file(db, subm.id, hash_str, 'mail', payload_size)\n",
"payload_size = len(payload)\n",
"hash_str = 'sha256-%s' % hashlib.sha256(payload).hexdigest()\n",
"payload_file.write(payload)\n",
"add_file(db, subm.id, hash_str, payload_name, payload_size)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"For",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"AugAssign'",
"AugAssign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(VAR_6):...\n",
"for settings_key in VAR_12:\n",
"assert VAR_5.registry.settings.get(settings_key)\n"
] | [
"def assert_settings_keys(keys):...\n",
"for settings_key in key:\n",
"assert config.registry.settings.get(settings_key)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"For",
"Assert'"
] |
[
"\"\"\"\nCreated on Sun Jan 14 21:55:57 2018\n\n@author: adam\n\"\"\"\n",
"import os\n",
"import warnings\n",
"import sqlite3\n",
"import datetime\n",
"from collections.abc import Iterable\n",
"from ast import literal_eval\n",
"import numpy as np\n",
"import pandas as pd\n",
"from .core import DATA_DIRE\n",
"\"\"\" `There was an accident with a contraceptive and a time machine.`\n \"\"\"\n",
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14 = os.path.join(DATA_DIRE, VAR_0 + '.db')\n",
"return VAR_14\n"
] | [
"\"\"\"\nCreated on Sun Jan 14 21:55:57 2018\n\n@author: adam\n\"\"\"\n",
"import os\n",
"import warnings\n",
"import sqlite3\n",
"import datetime\n",
"from collections.abc import Iterable\n",
"from ast import literal_eval\n",
"import numpy as np\n",
"import pandas as pd\n",
"from .core import DATA_DIRE\n",
"\"\"\" `There was an accident with a contraceptive and a time machine.`\n \"\"\"\n",
"def db_path(name):...\n",
"\"\"\"docstring\"\"\"\n",
"fil = os.path.join(DATA_DIRE, name + '.db')\n",
"return fil\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"Expr'",
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def __init__(self, *VAR_5, **VAR_6):...\n",
"VAR_29 = VAR_6.get('default', None)\n",
"if VAR_29 is not None:\n",
"VAR_6['default'] = pickle.dumps(VAR_29)\n",
"super().__init__(*VAR_5, **kwargs)\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"default = kwargs.get('default', None)\n",
"if default is not None:\n",
"kwargs['default'] = pickle.dumps(default)\n",
"super().__init__(*args, **kwargs)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_16(self, VAR_35=True):...\n",
"\"\"\"docstring\"\"\"\n",
"self.match_filters = []\n",
"self.match_conditions = []\n",
"VAR_54 = False\n",
"if not self.user:\n",
"self.user = frappe.session.user\n",
"if not self.tables:\n",
"self.extract_tables()\n",
"VAR_1 = frappe.get_meta(self.doctype)\n",
"VAR_55 = frappe.permissions.get_role_permissions(VAR_1, VAR_19=self.user)\n",
"self.shared = frappe.share.get_shared(self.doctype, self.user)\n",
"if not VAR_1.istable and not VAR_55.get('read'\n",
"VAR_54 = True\n",
"if VAR_55.get('apply_user_permissions', {}).get('read'):\n",
"if not self.shared:\n",
"VAR_36 = frappe.permissions.get_user_permissions(self.user)\n",
"if VAR_55.get('if_owner', {}).get('read'):\n",
"frappe.throw(_('No permission to read {0}').format(self.doctype), frappe.\n PermissionError)\n",
"self.conditions.append(self.get_share_condition())\n",
"self.add_user_permissions(VAR_36, VAR_37=role_permissions.get(\n 'user_permission_doctypes').get('read'))\n",
"self.match_conditions.append(\"`tab{0}`.owner = '{1}'\".format(self.doctype,\n frappe.db.escape(self.user, percent=False)))\n",
"if VAR_35:\n",
"VAR_33 = ''\n",
"return self.match_filters\n",
"if self.match_conditions:\n",
"VAR_33 = '((' + ') or ('.join(self.match_conditions) + '))'\n",
"VAR_63 = self.get_permission_query_conditions()\n",
"if VAR_63:\n",
"VAR_33 += ' and ' + VAR_63 if VAR_33 else VAR_63\n",
"if not VAR_54 and self.shared and VAR_33:\n",
"VAR_33 = '({conditions}) or ({shared_condition})'.format(VAR_33=conditions,\n shared_condition=self.get_share_condition())\n",
"return VAR_33\n"
] | [
"def build_match_conditions(self, as_condition=True):...\n",
"\"\"\"docstring\"\"\"\n",
"self.match_filters = []\n",
"self.match_conditions = []\n",
"only_if_shared = False\n",
"if not self.user:\n",
"self.user = frappe.session.user\n",
"if not self.tables:\n",
"self.extract_tables()\n",
"meta = frappe.get_meta(self.doctype)\n",
"role_permissions = frappe.permissions.get_role_permissions(meta, user=self.user\n )\n",
"self.shared = frappe.share.get_shared(self.doctype, self.user)\n",
"if not meta.istable and not role_permissions.get('read'\n",
"only_if_shared = True\n",
"if role_permissions.get('apply_user_permissions', {}).get('read'):\n",
"if not self.shared:\n",
"user_permissions = frappe.permissions.get_user_permissions(self.user)\n",
"if role_permissions.get('if_owner', {}).get('read'):\n",
"frappe.throw(_('No permission to read {0}').format(self.doctype), frappe.\n PermissionError)\n",
"self.conditions.append(self.get_share_condition())\n",
"self.add_user_permissions(user_permissions, user_permission_doctypes=\n role_permissions.get('user_permission_doctypes').get('read'))\n",
"self.match_conditions.append(\"`tab{0}`.owner = '{1}'\".format(self.doctype,\n frappe.db.escape(self.user, percent=False)))\n",
"if as_condition:\n",
"conditions = ''\n",
"return self.match_filters\n",
"if self.match_conditions:\n",
"conditions = '((' + ') or ('.join(self.match_conditions) + '))'\n",
"doctype_conditions = self.get_permission_query_conditions()\n",
"if doctype_conditions:\n",
"conditions += (' and ' + doctype_conditions if conditions else\n doctype_conditions)\n",
"if not only_if_shared and self.shared and conditions:\n",
"conditions = '({conditions}) or ({shared_condition})'.format(conditions=\n conditions, shared_condition=self.get_share_condition())\n",
"return conditions\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"AugAssign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_12(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return replication.decode_link_ticket(self.request.get('t').encode('ascii'))\n",
"self.abort(400)\n",
"return\n"
] | [
"def decode_link_ticket(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return replication.decode_link_ticket(self.request.get('t').encode('ascii'))\n",
"self.abort(400)\n",
"return\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'",
"Expr'",
"Return'"
] |
[
"def FUNC_4():...\n",
"VAR_22 = self.request.body\n",
"VAR_23 = json.loads(VAR_22.decode('utf-8', 'ignore'))\n",
"VAR_24 = VAR_23['action']\n",
"VAR_25 = VAR_23['source']\n",
"if type(VAR_25) == list:\n",
"for VAR_31 in range(0, len(VAR_25)):\n",
"VAR_25 = FUNC_1(VAR_25)\n",
"VAR_25[VAR_31] = FUNC_1(VAR_25[VAR_31])\n",
"if VAR_24 in ['copy', 'move']:\n",
"if VAR_24 in ['rename', 'new-folder']:\n",
"VAR_30 = FUNC_1(VAR_23['target'])\n",
"VAR_30 = '/'\n",
"if VAR_24 == 'copy':\n",
"VAR_30 = VAR_23['target']\n",
"VAR_30 = VAR_25\n",
"for source in VAR_25:\n",
"if VAR_24 == 'move':\n",
"db.Filesystem.copy(source, VAR_30, new_owner='user-cp')\n",
"VAR_6.set_result('')\n",
"for source in VAR_25:\n",
"if VAR_24 == 'delete':\n",
"db.Filesystem.move(source, VAR_30)\n",
"for source in VAR_25:\n",
"if VAR_24 == 'rename':\n",
"db.Filesystem.remove(source)\n",
"db.Filesystem.rename(VAR_25, VAR_30)\n",
"if VAR_24 == 'new-folder':\n",
"db.Filesystem.mkdir(VAR_25, VAR_30, 'user-nf')\n"
] | [
"def get_final_html_async():...\n",
"operation_content_raw = self.request.body\n",
"operation_content = json.loads(operation_content_raw.decode('utf-8', 'ignore'))\n",
"action = operation_content['action']\n",
"sources = operation_content['source']\n",
"if type(sources) == list:\n",
"for i in range(0, len(sources)):\n",
"sources = decode_hexed_b64_to_str(sources)\n",
"sources[i] = decode_hexed_b64_to_str(sources[i])\n",
"if action in ['copy', 'move']:\n",
"if action in ['rename', 'new-folder']:\n",
"target = decode_hexed_b64_to_str(operation_content['target'])\n",
"target = '/'\n",
"if action == 'copy':\n",
"target = operation_content['target']\n",
"target = sources\n",
"for source in sources:\n",
"if action == 'move':\n",
"db.Filesystem.copy(source, target, new_owner='user-cp')\n",
"future.set_result('')\n",
"for source in sources:\n",
"if action == 'delete':\n",
"db.Filesystem.move(source, target)\n",
"for source in sources:\n",
"if action == 'rename':\n",
"db.Filesystem.remove(source)\n",
"db.Filesystem.rename(sources, target)\n",
"if action == 'new-folder':\n",
"db.Filesystem.mkdir(sources, target, 'user-nf')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Condition",
"Expr'",
"Expr'",
"For",
"Condition",
"Expr'",
"For",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._state\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._state\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_18(VAR_30, VAR_9, VAR_31, VAR_6, VAR_11, **VAR_12):...\n",
"if VAR_31 is None:\n",
"VAR_31 = VAR_23[VAR_9].urlstring\n",
"VAR_52 = join(VAR_27, VAR_9)\n",
"VAR_53 = os.path.realpath(VAR_52)\n",
"if not VAR_53.startswith(VAR_26):\n",
"VAR_54 = CLASS_1.create(VAR_9, VAR_31, VAR_52, VAR_6, VAR_11, **kwargs)\n",
"VAR_55 = VAR_23.setdefault(VAR_9, VAR_54)\n",
"if VAR_55 is VAR_54:\n",
"VAR_55.parents.add(VAR_30)\n",
"if VAR_54.exclude_from_cmake != VAR_55.exclude_from_cmake:\n",
"if VAR_8:\n",
"VAR_68 = [join(VAR_30.directory, dependency_file) for VAR_30 in VAR_55.parents]\n",
"if not VAR_54.same_checkout(VAR_55) and VAR_31 is not None:\n",
"VAR_55.update()\n",
"VAR_28.append(VAR_55)\n",
"VAR_69 = join(VAR_30.directory, dependency_file)\n",
"VAR_70 = [join(VAR_30.directory, dependency_file) for VAR_30 in VAR_55.parents]\n",
"for VAR_29, VAR_51 in VAR_6.items():\n",
"VAR_30.children.add(VAR_55)\n",
"VAR_30 = join(VAR_30.directory, dependency_file)\n",
"VAR_55.options.setdefault(VAR_29, VAR_51)\n",
"if VAR_55.options[VAR_29] != VAR_51:\n"
] | [
"def add_module(parent, name, uri, options, conf, **kwargs):...\n",
"if uri is None:\n",
"uri = modules[name].urlstring\n",
"target_dir = join(subproject_dir, name)\n",
"target_dir_rp = os.path.realpath(target_dir)\n",
"if not target_dir_rp.startswith(source_dir_rp):\n",
"newmodule = Subproject.create(name, uri, target_dir, options, conf, **kwargs)\n",
"mod = modules.setdefault(name, newmodule)\n",
"if mod is newmodule:\n",
"mod.parents.add(parent)\n",
"if newmodule.exclude_from_cmake != mod.exclude_from_cmake:\n",
"if update:\n",
"children_conf = [join(parent.directory, dependency_file) for parent in mod.\n parents]\n",
"if not newmodule.same_checkout(mod) and uri is not None:\n",
"mod.update()\n",
"stack.append(mod)\n",
"parent_conf = join(parent.directory, dependency_file)\n",
"children = [join(parent.directory, dependency_file) for parent in mod.parents]\n",
"for key, value in options.items():\n",
"parent.children.add(mod)\n",
"parent = join(parent.directory, dependency_file)\n",
"mod.options.setdefault(key, value)\n",
"if mod.options[key] != value:\n"
] | [
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Assign'",
"Expr'",
"Condition"
] |
[
"from django.test import RequestFactory\n",
"from django.shortcuts import reverse\n",
"from test_plus.test import TestCase\n",
"from django.test import Client\n",
"from realpal.apps.users.constants import *\n",
"from realpal.apps.users.views import UserRedirectView, UserUpdateView\n",
"def FUNC_0(self):...\n",
"self.user = self.make_user()\n",
"self.factory = RequestFactory()\n",
"VAR_0 = Client()\n",
"def FUNC_1(self):...\n",
"VAR_1 = UserRedirectView()\n",
"VAR_2 = self.factory.get('/fake-url')\n",
"VAR_2.user = self.user\n",
"VAR_1.request = VAR_2\n",
"self.assertEqual(VAR_1.get_redirect_url(), '/users/testuser/')\n",
"def FUNC_0(self):...\n",
"super(CLASS_2, self).setUp()\n",
"self.view = UserUpdateView()\n",
"VAR_2 = self.factory.get('/fake-url')\n",
"VAR_2.user = self.user\n",
"self.view.request = VAR_2\n",
"def FUNC_2(self):...\n",
"self.assertEqual(self.view.get_success_url(), '/users/~update/#success')\n",
"def FUNC_3(self):...\n",
"self.assertEqual(self.view.get_object(), self.user)\n",
"def FUNC_4(self):...\n",
"VAR_3 = reverse('users:update')\n",
"VAR_4 = {'purchase_step_form': {'purchase_step': PS_DAP},\n 'marital_status_form': {'status': SC_SI}, 'first_home_form': {\n 'firsthome': True}, 'house_type_form': {'house_type': HT_SF,\n 'house_age': HA_15, 'house_cond': HC_SL}, 'city_form': {\n 'preferred_city': ''}, 'max_budget_form': {'budget': 1200.59},\n 'current_rent_form': {'current_rent': 321.49}, 'how_soon_form': {\n 'how_soon': HS_3}, 'personal_profile_form': {'first_name':\n 'TestFirstName', 'last_name': 'TestLastName', 'zipcode': '10118',\n 'phone_number': '+263771819478', 'email': '[email protected]'}}\n",
"self.client.login(username='testuser', password='password')\n",
"self.assertTemplateUsed('users/update.html')\n",
"for VAR_6 in VAR_4:\n",
"VAR_5 = VAR_4[VAR_6]\n",
"VAR_4 = {'purchase_step': 8}\n",
"VAR_4[VAR_6][VAR_6] = 'Update'\n",
"self.client.post(VAR_3, VAR_4)\n",
"VAR_7 = self.client.post(VAR_3, VAR_5)\n",
"self.assertEqual(self.view.get_object().purchase_step, PS_DAP)\n",
"self.assertEqual(VAR_7.status_code, 302)\n",
"VAR_4 = {'status': 8}\n",
"self.assertTemplateUsed('users/update.html')\n",
"self.client.post(VAR_3, VAR_4)\n",
"self.assertEqual(self.view.get_object().status, None)\n",
"VAR_4 = {'house_type': 8, 'house_age': 8, 'house_cond': 8}\n",
"self.client.post(VAR_3, VAR_4)\n",
"self.assertEqual(self.view.get_object().house_type, None)\n",
"self.assertEqual(self.view.get_object().house_age, None)\n",
"self.assertEqual(self.view.get_object().house_cond, None)\n",
"VAR_4 = {'budget': 'TEXT'}\n",
"self.client.post(VAR_3, VAR_4)\n",
"self.assertEqual(self.view.get_object().budget, None)\n",
"VAR_4 = {'current_rent': 'TEXT'}\n",
"self.client.post(VAR_3, VAR_4)\n",
"self.assertEqual(self.view.get_object().current_rent, None)\n",
"VAR_4 = {'how_soon': 8}\n",
"self.client.post(VAR_3, VAR_4)\n",
"self.assertEqual(self.view.get_object().how_soon, None)\n",
"VAR_4 = {'first_name': 'TestFirstName', 'last_name': 'TestLastName',\n 'zipcode': '10118', 'phone_number': '+26334465657456774567', 'email':\n '[email protected]'}\n",
"self.client.post(VAR_3, VAR_4)\n",
"self.assertEqual(self.view.get_object().first_name, '')\n",
"self.assertEqual(self.view.get_object().zipcode, None)\n",
"self.assertEqual(self.view.get_object().email, 'testuser')\n"
] | [
"from django.test import RequestFactory\n",
"from django.shortcuts import reverse\n",
"from test_plus.test import TestCase\n",
"from django.test import Client\n",
"from realpal.apps.users.constants import *\n",
"from realpal.apps.users.views import UserRedirectView, UserUpdateView\n",
"def setUp(self):...\n",
"self.user = self.make_user()\n",
"self.factory = RequestFactory()\n",
"client = Client()\n",
"def test_get_redirect_url(self):...\n",
"view = UserRedirectView()\n",
"request = self.factory.get('/fake-url')\n",
"request.user = self.user\n",
"view.request = request\n",
"self.assertEqual(view.get_redirect_url(), '/users/testuser/')\n",
"def setUp(self):...\n",
"super(TestUserUpdateView, self).setUp()\n",
"self.view = UserUpdateView()\n",
"request = self.factory.get('/fake-url')\n",
"request.user = self.user\n",
"self.view.request = request\n",
"def test_get_success_url(self):...\n",
"self.assertEqual(self.view.get_success_url(), '/users/~update/#success')\n",
"def test_get_object(self):...\n",
"self.assertEqual(self.view.get_object(), self.user)\n",
"def test_updating_user_info(self):...\n",
"update_url = reverse('users:update')\n",
"data = {'purchase_step_form': {'purchase_step': PS_DAP},\n 'marital_status_form': {'status': SC_SI}, 'first_home_form': {\n 'firsthome': True}, 'house_type_form': {'house_type': HT_SF,\n 'house_age': HA_15, 'house_cond': HC_SL}, 'city_form': {\n 'preferred_city': ''}, 'max_budget_form': {'budget': 1200.59},\n 'current_rent_form': {'current_rent': 321.49}, 'how_soon_form': {\n 'how_soon': HS_3}, 'personal_profile_form': {'first_name':\n 'TestFirstName', 'last_name': 'TestLastName', 'zipcode': '10118',\n 'phone_number': '+263771819478', 'email': '[email protected]'}}\n",
"self.client.login(username='testuser', password='password')\n",
"self.assertTemplateUsed('users/update.html')\n",
"for form in data:\n",
"data_to_pass = data[form]\n",
"data = {'purchase_step': 8}\n",
"data[form][form] = 'Update'\n",
"self.client.post(update_url, data)\n",
"response = self.client.post(update_url, data_to_pass)\n",
"self.assertEqual(self.view.get_object().purchase_step, PS_DAP)\n",
"self.assertEqual(response.status_code, 302)\n",
"data = {'status': 8}\n",
"self.assertTemplateUsed('users/update.html')\n",
"self.client.post(update_url, data)\n",
"self.assertEqual(self.view.get_object().status, None)\n",
"data = {'house_type': 8, 'house_age': 8, 'house_cond': 8}\n",
"self.client.post(update_url, data)\n",
"self.assertEqual(self.view.get_object().house_type, None)\n",
"self.assertEqual(self.view.get_object().house_age, None)\n",
"self.assertEqual(self.view.get_object().house_cond, None)\n",
"data = {'budget': 'TEXT'}\n",
"self.client.post(update_url, data)\n",
"self.assertEqual(self.view.get_object().budget, None)\n",
"data = {'current_rent': 'TEXT'}\n",
"self.client.post(update_url, data)\n",
"self.assertEqual(self.view.get_object().current_rent, None)\n",
"data = {'how_soon': 8}\n",
"self.client.post(update_url, data)\n",
"self.assertEqual(self.view.get_object().how_soon, None)\n",
"data = {'first_name': 'TestFirstName', 'last_name': 'TestLastName',\n 'zipcode': '10118', 'phone_number': '+26334465657456774567', 'email':\n '[email protected]'}\n",
"self.client.post(update_url, data)\n",
"self.assertEqual(self.view.get_object().first_name, '')\n",
"self.assertEqual(self.view.get_object().zipcode, None)\n",
"self.assertEqual(self.view.get_object().email, 'testuser')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
6,
0,
0,
0,
0,
0,
6,
6,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"import os\n",
"import unittest\n",
"from coalib.bearlib.abstractions.Lint import Lint\n",
"from coalib.misc.ContextManagers import prepare_file\n",
"from coalib.misc.Shell import escape_path_argument\n",
"from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY\n",
"from coalib.results.SourceRange import SourceRange\n",
"from coalib.settings.Section import Section\n",
"def FUNC_0(self):...\n",
"VAR_0 = Section('some_name')\n",
"self.uut = Lint(VAR_0, None)\n",
"def FUNC_1(self):...\n",
"VAR_1 = list(self.uut.process_output(['1.0|0: Info message\\n',\n '2.2|1: Normal message\\n', \"\"\"3.4|2: Major message\n\"\"\"], 'a/file.py', [\n 'original_file_lines_placeholder']))\n",
"self.assertEqual(len(VAR_1), 3)\n",
"self.assertEqual(VAR_1[0].origin, 'Lint')\n",
"self.assertEqual(VAR_1[0].affected_code[0], SourceRange.from_values(\n 'a/file.py', 1, 0))\n",
"self.assertEqual(VAR_1[0].severity, RESULT_SEVERITY.INFO)\n",
"self.assertEqual(VAR_1[0].message, 'Info message')\n",
"self.assertEqual(VAR_1[1].affected_code[0], SourceRange.from_values(\n 'a/file.py', 2, 2))\n",
"self.assertEqual(VAR_1[1].severity, RESULT_SEVERITY.NORMAL)\n",
"self.assertEqual(VAR_1[1].message, 'Normal message')\n",
"self.assertEqual(VAR_1[2].affected_code[0], SourceRange.from_values(\n 'a/file.py', 3, 4))\n",
"self.assertEqual(VAR_1[2].severity, RESULT_SEVERITY.MAJOR)\n",
"self.assertEqual(VAR_1[2].message, 'Major message')\n",
"def FUNC_2(self):...\n",
"self.uut.output_regex = 'string'\n",
"self.uut.severity_map = {'I': RESULT_SEVERITY.INFO}\n",
"VAR_1 = list(self.uut.process_output(['info_msg|1.0|2.3|I: Info message\\n'],\n 'a/file.py', ['original_file_lines_placeholder']))\n",
"self.assertEqual(len(VAR_1), 1)\n",
"self.assertEqual(VAR_1[0].affected_code[0].start.line, 1)\n",
"self.assertEqual(VAR_1[0].affected_code[0].start.column, 0)\n",
"self.assertEqual(VAR_1[0].affected_code[0].end.line, 2)\n",
"self.assertEqual(VAR_1[0].affected_code[0].end.column, 3)\n",
"self.assertEqual(VAR_1[0].severity, RESULT_SEVERITY.INFO)\n",
"self.assertEqual(VAR_1[0].origin, 'Lint (info_msg)')\n",
"def FUNC_3(self):...\n",
"VAR_1 = list(self.uut.process_output([\n \"Random line that shouldn't be captured\\n\", '*************\\n'],\n 'a/file.py', ['original_file_lines_placeholder']))\n",
"self.assertEqual(len(VAR_1), 0)\n",
"def FUNC_4(self):...\n",
"self.uut.executable = 'more'\n",
"self.uut.use_stdin = True\n",
"self.uut.use_stderr = False\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"VAR_1 = self.uut.lint(file=lines)\n",
"self.assertTrue(('abcd\\n', 'efgh\\n') == VAR_1 or ('abcd\\n', 'efgh\\n', '\\n') ==\n VAR_1)\n",
"def FUNC_5(self):...\n",
"self.uut.executable = 'echo'\n",
"self.uut.arguments = 'hello'\n",
"self.uut.use_stdin = False\n",
"self.uut.use_stderr = True\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"VAR_1 = self.uut.lint('unused_filename')\n",
"self.assertEqual((), VAR_1)\n",
"self.uut.use_stderr = False\n",
"VAR_1 = self.uut.lint('unused_filename')\n",
"self.assertEqual(('hello\\n',), VAR_1)\n",
"def FUNC_9(VAR_2):...\n",
"assert VAR_2 == 'hello'\n",
"VAR_3 = self.uut.warn\n",
"self.uut.warn = FUNC_9\n",
"self.uut._print_errors(['hello', '\\n'])\n",
"self.uut.warn = VAR_3\n",
"def FUNC_6(self):...\n",
"self.uut.gives_corrected = True\n",
"VAR_1 = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a', 'b']))\n",
"self.assertEqual((), VAR_1)\n",
"VAR_1 = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a']))\n",
"self.assertEqual(len(VAR_1), 1)\n",
"def FUNC_7(self):...\n",
"VAR_4 = Lint.executable\n",
"VAR_5 = 'invalid_binary_which_doesnt_exist'\n",
"Lint.executable = VAR_5\n",
"self.assertEqual(Lint.check_prerequisites(), \"'{}' is not installed.\".\n format(VAR_5))\n",
"Lint.executable = 'echo'\n",
"self.assertTrue(Lint.check_prerequisites())\n",
"self.assertTrue(Lint.check_prerequisites())\n",
"Lint.executable = VAR_4\n",
"def FUNC_8(self):...\n",
"self.uut.executable = 'echo'\n",
"self.uut.arguments = '-c {config_file}'\n",
"self.assertEqual(self.uut._create_command(config_file='configfile').strip(),\n 'echo -c ' + escape_path_argument('configfile'))\n",
"def FUNC_8(self):...\n",
"self.uut.executable = 'echo'\n",
"self.uut.config_file = lambda : ['config line1']\n",
"VAR_6 = self.uut.generate_config_file()\n",
"self.assertTrue(os.path.isfile(VAR_6))\n",
"os.remove(VAR_6)\n",
"self.uut.lint('filename')\n"
] | [
"import os\n",
"import unittest\n",
"from coalib.bearlib.abstractions.Lint import Lint\n",
"from coalib.misc.ContextManagers import prepare_file\n",
"from coalib.misc.Shell import escape_path_argument\n",
"from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY\n",
"from coalib.results.SourceRange import SourceRange\n",
"from coalib.settings.Section import Section\n",
"def setUp(self):...\n",
"section = Section('some_name')\n",
"self.uut = Lint(section, None)\n",
"def test_invalid_output(self):...\n",
"out = list(self.uut.process_output(['1.0|0: Info message\\n',\n '2.2|1: Normal message\\n', \"\"\"3.4|2: Major message\n\"\"\"], 'a/file.py', [\n 'original_file_lines_placeholder']))\n",
"self.assertEqual(len(out), 3)\n",
"self.assertEqual(out[0].origin, 'Lint')\n",
"self.assertEqual(out[0].affected_code[0], SourceRange.from_values(\n 'a/file.py', 1, 0))\n",
"self.assertEqual(out[0].severity, RESULT_SEVERITY.INFO)\n",
"self.assertEqual(out[0].message, 'Info message')\n",
"self.assertEqual(out[1].affected_code[0], SourceRange.from_values(\n 'a/file.py', 2, 2))\n",
"self.assertEqual(out[1].severity, RESULT_SEVERITY.NORMAL)\n",
"self.assertEqual(out[1].message, 'Normal message')\n",
"self.assertEqual(out[2].affected_code[0], SourceRange.from_values(\n 'a/file.py', 3, 4))\n",
"self.assertEqual(out[2].severity, RESULT_SEVERITY.MAJOR)\n",
"self.assertEqual(out[2].message, 'Major message')\n",
"def test_custom_regex(self):...\n",
"self.uut.output_regex = (\n '(?P<origin>\\\\w+)\\\\|(?P<line>\\\\d+)\\\\.(?P<column>\\\\d+)\\\\|(?P<end_line>\\\\d+)\\\\.(?P<end_column>\\\\d+)\\\\|(?P<severity>\\\\w+): (?P<message>.*)'\n )\n",
"self.uut.severity_map = {'I': RESULT_SEVERITY.INFO}\n",
"out = list(self.uut.process_output(['info_msg|1.0|2.3|I: Info message\\n'],\n 'a/file.py', ['original_file_lines_placeholder']))\n",
"self.assertEqual(len(out), 1)\n",
"self.assertEqual(out[0].affected_code[0].start.line, 1)\n",
"self.assertEqual(out[0].affected_code[0].start.column, 0)\n",
"self.assertEqual(out[0].affected_code[0].end.line, 2)\n",
"self.assertEqual(out[0].affected_code[0].end.column, 3)\n",
"self.assertEqual(out[0].severity, RESULT_SEVERITY.INFO)\n",
"self.assertEqual(out[0].origin, 'Lint (info_msg)')\n",
"def test_valid_output(self):...\n",
"out = list(self.uut.process_output([\n \"Random line that shouldn't be captured\\n\", '*************\\n'],\n 'a/file.py', ['original_file_lines_placeholder']))\n",
"self.assertEqual(len(out), 0)\n",
"def test_stdin_input(self):...\n",
"self.uut.executable = 'more'\n",
"self.uut.use_stdin = True\n",
"self.uut.use_stderr = False\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"out = self.uut.lint(file=lines)\n",
"self.assertTrue(('abcd\\n', 'efgh\\n') == out or ('abcd\\n', 'efgh\\n', '\\n') ==\n out)\n",
"def test_stderr_output(self):...\n",
"self.uut.executable = 'echo'\n",
"self.uut.arguments = 'hello'\n",
"self.uut.use_stdin = False\n",
"self.uut.use_stderr = True\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"out = self.uut.lint('unused_filename')\n",
"self.assertEqual((), out)\n",
"self.uut.use_stderr = False\n",
"out = self.uut.lint('unused_filename')\n",
"self.assertEqual(('hello\\n',), out)\n",
"def assert_warn(line):...\n",
"assert line == 'hello'\n",
"old_warn = self.uut.warn\n",
"self.uut.warn = assert_warn\n",
"self.uut._print_errors(['hello', '\\n'])\n",
"self.uut.warn = old_warn\n",
"def test_gives_corrected(self):...\n",
"self.uut.gives_corrected = True\n",
"out = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a', 'b']))\n",
"self.assertEqual((), out)\n",
"out = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a']))\n",
"self.assertEqual(len(out), 1)\n",
"def test_missing_binary(self):...\n",
"old_binary = Lint.executable\n",
"invalid_binary = 'invalid_binary_which_doesnt_exist'\n",
"Lint.executable = invalid_binary\n",
"self.assertEqual(Lint.check_prerequisites(), \"'{}' is not installed.\".\n format(invalid_binary))\n",
"Lint.executable = 'echo'\n",
"self.assertTrue(Lint.check_prerequisites())\n",
"self.assertTrue(Lint.check_prerequisites())\n",
"Lint.executable = old_binary\n",
"def test_config_file_generator(self):...\n",
"self.uut.executable = 'echo'\n",
"self.uut.arguments = '-c {config_file}'\n",
"self.assertEqual(self.uut._create_command(config_file='configfile').strip(),\n 'echo -c ' + escape_path_argument('configfile'))\n",
"def test_config_file_generator(self):...\n",
"self.uut.executable = 'echo'\n",
"self.uut.config_file = lambda : ['config line1']\n",
"config_filename = self.uut.generate_config_file()\n",
"self.assertTrue(os.path.isfile(config_filename))\n",
"os.remove(config_filename)\n",
"self.uut.lint('filename')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assert'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(self, VAR_5):...\n",
"VAR_9 = datetime.strptime(VAR_5.date_begin_located, '%Y-%m-%d %H:%M:%S')\n",
"if VAR_9 < datetime.now():\n",
"return False\n",
"return True\n"
] | [
"def can_unsubscribe(self, event):...\n",
"event_start = datetime.strptime(event.date_begin_located, '%Y-%m-%d %H:%M:%S')\n",
"if event_start < datetime.now():\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_2(self, VAR_18: int):...\n",
"\"\"\"docstring\"\"\"\n",
"return CLASS_0.query.get(VAR_18)\n"
] | [
"def get_by_id(self, user_id: int):...\n",
"\"\"\"docstring\"\"\"\n",
"return User.query.get(user_id)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def __init__(self, VAR_3, VAR_4, VAR_5=None, VAR_6=None):...\n",
"self.rule = VAR_3\n",
"self.dag = VAR_4\n",
"self.targetfile = VAR_5\n",
"self.wildcards_dict = self.rule.get_wildcards(VAR_5)\n",
"self.wildcards = Wildcards(fromdict=self.wildcards_dict)\n",
"self._format_wildcards = self.wildcards if VAR_6 is None else Wildcards(\n fromdict=format_wildcards)\n",
"(self.input, self.output, self.params, self.log, self.benchmark, self.\n ruleio, self.dependencies) = VAR_3.expand_wildcards(self.wildcards_dict)\n",
"self.resources_dict = {name: min(self.rule.workflow.global_resources.get(\n name, res), res) for name, res in VAR_3.resources.items()}\n",
"self.threads = self.resources_dict['_cores']\n",
"self.resources = Resources(fromdict=self.resources_dict)\n",
"self._inputsize = None\n",
"self.dynamic_output, self.dynamic_input = set(), set()\n",
"self.temp_output, self.protected_output = set(), set()\n",
"self.touch_output = set()\n",
"self.subworkflow_input = dict()\n",
"for VAR_28 in self.output:\n",
"VAR_27 = self.ruleio[VAR_28]\n",
"for VAR_28 in self.input:\n",
"if VAR_27 in self.rule.dynamic_output:\n",
"VAR_27 = self.ruleio[VAR_28]\n",
"self._hash = self.rule.__hash__()\n",
"self.dynamic_output.add(VAR_28)\n",
"if VAR_27 in self.rule.temp_output:\n",
"if VAR_27 in self.rule.dynamic_input:\n",
"if True or not self.dynamic_output:\n",
"self.temp_output.add(VAR_28)\n",
"if VAR_27 in self.rule.protected_output:\n",
"self.dynamic_input.add(VAR_28)\n",
"if VAR_27 in self.rule.subworkflow_input:\n",
"for o in self.output:\n",
"self.protected_output.add(VAR_28)\n",
"if VAR_27 in self.rule.touch_output:\n",
"self.subworkflow_input[VAR_28] = self.rule.subworkflow_input[VAR_27]\n",
"self._hash ^= o.__hash__()\n",
"self.touch_output.add(VAR_28)\n"
] | [
"def __init__(self, rule, dag, targetfile=None, format_wildcards=None):...\n",
"self.rule = rule\n",
"self.dag = dag\n",
"self.targetfile = targetfile\n",
"self.wildcards_dict = self.rule.get_wildcards(targetfile)\n",
"self.wildcards = Wildcards(fromdict=self.wildcards_dict)\n",
"self._format_wildcards = (self.wildcards if format_wildcards is None else\n Wildcards(fromdict=format_wildcards))\n",
"(self.input, self.output, self.params, self.log, self.benchmark, self.\n ruleio, self.dependencies) = rule.expand_wildcards(self.wildcards_dict)\n",
"self.resources_dict = {name: min(self.rule.workflow.global_resources.get(\n name, res), res) for name, res in rule.resources.items()}\n",
"self.threads = self.resources_dict['_cores']\n",
"self.resources = Resources(fromdict=self.resources_dict)\n",
"self._inputsize = None\n",
"self.dynamic_output, self.dynamic_input = set(), set()\n",
"self.temp_output, self.protected_output = set(), set()\n",
"self.touch_output = set()\n",
"self.subworkflow_input = dict()\n",
"for f in self.output:\n",
"f_ = self.ruleio[f]\n",
"for f in self.input:\n",
"if f_ in self.rule.dynamic_output:\n",
"f_ = self.ruleio[f]\n",
"self._hash = self.rule.__hash__()\n",
"self.dynamic_output.add(f)\n",
"if f_ in self.rule.temp_output:\n",
"if f_ in self.rule.dynamic_input:\n",
"if True or not self.dynamic_output:\n",
"self.temp_output.add(f)\n",
"if f_ in self.rule.protected_output:\n",
"self.dynamic_input.add(f)\n",
"if f_ in self.rule.subworkflow_input:\n",
"for o in self.output:\n",
"self.protected_output.add(f)\n",
"if f_ in self.rule.touch_output:\n",
"self.subworkflow_input[f] = self.rule.subworkflow_input[f_]\n",
"self._hash ^= o.__hash__()\n",
"self.touch_output.add(f)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"For",
"Expr'",
"Condition",
"Assign'",
"AugAssign'",
"Expr'"
] |
[
"def FUNC_3(VAR_3):...\n",
"print(CLASS_0.WARNING + '!', VAR_3, CLASS_0.END)\n"
] | [
"def print_warning(str):...\n",
"print(c.WARNING + '!', str, c.END)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@VAR_0.route('/<page_name>/history/record')...\n",
"VAR_12 = request.args.get('id')\n",
"VAR_3 = VAR_1.query('string' % VAR_12)\n",
"VAR_13 = VAR_3.namedresult()[0]\n",
"return render_template('page_record.html', VAR_2=page_name, VAR_13=page_record)\n"
] | [
"@app.route('/<page_name>/history/record')...\n",
"content_id = request.args.get('id')\n",
"query = db.query(\n \"select page_content.content, page_content.timestamp from page, page_content where page.id = page_content.page_id and page_content.id = '%s'\"\n % content_id)\n",
"page_record = query.namedresult()[0]\n",
"return render_template('page_record.html', page_name=page_name, page_record\n =page_record)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_4():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_20 = pd.read_sql('select date, league, team1, team2 from spi_historical',\n VAR_0)\n",
"VAR_21 = pd.read_sql('select Date, Div, HomeTeam, AwayTeam from fd_historical',\n VAR_0)\n",
"VAR_22 = ['key0', 'key1']\n",
"VAR_20.columns = VAR_22 + ['left_team1', 'left_team2']\n",
"VAR_21.columns = VAR_22 + ['right_team1', 'right_team2']\n",
"VAR_23 = pd.merge(VAR_20, VAR_21, how='outer').dropna().drop(columns=\n key_columns).reset_index(drop=True)\n",
"VAR_24 = VAR_23.apply(lambda row: SequenceMatcher(None, row.left_team1, row\n .right_team1).ratio() * SequenceMatcher(None, row.left_team2, row.\n right_team2).ratio(), axis=1)\n",
"VAR_25 = pd.concat([VAR_23, VAR_24], axis=1)\n",
"VAR_26 = VAR_25.groupby(['left_team1', 'left_team2'])[0].idxmax().values\n",
"VAR_27 = VAR_23.take(VAR_26)\n",
"VAR_28 = VAR_27.loc[:, (['left_team1', 'right_team1'])].rename(columns={\n 'left_team1': 'left_team', 'right_team1': 'right_team'})\n",
"VAR_29 = VAR_27.loc[:, (['left_team2', 'right_team2'])].rename(columns={\n 'left_team2': 'left_team', 'right_team2': 'right_team'})\n",
"VAR_30 = VAR_28.append(VAR_29)\n",
"VAR_30 = VAR_30.groupby(VAR_30.columns.tolist()).size().reset_index()\n",
"VAR_26 = VAR_30.groupby('left_team')[0].idxmax().values\n",
"VAR_31 = VAR_30.take(VAR_26).drop(columns=0).reset_index(drop=True)\n",
"VAR_31.to_sql('names_mapping', VAR_0, index=False, if_exists='replace')\n"
] | [
"def create_names_mapping_table():...\n",
"\"\"\"docstring\"\"\"\n",
"left_data = pd.read_sql('select date, league, team1, team2 from spi_historical'\n , DB_CONNECTION)\n",
"right_data = pd.read_sql(\n 'select Date, Div, HomeTeam, AwayTeam from fd_historical', DB_CONNECTION)\n",
"key_columns = ['key0', 'key1']\n",
"left_data.columns = key_columns + ['left_team1', 'left_team2']\n",
"right_data.columns = key_columns + ['right_team1', 'right_team2']\n",
"names_combinations = pd.merge(left_data, right_data, how='outer').dropna(\n ).drop(columns=key_columns).reset_index(drop=True)\n",
"similarity = names_combinations.apply(lambda row: SequenceMatcher(None, row\n .left_team1, row.right_team1).ratio() * SequenceMatcher(None, row.\n left_team2, row.right_team2).ratio(), axis=1)\n",
"names_combinations_similarity = pd.concat([names_combinations, similarity],\n axis=1)\n",
"indices = names_combinations_similarity.groupby(['left_team1', 'left_team2'])[0\n ].idxmax().values\n",
"names_matching = names_combinations.take(indices)\n",
"matching1 = names_matching.loc[:, (['left_team1', 'right_team1'])].rename(\n columns={'left_team1': 'left_team', 'right_team1': 'right_team'})\n",
"matching2 = names_matching.loc[:, (['left_team2', 'right_team2'])].rename(\n columns={'left_team2': 'left_team', 'right_team2': 'right_team'})\n",
"matching = matching1.append(matching2)\n",
"matching = matching.groupby(matching.columns.tolist()).size().reset_index()\n",
"indices = matching.groupby('left_team')[0].idxmax().values\n",
"names_mapping = matching.take(indices).drop(columns=0).reset_index(drop=True)\n",
"names_mapping.to_sql('names_mapping', DB_CONNECTION, index=False, if_exists\n ='replace')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_9(self, VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_30 = self.cursor.execute('delete from items where itemname = \"%s\"' % VAR_6)\n",
"self.connection.commit()\n"
] | [
"def remove_item(self, item):...\n",
"\"\"\"docstring\"\"\"\n",
"r = self.cursor.execute('delete from items where itemname = \"%s\"' % item)\n",
"self.connection.commit()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_43(self, VAR_21=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_32 = list()\n",
"VAR_21 = VAR_21 or self.get_columns_list()\n",
"for VAR_14 in VAR_21:\n",
"if not self.is_relation(VAR_14):\n",
"return VAR_32\n",
"if hasattr(self.obj, VAR_14):\n",
"if not hasattr(getattr(self.obj, VAR_14), '__call__') or hasattr(getattr(\n",
"VAR_32.append(VAR_14)\n",
"VAR_32.append(VAR_14)\n"
] | [
"def get_order_columns_list(self, list_columns=None):...\n",
"\"\"\"docstring\"\"\"\n",
"ret_lst = list()\n",
"list_columns = list_columns or self.get_columns_list()\n",
"for col_name in list_columns:\n",
"if not self.is_relation(col_name):\n",
"return ret_lst\n",
"if hasattr(self.obj, col_name):\n",
"if not hasattr(getattr(self.obj, col_name), '__call__') or hasattr(getattr(\n",
"ret_lst.append(col_name)\n",
"ret_lst.append(col_name)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Condition",
"Return'",
"Condition",
"Condition",
"Expr'",
"Expr'"
] |
[
"\"\"\"string\"\"\"\n",
"import argparse\n",
"import importlib\n",
"import os\n",
"import sys\n",
"from parlai.core.agents import get_agent_module, get_task_module\n",
"from parlai.tasks.tasks import ids_to_tasks\n",
"def FUNC_0(VAR_0):...\n",
"VAR_3 = VAR_0.lower()\n",
"if VAR_3 in ('yes', 'true', 't', '1', 'y'):\n",
"return True\n",
"if VAR_3 in ('no', 'false', 'f', 'n', '0'):\n",
"return False\n",
"def FUNC_1(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"if ':' not in VAR_0:\n",
"VAR_4 = VAR_0.split(':')\n",
"VAR_5 = importlib.import_module(VAR_4[0])\n",
"return getattr(VAR_5, VAR_4[1])\n"
] | [
"\"\"\"Provides an argument parser and a set of default command line options for\nusing the ParlAI package.\n\"\"\"\n",
"import argparse\n",
"import importlib\n",
"import os\n",
"import sys\n",
"from parlai.core.agents import get_agent_module, get_task_module\n",
"from parlai.tasks.tasks import ids_to_tasks\n",
"def str2bool(value):...\n",
"v = value.lower()\n",
"if v in ('yes', 'true', 't', '1', 'y'):\n",
"return True\n",
"if v in ('no', 'false', 'f', 'n', '0'):\n",
"return False\n",
"def str2class(value):...\n",
"\"\"\"docstring\"\"\"\n",
"if ':' not in value:\n",
"name = value.split(':')\n",
"module = importlib.import_module(name[0])\n",
"return getattr(module, name[1])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_11(self):...\n",
"shellutil.run('/etc/rc.d/dhclient start {0}'.format(self.get_if_name()),\n VAR_10=False)\n"
] | [
"def start_dhcp_service(self):...\n",
"shellutil.run('/etc/rc.d/dhclient start {0}'.format(self.get_if_name()),\n chk_err=False)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_6(self, VAR_23):...\n",
"VAR_82 = CLASS_46.run(self, VAR_23)\n",
"if not VAR_82 or not hasattr(VAR_82, 'email') or not VAR_82.email:\n",
"return self.error(errors.NO_EMAIL_FOR_USER)\n",
"return VAR_82\n"
] | [
"def run(self, name):...\n",
"user = VExistingUname.run(self, name)\n",
"if not user or not hasattr(user, 'email') or not user.email:\n",
"return self.error(errors.NO_EMAIL_FOR_USER)\n",
"return user\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_10(VAR_32, VAR_34):...\n",
"logging.info('Got signal %s', VAR_32)\n"
] | [
"def handler(sig, _):...\n",
"logging.info('Got signal %s', sig)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_9(VAR_9):...\n",
"VAR_14 = __import__(VAR_9.replace('\\r', ''))\n",
"VAR_15 = getattr(VAR_14, VAR_9.replace('\\r', ''))\n",
"VAR_16 = VAR_15()\n",
"return FUNC_7(VAR_16, 'config') + VAR_4 + '.' + VAR_4\n"
] | [
"def configPlugin(name):...\n",
"module = __import__(name.replace('\\r', ''))\n",
"class_ = getattr(module, name.replace('\\r', ''))\n",
"instance = class_()\n",
"return callMethod(instance, 'config') + LINEBREAK + '.' + LINEBREAK\n"
] | [
0,
0,
0,
0,
1
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_9():...\n",
"VAR_9 = FUNC_0()\n",
"VAR_10 = VAR_9.cursor()\n",
"VAR_10.execute(' SELECT name, karma FROM people ORDER BY karma ASC LIMIT 5 ')\n",
"VAR_2.error('Execution failed with error: {}'.format(e))\n",
"VAR_18 = VAR_10.fetchall()\n",
"VAR_2.debug('fetched bottom karma values')\n",
"VAR_9.close()\n",
"return VAR_18\n"
] | [
"def karma_bottom():...\n",
"db = db_connect()\n",
"cursor = db.cursor()\n",
"cursor.execute(' SELECT name, karma FROM people ORDER BY karma ASC LIMIT 5 ')\n",
"logger.error('Execution failed with error: {}'.format(e))\n",
"leaders = cursor.fetchall()\n",
"logger.debug('fetched bottom karma values')\n",
"db.close()\n",
"return leaders\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(self, VAR_2, VAR_3, VAR_4):...\n",
"if not VAR_4:\n",
"return\n",
"if validate_hash(VAR_4):\n",
"if validate_url(VAR_4):\n",
"VAR_21 = VirusTotalAPI().hash_fetch(VAR_4)\n",
"VAR_2['errors'].append('Error retrieving file hash: %s' % e)\n",
"VAR_18 = Files.create(VAR_3, VAR_4, VAR_21)\n",
"VAR_2['data'].append({'type': 'url', 'data': VAR_4})\n",
"VAR_2['errors'].append(\"'%s' was neither a valid hash or url\" % VAR_4)\n",
"return\n",
"VAR_2['data'].append({'type': 'file', 'data': VAR_18})\n",
"return\n",
"return\n"
] | [
"def _handle_string(self, submit, tmppath, line):...\n",
"if not line:\n",
"return\n",
"if validate_hash(line):\n",
"if validate_url(line):\n",
"filedata = VirusTotalAPI().hash_fetch(line)\n",
"submit['errors'].append('Error retrieving file hash: %s' % e)\n",
"filepath = Files.create(tmppath, line, filedata)\n",
"submit['data'].append({'type': 'url', 'data': line})\n",
"submit['errors'].append(\"'%s' was neither a valid hash or url\" % line)\n",
"return\n",
"submit['data'].append({'type': 'file', 'data': filepath})\n",
"return\n",
"return\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_27(self, VAR_15):...\n",
""
] | [
"def add(self, item):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_42 = User.get_all_users_not_pagainated()\n",
"VAR_43 = 1\n",
"VAR_44 = len(VAR_42)\n",
"for VAR_22 in VAR_42:\n",
"CLASS_1.check_and_update_mapper_level(VAR_22.id)\n",
"return VAR_43\n",
"if VAR_43 % 50 == 0:\n",
"print(f'{VAR_43} users updated of {VAR_44}')\n",
"VAR_43 += 1\n"
] | [
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"users = User.get_all_users_not_pagainated()\n",
"users_updated = 1\n",
"total_users = len(users)\n",
"for user in users:\n",
"UserService.check_and_update_mapper_level(user.id)\n",
"return users_updated\n",
"if users_updated % 50 == 0:\n",
"print(f'{users_updated} users updated of {total_users}')\n",
"users_updated += 1\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Return'",
"Condition",
"Expr'",
"AugAssign'"
] |
[
"def FUNC_40(*VAR_19, **VAR_20):...\n",
"VAR_62 = Thread(target=function, VAR_19=args, VAR_46=ka)\n",
"VAR_62.daemon = True\n",
"VAR_62.start()\n"
] | [
"def decorator(*args, **ka):...\n",
"t = Thread(target=function, args=args, kwargs=ka)\n",
"t.daemon = True\n",
"t.start()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(*VAR_17, **VAR_18):...\n",
"return {'id': uuidutils.generate_uuid()}\n"
] | [
"def _return_id_key(*args, **kwargs):...\n",
"return {'id': uuidutils.generate_uuid()}\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_7(VAR_21):...\n",
"VAR_31 = len(VAR_21) > VAR_10\n",
"logging.info('validate_answer: %s returns %r', str(VAR_21), not VAR_31)\n",
"return not VAR_31\n"
] | [
"def validate_answer(ans):...\n",
"too_long = len(ans) > max_ans_len\n",
"logging.info('validate_answer: %s returns %r', str(ans), not too_long)\n",
"return not too_long\n"
] | [
0,
4,
4,
4
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_2(VAR_1):...\n",
"VAR_2 = True\n",
"VAR_0 = FUNC_0()\n",
"print('insert error: ', error)\n",
"return VAR_2\n",
"VAR_4 = VAR_0.cursor()\n",
"VAR_2 = False\n",
"VAR_4.execute(VAR_1)\n",
"VAR_5 = VAR_4.fetchall()\n",
"VAR_6 = []\n",
"VAR_7 = []\n",
"VAR_8 = '|'\n",
"VAR_9 = '+'\n",
"for cd in VAR_4.description:\n",
"VAR_6.append(max(cd[2], len(cd[0])))\n",
"for w in VAR_6:\n",
"VAR_7.append(cd[0])\n",
"VAR_8 += ' %-' + '%ss |' % (w,)\n",
"print(VAR_9)\n",
"VAR_9 += '-' * w + '--+'\n",
"print(VAR_8 % tuple(VAR_7))\n",
"print(VAR_9)\n",
"for row in VAR_5:\n",
"print(VAR_8 % row)\n",
"print(VAR_9)\n",
"VAR_0.commit()\n",
"FUNC_1(VAR_0)\n"
] | [
"def run_insert(insert_stmt):...\n",
"is_success = True\n",
"conn = create_connection()\n",
"print('insert error: ', error)\n",
"return is_success\n",
"cur = conn.cursor()\n",
"is_success = False\n",
"cur.execute(insert_stmt)\n",
"results = cur.fetchall()\n",
"widths = []\n",
"columns = []\n",
"tavnit = '|'\n",
"separator = '+'\n",
"for cd in cur.description:\n",
"widths.append(max(cd[2], len(cd[0])))\n",
"for w in widths:\n",
"columns.append(cd[0])\n",
"tavnit += ' %-' + '%ss |' % (w,)\n",
"print(separator)\n",
"separator += '-' * w + '--+'\n",
"print(tavnit % tuple(columns))\n",
"print(separator)\n",
"for row in results:\n",
"print(tavnit % row)\n",
"print(separator)\n",
"conn.commit()\n",
"destroy_connection(conn)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"For",
"Expr'",
"AugAssign'",
"Expr'",
"AugAssign'",
"Expr'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_25(self):...\n",
"VAR_13 = -1\n",
"self.cursor.execute('create table t1(n int)')\n",
"self.cursor.execute('insert into t1 values (?)', VAR_13)\n",
"VAR_20 = self.cursor.execute('select n from t1').fetchone()[0]\n",
"self.assertEqual(VAR_20, VAR_13)\n"
] | [
"def test_negative_int(self):...\n",
"value = -1\n",
"self.cursor.execute('create table t1(n int)')\n",
"self.cursor.execute('insert into t1 values (?)', value)\n",
"result = self.cursor.execute('select n from t1').fetchone()[0]\n",
"self.assertEqual(result, value)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"@VAR_13.whitelist()...\n",
"FUNC_2(VAR_1, VAR_2, VAR_3, VAR_0=searchfield, VAR_5=page_length, VAR_4=filters\n )\n",
"VAR_13.response['results'] = FUNC_4(VAR_13.response['values'])\n"
] | [
"@frappe.whitelist()...\n",
"search_widget(doctype, txt, query, searchfield=searchfield, page_length=\n page_length, filters=filters)\n",
"frappe.response['results'] = build_for_autosuggest(frappe.response['values'])\n"
] | [
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'"
] |
[
"import json\n",
"import logging\n",
"VAR_0 = logging.getLogger(__name__)\n",
"def FUNC_0(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_1.content_type in ('application/json', 'text/json'):\n",
"return VAR_1\n",
"VAR_4 = json.loads(VAR_1.body)\n",
"if isinstance(VAR_4, (list, tuple)):\n",
"VAR_0.warn('string')\n"
] | [
"import json\n",
"import logging\n",
"log = logging.getLogger(__name__)\n",
"def filter_json_xsrf(response):...\n",
"\"\"\"docstring\"\"\"\n",
"if response.content_type in ('application/json', 'text/json'):\n",
"return response\n",
"content = json.loads(response.body)\n",
"if isinstance(content, (list, tuple)):\n",
"log.warn(\n 'returning a json array is a potential security whole, please ensure you really want to do this. See http://wiki.pylonshq.com/display/pylonsfaq/Warnings for more info'\n )\n"
] | [
5,
0,
5,
0,
0,
0,
0,
5,
5,
0
] | [
"Import'",
"Import'",
"Assign'",
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_22(self, *VAR_69, **VAR_70):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.input = VAR_69, VAR_70\n",
"return VAR_101\n"
] | [
"def input(self, *paths, **kwpaths):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.input = paths, kwpaths\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_3():...\n",
"VAR_3 = FUNC_1()\n",
"FUNC_2(VAR_3)\n"
] | [
"def main():...\n",
"dms = getDMs()\n",
"replyToUnansweredDMs(dms)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_40(self):...\n",
"\"\"\"docstring\"\"\"\n",
"tournament.reportMatch(p1=2, p2='A')\n"
] | [
"def test_p2_contains_letter(self):...\n",
"\"\"\"docstring\"\"\"\n",
"tournament.reportMatch(p1=2, p2='A')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def FUNC_0(self, VAR_3, VAR_4, VAR_7):...\n",
"if VAR_4.is_course_staff:\n",
"return True\n",
"if not VAR_7.visible_to_students:\n",
"self.error_msg(_('The resource is not currently visible.'))\n",
"if VAR_7.view_content_to != VAR_7.VIEW_ACCESS.PUBLIC and not VAR_3.user.is_authenticated:\n",
"return False\n",
"self.error_msg(_('This course is not open for public.'))\n",
"return True\n",
"return False\n"
] | [
"def is_object_visible(self, request, view, course_instance):...\n",
"if view.is_course_staff:\n",
"return True\n",
"if not course_instance.visible_to_students:\n",
"self.error_msg(_('The resource is not currently visible.'))\n",
"if course_instance.view_content_to != course_instance.VIEW_ACCESS.PUBLIC and not request.user.is_authenticated:\n",
"return False\n",
"self.error_msg(_('This course is not open for public.'))\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_0(VAR_0, VAR_1='_', VAR_2=-2, VAR_3=-1):...\n",
"\"\"\"docstring\"\"\"\n",
"import pandas as pd\n",
"VAR_15 = [int(fn.split('.')[0].split(VAR_1)[VAR_2]) for fn in VAR_0]\n",
"VAR_16 = [int(fn.split('.')[0].split(VAR_1)[VAR_3]) for fn in VAR_0]\n",
"VAR_17 = pd.DataFrame({'fn': VAR_0, 'month': VAR_15, 'year': VAR_16})\n",
"VAR_18 = VAR_17.sort_values(['year', 'month'])\n",
"return VAR_18.fn.tolist()\n"
] | [
"def sort_files(files, split_on='_', elem_month=-2, elem_year=-1):...\n",
"\"\"\"docstring\"\"\"\n",
"import pandas as pd\n",
"months = [int(fn.split('.')[0].split(split_on)[elem_month]) for fn in files]\n",
"years = [int(fn.split('.')[0].split(split_on)[elem_year]) for fn in files]\n",
"df = pd.DataFrame({'fn': files, 'month': months, 'year': years})\n",
"df_sorted = df.sort_values(['year', 'month'])\n",
"return df_sorted.fn.tolist()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"from itemPrices.models import ItemSale\n",
"from django.db import connection\n",
"from django.db.models import Count\n",
"from rest_framework.views import APIView\n",
"from rest_framework.response import Response\n",
"from rest_framework import status\n",
"VAR_0 = {'status': 404, 'content': {'message': 'Not found'}}\n",
"\"\"\"\n \"\"\"\n",
"def FUNC_0(self, VAR_1):...\n",
"VAR_2 = VAR_1.query_params.get('item')\n",
"VAR_3 = VAR_1.query_params.get('city')\n",
"if not VAR_2 and not VAR_3:\n",
"return Response(VAR_0)\n",
"VAR_4 = 'string'\n",
"if VAR_2 and VAR_3:\n",
"VAR_4 = \"{} WHERE city = '{}' and title = '{}'\".format(VAR_4, VAR_3, VAR_2)\n",
"if VAR_2:\n",
"c.execute(VAR_4)\n",
"VAR_4 = \"{} WHERE title = '{}'\".format(VAR_4, VAR_2)\n",
"if VAR_3:\n",
"VAR_5, VAR_6 = c.fetchone()\n",
"VAR_4 = \"{} WHERE city = '{}'\".format(VAR_4, VAR_3)\n",
"if VAR_6 == 0:\n",
"return Response(VAR_0)\n",
"return Response({'status': 200, 'content': {'item': VAR_2 or\n 'Not specified', 'item_count': VAR_6, 'price_suggestion': VAR_5, 'city':\n VAR_3 or 'Not specified'}})\n"
] | [
"from itemPrices.models import ItemSale\n",
"from django.db import connection\n",
"from django.db.models import Count\n",
"from rest_framework.views import APIView\n",
"from rest_framework.response import Response\n",
"from rest_framework import status\n",
"NOT_FOUND_JSON_RESPONSE = {'status': 404, 'content': {'message': 'Not found'}}\n",
"\"\"\"\n \"\"\"\n",
"def get(self, request):...\n",
"item = request.query_params.get('item')\n",
"city = request.query_params.get('city')\n",
"if not item and not city:\n",
"return Response(NOT_FOUND_JSON_RESPONSE)\n",
"sql = \"\"\"SELECT\n mode() WITHIN GROUP (ORDER BY list_price DESC) AS model_value,\n count(*)\n FROM\n \"itemPrices_itemsale\"\n \"\"\"\n",
"if item and city:\n",
"sql = \"{} WHERE city = '{}' and title = '{}'\".format(sql, city, item)\n",
"if item:\n",
"c.execute(sql)\n",
"sql = \"{} WHERE title = '{}'\".format(sql, item)\n",
"if city:\n",
"price_mode, count = c.fetchone()\n",
"sql = \"{} WHERE city = '{}'\".format(sql, city)\n",
"if count == 0:\n",
"return Response(NOT_FOUND_JSON_RESPONSE)\n",
"return Response({'status': 200, 'content': {'item': item or 'Not specified',\n 'item_count': count, 'price_suggestion': price_mode, 'city': city or\n 'Not specified'}})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
4,
4,
4,
4,
4,
4,
4,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def __init__(self, VAR_2=None, VAR_3=None, VAR_4=True):...\n",
"self.vars = {}\n",
"self.groups = []\n",
"self._uuid = None\n",
"self.name = VAR_2\n",
"self.address = VAR_2\n",
"if VAR_3:\n",
"self.set_variable('ansible_port', int(VAR_3))\n",
"if VAR_4:\n",
"self._uuid = get_unique_id()\n",
"self.implicit = False\n"
] | [
"def __init__(self, name=None, port=None, gen_uuid=True):...\n",
"self.vars = {}\n",
"self.groups = []\n",
"self._uuid = None\n",
"self.name = name\n",
"self.address = name\n",
"if port:\n",
"self.set_variable('ansible_port', int(port))\n",
"if gen_uuid:\n",
"self._uuid = get_unique_id()\n",
"self.implicit = False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_3(self, VAR_5):...\n",
"VAR_14 = 'string'.format(VAR_5)\n",
"self.cur.execute(VAR_14)\n",
"return self.cur.fetchall()\n"
] | [
"def get_events(self, server_id):...\n",
"sql = (\n \"\"\"SELECT events.event_id as e, title, description, start_time, time_zone, (\n SELECT GROUP_CONCAT(DISTINCT username)\n FROM user_event, events\n WHERE user_event.event_id = e\n AND events.server_id = {0}\n AND user_event.attending = 1)\n AS accepted, (\n SELECT GROUP_CONCAT(DISTINCT username)\n FROM user_event, events\n WHERE user_event.event_id = e\n AND events.server_id = {0}\n AND user_event.attending = 0)\n AS declined\n FROM events\n WHERE events.server_id = {0}\n GROUP BY event_id, title, description, start_time, time_zone;\n \"\"\"\n .format(server_id))\n",
"self.cur.execute(sql)\n",
"return self.cur.fetchall()\n"
] | [
0,
4,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(VAR_0, VAR_1, **VAR_2):...\n",
"VAR_6 = '{}/{}?'.format(VAR_0, VAR_1)\n",
"VAR_7 = sorted(VAR_2.keys())\n",
"for key in VAR_7:\n",
"VAR_6 += '{}={}&'.format(key, VAR_2[key])\n",
"return hashlib.sha224(VAR_6.encode('utf8')).hexdigest()\n"
] | [
"def query_hash(project_id, query_name, **query_params):...\n",
"id_string = '{}/{}?'.format(project_id, query_name)\n",
"keylist = sorted(query_params.keys())\n",
"for key in keylist:\n",
"id_string += '{}={}&'.format(key, query_params[key])\n",
"return hashlib.sha224(id_string.encode('utf8')).hexdigest()\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"AugAssign'",
"Return'"
] |
[
"def FUNC_1(VAR_5):...\n",
"if not VAR_6:\n",
"return\n",
"VAR_20 = UltiSnips_Manager._snips('', 1)\n",
"return\n",
"VAR_5['ultisnips_snippets'] = [{'trigger': x.trigger, 'description': x.\n description} for x in VAR_20]\n"
] | [
"def _AddUltiSnipsDataIfNeeded(extra_data):...\n",
"if not USE_ULTISNIPS_DATA:\n",
"return\n",
"rawsnips = UltiSnips_Manager._snips('', 1)\n",
"return\n",
"extra_data['ultisnips_snippets'] = [{'trigger': x.trigger, 'description': x\n .description} for x in rawsnips]\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Return'",
"Assign'"
] |
[
"def FUNC_6(self, VAR_7):...\n",
"VAR_8 = self.content\n",
"while VAR_8:\n",
"yield VAR_8[:VAR_7]\n",
"VAR_8 = VAR_8[VAR_7:]\n"
] | [
"def iter_content(self, chunk_size):...\n",
"c = self.content\n",
"while c:\n",
"yield c[:chunk_size]\n",
"c = c[chunk_size:]\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Expr'",
"Assign'"
] |
[
"import asyncio\n",
"import tormysql\n",
"VAR_0 = None\n",
"VAR_1 = None\n",
"def FUNC_0(VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = VAR_2\n",
"def FUNC_1(VAR_3, VAR_4, VAR_5, VAR_6, VAR_7):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_0 = tormysql.ConnectionPool(max_connections=20, idle_seconds=7200,\n wait_connection_timeout=3, host=host_addr, user=user_id, passwd=\n password, VAR_6=db, charset='utf8')\n",
"return VAR_7.run_until_complete(FUNC_2())\n"
] | [
"import asyncio\n",
"import tormysql\n",
"_pool = None\n",
"_handler = None\n",
"def set_log_handler(handler):...\n",
"\"\"\"docstring\"\"\"\n",
"_handler = handler\n",
"def connect_db_server(host_addr, user_id, password, db, loop):...\n",
"\"\"\"docstring\"\"\"\n",
"_pool = tormysql.ConnectionPool(max_connections=20, idle_seconds=7200,\n wait_connection_timeout=3, host=host_addr, user=user_id, passwd=\n password, db=db, charset='utf8')\n",
"return loop.run_until_complete(is_connect_db())\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"@api.model...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_55 = False\n",
"for arg in VAR_34:\n",
"if arg[0] == 'name' and arg[1] == '%':\n",
"if VAR_55:\n",
"VAR_55 = arg[2]\n",
"VAR_38 = u\"similarity(res_partner.name, '%s') DESC\" % VAR_55\n",
"return super(CLASS_0, self).search(VAR_34, VAR_37, VAR_36, VAR_38, VAR_39)\n"
] | [
"@api.model...\n",
"\"\"\"docstring\"\"\"\n",
"fuzzy_search = False\n",
"for arg in args:\n",
"if arg[0] == 'name' and arg[1] == '%':\n",
"if fuzzy_search:\n",
"fuzzy_search = arg[2]\n",
"order = u\"similarity(res_partner.name, '%s') DESC\" % fuzzy_search\n",
"return super(ResPartner, self).search(args, offset, limit, order, count)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
4,
0
] | [
"Condition",
"Docstring",
"Assign'",
"For",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"import sqlalchemy\n",
"VAR_0 = \"SELECT * FROM foo WHERE id = '%s'\" % identifier\n",
"VAR_0 = \"INSERT INTO foo VALUES ('a', 'b', '%s')\" % value\n",
"VAR_0 = \"DELETE FROM foo WHERE id = '%s'\" % identifier\n",
"VAR_0 = \"UPDATE foo SET value = 'b' WHERE id = '%s'\" % identifier\n",
"VAR_0 = (\n \"\"\"WITH cte AS (SELECT x FROM foo)\nSELECT x FROM cte WHERE x = '%s'\"\"\" %\n identifier)\n",
"cur.execute(\"SELECT * FROM foo WHERE id = '%s'\" % identifier)\n",
"cur.execute(\"INSERT INTO foo VALUES ('a', 'b', '%s')\" % value)\n",
"cur.execute(\"DELETE FROM foo WHERE id = '%s'\" % identifier)\n",
"cur.execute(\"UPDATE foo SET value = 'b' WHERE id = '%s'\" % identifier)\n",
"cur.execute(\"SELECT * FROM foo WHERE id = '%s'\", identifier)\n",
"cur.execute(\"INSERT INTO foo VALUES ('a', 'b', '%s')\", value)\n",
"cur.execute(\"DELETE FROM foo WHERE id = '%s'\", identifier)\n",
"cur.execute(\"UPDATE foo SET value = 'b' WHERE id = '%s'\", identifier)\n",
"VAR_0 = 'SELECT ' + val + ' FROM ' + val + ' WHERE id = ' + val\n",
"cur.execute('SELECT ' + val + ' FROM ' + val + ' WHERE id = ' + val)\n",
"def FUNC_0():...\n",
"def FUNC_1():...\n",
"return FUNC_1\n"
] | [
"import sqlalchemy\n",
"query = \"SELECT * FROM foo WHERE id = '%s'\" % identifier\n",
"query = \"INSERT INTO foo VALUES ('a', 'b', '%s')\" % value\n",
"query = \"DELETE FROM foo WHERE id = '%s'\" % identifier\n",
"query = \"UPDATE foo SET value = 'b' WHERE id = '%s'\" % identifier\n",
"query = (\n \"\"\"WITH cte AS (SELECT x FROM foo)\nSELECT x FROM cte WHERE x = '%s'\"\"\" %\n identifier)\n",
"cur.execute(\"SELECT * FROM foo WHERE id = '%s'\" % identifier)\n",
"cur.execute(\"INSERT INTO foo VALUES ('a', 'b', '%s')\" % value)\n",
"cur.execute(\"DELETE FROM foo WHERE id = '%s'\" % identifier)\n",
"cur.execute(\"UPDATE foo SET value = 'b' WHERE id = '%s'\" % identifier)\n",
"cur.execute(\"SELECT * FROM foo WHERE id = '%s'\", identifier)\n",
"cur.execute(\"INSERT INTO foo VALUES ('a', 'b', '%s')\", value)\n",
"cur.execute(\"DELETE FROM foo WHERE id = '%s'\", identifier)\n",
"cur.execute(\"UPDATE foo SET value = 'b' WHERE id = '%s'\", identifier)\n",
"query = 'SELECT ' + val + ' FROM ' + val + ' WHERE id = ' + val\n",
"cur.execute('SELECT ' + val + ' FROM ' + val + ' WHERE id = ' + val)\n",
"def a():...\n",
"def b():...\n",
"return b\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0
] | [
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"FunctionDef'",
"Return'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_17 = [VAR_28.mtime for VAR_28 in self.input if VAR_28.exists]\n",
"if VAR_17:\n",
"return max(VAR_17)\n",
"return None\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"existing = [f.mtime for f in self.input if f.exists]\n",
"if existing:\n",
"return max(existing)\n",
"return None\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_27(VAR_13):...\n",
"self.assertEqual('/request', VAR_13.path)\n",
"VAR_12.append('not_applicable')\n",
"return None\n"
] | [
"def not_applicable(request):...\n",
"self.assertEqual('/request', request.path)\n",
"calls.append('not_applicable')\n",
"return None\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_12(self, VAR_6):...\n",
"VAR_22 = 'string' + \"({}, {}, '{}', '{}', '{}', {})\".format(json.dumps(\n VAR_4), json.dumps(VAR_5), VAR_6, datetime.now(), VAR_7, json.dumps(VAR_8))\n",
"self.cur.execute(VAR_22)\n",
"self.conn.commit()\n"
] | [
"def check_query(self, qhash):...\n",
"sql = (\n 'INSERT INTO log_query (query_text, query_search, query_hash, query_time, client_ip, client_browser) VALUES'\n + \"({}, {}, '{}', '{}', '{}', {})\".format(json.dumps(text), json.dumps\n (search), qhash, datetime.now(), ip, json.dumps(browser)))\n",
"self.cur.execute(sql)\n",
"self.conn.commit()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0():...\n",
"VAR_2.flushdb()\n",
"FUNC_1()\n",
"FUNC_2('[email protected]', 'pwd')\n"
] | [
"def db_init():...\n",
"db.flushdb()\n",
"auth_init()\n",
"auth_add_user('[email protected]', 'pwd')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_31(self, VAR_48, VAR_49):...\n",
"VAR_69, VAR_73 = self.post('/mail/%s/tags' % VAR_48, VAR_49)\n",
"return VAR_69\n"
] | [
"def post_tags(self, mail_ident, tags_json):...\n",
"res, req = self.post('/mail/%s/tags' % mail_ident, tags_json)\n",
"return res\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_7(self, VAR_17, VAR_16):...\n",
"if VAR_16.endswith('/'):\n",
"VAR_17 = os.path.join(VAR_17, self.index_file)\n",
"if VAR_16.endswith('/' + self.index_file):\n",
"return self.get_static_file(VAR_17, VAR_16)\n",
"if os.path.isfile(VAR_17):\n",
"return self.get_static_file(VAR_17, VAR_16)\n",
"if os.path.isfile(os.path.join(VAR_17, self.index_file)):\n",
"return self.redirect(VAR_16, VAR_16[:-len(self.index_file)])\n",
"return self.redirect(VAR_16, VAR_16 + '/')\n"
] | [
"def find_file_at_path_with_indexes(self, path, url):...\n",
"if url.endswith('/'):\n",
"path = os.path.join(path, self.index_file)\n",
"if url.endswith('/' + self.index_file):\n",
"return self.get_static_file(path, url)\n",
"if os.path.isfile(path):\n",
"return self.get_static_file(path, url)\n",
"if os.path.isfile(os.path.join(path, self.index_file)):\n",
"return self.redirect(url, url[:-len(self.index_file)])\n",
"return self.redirect(url, url + '/')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"self.run_test_case(self.scenario.create_app())\n"
] | [
"def test_a_create_app(self):...\n",
"self.run_test_case(self.scenario.create_app())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def __init__(self, VAR_9, *, VAR_10=0, VAR_11=None, VAR_12=2):...\n",
"self.wires = VAR_9\n",
"self.cutoff = VAR_11\n",
"self.hbar = VAR_12\n",
"self.eng = None\n",
"self.state = None\n",
"super().__init__(self.short_name, VAR_10)\n"
] | [
"def __init__(self, wires, *, shots=0, cutoff=None, hbar=2):...\n",
"self.wires = wires\n",
"self.cutoff = cutoff\n",
"self.hbar = hbar\n",
"self.eng = None\n",
"self.state = None\n",
"super().__init__(self.short_name, shots)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_9(self):...\n",
"remove_perm_from_user(self.tester, self.permission)\n",
"self.client.login(username=self.tester.username, password='password')\n",
"VAR_1 = self.client.post(self.update_url, {'content_type':\n 'testruns.testcaserun', 'object_pk': self.case_run_1.pk, 'field':\n 'case_run_status', 'value': str(TestCaseRunStatus.objects.get(name=\n 'PAUSED').pk), 'value_type': 'int'})\n",
"self.assertJSONEqual(str(VAR_1.content, encoding=settings.DEFAULT_CHARSET),\n {'rc': 1, 'response': 'Permission Dinied.'})\n"
] | [
"def test_refuse_if_missing_permission(self):...\n",
"remove_perm_from_user(self.tester, self.permission)\n",
"self.client.login(username=self.tester.username, password='password')\n",
"response = self.client.post(self.update_url, {'content_type':\n 'testruns.testcaserun', 'object_pk': self.case_run_1.pk, 'field':\n 'case_run_status', 'value': str(TestCaseRunStatus.objects.get(name=\n 'PAUSED').pk), 'value_type': 'int'})\n",
"self.assertJSONEqual(str(response.content, encoding=settings.\n DEFAULT_CHARSET), {'rc': 1, 'response': 'Permission Dinied.'})\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_6(self, VAR_40):...\n",
"if VAR_40 and len(VAR_40) > 500:\n",
"VAR_101.errors.add(errors.DESC_TOO_LONG)\n",
"return unkeep_space(VAR_40 or '')\n"
] | [
"def run(self, description):...\n",
"if description and len(description) > 500:\n",
"c.errors.add(errors.DESC_TOO_LONG)\n",
"return unkeep_space(description or '')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_13(VAR_5, VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_34 = {}\n",
"VAR_34['public'] = True\n",
"VAR_34['files'] = {}\n",
"VAR_34['description'] = \"In response to @{0}'s comment : {1}\".format(VAR_5[\n 'reviewer'], VAR_5['review_url'])\n",
"for VAR_47, diffs in VAR_5['diff'].items():\n",
"if len(diffs) != 0:\n",
"VAR_8 = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n",
"VAR_34['files'][VAR_47.split('/')[-1] + '.diff'] = {'content': diffs}\n",
"VAR_9 = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n",
"VAR_10 = 'https://api.github.com/gists'\n",
"VAR_35 = requests.post(VAR_10, json=REQUEST_JSON, VAR_8=headers, VAR_9=auth\n ).json()\n",
"VAR_5['gist_response'] = VAR_35\n",
"VAR_5['gist_url'] = VAR_35['html_url']\n"
] | [
"def create_gist(data, config):...\n",
"\"\"\"docstring\"\"\"\n",
"REQUEST_JSON = {}\n",
"REQUEST_JSON['public'] = True\n",
"REQUEST_JSON['files'] = {}\n",
"REQUEST_JSON['description'] = \"In response to @{0}'s comment : {1}\".format(data\n ['reviewer'], data['review_url'])\n",
"for file, diffs in data['diff'].items():\n",
"if len(diffs) != 0:\n",
"headers = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n",
"REQUEST_JSON['files'][file.split('/')[-1] + '.diff'] = {'content': diffs}\n",
"auth = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n",
"url = 'https://api.github.com/gists'\n",
"res = requests.post(url, json=REQUEST_JSON, headers=headers, auth=auth).json()\n",
"data['gist_response'] = res\n",
"data['gist_url'] = res['html_url']\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_0(self, VAR_3):...\n",
"VAR_5 = '%s int' % str(VAR_3)\n",
"if self.unique:\n",
"VAR_5 += ' UNIQUE'\n",
"return VAR_5\n"
] | [
"def create_field(self, name):...\n",
"field_string = '%s int' % str(name)\n",
"if self.unique:\n",
"field_string += ' UNIQUE'\n",
"return field_string\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"AugAssign'",
"Return'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.