lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"\"\"\"string\"\"\"\n",
"import random\n",
"from eventlet import greenthread\n",
"from oslo.config import cfg\n",
"from cinder import exception\n",
"from cinder.openstack.common import excutils\n",
"from cinder.openstack.common import log as logging\n",
"from cinder import utils\n",
"from cinder.volume import driver\n",
"VAR_0 = logging.getLogger(__name__)\n",
"VAR_1 = [cfg.BoolOpt('san_thin_provision', default=True, help=\n 'Use thin provisioning for SAN volumes?'), cfg.StrOpt('san_ip', default\n ='', help='IP address of SAN controller'), cfg.StrOpt('san_login',\n default='admin', help='Username for SAN controller'), cfg.StrOpt(\n 'san_password', default='', help='Password for SAN controller', secret=\n True), cfg.StrOpt('san_private_key', default='', help=\n 'Filename of private key to use for SSH authentication'), cfg.StrOpt(\n 'san_clustername', default='', help=\n 'Cluster name to use for creating volumes'), cfg.IntOpt('san_ssh_port',\n default=22, help='SSH port to use with SAN'), cfg.BoolOpt(\n 'san_is_local', default=False, help=\n 'Execute commands locally instead of over SSH; use if the volume service is running on the SAN device'\n ), cfg.IntOpt('ssh_conn_timeout', default=30, help=\n 'SSH connection timeout in seconds'), cfg.IntOpt('ssh_min_pool_conn',\n default=1, help='Minimum ssh connections in the pool'), cfg.IntOpt(\n 'ssh_max_pool_conn', default=5, help='Maximum ssh connections in the pool')\n ]\n",
"VAR_2 = cfg.CONF\n",
"VAR_2.register_opts(VAR_1)\n",
"\"\"\"string\"\"\"\n",
"def __init__(self, *VAR_3, **VAR_4):...\n",
"VAR_11 = VAR_4.pop('execute', self.san_execute)\n",
"super(CLASS_0, self).__init__(*VAR_3, VAR_11=execute, **kwargs)\n",
"self.configuration.append_config_values(VAR_1)\n",
"self.run_local = self.configuration.san_is_local\n",
"self.sshpool = None\n",
"def FUNC_0(self, *VAR_5, **VAR_4):...\n",
"if self.run_local:\n",
"return utils.execute(*VAR_5, **kwargs)\n",
"VAR_7 = VAR_4.pop('check_exit_code', None)\n",
"VAR_6 = ' '.join(VAR_5)\n",
"return self._run_ssh(VAR_6, VAR_7)\n"
] | [
"\"\"\"\nDefault Driver for san-stored volumes.\n\nThe unique thing about a SAN is that we don't expect that we can run the volume\ncontroller on the SAN hardware. We expect to access it over SSH or some API.\n\"\"\"\n",
"import random\n",
"from eventlet import greenthread\n",
"from oslo.config import cfg\n",
"from cinder import exception\n",
"from cinder.openstack.common import excutils\n",
"from cinder.openstack.common import log as logging\n",
"from cinder import utils\n",
"from cinder.volume import driver\n",
"LOG = logging.getLogger(__name__)\n",
"san_opts = [cfg.BoolOpt('san_thin_provision', default=True, help=\n 'Use thin provisioning for SAN volumes?'), cfg.StrOpt('san_ip', default\n ='', help='IP address of SAN controller'), cfg.StrOpt('san_login',\n default='admin', help='Username for SAN controller'), cfg.StrOpt(\n 'san_password', default='', help='Password for SAN controller', secret=\n True), cfg.StrOpt('san_private_key', default='', help=\n 'Filename of private key to use for SSH authentication'), cfg.StrOpt(\n 'san_clustername', default='', help=\n 'Cluster name to use for creating volumes'), cfg.IntOpt('san_ssh_port',\n default=22, help='SSH port to use with SAN'), cfg.BoolOpt(\n 'san_is_local', default=False, help=\n 'Execute commands locally instead of over SSH; use if the volume service is running on the SAN device'\n ), cfg.IntOpt('ssh_conn_timeout', default=30, help=\n 'SSH connection timeout in seconds'), cfg.IntOpt('ssh_min_pool_conn',\n default=1, help='Minimum ssh connections in the pool'), cfg.IntOpt(\n 'ssh_max_pool_conn', default=5, help='Maximum ssh connections in the pool')\n ]\n",
"CONF = cfg.CONF\n",
"CONF.register_opts(san_opts)\n",
"\"\"\"Base class for SAN-style storage volumes\n\n A SAN-style storage value is 'different' because the volume controller\n probably won't run on it, so we need to access is over SSH or another\n remote protocol.\n \"\"\"\n",
"def __init__(self, *args, **kwargs):...\n",
"execute = kwargs.pop('execute', self.san_execute)\n",
"super(SanDriver, self).__init__(*args, execute=execute, **kwargs)\n",
"self.configuration.append_config_values(san_opts)\n",
"self.run_local = self.configuration.san_is_local\n",
"self.sshpool = None\n",
"def san_execute(self, *cmd, **kwargs):...\n",
"if self.run_local:\n",
"return utils.execute(*cmd, **kwargs)\n",
"check_exit_code = kwargs.pop('check_exit_code', None)\n",
"command = ' '.join(cmd)\n",
"return self._run_ssh(command, check_exit_code)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_6(self, VAR_45):...\n",
""
] | [
"def run(self, uh):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_30(self, VAR_31, VAR_32):...\n",
"if not VAR_31:\n",
"return []\n",
"VAR_59 = self._find_scalac_plugins(list(VAR_31.keys()), VAR_32)\n",
"VAR_58 = []\n",
"for VAR_78, cp_entries in VAR_59.items():\n",
"VAR_58.append('-S-Xplugin:{}'.format(':'.join(cp_entries)))\n",
"return VAR_58\n",
"for VAR_68 in VAR_31[VAR_78]:\n",
"VAR_58.append('-S-P:{}:{}'.format(VAR_78, VAR_68))\n"
] | [
"def _scalac_plugin_args(self, scalac_plugin_map, classpath):...\n",
"if not scalac_plugin_map:\n",
"return []\n",
"plugin_jar_map = self._find_scalac_plugins(list(scalac_plugin_map.keys()),\n classpath)\n",
"ret = []\n",
"for name, cp_entries in plugin_jar_map.items():\n",
"ret.append('-S-Xplugin:{}'.format(':'.join(cp_entries)))\n",
"return ret\n",
"for arg in scalac_plugin_map[name]:\n",
"ret.append('-S-P:{}:{}'.format(name, arg))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Return'",
"For",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
"return 'inet6_ntoa({0})'.format(self.name)\n"
] | [
"def select(self):...\n",
"return 'inet6_ntoa({0})'.format(self.name)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_3(self):...\n",
""
] | [
"def tearDown(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"@staticmethod...\n",
"VAR_12 = VAR_10.get('instance')\n",
"VAR_13 = VAR_12.tracker.previous('raw_cas')\n",
"VAR_14 = VAR_12.tracker.previous('raw_chem_name')\n",
"if VAR_12.tracker.has_changed('raw_cas') or VAR_12.tracker.has_changed(\n",
"VAR_12.dsstox = None\n"
] | [
"@staticmethod...\n",
"instance = kwargs.get('instance')\n",
"previous_raw_cas = instance.tracker.previous('raw_cas')\n",
"previous_raw_chem_name = instance.tracker.previous('raw_chem_name')\n",
"if instance.tracker.has_changed('raw_cas') or instance.tracker.has_changed(\n",
"instance.dsstox = None\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_19(self):...\n",
"return Component.objects.filter(product__id=self.product_id)\n"
] | [
"def components(self):...\n",
"return Component.objects.filter(product__id=self.product_id)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_4(self, VAR_4, *VAR_5, **VAR_6):...\n",
"if not self.exercise.is_submittable:\n",
"return self.http_method_not_allowed(VAR_4, *VAR_5, **kwargs)\n",
"VAR_20 = None\n",
"VAR_19 = ExercisePage(self.exercise)\n",
"VAR_21, VAR_14, VAR_17, VAR_18 = self.submission_check(True, VAR_4)\n",
"if VAR_14:\n",
"VAR_20 = Submission.objects.create_from_post(self.exercise, VAR_18, VAR_4)\n",
"self.get_summary_submissions()\n",
"if VAR_20:\n",
"return self.response(VAR_19=page, VAR_18=students, submission=new_submission)\n",
"VAR_19 = self.exercise.grade(VAR_4, VAR_20, url_name=self.post_url_name)\n",
"messages.error(VAR_4, _(\n 'The submission could not be saved for some reason. The submission was not registered.'\n ))\n",
"if self.exercise.status in (LearningObject.STATUS.ENROLLMENT,\n",
"if not VAR_4.is_ajax() and '__r' in VAR_4.GET:\n",
"self.instance.enroll_student(self.request.user)\n",
"if not VAR_4.is_ajax() and '__r' not in VAR_4.GET:\n",
"return self.redirect(VAR_4.GET['__r'], backup=self.exercise)\n",
"return self.redirect(VAR_20.get_absolute_url() + ('?wait=1' if VAR_19.\n is_wait else ''))\n"
] | [
"def post(self, request, *args, **kwargs):...\n",
"if not self.exercise.is_submittable:\n",
"return self.http_method_not_allowed(request, *args, **kwargs)\n",
"new_submission = None\n",
"page = ExercisePage(self.exercise)\n",
"submission_status, submission_allowed, issues, students = (self.\n submission_check(True, request))\n",
"if submission_allowed:\n",
"new_submission = Submission.objects.create_from_post(self.exercise,\n students, request)\n",
"self.get_summary_submissions()\n",
"if new_submission:\n",
"return self.response(page=page, students=students, submission=new_submission)\n",
"page = self.exercise.grade(request, new_submission, url_name=self.post_url_name\n )\n",
"messages.error(request, _(\n 'The submission could not be saved for some reason. The submission was not registered.'\n ))\n",
"if self.exercise.status in (LearningObject.STATUS.ENROLLMENT,\n",
"if not request.is_ajax() and '__r' in request.GET:\n",
"self.instance.enroll_student(self.request.user)\n",
"if not request.is_ajax() and '__r' not in request.GET:\n",
"return self.redirect(request.GET['__r'], backup=self.exercise)\n",
"return self.redirect(new_submission.get_absolute_url() + ('?wait=1' if page\n .is_wait else ''))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_20(VAR_36):...\n",
"VAR_63 = {}\n",
"for VAR_62, info in VAR_36.items():\n",
"if VAR_62.core:\n",
"return VAR_63\n",
"VAR_63[VAR_62] = info\n"
] | [
"def _filter_core_keywors(keywords):...\n",
"matches = {}\n",
"for kw, info in keywords.items():\n",
"if kw.core:\n",
"return matches\n",
"matches[kw] = info\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'"
] |
[
"VAR_0 = {'status': ['stableinterface'], 'supported_by': 'community',\n 'version': '1.0'}\n",
"VAR_1 = 'string'\n",
"VAR_2 = 'string'\n",
"import psycopg2\n",
"VAR_6 = False\n",
"VAR_6 = True\n",
"import psycopg2.extras\n",
"from ansible.module_utils.six import iteritems\n",
"def FUNC_0(VAR_3, VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = 'string'\n",
"VAR_3.execute(VAR_7 % VAR_4)\n",
"return VAR_3.fetchone()[0]\n"
] | [
"ANSIBLE_METADATA = {'status': ['stableinterface'], 'supported_by':\n 'community', 'version': '1.0'}\n",
"DOCUMENTATION = \"\"\"\n---\nmodule: postgresql_setting\nshort_description: manage config settings for PostgreSQL instance.\ndescription:\n - Change server configuration parameters across the entire database cluster\n - New values will be effective after the next server configuration reload,\n or after the next server restart in the case of parameters that can only\n be changed at server start\n - Only superusers can change configuration settings\nauthor: \"Kostiantyn Nemchenko (@kostiantyn-nemchenko)\"\nversion_added: \"2.3\"\nrequirements:\n - psycopg2\noptions:\n login_user:\n description:\n - The username used to authenticate with\n required: false\n default: null\n login_password:\n description:\n - The password used to authenticate with\n required: false\n default: null\n login_host:\n description:\n - Host running the database\n required: false\n default: localhost\n login_unix_socket:\n description:\n - Path to a Unix domain socket for local connections\n required: false\n default: null\n port:\n description:\n - Database port to connect to.\n required: false\n default: 5432\n option:\n description:\n - The parameter from PostgreSQL configuration file\n required: true\n default: null\n value:\n description:\n - The value of the parameter to change\n required: false\n default: null\n state:\n description:\n - The parameter state\n required: false\n default: present\n choices: [ \"present\", \"absent\" ]\n\"\"\"\n",
"EXAMPLES = \"\"\"\n# Set work_mem parameter to 8MB\n- postgresql_setting:\n option: work_mem\n value: 8MB\n state: present\n\n# Allow only local TCP/IP \"loopback\" connections to be made\n- postgresql_setting:\n option: listen_addresses\n state: absent\n\n# Enable autovacuum\n- postgresql_setting:\n option: autovacuum\n value: on\n\"\"\"\n",
"import psycopg2\n",
"postgresqldb_found = False\n",
"postgresqldb_found = True\n",
"import psycopg2.extras\n",
"from ansible.module_utils.six import iteritems\n",
"def option_ispreset(cursor, option):...\n",
"\"\"\"docstring\"\"\"\n",
"query = \"\"\"\n SELECT EXISTS\n (SELECT 1\n FROM pg_settings\n WHERE context = 'internal'\n AND name = '%s')\n \"\"\"\n",
"cursor.execute(query % option)\n",
"return cursor.fetchone()[0]\n"
] | [
4,
4,
4,
4,
4,
4,
4,
4,
0,
0,
4,
4,
4
] | [
"Assign'",
"Assign'",
"Assign'",
"Import'",
"Assign'",
"Assign'",
"Import'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Return'"
] |
[
"@property...\n",
"if not self._is_function:\n",
"return self._file\n"
] | [
"@property...\n",
"if not self._is_function:\n",
"return self._file\n"
] | [
0,
0,
0
] | [
"Condition",
"Condition",
"Return'"
] |
[
"def FUNC_0():...\n",
"VAR_13 = VAR_0.path.dirname(VAR_0.path.abspath(__file__))\n",
"return VAR_0.path.join(VAR_13, 'server/ycmd.py')\n"
] | [
"def _PathToServerScript():...\n",
"dir_of_current_script = os.path.dirname(os.path.abspath(__file__))\n",
"return os.path.join(dir_of_current_script, 'server/ycmd.py')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_8(VAR_6):...\n",
"return 'subdir' not in VAR_6\n"
] | [
"def file_filter(path):...\n",
"return 'subdir' not in path\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@eqlx.with_timeout...\n",
"time.sleep(1)\n"
] | [
"@eqlx.with_timeout...\n",
"time.sleep(1)\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"self.render('login.html')\n"
] | [
"def get(self):...\n",
"self.render('login.html')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_3(VAR_5, VAR_6):...\n",
"VAR_38 = os.getcwd()\n",
"VAR_39 = VAR_5.replace('/', '_')\n",
"VAR_40 = VAR_38 + '/pickle/' + str(VAR_39) + '.p'\n",
"pickle.dump(VAR_6, p)\n"
] | [
"def dump_pickle_data(base_fname, data):...\n",
"cwd = os.getcwd()\n",
"bracket_name = base_fname.replace('/', '_')\n",
"fname = cwd + '/pickle/' + str(bracket_name) + '.p'\n",
"pickle.dump(data, p)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_4(VAR_2, VAR_3):...\n",
"VAR_16 = {'Title': 'lower(b.Title) {0}', 'ISBN': 'b.ISBN {0}', 'Volume':\n 'b.Volume {0}', 'Series': 'lower(s.name) {0}', 'Published':\n 'b.Published {0}', 'Category': 'lower(b.Category) {0}', 'Status':\n 'lower(b.Status) {0}', 'CoverType': 'lower(b.CoverType) {0}', 'Notes':\n 'lower(b.Notes) {0}', 'id': 'b.id {0}', 'Author':\n 'lower(a.LastName) {0}, lower(a.FirstName) {0}'}\n",
"VAR_17 = 'asc'\n",
"if VAR_3.lower() == 'desc':\n",
"VAR_17 = 'desc'\n",
"return VAR_16[VAR_2].format(VAR_17)\n"
] | [
"def get_sort_clause(sort_col, sort_dir):...\n",
"column_sort_list = {'Title': 'lower(b.Title) {0}', 'ISBN': 'b.ISBN {0}',\n 'Volume': 'b.Volume {0}', 'Series': 'lower(s.name) {0}', 'Published':\n 'b.Published {0}', 'Category': 'lower(b.Category) {0}', 'Status':\n 'lower(b.Status) {0}', 'CoverType': 'lower(b.CoverType) {0}', 'Notes':\n 'lower(b.Notes) {0}', 'id': 'b.id {0}', 'Author':\n 'lower(a.LastName) {0}, lower(a.FirstName) {0}'}\n",
"sd = 'asc'\n",
"if sort_dir.lower() == 'desc':\n",
"sd = 'desc'\n",
"return column_sort_list[sort_col].format(sd)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self, VAR_14):...\n",
""
] | [
"def is_file(self, col_name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_45(self):...\n",
"return self.__class__.__call__(VAR_37=self, VAR_39=True)\n"
] | [
"def plainstrings(self):...\n",
"return self.__class__.__call__(toclone=self, plainstr=True)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(VAR_1, VAR_6):...\n",
""
] | [
"def directory_index(path, fullpath):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"@VAR_0.route('/about')...\n",
"return render_template('about.html')\n"
] | [
"@app.route('/about')...\n",
"return render_template('about.html')\n"
] | [
0,
4
] | [
"Condition",
"Return'"
] |
[
"@auth.autologin...\n",
"VAR_13 = int(self.request.get('limit', 100))\n",
"VAR_14 = datastore_query.Cursor(urlsafe=self.request.get('cursor'))\n",
"VAR_15, VAR_14, VAR_16 = models.Error.query().order(-models.Error.created_ts\n ).fetch_page(VAR_13, start_cursor=cursor)\n",
"VAR_11 = {'cursor': VAR_14.urlsafe() if VAR_14 and VAR_16 else None,\n 'errors': VAR_15, 'limit': VAR_13, 'now': utils.utcnow()}\n",
"self.response.out.write(template.render('ereporter2/errors.html', VAR_11))\n"
] | [
"@auth.autologin...\n",
"limit = int(self.request.get('limit', 100))\n",
"cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor'))\n",
"errors_found, cursor, more = models.Error.query().order(-models.Error.\n created_ts).fetch_page(limit, start_cursor=cursor)\n",
"params = {'cursor': cursor.urlsafe() if cursor and more else None, 'errors':\n errors_found, 'limit': limit, 'now': utils.utcnow()}\n",
"self.response.out.write(template.render('ereporter2/errors.html', params))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_14(self, VAR_9, VAR_10=None):...\n",
"if isinstance(VAR_9, str) or callable(VAR_9):\n",
"self.log.append(IOFile(VAR_9, VAR_16=self) if isinstance(VAR_9, str) else VAR_9\n )\n",
"VAR_48 = len(self.log)\n",
"if VAR_10:\n",
"for VAR_51 in VAR_9:\n",
"self.log.add_name(VAR_10)\n",
"self._set_log_item(VAR_51)\n",
"if VAR_10:\n",
"self.log.set_name(VAR_10, VAR_48, end=len(self.log))\n"
] | [
"def _set_log_item(self, item, name=None):...\n",
"if isinstance(item, str) or callable(item):\n",
"self.log.append(IOFile(item, rule=self) if isinstance(item, str) else item)\n",
"start = len(self.log)\n",
"if name:\n",
"for i in item:\n",
"self.log.add_name(name)\n",
"self._set_log_item(i)\n",
"if name:\n",
"self.log.set_name(name, start, end=len(self.log))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"For",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"__author__ = 'Johannes Köster'\n",
"__copyright__ = 'Copyright 2015, Johannes Köster'\n",
"__email__ = '[email protected]'\n",
"__license__ = 'MIT'\n",
"import os\n",
"import re\n",
"import stat\n",
"import time\n",
"import json\n",
"from itertools import product, chain\n",
"from collections import Iterable, namedtuple\n",
"from snakemake.exceptions import MissingOutputException, WorkflowError, WildcardError\n",
"from snakemake.logging import logger\n",
"def FUNC_0(VAR_0):...\n",
"return os.stat(VAR_0, follow_symlinks=os.stat not in os.\n supports_follow_symlinks)\n"
] | [
"__author__ = 'Johannes Köster'\n",
"__copyright__ = 'Copyright 2015, Johannes Köster'\n",
"__email__ = '[email protected]'\n",
"__license__ = 'MIT'\n",
"import os\n",
"import re\n",
"import stat\n",
"import time\n",
"import json\n",
"from itertools import product, chain\n",
"from collections import Iterable, namedtuple\n",
"from snakemake.exceptions import MissingOutputException, WorkflowError, WildcardError\n",
"from snakemake.logging import logger\n",
"def lstat(f):...\n",
"return os.stat(f, follow_symlinks=os.stat not in os.supports_follow_symlinks)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
0,
0,
0
] | [
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Return'"
] |
[
"@integration_synonym_api...\n",
"FUNC_2(VAR_2)\n",
"FUNC_3(VAR_2, 'KIRK', VAR_7='1')\n",
"FUNC_5(VAR_4, VAR_5, VAR_11='CIRCLE', VAR_10=[{'name': '----CIRCLE'}])\n"
] | [
"@integration_synonym_api...\n",
"clean_database(solr)\n",
"seed_database_with(solr, 'KIRK', id='1')\n",
"verify_results(client, jwt, query='CIRCLE', expected=[{'name': '----CIRCLE'}])\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_31(self):...\n",
""
] | [
"def touch_or_create(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"@login_required...\n",
"\"\"\"docstring\"\"\"\n",
"return render_to_response('student_account/account_settings.html', FUNC_11(\n VAR_3))\n"
] | [
"@login_required...\n",
"\"\"\"docstring\"\"\"\n",
"return render_to_response('student_account/account_settings.html',\n account_settings_context(request))\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_43(self):...\n",
"self.compilation_ko(\"\"\"\nelsif header :is \"From\" \"toto\" {\n\n}\n\"\"\")\n"
] | [
"def test_misplaced_elsif2(self):...\n",
"self.compilation_ko(\"\"\"\nelsif header :is \"From\" \"toto\" {\n\n}\n\"\"\")\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def __init__(self, *VAR_18):...\n",
"self._homes = VAR_18\n"
] | [
"def __init__(self, *homes):...\n",
"self._homes = homes\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_18(self, VAR_34, VAR_36, VAR_37):...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_34:\n",
"return True\n",
"return os.lseek(VAR_34, VAR_36, VAR_37)\n"
] | [
"def lseek(self, fd, offset, whence):...\n",
"\"\"\"docstring\"\"\"\n",
"if not fd:\n",
"return True\n",
"return os.lseek(fd, offset, whence)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_1(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = FUNC_0()\n",
"VAR_6.cursor().execute(VAR_1)\n",
"VAR_6.commit()\n",
"VAR_6.close()\n"
] | [
"def _commit(query):...\n",
"\"\"\"docstring\"\"\"\n",
"c = connect()\n",
"c.cursor().execute(query)\n",
"c.commit()\n",
"c.close()\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@Json...\n",
"if VAR_2._chk_error(errors.NO_TITLE):\n",
"VAR_2._chk_error(errors.TITLE_TOO_LONG)\n",
"VAR_2._chk_errors((errors.NO_LOCATION, errors.NO_DESCRIPTION, errors.\n INVALID_DATE, errors.NO_DATE))\n",
"VAR_2._focus('title')\n",
"if VAR_2.error:\n",
"return\n",
"VAR_0.title = VAR_4\n",
"VAR_0.description = VAR_8\n",
"VAR_0.location = VAR_9\n",
"VAR_0.latitude = VAR_10\n",
"VAR_0.longitude = VAR_11\n",
"VAR_0.timestamp = VAR_12 / 1000\n",
"VAR_0.tzoffset = VAR_13\n",
"g.rendercache.invalidate_key_group(Meetup.group_cache_key())\n",
"VAR_0._commit()\n",
"VAR_18 = Link._byID(VAR_0.assoc_link)\n",
"VAR_18._load()\n",
"VAR_19 = VAR_18.url\n",
"VAR_18.title = FUNC_1(VAR_0)\n",
"VAR_18.article = FUNC_0(VAR_0)\n",
"VAR_18._commit()\n",
"VAR_18.update_url_cache(VAR_19)\n",
"VAR_2._redirect(url_for(action='show', id=meetup._id36))\n"
] | [
"@Json...\n",
"if res._chk_error(errors.NO_TITLE):\n",
"res._chk_error(errors.TITLE_TOO_LONG)\n",
"res._chk_errors((errors.NO_LOCATION, errors.NO_DESCRIPTION, errors.\n INVALID_DATE, errors.NO_DATE))\n",
"res._focus('title')\n",
"if res.error:\n",
"return\n",
"meetup.title = title\n",
"meetup.description = description\n",
"meetup.location = location\n",
"meetup.latitude = latitude\n",
"meetup.longitude = longitude\n",
"meetup.timestamp = timestamp / 1000\n",
"meetup.tzoffset = tzoffset\n",
"g.rendercache.invalidate_key_group(Meetup.group_cache_key())\n",
"meetup._commit()\n",
"article = Link._byID(meetup.assoc_link)\n",
"article._load()\n",
"article_old_url = article.url\n",
"article.title = meetup_article_title(meetup)\n",
"article.article = meetup_article_text(meetup)\n",
"article._commit()\n",
"article.update_url_cache(article_old_url)\n",
"res._redirect(url_for(action='show', id=meetup._id36))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"\"\"\"docstring\"\"\"\n",
"self.common.client_login()\n",
"VAR_18 = self.common.create_volume_from_snapshot(VAR_6, VAR_8)\n",
"self.common.client_logout()\n",
"return {'metadata': VAR_18}\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"\"\"\"docstring\"\"\"\n",
"self.common.client_login()\n",
"metadata = self.common.create_volume_from_snapshot(volume, snapshot)\n",
"self.common.client_logout()\n",
"return {'metadata': metadata}\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"@app.route('/json/tweets/search/<query>', methods=['GET'])...\n",
"VAR_13 = '487593326-yu9WIClcUgs9vBWJGGgW4QC9pKedHMdm3NhhNoxe'\n",
"VAR_14 = 'fMcsDcqTtbeM73qB7Cxo7dGKhZT9byGh7i5lKjOVscQzP'\n",
"VAR_15 = 'yd6lDwm3Ra9j7djyXHmrg'\n",
"VAR_16 = 'BlBMf6kP98LwWepOVSypVwDi2x2782P2KQnJQomY'\n",
"VAR_17 = OAuth1(VAR_15, resource_owner_key=access_token,\n resource_owner_secret=access_token_secret, client_secret=consumer_secret)\n",
"VAR_18 = 'https://api.twitter.com/1.1/'\n",
"VAR_19 = 'search/tweets.json'\n",
"VAR_20 = 'account/verify_credentials.json'\n",
"VAR_21 = {'q': VAR_5, 'count': '5', 'lang': 'en', 'result_type': 'mixed'}\n",
"VAR_22 = requests.get(VAR_18 + VAR_20, auth=oauth)\n",
"if VAR_22.status_code == 200:\n",
"VAR_22 = requests.get(VAR_18 + VAR_19, params=payload, auth=oauth)\n",
"return jsonify(VAR_1=str(response.content))\n",
"VAR_10 = Response(VAR_22=response.content, status=200, mimetype=\n 'application/json')\n",
"return VAR_10\n"
] | [
"@app.route('/json/tweets/search/<query>', methods=['GET'])...\n",
"access_token = '487593326-yu9WIClcUgs9vBWJGGgW4QC9pKedHMdm3NhhNoxe'\n",
"access_token_secret = 'fMcsDcqTtbeM73qB7Cxo7dGKhZT9byGh7i5lKjOVscQzP'\n",
"consumer_key = 'yd6lDwm3Ra9j7djyXHmrg'\n",
"consumer_secret = 'BlBMf6kP98LwWepOVSypVwDi2x2782P2KQnJQomY'\n",
"oauth = OAuth1(consumer_key, resource_owner_key=access_token,\n resource_owner_secret=access_token_secret, client_secret=consumer_secret)\n",
"base_url = 'https://api.twitter.com/1.1/'\n",
"search_url = 'search/tweets.json'\n",
"verify_url = 'account/verify_credentials.json'\n",
"payload = {'q': query, 'count': '5', 'lang': 'en', 'result_type': 'mixed'}\n",
"response = requests.get(base_url + verify_url, auth=oauth)\n",
"if response.status_code == 200:\n",
"response = requests.get(base_url + search_url, params=payload, auth=oauth)\n",
"return jsonify(error=str(response.content))\n",
"resp = Response(response=response.content, status=200, mimetype=\n 'application/json')\n",
"return resp\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Return'"
] |
[
"def FUNC_11(self, VAR_7=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_18 = set()\n",
"if self.benchmark and (VAR_7 is None or self.benchmark in VAR_7):\n",
"if not self.benchmark.exists:\n",
"for VAR_28, VAR_27 in zip(self.output, self.rule.output):\n",
"VAR_18.add(self.benchmark)\n",
"if VAR_7 is None or VAR_28 in VAR_7:\n",
"return VAR_18\n",
"if VAR_28 in self.dynamic_output:\n",
"if not self.expand_dynamic(VAR_27, VAR_13=self.wildcards, VAR_14=_IOFile.\n",
"if not VAR_28.exists:\n",
"VAR_18.add('{} (dynamic)'.format(VAR_27))\n",
"VAR_18.add(VAR_28)\n"
] | [
"def missing_output(self, requested=None):...\n",
"\"\"\"docstring\"\"\"\n",
"files = set()\n",
"if self.benchmark and (requested is None or self.benchmark in requested):\n",
"if not self.benchmark.exists:\n",
"for f, f_ in zip(self.output, self.rule.output):\n",
"files.add(self.benchmark)\n",
"if requested is None or f in requested:\n",
"return files\n",
"if f in self.dynamic_output:\n",
"if not self.expand_dynamic(f_, restriction=self.wildcards, omit_value=\n",
"if not f.exists:\n",
"files.add('{} (dynamic)'.format(f_))\n",
"files.add(f)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Condition",
"For",
"Expr'",
"Condition",
"Return'",
"Condition",
"Condition",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(VAR_0=None):...\n",
"VAR_9 = frappe._dict(template='templates/includes/blog/blog.html', get_list\n =get_blog_list, hide_filters=True, children=get_children(), title=_('Blog')\n )\n",
"VAR_10 = (frappe.local.form_dict.blog_category or frappe.local.form_dict.\n category)\n",
"if VAR_10:\n",
"VAR_15 = FUNC_3(VAR_10)\n",
"if frappe.local.form_dict.blogger:\n",
"VAR_9.sub_title = _('Posts filed under {0}').format(VAR_15)\n",
"VAR_16 = frappe.db.get_value('Blogger', {'name': frappe.local.form_dict.\n blogger}, 'full_name')\n",
"if frappe.local.form_dict.txt:\n",
"VAR_9.title = VAR_15\n",
"VAR_9.sub_title = _('Posts by {0}').format(VAR_16)\n",
"VAR_9.sub_title = _('Filtered by \"{0}\"').format(frappe.local.form_dict.txt)\n",
"if VAR_9.sub_title:\n",
"VAR_9.title = VAR_16\n",
"VAR_9.parents = [{'name': _('Home'), 'route': '/'}, {'name': 'Blog',\n 'route': '/blog'}]\n",
"VAR_9.parents = [{'name': _('Home'), 'route': '/'}]\n",
"VAR_9.update(frappe.get_doc('Blog Settings', 'Blog Settings').as_dict(\n no_default_fields=True))\n",
"return VAR_9\n"
] | [
"def get_list_context(context=None):...\n",
"list_context = frappe._dict(template='templates/includes/blog/blog.html',\n get_list=get_blog_list, hide_filters=True, children=get_children(),\n title=_('Blog'))\n",
"category = (frappe.local.form_dict.blog_category or frappe.local.form_dict.\n category)\n",
"if category:\n",
"category_title = get_blog_category(category)\n",
"if frappe.local.form_dict.blogger:\n",
"list_context.sub_title = _('Posts filed under {0}').format(category_title)\n",
"blogger = frappe.db.get_value('Blogger', {'name': frappe.local.form_dict.\n blogger}, 'full_name')\n",
"if frappe.local.form_dict.txt:\n",
"list_context.title = category_title\n",
"list_context.sub_title = _('Posts by {0}').format(blogger)\n",
"list_context.sub_title = _('Filtered by \"{0}\"').format(frappe.local.\n form_dict.txt)\n",
"if list_context.sub_title:\n",
"list_context.title = blogger\n",
"list_context.parents = [{'name': _('Home'), 'route': '/'}, {'name': 'Blog',\n 'route': '/blog'}]\n",
"list_context.parents = [{'name': _('Home'), 'route': '/'}]\n",
"list_context.update(frappe.get_doc('Blog Settings', 'Blog Settings').\n as_dict(no_default_fields=True))\n",
"return list_context\n"
] | [
0,
0,
3,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"For",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = self.pool.get('res.users').browse(VAR_2, VAR_3, VAR_3).company_id.id\n",
"VAR_7 = []\n",
"VAR_8 = self.pool.get('ir.model.data')\n",
"VAR_9 = self.pool.get('account.bank.statement')\n",
"VAR_10 = self.pool.get('account.journal')\n",
"VAR_2.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % VAR_3)\n",
"VAR_11 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"VAR_2.execute('string' % ','.join(map(lambda x: \"'\" + str(x) + \"'\", VAR_11)))\n",
"VAR_12 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"for journal in VAR_10.browse(VAR_2, VAR_3, VAR_12):\n",
"VAR_4 = VAR_9.search(VAR_2, VAR_3, [('state', '!=', 'confirm'), ('user_id',\n '=', VAR_3), ('journal_id', '=', journal.id)])\n",
"VAR_13 = self.pool.get('ir.model.data')\n",
"if not VAR_4:\n",
"VAR_14 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_tree')\n",
"VAR_7.append(VAR_4[0])\n",
"VAR_15 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_form2')\n",
"if not journal.check_dtls:\n",
"if VAR_14:\n",
"VAR_9.button_confirm_cash(VAR_2, VAR_3, VAR_4, VAR_5)\n",
"VAR_14 = VAR_13.browse(VAR_2, VAR_3, VAR_14, VAR_5=context).res_id\n",
"if VAR_15:\n",
"VAR_15 = VAR_13.browse(VAR_2, VAR_3, VAR_15, VAR_5=context).res_id\n",
"return {'domain': \"[('id','in',\" + str(VAR_7) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(VAR_14, 'tree'), (\n VAR_15, 'form')], 'type': 'ir.actions.act_window'}\n"
] | [
"def close_statement(self, cr, uid, ids, context):...\n",
"\"\"\"docstring\"\"\"\n",
"company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id\n",
"list_statement = []\n",
"mod_obj = self.pool.get('ir.model.data')\n",
"statement_obj = self.pool.get('account.bank.statement')\n",
"journal_obj = self.pool.get('account.journal')\n",
"cr.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % uid)\n",
"j_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"cr.execute(\n \"\"\" select id from account_journal\n where auto_cash='True' and type='cash'\n and id in (%s)\"\"\"\n % ','.join(map(lambda x: \"'\" + str(x) + \"'\", j_ids)))\n",
"journal_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"for journal in journal_obj.browse(cr, uid, journal_ids):\n",
"ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id',\n '=', uid), ('journal_id', '=', journal.id)])\n",
"data_obj = self.pool.get('ir.model.data')\n",
"if not ids:\n",
"id2 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_tree')\n",
"list_statement.append(ids[0])\n",
"id3 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_form2')\n",
"if not journal.check_dtls:\n",
"if id2:\n",
"statement_obj.button_confirm_cash(cr, uid, ids, context)\n",
"id2 = data_obj.browse(cr, uid, id2, context=context).res_id\n",
"if id3:\n",
"id3 = data_obj.browse(cr, uid, id3, context=context).res_id\n",
"return {'domain': \"[('id','in',\" + str(list_statement) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(id2, 'tree'), (id3,\n 'form')], 'type': 'ir.actions.act_window'}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_19(self, VAR_7):...\n",
"VAR_24 = ['GTransferred', 'Errors', 'Checks', 'Transferred', 'Elapsed time',\n 'Transferring']\n",
"VAR_25 = self._job_status[VAR_7]\n",
"VAR_26 = '\\n'.join('{:>12}: {}'.format(header, VAR_25[header]) for header in\n VAR_24)\n",
"self._job_text[VAR_7] = VAR_26\n"
] | [
"def __process_text(self, job_id):...\n",
"headers = ['GTransferred', 'Errors', 'Checks', 'Transferred',\n 'Elapsed time', 'Transferring']\n",
"status = self._job_status[job_id]\n",
"text = '\\n'.join('{:>12}: {}'.format(header, status[header]) for header in\n headers)\n",
"self._job_text[job_id] = text\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_3(self):...\n",
"VAR_5 = models.Session.create()\n",
"VAR_3 = models.Session.query().count()\n",
"self.assertTrue(VAR_3 == 1)\n",
"VAR_2 = EngineAuthRequest.blank('/auth/google')\n",
"VAR_2.cookies['_eauth'] = VAR_5.serialize()\n",
"VAR_2._load_session()\n",
"self.assertTrue(VAR_2.session.session_id == VAR_5.session_id)\n",
"VAR_6 = models.Session.query().count()\n",
"self.assertTrue(VAR_6 == 1)\n"
] | [
"def test_laod_session_session_id_no_user_id(self):...\n",
"s = models.Session.create()\n",
"s_count = models.Session.query().count()\n",
"self.assertTrue(s_count == 1)\n",
"req = EngineAuthRequest.blank('/auth/google')\n",
"req.cookies['_eauth'] = s.serialize()\n",
"req._load_session()\n",
"self.assertTrue(req.session.session_id == s.session_id)\n",
"s_count2 = models.Session.query().count()\n",
"self.assertTrue(s_count2 == 1)\n"
] | [
0,
0,
0,
0,
0,
0,
6,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"\"\"\"Serve files directly from the ContentsManager.\"\"\"\n",
"import mimetypes\n",
"import json\n",
"from base64 import decodebytes\n",
"from tornado import web\n",
"from notebook.base.handlers import IPythonHandler\n",
"from notebook.utils import maybe_future\n",
"\"\"\"string\"\"\"\n",
"@property...\n",
"return super(CLASS_0, self).content_security_policy + '; sandbox allow-scripts'\n"
] | [
"\"\"\"Serve files directly from the ContentsManager.\"\"\"\n",
"import mimetypes\n",
"import json\n",
"from base64 import decodebytes\n",
"from tornado import web\n",
"from notebook.base.handlers import IPythonHandler\n",
"from notebook.utils import maybe_future\n",
"\"\"\"serve files via ContentsManager\n\n Normally used when ContentsManager is not a FileContentsManager.\n\n FileContentsManager subclasses use AuthenticatedFilesHandler by default,\n a subclass of StaticFileHandler.\n \"\"\"\n",
"@property...\n",
"return super(FilesHandler, self\n ).content_security_policy + '; sandbox allow-scripts'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Expr'",
"Condition",
"Return'"
] |
[
"def __init__(self, VAR_5, VAR_6, VAR_7, VAR_8, VAR_9=None, VAR_10=None,...\n",
"threading.Thread.__init__(self)\n",
"self.username = VAR_5\n",
"self.password = VAR_6\n",
"self.metrics = VAR_7\n",
"self.tasks = VAR_8\n",
"self.log = VAR_9\n",
"self.base_url = VAR_10\n",
"self.submissions_path = VAR_11\n",
"self.name = 'Actor thread for user %s' % self.username\n",
"self.browser = Browser()\n",
"self.die = False\n"
] | [
"def __init__(self, username, password, metrics, tasks, log=None, base_url=...\n",
"threading.Thread.__init__(self)\n",
"self.username = username\n",
"self.password = password\n",
"self.metrics = metrics\n",
"self.tasks = tasks\n",
"self.log = log\n",
"self.base_url = base_url\n",
"self.submissions_path = submissions_path\n",
"self.name = 'Actor thread for user %s' % self.username\n",
"self.browser = Browser()\n",
"self.die = False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_29(self, VAR_19):...\n",
"self._time += VAR_19\n"
] | [
"def sleep(self, duration):...\n",
"self._time += duration\n"
] | [
0,
0
] | [
"FunctionDef'",
"AugAssign'"
] |
[
"def FUNC_3(self, VAR_2, VAR_3):...\n",
"VAR_4 = []\n",
"for item in injection.objects.filter(result__campaign_id=campaign).values_list(\n",
"if item is not None:\n",
"return sorted(VAR_4, key=fix_sort_list)\n",
"VAR_4.append((item, item))\n"
] | [
"def injection_choices(self, campaign, attribute):...\n",
"choices = []\n",
"for item in injection.objects.filter(result__campaign_id=campaign).values_list(\n",
"if item is not None:\n",
"return sorted(choices, key=fix_sort_list)\n",
"choices.append((item, item))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Condition",
"Return'",
"Expr'"
] |
[
"def FUNC_4(VAR_2, VAR_1):...\n",
"VAR_1.execute('DELETE FROM video where id={id};'.format(VAR_2=id))\n"
] | [
"def delete_video(id, db):...\n",
"db.execute('DELETE FROM video where id={id};'.format(id=id))\n"
] | [
0,
4
] | [
"FunctionDef'",
"Expr'"
] |
[
"@functools.wraps(VAR_9)...\n",
"VAR_19 = {'$schema': 'http://json-schema.org/draft-07/schema#',\n 'definitions': {'tag': {'type': 'object', 'required': ['namespace',\n 'predicate', 'value'], 'properties': {'namespace': {'type': 'string'},\n 'predicate': {'type': 'string'}, 'value': {'type': ['number', 'string']\n }}}, 'issue': {'type': 'object', 'required': ['title'], 'properties': {\n 'title': {'type': 'string'}, 'description': {'type': 'string'}, 'tags':\n {'type': 'array', 'default': [], 'minItems': 0, 'items': {'$ref':\n '#/definitions/tag'}}}}}}\n",
"if VAR_3:\n",
"VAR_19['definitions']['issue']['required'].append('id')\n",
"VAR_19 = {**VAR_19, **{'type': 'object', 'properties': {'data': {'type':\n 'array', 'minItems': 1, 'items': {'$ref': '#/definitions/issue'}}}}}\n",
"VAR_19['definitions']['issue']['properties']['id'] = {'type': ['integer',\n 'string']}\n",
"VAR_20 = request.get_json()\n",
"validate(instance=request_payload, schema=request_schema)\n",
"return jsonify({'data': [], 'errors': [\n 'failed to validate payload against json schema']}), 400\n",
"return VAR_9(*VAR_17, **kwargs)\n"
] | [
"@functools.wraps(func)...\n",
"request_schema = {'$schema': 'http://json-schema.org/draft-07/schema#',\n 'definitions': {'tag': {'type': 'object', 'required': ['namespace',\n 'predicate', 'value'], 'properties': {'namespace': {'type': 'string'},\n 'predicate': {'type': 'string'}, 'value': {'type': ['number', 'string']\n }}}, 'issue': {'type': 'object', 'required': ['title'], 'properties': {\n 'title': {'type': 'string'}, 'description': {'type': 'string'}, 'tags':\n {'type': 'array', 'default': [], 'minItems': 0, 'items': {'$ref':\n '#/definitions/tag'}}}}}}\n",
"if require_id:\n",
"request_schema['definitions']['issue']['required'].append('id')\n",
"request_schema = {**request_schema, **{'type': 'object', 'properties': {\n 'data': {'type': 'array', 'minItems': 1, 'items': {'$ref':\n '#/definitions/issue'}}}}}\n",
"request_schema['definitions']['issue']['properties']['id'] = {'type': [\n 'integer', 'string']}\n",
"request_payload = request.get_json()\n",
"validate(instance=request_payload, schema=request_schema)\n",
"return jsonify({'data': [], 'errors': [\n 'failed to validate payload against json schema']}), 400\n",
"return func(*args, **kwargs)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
4,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_15(VAR_5):...\n",
"VAR_36 = False\n",
"VAR_10 = 'https://api.github.com/repos/{}/forks'\n",
"VAR_10 = VAR_10.format(VAR_5['target_repo_fullname'])\n",
"VAR_8 = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n",
"VAR_9 = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n",
"VAR_11 = requests.post(VAR_10, VAR_8=headers, VAR_9=auth)\n",
"if VAR_11.status_code == 202:\n",
"VAR_5['fork_fullname'] = VAR_11.json()['full_name']\n",
"VAR_5['error'] = 'Unable to fork'\n",
"VAR_36 = True\n",
"return VAR_36\n"
] | [
"def fork_for_pr(data):...\n",
"FORKED = False\n",
"url = 'https://api.github.com/repos/{}/forks'\n",
"url = url.format(data['target_repo_fullname'])\n",
"headers = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n",
"auth = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n",
"r = requests.post(url, headers=headers, auth=auth)\n",
"if r.status_code == 202:\n",
"data['fork_fullname'] = r.json()['full_name']\n",
"data['error'] = 'Unable to fork'\n",
"FORKED = True\n",
"return FORKED\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_6(VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"return {'login': FUNC_7('/user_api/v1/account/login_session/', VAR_3.\n session), 'registration': FUNC_7('/user_api/v1/account/registration/',\n VAR_3.session), 'password_reset': FUNC_7(\n '/user_api/v1/account/password_reset/', VAR_3.session)}\n"
] | [
"def _get_form_descriptions(request):...\n",
"\"\"\"docstring\"\"\"\n",
"return {'login': _local_server_get('/user_api/v1/account/login_session/',\n request.session), 'registration': _local_server_get(\n '/user_api/v1/account/registration/', request.session),\n 'password_reset': _local_server_get(\n '/user_api/v1/account/password_reset/', request.session)}\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def __init__(self, VAR_7):...\n",
"self._user_options = VAR_7\n",
"self._user_notified_about_crash = False\n",
"self._diag_interface = DiagnosticInterface(VAR_7)\n",
"self._omnicomp = OmniCompleter(VAR_7)\n",
"self._latest_completion_request = None\n",
"self._latest_file_parse_request = None\n",
"self._server_stdout = None\n",
"self._server_stderr = None\n",
"self._server_popen = None\n",
"self._filetypes_with_keywords_loaded = set()\n",
"self._temp_options_filename = None\n",
"self._ycmd_keepalive = YcmdKeepalive()\n",
"self._SetupServer()\n",
"self._ycmd_keepalive.Start()\n"
] | [
"def __init__(self, user_options):...\n",
"self._user_options = user_options\n",
"self._user_notified_about_crash = False\n",
"self._diag_interface = DiagnosticInterface(user_options)\n",
"self._omnicomp = OmniCompleter(user_options)\n",
"self._latest_completion_request = None\n",
"self._latest_file_parse_request = None\n",
"self._server_stdout = None\n",
"self._server_stderr = None\n",
"self._server_popen = None\n",
"self._filetypes_with_keywords_loaded = set()\n",
"self._temp_options_filename = None\n",
"self._ycmd_keepalive = YcmdKeepalive()\n",
"self._SetupServer()\n",
"self._ycmd_keepalive.Start()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@VAR_0.route('/signup/<remote_app>/', methods=['GET', 'POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_1 not in signup_handlers:\n",
"return abort(404)\n",
"VAR_3 = signup_handlers[VAR_1]['view']()\n",
"return abort(404) if VAR_3 is None else VAR_3\n"
] | [
"@blueprint.route('/signup/<remote_app>/', methods=['GET', 'POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"if remote_app not in signup_handlers:\n",
"return abort(404)\n",
"res = signup_handlers[remote_app]['view']()\n",
"return abort(404) if res is None else res\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Return'"
] |
[
"def FUNC_13(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"VAR_9 = '10.1.2.3'\n",
"for feature in ('confirmation', 'paging', 'events', 'formatoutput'):\n",
"self.driver._eql_execute('cli-settings', feature, 'off')\n",
"self.driver._eql_execute('grpparams', 'show').AndReturn([\n 'Group-Ipaddress: %s' % VAR_9])\n",
"self.mox.ReplayAll()\n",
"self.driver.do_setup(self._context)\n",
"self.assertEqual(VAR_9, self.driver._group_ip)\n"
] | [
"def test_do_setup(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"fake_group_ip = '10.1.2.3'\n",
"for feature in ('confirmation', 'paging', 'events', 'formatoutput'):\n",
"self.driver._eql_execute('cli-settings', feature, 'off')\n",
"self.driver._eql_execute('grpparams', 'show').AndReturn([\n 'Group-Ipaddress: %s' % fake_group_ip])\n",
"self.mox.ReplayAll()\n",
"self.driver.do_setup(self._context)\n",
"self.assertEqual(fake_group_ip, self.driver._group_ip)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(self, VAR_4):...\n",
"if VAR_4 in self.ime_property_cache:\n",
"return self.ime_property_cache[VAR_4]\n",
"return None\n"
] | [
"def get(self, key):...\n",
"if key in self.ime_property_cache:\n",
"return self.ime_property_cache[key]\n",
"return None\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def __init__(self, **VAR_4):...\n",
"self.users_map = {u.id: u for u in User.objects.all()}\n",
"VAR_18 = [('advisors', Advisor), ('breakdowns', Breakdown), (\n 'confirmations', CustomerResponse), ('notifications', Notification)]\n",
"self.table_maps = {}\n",
"for VAR_11, VAR_6 in VAR_18:\n",
"VAR_42 = collections.defaultdict(list)\n",
"super().__init__(**kwargs)\n",
"VAR_43 = VAR_6.objects.all()\n",
"if VAR_11 == 'notifications':\n",
"VAR_43 = VAR_43.filter(type='c').order_by('created')\n",
"for instance in VAR_43:\n",
"VAR_42[instance.win_id].append(instance)\n",
"self.table_maps[VAR_11] = VAR_42\n"
] | [
"def __init__(self, **kwargs):...\n",
"self.users_map = {u.id: u for u in User.objects.all()}\n",
"prefetch_tables = [('advisors', Advisor), ('breakdowns', Breakdown), (\n 'confirmations', CustomerResponse), ('notifications', Notification)]\n",
"self.table_maps = {}\n",
"for table, model in prefetch_tables:\n",
"prefetch_map = collections.defaultdict(list)\n",
"super().__init__(**kwargs)\n",
"instances = model.objects.all()\n",
"if table == 'notifications':\n",
"instances = instances.filter(type='c').order_by('created')\n",
"for instance in instances:\n",
"prefetch_map[instance.win_id].append(instance)\n",
"self.table_maps[table] = prefetch_map\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"For",
"Expr'",
"Assign'"
] |
[
"def FUNC_23(VAR_11):...\n",
"return '%s> ' % self.configuration.eqlx_group_name\n"
] | [
"def _fake_recv(ignore_arg):...\n",
"return '%s> ' % self.configuration.eqlx_group_name\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_19(VAR_8, **VAR_11):...\n",
"\"\"\"docstring\"\"\"\n",
"return VAR_8.format(**{wildcard: '{{{},{}}}'.format(wildcard, '|'.join(\n values)) for wildcard, values in wildcards.items()})\n"
] | [
"def limit(pattern, **wildcards):...\n",
"\"\"\"docstring\"\"\"\n",
"return pattern.format(**{wildcard: '{{{},{}}}'.format(wildcard, '|'.join(\n values)) for wildcard, values in wildcards.items()})\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_2(self):...\n",
"self._mox.ReplayAll()\n",
"self.assertEqual(url_helper.UrlOpen('url', max_tries=-1), None)\n",
"self._mox.VerifyAll()\n"
] | [
"def testUrlOpenInvalidTryCount(self):...\n",
"self._mox.ReplayAll()\n",
"self.assertEqual(url_helper.UrlOpen('url', max_tries=-1), None)\n",
"self._mox.VerifyAll()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_5(self):...\n",
"super().halt_dut('halt 0 1', ['- TARGET: core #0 has entered debug mode',\n '- TARGET: core #1 has entered debug mode'])\n"
] | [
"def halt_dut(self):...\n",
"super().halt_dut('halt 0 1', ['- TARGET: core #0 has entered debug mode',\n '- TARGET: core #1 has entered debug mode'])\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0(self):...\n",
"self.factory = RequestFactory()\n",
"self.client.login(username='Karyn', password='specialP@55word')\n"
] | [
"def setUp(self):...\n",
"self.factory = RequestFactory()\n",
"self.client.login(username='Karyn', password='specialP@55word')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_7(VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = email.EmailHolder(subject='Your {} password has been reset'.format\n (app.config['GLOBAL_SITE_NAME']), recipient=user, text=flask.\n render_template('email/reset.txt', user=user), html=flask.\n render_template('email/reset.html', user=user))\n",
"email.send_email(VAR_10)\n"
] | [
"def send_password_reset_email(user):...\n",
"\"\"\"docstring\"\"\"\n",
"email_msg = email.EmailHolder(subject='Your {} password has been reset'.\n format(app.config['GLOBAL_SITE_NAME']), recipient=user, text=flask.\n render_template('email/reset.txt', user=user), html=flask.\n render_template('email/reset.html', user=user))\n",
"email.send_email(email_msg)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_4):...\n",
"self.serial = VAR_4\n"
] | [
"def __init__(self, serial):...\n",
"self.serial = serial\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_27(self, VAR_75):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.threads = VAR_75\n",
"return VAR_101\n"
] | [
"def threads(self, threads):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.threads = threads\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {'SEVERITY': {'MEDIUM': 3}, 'CONFIDENCE': {'HIGH': 3}}\n",
"self.check_example('eval.py', VAR_2)\n"
] | [
"def test_eval(self):...\n",
"\"\"\"docstring\"\"\"\n",
"expect = {'SEVERITY': {'MEDIUM': 3}, 'CONFIDENCE': {'HIGH': 3}}\n",
"self.check_example('eval.py', expect)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"VAR_0 = Library(id='org/repo', metadata_etag='a', contributors_etag='b',\n tags_etag='c')\n",
"VAR_0.put()\n",
"self.respond_to_github('https://api.github.com/repos/org/repo', {'status': 304}\n )\n",
"self.respond_to_github('https://api.github.com/repos/org/repo/contributors',\n {'status': 304})\n",
"self.respond_to_github('https://api.github.com/repos/org/repo/git/refs/tags',\n {'status': 304})\n",
"self.app.get('/task/update/org/repo')\n",
"VAR_1 = self.tasks.get_filtered_tasks()\n",
"self.assertEqual(len(VAR_1), 0)\n"
] | [
"def test_update_respects_304(self):...\n",
"library = Library(id='org/repo', metadata_etag='a', contributors_etag='b',\n tags_etag='c')\n",
"library.put()\n",
"self.respond_to_github('https://api.github.com/repos/org/repo', {'status': 304}\n )\n",
"self.respond_to_github('https://api.github.com/repos/org/repo/contributors',\n {'status': 304})\n",
"self.respond_to_github('https://api.github.com/repos/org/repo/git/refs/tags',\n {'status': 304})\n",
"self.app.get('/task/update/org/repo')\n",
"tasks = self.tasks.get_filtered_tasks()\n",
"self.assertEqual(len(tasks), 0)\n"
] | [
0,
0,
0,
0,
0,
0,
5,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_14(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.assertEqual(tournament.check_version((3, 4)), 0)\n"
] | [
"def test_newer_python3_version(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.assertEqual(tournament.check_version((3, 4)), 0)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"@classmethod...\n",
"VAR_85 = {}\n",
"VAR_86 = g.RATELIMIT * 60\n",
"if VAR_86 <= 0:\n",
"return\n",
"VAR_87 = datetime.now(g.tz) + timedelta(VAR_86=seconds)\n",
"if VAR_61 and VAR_101.user_is_loggedin:\n",
"VAR_85['user' + VAR_100(VAR_101.user._id36)] = VAR_87\n",
"if VAR_62:\n",
"VAR_85['ip' + VAR_100(VAR_102.ip)] = VAR_87\n",
"cache.set_multi(VAR_85, VAR_63, VAR_99=seconds)\n"
] | [
"@classmethod...\n",
"to_set = {}\n",
"seconds = g.RATELIMIT * 60\n",
"if seconds <= 0:\n",
"return\n",
"expire_time = datetime.now(g.tz) + timedelta(seconds=seconds)\n",
"if rate_user and c.user_is_loggedin:\n",
"to_set['user' + str(c.user._id36)] = expire_time\n",
"if rate_ip:\n",
"to_set['ip' + str(request.ip)] = expire_time\n",
"cache.set_multi(to_set, prefix, time=seconds)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'"
] |
[
"@handled_slot(bool)...\n",
"print('Starting BiFi scan...')\n",
"self.matisse.birefringent_filter_scan()\n"
] | [
"@handled_slot(bool)...\n",
"print('Starting BiFi scan...')\n",
"self.matisse.birefringent_filter_scan()\n"
] | [
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(VAR_2, VAR_3):...\n",
"VAR_4 = []\n",
"for item in result.objects.filter(campaign_id=campaign).values_list(VAR_3,\n",
"if item is not None:\n",
"return sorted(VAR_4, key=fix_sort_list)\n",
"VAR_4.append((item, item))\n"
] | [
"def result_choices(campaign, attribute):...\n",
"choices = []\n",
"for item in result.objects.filter(campaign_id=campaign).values_list(attribute,\n",
"if item is not None:\n",
"return sorted(choices, key=fix_sort_list)\n",
"choices.append((item, item))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Condition",
"Return'",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
"if not VAR_101.user_is_admin:\n",
"abort(404, 'page not found')\n"
] | [
"def run(self):...\n",
"if not c.user_is_admin:\n",
"abort(404, 'page not found')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'"
] |
[
"def FUNC_16(self, VAR_66):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_99 = snakemake.io.load_configfile(VAR_66)\n",
"update_config(VAR_85, VAR_99)\n",
"update_config(VAR_85, self.overwrite_config)\n"
] | [
"def configfile(self, jsonpath):...\n",
"\"\"\"docstring\"\"\"\n",
"c = snakemake.io.load_configfile(jsonpath)\n",
"update_config(config, c)\n",
"update_config(config, self.overwrite_config)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(self, VAR_7, VAR_8):...\n",
"if True:\n",
"self.postmsg(VAR_7[1], VAR_8, VAR_7[0])\n",
"self.counters['comments'] += 1\n",
"self.w.sleep(self.comment_successtimeout)\n",
"self.w.sleep(self.comment_successtimeout)\n",
"self.schedule(self.add_comment, (VAR_7, VAR_8))\n",
"self.targets.remove(VAR_7)\n",
"self.w.sleep(self.comment_successtimeout)\n",
"self.log.error('Too many wrong answers to CAPTCHA')\n",
"self.schedule(self.add_comment, (VAR_7, VAR_8))\n",
"self.log.warn('%s: %s', e, e.answer)\n",
"self.schedule(self.add_comment, (VAR_7, VAR_8))\n",
"self.schedule(self.add_comment, (VAR_7, VAR_8))\n",
"self.schedule_first(self.switch_user)\n",
"self.log.info('Removing %s from targets', VAR_7)\n",
"self.targets.remove(VAR_7)\n",
"self.w.sleep(self.errortimeout)\n",
"self.schedule(self.add_comment, (VAR_7, VAR_8))\n",
"self.w.sleep(self.errortimeout)\n",
"self.targets.remove(VAR_7)\n",
"self.w.sleep(self.errortimeout)\n",
"self.log.exception(e)\n",
"self.w.sleep(self.errortimeout)\n"
] | [
"def add_comment(self, t, msg):...\n",
"if True:\n",
"self.postmsg(t[1], msg, t[0])\n",
"self.counters['comments'] += 1\n",
"self.w.sleep(self.comment_successtimeout)\n",
"self.w.sleep(self.comment_successtimeout)\n",
"self.schedule(self.add_comment, (t, msg))\n",
"self.targets.remove(t)\n",
"self.w.sleep(self.comment_successtimeout)\n",
"self.log.error('Too many wrong answers to CAPTCHA')\n",
"self.schedule(self.add_comment, (t, msg))\n",
"self.log.warn('%s: %s', e, e.answer)\n",
"self.schedule(self.add_comment, (t, msg))\n",
"self.schedule(self.add_comment, (t, msg))\n",
"self.schedule_first(self.switch_user)\n",
"self.log.info('Removing %s from targets', t)\n",
"self.targets.remove(t)\n",
"self.w.sleep(self.errortimeout)\n",
"self.schedule(self.add_comment, (t, msg))\n",
"self.w.sleep(self.errortimeout)\n",
"self.targets.remove(t)\n",
"self.w.sleep(self.errortimeout)\n",
"self.log.exception(e)\n",
"self.w.sleep(self.errortimeout)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"AugAssign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_10(self, VAR_2, VAR_3, VAR_4, VAR_7={}):...\n",
"VAR_34 = self.setActive(VAR_2, VAR_3, VAR_4, VAR_21=True, VAR_7=context)\n",
"return VAR_34\n"
] | [
"def reset_project(self, cr, uid, ids, context={}):...\n",
"res = self.setActive(cr, uid, ids, value=True, context=context)\n",
"return res\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"@app.route('/api/indi_service/stop', methods=['POST'])...\n",
"controller.indi_service.stop()\n",
"return {'indi_service': 'stopping'}\n"
] | [
"@app.route('/api/indi_service/stop', methods=['POST'])...\n",
"controller.indi_service.stop()\n",
"return {'indi_service': 'stopping'}\n"
] | [
0,
0,
0
] | [
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_1(self, VAR_0):...\n",
"return VAR_0.imageupload_set.count()\n"
] | [
"def get_images_count(self, obj):...\n",
"return obj.imageupload_set.count()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_2(self):...\n",
"return \"\"\"Task: %s (ID %d)\nFile: %s\n\"\"\" % (self.task[1], self.task[0], self\n .source_path) + GenericRequest.specific_info(self)\n"
] | [
"def specific_info(self):...\n",
"return \"\"\"Task: %s (ID %d)\nFile: %s\n\"\"\" % (self.task[1], self.task[0], self\n .source_path) + GenericRequest.specific_info(self)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"__author__ = 'Johannes Köster'\n",
"__copyright__ = 'Copyright 2015, Johannes Köster'\n",
"__email__ = '[email protected]'\n",
"__license__ = 'MIT'\n",
"import os\n",
"import sys\n",
"import base64\n",
"import json\n",
"from collections import defaultdict\n",
"from itertools import chain\n",
"from functools import partial\n",
"from operator import attrgetter\n",
"from snakemake.io import IOFile, Wildcards, Resources, _IOFile\n",
"from snakemake.utils import format, listfiles\n",
"from snakemake.exceptions import RuleException, ProtectedOutputException\n",
"from snakemake.exceptions import UnexpectedOutputException\n",
"from snakemake.logging import logger\n",
"def FUNC_0(VAR_0, VAR_1):...\n",
"return chain(*map(attrgetter(VAR_1), VAR_0))\n"
] | [
"__author__ = 'Johannes Köster'\n",
"__copyright__ = 'Copyright 2015, Johannes Köster'\n",
"__email__ = '[email protected]'\n",
"__license__ = 'MIT'\n",
"import os\n",
"import sys\n",
"import base64\n",
"import json\n",
"from collections import defaultdict\n",
"from itertools import chain\n",
"from functools import partial\n",
"from operator import attrgetter\n",
"from snakemake.io import IOFile, Wildcards, Resources, _IOFile\n",
"from snakemake.utils import format, listfiles\n",
"from snakemake.exceptions import RuleException, ProtectedOutputException\n",
"from snakemake.exceptions import UnexpectedOutputException\n",
"from snakemake.logging import logger\n",
"def jobfiles(jobs, type):...\n",
"return chain(*map(attrgetter(type), jobs))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
0,
0,
0,
0,
0,
0
] | [
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_13(self):...\n",
"VAR_14 = self.new_mocked_client(client.RESTClient, url_prefix='api/v1/ports')\n",
"VAR_14.url_delete('1')\n",
"FUNC_1('delete', VAR_14, 'https://1.2.3.4/api/v1/ports/1')\n"
] | [
"def test_client_url_delete(self):...\n",
"api = self.new_mocked_client(client.RESTClient, url_prefix='api/v1/ports')\n",
"api.url_delete('1')\n",
"assert_call('delete', api, 'https://1.2.3.4/api/v1/ports/1')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(VAR_16, VAR_17):...\n",
"\"\"\"docstring\"\"\"\n",
"return keyworder.get_single_keywords(VAR_16, VAR_17) or {}\n"
] | [
"def extract_single_keywords(skw_db, fulltext):...\n",
"\"\"\"docstring\"\"\"\n",
"return keyworder.get_single_keywords(skw_db, fulltext) or {}\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_41(self, VAR_27, VAR_28):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_49 = []\n",
"for VAR_25 in VAR_28:\n",
"if VAR_25.permlevel not in VAR_27 and VAR_25.fieldtype not in display_fieldtypes:\n",
"if VAR_49:\n",
"VAR_49.append(VAR_25)\n",
"if self.is_new():\n",
"VAR_70 = VAR_54.new_doc(self.doctype)\n",
"if self.get('parent_doc'):\n",
"for VAR_25 in VAR_49:\n",
"self.parent_doc.get_latest()\n",
"VAR_70 = self.get_latest()\n",
"self.set(VAR_25.fieldname, VAR_70.get(VAR_25.fieldname))\n",
"VAR_70 = [VAR_6 for VAR_6 in self.parent_doc.get(self.parentfield) if VAR_6\n .name == self.name][0]\n"
] | [
"def reset_values_if_no_permlevel_access(self, has_access_to,...\n",
"\"\"\"docstring\"\"\"\n",
"to_reset = []\n",
"for df in high_permlevel_fields:\n",
"if df.permlevel not in has_access_to and df.fieldtype not in display_fieldtypes:\n",
"if to_reset:\n",
"to_reset.append(df)\n",
"if self.is_new():\n",
"ref_doc = frappe.new_doc(self.doctype)\n",
"if self.get('parent_doc'):\n",
"for df in to_reset:\n",
"self.parent_doc.get_latest()\n",
"ref_doc = self.get_latest()\n",
"self.set(df.fieldname, ref_doc.get(df.fieldname))\n",
"ref_doc = [d for d in self.parent_doc.get(self.parentfield) if d.name ==\n self.name][0]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Condition",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"For",
"Expr'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_3(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._prepare()\n",
"self._execute()\n"
] | [
"def execute(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._prepare()\n",
"self._execute()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'"
] |
[
"def FUNC_12(self):...\n",
"VAR_4 = {url_helper.swarm_constants.COUNT_KEY: 1}\n",
"self._mox.ReplayAll()\n",
"self.assertEqual(url_helper.UrlOpen('url', VAR_4=data), None)\n",
"self._mox.VerifyAll()\n"
] | [
"def testCountKeyInData(self):...\n",
"data = {url_helper.swarm_constants.COUNT_KEY: 1}\n",
"self._mox.ReplayAll()\n",
"self.assertEqual(url_helper.UrlOpen('url', data=data), None)\n",
"self._mox.VerifyAll()\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_21(self):...\n",
"self.log.info('Initializing intraprocess signal socket %s', self.th_sa)\n",
"self.th_sock = self.p.ctx.socket(zmq.PUB)\n",
"self.th_sock.bind(self.th_sa)\n"
] | [
"def init_th_sock(self):...\n",
"self.log.info('Initializing intraprocess signal socket %s', self.th_sa)\n",
"self.th_sock = self.p.ctx.socket(zmq.PUB)\n",
"self.th_sock.bind(self.th_sa)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_26(self):...\n",
"if self._IsServerAlive():\n",
"VAR_23 = BaseRequest.PostDataToHandler(BuildRequestData(), 'debug_info')\n",
"VAR_23 = 'Server crashed, no debug info from server'\n",
"VAR_23 += \"\"\"\nServer running at: {0}\"\"\".format(BaseRequest.server_location)\n",
"VAR_23 += \"\"\"\nServer process ID: {0}\"\"\".format(self._server_popen.pid)\n",
"if self._server_stderr or self._server_stdout:\n",
"VAR_23 += \"\"\"\nServer logfiles:\n {0}\n {1}\"\"\".format(self._server_stdout,\n self._server_stderr)\n",
"return VAR_23\n"
] | [
"def DebugInfo(self):...\n",
"if self._IsServerAlive():\n",
"debug_info = BaseRequest.PostDataToHandler(BuildRequestData(), 'debug_info')\n",
"debug_info = 'Server crashed, no debug info from server'\n",
"debug_info += \"\"\"\nServer running at: {0}\"\"\".format(BaseRequest.server_location)\n",
"debug_info += \"\"\"\nServer process ID: {0}\"\"\".format(self._server_popen.pid)\n",
"if self._server_stderr or self._server_stdout:\n",
"debug_info += \"\"\"\nServer logfiles:\n {0}\n {1}\"\"\".format(self.\n _server_stdout, self._server_stderr)\n",
"return debug_info\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"AugAssign'",
"AugAssign'",
"Condition",
"AugAssign'",
"Return'"
] |
[
"from __future__ import absolute_import\n",
"from functools import partial\n",
"import cherrypy\n",
"import cephfs\n",
"from . import ApiController, RESTController, UiApiController, BaseController, Endpoint, Task\n",
"from .. import logger\n",
"from ..security import Scope\n",
"from ..services.cephfs import CephFS\n",
"from ..services.cephx import CephX\n",
"from ..services.exception import serialize_dashboard_exception\n",
"from ..services.ganesha import Ganesha, GaneshaConf, NFSException\n",
"from ..services.rgw_client import RgwClient\n",
"def FUNC_0(VAR_0, VAR_1, VAR_2):...\n",
"def FUNC_1(VAR_3):...\n",
"return Task('nfs/{}'.format(VAR_0), VAR_1, VAR_2, partial(\n serialize_dashboard_exception, include_http_status=True))(VAR_3)\n"
] | [
"from __future__ import absolute_import\n",
"from functools import partial\n",
"import cherrypy\n",
"import cephfs\n",
"from . import ApiController, RESTController, UiApiController, BaseController, Endpoint, Task\n",
"from .. import logger\n",
"from ..security import Scope\n",
"from ..services.cephfs import CephFS\n",
"from ..services.cephx import CephX\n",
"from ..services.exception import serialize_dashboard_exception\n",
"from ..services.ganesha import Ganesha, GaneshaConf, NFSException\n",
"from ..services.rgw_client import RgwClient\n",
"def NfsTask(name, metadata, wait_for):...\n",
"def composed_decorator(func):...\n",
"return Task('nfs/{}'.format(name), metadata, wait_for, partial(\n serialize_dashboard_exception, include_http_status=True))(func)\n"
] | [
0,
0,
0,
0,
6,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_25=None):...\n",
"super(CLASS_2.MockHTTPProvider, self).__init__()\n",
"if isinstance(VAR_25, list):\n",
"self._session_responses = VAR_25\n",
"if VAR_25:\n",
"self._session_responses = [VAR_25]\n",
"self._session_responses = None\n"
] | [
"def __init__(self, session_response=None):...\n",
"super(NsxClientTestCase.MockHTTPProvider, self).__init__()\n",
"if isinstance(session_response, list):\n",
"self._session_responses = session_response\n",
"if session_response:\n",
"self._session_responses = [session_response]\n",
"self._session_responses = None\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_6(self, VAR_33):...\n",
"if self.options and VAR_33 not in self.options:\n",
"VAR_101.errors.add(errors.INVALID_OPTION)\n",
"return VAR_33\n",
"return self.default\n"
] | [
"def run(self, val):...\n",
"if self.options and val not in self.options:\n",
"c.errors.add(errors.INVALID_OPTION)\n",
"return val\n",
"return self.default\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_4():...\n",
"\"\"\"docstring\"\"\"\n",
"print('What are the most popular article authors of all time?\\n')\n",
"VAR_8 = '%s - %d views\\n'\n",
"VAR_7 = ''.join(VAR_8 % (name, views) for name, views in FUNC_1())\n",
"print(VAR_7)\n"
] | [
"def print_top_authors():...\n",
"\"\"\"docstring\"\"\"\n",
"print('What are the most popular article authors of all time?\\n')\n",
"top_authors = '%s - %d views\\n'\n",
"results = ''.join(top_authors % (name, views) for name, views in\n get_top_authors())\n",
"print(results)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"return self.name\n"
] | [
"def get_name(self):...\n",
"return self.name\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_0():...\n",
"VAR_0 = input().split(' ')\n",
"VAR_1, VAR_2 = int(VAR_0[0]), int(VAR_0[1])\n",
"VAR_3 = {i: set() for i in range(VAR_1)}\n",
"VAR_4 = {}\n",
"VAR_5 = {}\n",
"for i in range(VAR_2):\n",
"VAR_10 = input().split(' ')\n",
"return VAR_1, VAR_3, VAR_4, VAR_5\n",
"VAR_11, VAR_12, VAR_13 = int(VAR_10[0]), int(VAR_10[1]), int(VAR_10[2])\n",
"if VAR_11 != VAR_12:\n",
"VAR_3[VAR_11].add(VAR_12)\n",
"VAR_3[VAR_12].add(VAR_11)\n",
"VAR_14 = frozenset((VAR_11, VAR_12))\n",
"if VAR_14 in VAR_4 and VAR_13 >= VAR_4[VAR_14]:\n",
"VAR_5[VAR_14] += 1 if VAR_13 == VAR_4[VAR_14] else 0\n",
"VAR_4[VAR_14] = VAR_13\n",
"VAR_5[VAR_14] = 1\n"
] | [
"def inp():...\n",
"first_line = input().split(' ')\n",
"num_points, num_trails = int(first_line[0]), int(first_line[1])\n",
"adj_lst = {i: set() for i in range(num_points)}\n",
"trail_len = {}\n",
"trail_len_duplicate_count = {}\n",
"for i in range(num_trails):\n",
"trail = input().split(' ')\n",
"return num_points, adj_lst, trail_len, trail_len_duplicate_count\n",
"node1, node2, length = int(trail[0]), int(trail[1]), int(trail[2])\n",
"if node1 != node2:\n",
"adj_lst[node1].add(node2)\n",
"adj_lst[node2].add(node1)\n",
"key = frozenset((node1, node2))\n",
"if key in trail_len and length >= trail_len[key]:\n",
"trail_len_duplicate_count[key] += 1 if length == trail_len[key] else 0\n",
"trail_len[key] = length\n",
"trail_len_duplicate_count[key] = 1\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"AugAssign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_3(VAR_6, VAR_3, VAR_4, VAR_5=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_5 is None:\n",
"VAR_5 = ray.DriverID.nil()\n",
"VAR_35 = ray.gcs_utils.construct_error_message(VAR_5, VAR_3, VAR_4, time.time()\n )\n",
"VAR_6.execute_command('RAY.TABLE_APPEND', ray.gcs_utils.TablePrefix.\n ERROR_INFO, ray.gcs_utils.TablePubsub.ERROR_INFO, VAR_5.binary(), VAR_35)\n"
] | [
"def push_error_to_driver_through_redis(redis_client, error_type, message,...\n",
"\"\"\"docstring\"\"\"\n",
"if driver_id is None:\n",
"driver_id = ray.DriverID.nil()\n",
"error_data = ray.gcs_utils.construct_error_message(driver_id, error_type,\n message, time.time())\n",
"redis_client.execute_command('RAY.TABLE_APPEND', ray.gcs_utils.TablePrefix.\n ERROR_INFO, ray.gcs_utils.TablePubsub.ERROR_INFO, driver_id.binary(),\n error_data)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = discord.Embed(VAR_2='Virtual Console Injects for 3DS', VAR_3=\n discord.Color.blue())\n",
"VAR_7.set_author(name='Asdolo', url=\n 'https://gbatemp.net/members/asdolo.389539/')\n",
"VAR_7.set_thumbnail(url='https://i.imgur.com/rHa76XM.png')\n",
"VAR_7.url = (\n 'https://gbatemp.net/search/40920047/?q=injector&t=post&o=date&g=1&c[title_only]=1&c[user][0]=389539'\n )\n",
"VAR_7.description = 'The recommended way to play old classics on your 3DS'\n",
"await self.bot.say('', VAR_7=embed)\n"
] | [
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"embed = discord.Embed(title='Virtual Console Injects for 3DS', color=\n discord.Color.blue())\n",
"embed.set_author(name='Asdolo', url=\n 'https://gbatemp.net/members/asdolo.389539/')\n",
"embed.set_thumbnail(url='https://i.imgur.com/rHa76XM.png')\n",
"embed.url = (\n 'https://gbatemp.net/search/40920047/?q=injector&t=post&o=date&g=1&c[title_only]=1&c[user][0]=389539'\n )\n",
"embed.description = 'The recommended way to play old classics on your 3DS'\n",
"await self.bot.say('', embed=embed)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_21(self, VAR_14, VAR_28):...\n",
"VAR_12 = \"ssh %s 'hyperion --config %s/%s.yaml slave'\" % (VAR_28, VAR_1, VAR_14\n )\n",
"self.logger.debug('Run cmd:\\n%s' % VAR_12)\n",
"FUNC_7(self.session, VAR_12)\n"
] | [
"def start_remote_component(self, comp_name, host):...\n",
"cmd = \"ssh %s 'hyperion --config %s/%s.yaml slave'\" % (host, TMP_SLAVE_DIR,\n comp_name)\n",
"self.logger.debug('Run cmd:\\n%s' % cmd)\n",
"send_main_session_command(self.session, cmd)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@handled_slot(bool)...\n",
"VAR_17, VAR_18 = QInputDialog.getDouble(self.window, title=\n 'Set Approx. Wavelength', label='Wavelength (nm): ', value=self.matisse\n .query('MOTBI:WL?', numeric_result=True))\n",
"if VAR_18:\n",
"print(f'Setting BiFi approximate wavelength to {VAR_17} nm...')\n",
"self.matisse.set_bifi_wavelength(VAR_17)\n"
] | [
"@handled_slot(bool)...\n",
"target_wavelength, success = QInputDialog.getDouble(self.window, title=\n 'Set Approx. Wavelength', label='Wavelength (nm): ', value=self.matisse\n .query('MOTBI:WL?', numeric_result=True))\n",
"if success:\n",
"print(f'Setting BiFi approximate wavelength to {target_wavelength} nm...')\n",
"self.matisse.set_bifi_wavelength(target_wavelength)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_4():...\n",
"VAR_3.execute(\"SELECT * FROM games WHERE Running = 'Yes'\")\n",
"VAR_16 = VAR_3.fetchall()\n",
"return VAR_16\n"
] | [
"def getRunning():...\n",
"db.execute(\"SELECT * FROM games WHERE Running = 'Yes'\")\n",
"running = db.fetchall()\n",
"return running\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(VAR_9):...\n",
"VAR_12 = VAR_9['subject'].split(': #')[1]\n",
"VAR_14 = re.findall('^[\\\\W]*([Oo\\\\d]){1}(?=[\\\\W]*)', VAR_9['content'].\n replace('#', '').replace('link', ''))[0]\n",
"VAR_14 = int(0 if VAR_14 == ('O' or 'o') else VAR_14)\n",
"VAR_15 = re.findall('\\\\w{8}-\\\\w{4}-\\\\w{4}-\\\\w{4}-\\\\w{12}', VAR_9['content'])[0]\n",
"VAR_4.info(f'got feedback `{VAR_14}` for job #`{VAR_12}`')\n",
"VAR_27 = pd.read_sql(f'SELECT * FROM df_dilfo WHERE job_number={VAR_12}', conn\n ).iloc[0].closed\n",
"if VAR_27:\n",
"VAR_4.info(\n f'job was already matched successfully and logged as `closed`... skipping.'\n )\n",
"if VAR_14 == 1:\n",
"return\n",
"VAR_4.info(f'got feeback that DCN key {VAR_15} was correct')\n",
"VAR_26 = pd.read_sql('SELECT * FROM df_matched', conn)\n",
"VAR_28 = 'UPDATE df_dilfo SET closed = 1 WHERE job_number = {}'\n",
"VAR_29 = {'job_number': VAR_12, 'dcn_key': VAR_15, 'ground_truth': 1 if \n VAR_14 == 1 else 0, 'multi_phase': 1 if VAR_14 == 2 else 0, 'verifier':\n VAR_9['sender'], 'source': 'feedback', 'log_date': str(datetime.\n datetime.now().date()), 'validate': 0}\n",
"conn.cursor().execute(VAR_28.format(VAR_12))\n",
"VAR_26 = VAR_26.append(VAR_29, ignore_index=True)\n",
"VAR_4.info(f'updated df_dilfo to show `closed` status for job #{VAR_12}')\n",
"VAR_26 = VAR_26.drop_duplicates(subset=['job_number', 'dcn_key'], keep='last')\n",
"VAR_26.to_sql('df_matched', conn, if_exists='replace', index=False)\n",
"VAR_4.info(\n f\"DCN key `{VAR_15}` was a {'successful match' if VAR_14 == 1 else 'mis-match'} for job #{VAR_12}\"\n )\n"
] | [
"def process_as_reply(email_obj):...\n",
"job_number = email_obj['subject'].split(': #')[1]\n",
"feedback = re.findall('^[\\\\W]*([Oo\\\\d]){1}(?=[\\\\W]*)', email_obj['content']\n .replace('#', '').replace('link', ''))[0]\n",
"feedback = int(0 if feedback == ('O' or 'o') else feedback)\n",
"dcn_key = re.findall('\\\\w{8}-\\\\w{4}-\\\\w{4}-\\\\w{4}-\\\\w{12}', email_obj[\n 'content'])[0]\n",
"logger.info(f'got feedback `{feedback}` for job #`{job_number}`')\n",
"was_prev_closed = pd.read_sql(\n f'SELECT * FROM df_dilfo WHERE job_number={job_number}', conn).iloc[0\n ].closed\n",
"if was_prev_closed:\n",
"logger.info(\n f'job was already matched successfully and logged as `closed`... skipping.'\n )\n",
"if feedback == 1:\n",
"return\n",
"logger.info(f'got feeback that DCN key {dcn_key} was correct')\n",
"df = pd.read_sql('SELECT * FROM df_matched', conn)\n",
"update_status_query = 'UPDATE df_dilfo SET closed = 1 WHERE job_number = {}'\n",
"match_dict_input = {'job_number': job_number, 'dcn_key': dcn_key,\n 'ground_truth': 1 if feedback == 1 else 0, 'multi_phase': 1 if feedback ==\n 2 else 0, 'verifier': email_obj['sender'], 'source': 'feedback',\n 'log_date': str(datetime.datetime.now().date()), 'validate': 0}\n",
"conn.cursor().execute(update_status_query.format(job_number))\n",
"df = df.append(match_dict_input, ignore_index=True)\n",
"logger.info(f'updated df_dilfo to show `closed` status for job #{job_number}')\n",
"df = df.drop_duplicates(subset=['job_number', 'dcn_key'], keep='last')\n",
"df.to_sql('df_matched', conn, if_exists='replace', index=False)\n",
"logger.info(\n f\"DCN key `{dcn_key}` was a {'successful match' if feedback == 1 else 'mis-match'} for job #{job_number}\"\n )\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
4,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
"return self.title\n"
] | [
"def get_feed(self):...\n",
"return self.title\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_0(self, VAR_7, VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_8:\n",
"VAR_7 = VAR_7.query(VAR_8)\n",
"return VAR_7\n"
] | [
"def query(self, search, query):...\n",
"\"\"\"docstring\"\"\"\n",
"if query:\n",
"search = search.query(query)\n",
"return search\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_3(VAR_0, VAR_1, VAR_2):...\n",
"VAR_9 = {'class': SI, 'results': {start_url: [{'endpoint': VAR_0, 'params':\n VAR_1, 'method': VAR_2}]}}\n",
"if VAR_2 == 'POST':\n",
"VAR_4 = FUNC_1(VAR_0, VAR_1)\n",
"return VAR_9, VAR_4\n"
] | [
"def genSI(endpoint, params, method):...\n",
"scope = {'class': SI, 'results': {start_url: [{'endpoint': endpoint,\n 'params': params, 'method': method}]}}\n",
"if method == 'POST':\n",
"script = createPostScript(endpoint, params)\n",
"return scope, script\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"\"\"\"docstring\"\"\"\n",
"self.common.client_login()\n",
"VAR_24 = self.common.create_cloned_volume(VAR_7, VAR_8)\n",
"self.common.client_logout()\n",
"return {'metadata': VAR_24}\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"\"\"\"docstring\"\"\"\n",
"self.common.client_login()\n",
"new_vol = self.common.create_cloned_volume(volume, src_vref)\n",
"self.common.client_logout()\n",
"return {'metadata': new_vol}\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def __init__(self, *VAR_0, VAR_1=None, VAR_2=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(VAR_0) == 2:\n",
"VAR_10, VAR_41 = VAR_0\n",
"if len(VAR_0) == 1:\n",
"self.name = VAR_10\n",
"VAR_17 = VAR_0[0]\n",
"self.workflow = VAR_41\n",
"self.name = VAR_17.name\n",
"self.docstring = None\n",
"self.workflow = VAR_17.workflow\n",
"self.message = None\n",
"self.docstring = VAR_17.docstring\n",
"self._input = InputFiles()\n",
"self.message = VAR_17.message\n",
"self._output = OutputFiles()\n",
"self._input = InputFiles(VAR_17._input)\n",
"self._params = Params()\n",
"self._output = OutputFiles(VAR_17._output)\n",
"self.dependencies = dict()\n",
"self._params = Params(VAR_17._params)\n",
"self.dynamic_output = set()\n",
"self.dependencies = dict(VAR_17.dependencies)\n",
"self.dynamic_input = set()\n",
"self.dynamic_output = set(VAR_17.dynamic_output)\n",
"self.temp_output = set()\n",
"self.dynamic_input = set(VAR_17.dynamic_input)\n",
"self.protected_output = set()\n",
"self.temp_output = set(VAR_17.temp_output)\n",
"self.touch_output = set()\n",
"self.protected_output = set(VAR_17.protected_output)\n",
"self.subworkflow_input = dict()\n",
"self.touch_output = set(VAR_17.touch_output)\n",
"self.resources = dict(_cores=1, _nodes=1)\n",
"self.subworkflow_input = dict(VAR_17.subworkflow_input)\n",
"self.priority = 0\n",
"self.resources = VAR_17.resources\n",
"self.version = None\n",
"self.priority = VAR_17.priority\n",
"self._log = Log()\n",
"self.version = VAR_17.version\n",
"self._benchmark = None\n",
"self._log = VAR_17._log\n",
"self.wildcard_names = set()\n",
"self._benchmark = VAR_17._benchmark\n",
"self.lineno = VAR_1\n",
"self.wildcard_names = set(VAR_17.wildcard_names)\n",
"self.snakefile = VAR_2\n",
"self.lineno = VAR_17.lineno\n",
"self.run_func = None\n",
"self.snakefile = VAR_17.snakefile\n",
"self.shellcmd = None\n",
"self.run_func = VAR_17.run_func\n",
"self.norun = False\n",
"self.shellcmd = VAR_17.shellcmd\n",
"self.norun = VAR_17.norun\n"
] | [
"def __init__(self, *args, lineno=None, snakefile=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(args) == 2:\n",
"name, workflow = args\n",
"if len(args) == 1:\n",
"self.name = name\n",
"other = args[0]\n",
"self.workflow = workflow\n",
"self.name = other.name\n",
"self.docstring = None\n",
"self.workflow = other.workflow\n",
"self.message = None\n",
"self.docstring = other.docstring\n",
"self._input = InputFiles()\n",
"self.message = other.message\n",
"self._output = OutputFiles()\n",
"self._input = InputFiles(other._input)\n",
"self._params = Params()\n",
"self._output = OutputFiles(other._output)\n",
"self.dependencies = dict()\n",
"self._params = Params(other._params)\n",
"self.dynamic_output = set()\n",
"self.dependencies = dict(other.dependencies)\n",
"self.dynamic_input = set()\n",
"self.dynamic_output = set(other.dynamic_output)\n",
"self.temp_output = set()\n",
"self.dynamic_input = set(other.dynamic_input)\n",
"self.protected_output = set()\n",
"self.temp_output = set(other.temp_output)\n",
"self.touch_output = set()\n",
"self.protected_output = set(other.protected_output)\n",
"self.subworkflow_input = dict()\n",
"self.touch_output = set(other.touch_output)\n",
"self.resources = dict(_cores=1, _nodes=1)\n",
"self.subworkflow_input = dict(other.subworkflow_input)\n",
"self.priority = 0\n",
"self.resources = other.resources\n",
"self.version = None\n",
"self.priority = other.priority\n",
"self._log = Log()\n",
"self.version = other.version\n",
"self._benchmark = None\n",
"self._log = other._log\n",
"self.wildcard_names = set()\n",
"self._benchmark = other._benchmark\n",
"self.lineno = lineno\n",
"self.wildcard_names = set(other.wildcard_names)\n",
"self.snakefile = snakefile\n",
"self.lineno = other.lineno\n",
"self.run_func = None\n",
"self.snakefile = other.snakefile\n",
"self.shellcmd = None\n",
"self.run_func = other.run_func\n",
"self.norun = False\n",
"self.shellcmd = other.shellcmd\n",
"self.norun = other.norun\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Docstring",
"Assign'",
"Assign'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_21(self):...\n",
"@api.public...\n",
"self.response.write(ipaddr.ip_to_string(api.get_peer_ip()))\n",
"VAR_7 = self.make_test_app('/request', CLASS_7)\n",
"VAR_15 = VAR_7.get('/request', extra_environ={'REMOTE_ADDR': '192.1.2.3'})\n",
"self.assertEqual('192.1.2.3', VAR_15.body)\n"
] | [
"def test_get_peer_ip(self):...\n",
"@api.public...\n",
"self.response.write(ipaddr.ip_to_string(api.get_peer_ip()))\n",
"app = self.make_test_app('/request', Handler)\n",
"response = app.get('/request', extra_environ={'REMOTE_ADDR': '192.1.2.3'})\n",
"self.assertEqual('192.1.2.3', response.body)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_1(self, VAR_6):...\n",
"return FUNC_1(VAR_6, self.session, self.logger)\n"
] | [
"def check_component(self, comp):...\n",
"return check_component(comp, self.session, self.logger)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.