lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_1(VAR_14, VAR_2, VAR_3='text', VAR_4=bconfig....\n",
"\"\"\"docstring\"\"\"\n",
"VAR_0.info('Analyzing keywords for local file %s.' % VAR_14)\n",
"VAR_15 = extractor.text_lines_from_local_file(VAR_14)\n",
"return FUNC_2(VAR_15, VAR_2, VAR_3=output_mode, VAR_4=output_limit, VAR_5=\n spires, VAR_6=match_mode, VAR_7=no_cache, VAR_8=with_author_keywords,\n VAR_9=rebuild_cache, VAR_10=only_core_tags, VAR_11=extract_acronyms)\n"
] | [
"def get_keywords_from_local_file(local_file, taxonomy_name, output_mode=...\n",
"\"\"\"docstring\"\"\"\n",
"log.info('Analyzing keywords for local file %s.' % local_file)\n",
"text_lines = extractor.text_lines_from_local_file(local_file)\n",
"return get_keywords_from_text(text_lines, taxonomy_name, output_mode=\n output_mode, output_limit=output_limit, spires=spires, match_mode=\n match_mode, no_cache=no_cache, with_author_keywords=\n with_author_keywords, rebuild_cache=rebuild_cache, only_core_tags=\n only_core_tags, extract_acronyms=extract_acronyms)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"VAR_8 = self.get_datadocument_count()\n",
"VAR_9 = 0 if VAR_8 == 0 else self.get_qa_complete_extractedtext_count(\n ) / VAR_8 * 100\n",
"return VAR_9\n"
] | [
"def get_pct_checked_numeric(self):...\n",
"count = self.get_datadocument_count()\n",
"pct = 0 if count == 0 else self.get_qa_complete_extractedtext_count(\n ) / count * 100\n",
"return pct\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = self.to_str()\n",
"if not VAR_7.startswith('/'):\n",
"VAR_7 = '/' + VAR_7\n",
"return VAR_7\n"
] | [
"def to_abs_str(self):...\n",
"\"\"\"docstring\"\"\"\n",
"path = self.to_str()\n",
"if not path.startswith('/'):\n",
"path = '/' + path\n",
"return path\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_0():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = {'TEST_STACK': str(CLASS_0.DEFAULT_TEST_ID), 'TEST_APP': \n 'smoketest' + CLASS_0.DEFAULT_TEST_ID}\n",
"return st.ScenarioTestRunner.main(CLASS_0, default_binding_overrides=\n defaults, test_case_list=[AwsSmokeTest])\n"
] | [
"def main():...\n",
"\"\"\"docstring\"\"\"\n",
"defaults = {'TEST_STACK': str(AwsSmokeTestScenario.DEFAULT_TEST_ID),\n 'TEST_APP': 'smoketest' + AwsSmokeTestScenario.DEFAULT_TEST_ID}\n",
"return st.ScenarioTestRunner.main(AwsSmokeTestScenario,\n default_binding_overrides=defaults, test_case_list=[AwsSmokeTest])\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_40(*VAR_19, **VAR_20):...\n",
"VAR_62 = Thread(target=function, VAR_19=args, VAR_46=ka)\n",
"VAR_62.daemon = True\n",
"VAR_62.start()\n"
] | [
"def decorator(*args, **ka):...\n",
"t = Thread(target=function, args=args, kwargs=ka)\n",
"t.daemon = True\n",
"t.start()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_0):...\n",
"resource.Resource.__init__(self)\n",
"self.session = VAR_0\n",
"self.events_requests = []\n",
"self.infohashes_sent = set()\n",
"self.channel_cids_sent = set()\n",
"self.session.add_observer(self.on_upgrader_started, NTFY_UPGRADER, [\n NTFY_STARTED])\n",
"self.session.add_observer(self.on_upgrader_finished, NTFY_UPGRADER, [\n NTFY_FINISHED])\n",
"self.session.add_observer(self.on_upgrader_tick, NTFY_UPGRADER_TICK, [\n NTFY_STARTED])\n",
"self.session.add_observer(self.on_watch_folder_corrupt_torrent,\n NTFY_WATCH_FOLDER_CORRUPT_TORRENT, [NTFY_INSERT])\n",
"self.session.add_observer(self.on_new_version_available, NTFY_NEW_VERSION,\n [NTFY_INSERT])\n",
"self.session.add_observer(self.on_tribler_started, NTFY_TRIBLER, [NTFY_STARTED]\n )\n",
"self.session.add_observer(self.on_channel_discovered, NTFY_CHANNEL, [\n NTFY_DISCOVERED])\n",
"self.session.add_observer(self.on_torrent_discovered, NTFY_TORRENT, [\n NTFY_DISCOVERED])\n",
"self.session.add_observer(self.on_torrent_finished, NTFY_TORRENT, [\n NTFY_FINISHED])\n",
"self.session.add_observer(self.on_torrent_error, NTFY_TORRENT, [NTFY_ERROR])\n",
"self.session.add_observer(self.on_torrent_info_updated, NTFY_TORRENT, [\n NTFY_UPDATE])\n",
"self.session.add_observer(self.on_market_ask, NTFY_MARKET_ON_ASK, [NTFY_UPDATE]\n )\n",
"self.session.add_observer(self.on_market_bid, NTFY_MARKET_ON_BID, [NTFY_UPDATE]\n )\n",
"self.session.add_observer(self.on_market_ask_timeout,\n NTFY_MARKET_ON_ASK_TIMEOUT, [NTFY_UPDATE])\n",
"self.session.add_observer(self.on_market_bid_timeout,\n NTFY_MARKET_ON_BID_TIMEOUT, [NTFY_UPDATE])\n",
"self.session.add_observer(self.on_market_transaction_complete,\n NTFY_MARKET_ON_TRANSACTION_COMPLETE, [NTFY_UPDATE])\n",
"self.session.add_observer(self.on_market_payment_received,\n NTFY_MARKET_ON_PAYMENT_RECEIVED, [NTFY_UPDATE])\n",
"self.session.add_observer(self.on_market_payment_sent,\n NTFY_MARKET_ON_PAYMENT_SENT, [NTFY_UPDATE])\n",
"self.session.add_observer(self.on_resource_event, SIGNAL_RESOURCE_CHECK, [\n SIGNAL_LOW_SPACE])\n",
"self.session.add_observer(self.on_credit_minig_error, NTFY_CREDIT_MINING, [\n NTFY_ERROR])\n",
"self.session.add_observer(self.on_shutdown, NTFY_TRIBLER, [STATE_SHUTDOWN])\n",
"self.session.add_observer(self.on_circuit_removed, NTFY_TUNNEL, [NTFY_REMOVE])\n"
] | [
"def __init__(self, session):...\n",
"resource.Resource.__init__(self)\n",
"self.session = session\n",
"self.events_requests = []\n",
"self.infohashes_sent = set()\n",
"self.channel_cids_sent = set()\n",
"self.session.add_observer(self.on_upgrader_started, NTFY_UPGRADER, [\n NTFY_STARTED])\n",
"self.session.add_observer(self.on_upgrader_finished, NTFY_UPGRADER, [\n NTFY_FINISHED])\n",
"self.session.add_observer(self.on_upgrader_tick, NTFY_UPGRADER_TICK, [\n NTFY_STARTED])\n",
"self.session.add_observer(self.on_watch_folder_corrupt_torrent,\n NTFY_WATCH_FOLDER_CORRUPT_TORRENT, [NTFY_INSERT])\n",
"self.session.add_observer(self.on_new_version_available, NTFY_NEW_VERSION,\n [NTFY_INSERT])\n",
"self.session.add_observer(self.on_tribler_started, NTFY_TRIBLER, [NTFY_STARTED]\n )\n",
"self.session.add_observer(self.on_channel_discovered, NTFY_CHANNEL, [\n NTFY_DISCOVERED])\n",
"self.session.add_observer(self.on_torrent_discovered, NTFY_TORRENT, [\n NTFY_DISCOVERED])\n",
"self.session.add_observer(self.on_torrent_finished, NTFY_TORRENT, [\n NTFY_FINISHED])\n",
"self.session.add_observer(self.on_torrent_error, NTFY_TORRENT, [NTFY_ERROR])\n",
"self.session.add_observer(self.on_torrent_info_updated, NTFY_TORRENT, [\n NTFY_UPDATE])\n",
"self.session.add_observer(self.on_market_ask, NTFY_MARKET_ON_ASK, [NTFY_UPDATE]\n )\n",
"self.session.add_observer(self.on_market_bid, NTFY_MARKET_ON_BID, [NTFY_UPDATE]\n )\n",
"self.session.add_observer(self.on_market_ask_timeout,\n NTFY_MARKET_ON_ASK_TIMEOUT, [NTFY_UPDATE])\n",
"self.session.add_observer(self.on_market_bid_timeout,\n NTFY_MARKET_ON_BID_TIMEOUT, [NTFY_UPDATE])\n",
"self.session.add_observer(self.on_market_transaction_complete,\n NTFY_MARKET_ON_TRANSACTION_COMPLETE, [NTFY_UPDATE])\n",
"self.session.add_observer(self.on_market_payment_received,\n NTFY_MARKET_ON_PAYMENT_RECEIVED, [NTFY_UPDATE])\n",
"self.session.add_observer(self.on_market_payment_sent,\n NTFY_MARKET_ON_PAYMENT_SENT, [NTFY_UPDATE])\n",
"self.session.add_observer(self.on_resource_event, SIGNAL_RESOURCE_CHECK, [\n SIGNAL_LOW_SPACE])\n",
"self.session.add_observer(self.on_credit_minig_error, NTFY_CREDIT_MINING, [\n NTFY_ERROR])\n",
"self.session.add_observer(self.on_shutdown, NTFY_TRIBLER, [STATE_SHUTDOWN])\n",
"self.session.add_observer(self.on_circuit_removed, NTFY_TUNNEL, [NTFY_REMOVE])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(self, **VAR_6):...\n",
"return self.sendstring(json.dumps(VAR_6))\n"
] | [
"def response_func(self, **kw):...\n",
"return self.sendstring(json.dumps(kw))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_5():...\n",
"VAR_34 = FUNC_3(VAR_18, VAR_19)\n",
"return CLASS_16, VAR_34\n"
] | [
"def two():...\n",
"FunctionalUseFormSet = make_formset(parent, child)\n",
"return ExtractedTextForm, FunctionalUseFormSet\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_12(self, VAR_11):...\n",
"self.get(VAR_11.parentfield).remove(VAR_11)\n"
] | [
"def remove(self, doc):...\n",
"self.get(doc.parentfield).remove(doc)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def __init__(self, *VAR_3, **VAR_4):...\n",
"super(CLASS_1, self).__init__(*VAR_3, **kwargs)\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super(SanISCSIDriver, self).__init__(*args, **kwargs)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def __str__(self):...\n",
"return Config.get('stretch', self.flag)(super(CLASS_7, self).__str__())\n"
] | [
"def __str__(self):...\n",
"return Config.get('stretch', self.flag)(super(Stretch, self).__str__())\n"
] | [
0,
2
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_19(VAR_8, **VAR_11):...\n",
"\"\"\"docstring\"\"\"\n",
"return VAR_8.format(**{wildcard: '{{{},{}}}'.format(wildcard, '|'.join(\n values)) for wildcard, values in wildcards.items()})\n"
] | [
"def limit(pattern, **wildcards):...\n",
"\"\"\"docstring\"\"\"\n",
"return pattern.format(**{wildcard: '{{{},{}}}'.format(wildcard, '|'.join(\n values)) for wildcard, values in wildcards.items()})\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def __init__(self, VAR_1, VAR_3=None):...\n",
"FUNC_0(self)\n",
"self.list_columns = dict()\n",
"self.list_properties = dict()\n",
"self.session = VAR_3\n",
"for VAR_36 in sa.orm.class_mapper(VAR_1).iterate_properties:\n",
"if type(VAR_36) != SynonymProperty:\n",
"for VAR_14 in VAR_1.__mapper__.columns.keys():\n",
"self.list_properties[VAR_36.key] = VAR_36\n",
"if VAR_14 in self.list_properties:\n",
"super(CLASS_0, self).__init__(VAR_1)\n",
"self.list_columns[VAR_14] = VAR_1.__mapper__.columns[VAR_14]\n"
] | [
"def __init__(self, obj, session=None):...\n",
"_include_filters(self)\n",
"self.list_columns = dict()\n",
"self.list_properties = dict()\n",
"self.session = session\n",
"for prop in sa.orm.class_mapper(obj).iterate_properties:\n",
"if type(prop) != SynonymProperty:\n",
"for col_name in obj.__mapper__.columns.keys():\n",
"self.list_properties[prop.key] = prop\n",
"if col_name in self.list_properties:\n",
"super(SQLAInterface, self).__init__(obj)\n",
"self.list_columns[col_name] = obj.__mapper__.columns[col_name]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"For",
"Assign'",
"Condition",
"Expr'",
"Assign'"
] |
[
"def FUNC_7(VAR_14, VAR_15, VAR_17, VAR_18=None):...\n",
"for VAR_20 in VAR_4:\n",
"if VAR_20 in VAR_5:\n",
"VAR_45 = VAR_5[VAR_20]\n",
"VAR_45 = list()\n",
"if VAR_20 in VAR_7:\n",
"VAR_5[VAR_20] = VAR_45\n",
"VAR_53 = VAR_7[VAR_20]\n",
"VAR_53 = set()\n",
"VAR_31 = FUNC_5(VAR_14, VAR_15)\n",
"VAR_7[VAR_20] = VAR_53\n",
"VAR_31.cookiefname = (VAR_14 if VAR_14 else 'noproxy') + '_' + VAR_20\n",
"VAR_47 = UniWipe(VAR_53, VAR_45, FUNC_1, FUNC_0, VAR_17, VAR_31, VAR_20,\n Mailinator, VAR_18(VAR_20) if VAR_18 else None)\n",
"VAR_47.stoponclose = VAR_9.stop_on_closed\n",
"VAR_47.die_on_neterror = VAR_9.die_on_neterror\n",
"VAR_47.caprate_minp = VAR_9.caprate_minp\n",
"VAR_47.caprate_limit = VAR_9.caprate_limit\n",
"VAR_47.conlimit = VAR_9.conlimit\n",
"VAR_47.comment_successtimeout = 0.2\n",
"if VAR_9.upload_avatar:\n",
"VAR_47.hooks['post_login'].append(FUNC_6)\n",
"yield VAR_47\n"
] | [
"def create_spawn(proxy, proxytype, pc, uq=None):...\n",
"for domain in domains:\n",
"if domain in targets:\n",
"tlist = targets[domain]\n",
"tlist = list()\n",
"if domain in forums:\n",
"targets[domain] = tlist\n",
"fset = forums[domain]\n",
"fset = set()\n",
"net = make_net(proxy, proxytype)\n",
"forums[domain] = fset\n",
"net.cookiefname = (proxy if proxy else 'noproxy') + '_' + domain\n",
"w = UniWipe(fset, tlist, sbjfun, message, pc, net, domain, Mailinator, uq(\n domain) if uq else None)\n",
"w.stoponclose = c.stop_on_closed\n",
"w.die_on_neterror = c.die_on_neterror\n",
"w.caprate_minp = c.caprate_minp\n",
"w.caprate_limit = c.caprate_limit\n",
"w.conlimit = c.conlimit\n",
"w.comment_successtimeout = 0.2\n",
"if c.upload_avatar:\n",
"w.hooks['post_login'].append(upload_avatar)\n",
"yield w\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def __getitem__(self, VAR_7):...\n",
"for VAR_19 in reversed(self.contexts):\n",
"VAR_15 = VAR_19.get(VAR_7, VAR_0)\n",
"if VAR_15 is not VAR_0:\n",
"return VAR_15\n"
] | [
"def __getitem__(self, path):...\n",
"for context in reversed(self.contexts):\n",
"value = context.get(path, MISSING)\n",
"if value is not MISSING:\n",
"return value\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_9(self, VAR_16, VAR_12=None):...\n",
"return self._rest_call(VAR_16, VAR_24='DELETE', VAR_12=headers)\n"
] | [
"def url_delete(self, url, headers=None):...\n",
"return self._rest_call(url, method='DELETE', headers=headers)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_11(VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_28 = f.read()\n",
"return VAR_28\n"
] | [
"def getLocalFile(filepath):...\n",
"\"\"\"docstring\"\"\"\n",
"data = f.read()\n",
"return data\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"@staticmethod...\n",
"VAR_6, VAR_7 = None, 0\n",
"def FUNC_9(VAR_18, VAR_19):...\n",
"VAR_20 = None\n",
"if line.startswith(VAR_18):\n",
"VAR_20 = line.rstrip()[len(VAR_18):]\n",
"return VAR_20 or VAR_19\n"
] | [
"@staticmethod...\n",
"etag, mtime = None, 0\n",
"def extract(comment, current_value):...\n",
"value = None\n",
"if line.startswith(comment):\n",
"value = line.rstrip()[len(comment):]\n",
"return value or current_value\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"VAR_23 = 'katotest-server-group'\n",
"VAR_21 = self.agent.type_to_payload('upsertGoogleServerGroupTagsDescription',\n {'credentials': self.bindings['GCE_CREDENTIALS'], 'zone': self.bindings\n ['TEST_GCE_ZONE'], 'serverGroupName': 'katotest-server-group', 'tags':\n ['test-tag-1', 'test-tag-2']})\n",
"VAR_20 = gcp.GceContractBuilder(self.gce_observer)\n",
"VAR_20.new_clause_builder('Server Group Tags Added').inspect_resource(\n 'managed-instance-groups', VAR_23).contains_pred_list([jc.\n PathContainsPredicate('name', VAR_23), jc.PathContainsPredicate(\n 'tags/items', ['test-tag-1', 'test-tag-2'])])\n",
"return st.OperationContract(self.new_post_operation(title=\n 'upsert_server_group_tags', data=payload, VAR_29='ops'), contract=\n builder.build())\n"
] | [
"def upsert_google_server_group_tags(self):...\n",
"server_group_name = 'katotest-server-group'\n",
"payload = self.agent.type_to_payload('upsertGoogleServerGroupTagsDescription',\n {'credentials': self.bindings['GCE_CREDENTIALS'], 'zone': self.bindings\n ['TEST_GCE_ZONE'], 'serverGroupName': 'katotest-server-group', 'tags':\n ['test-tag-1', 'test-tag-2']})\n",
"builder = gcp.GceContractBuilder(self.gce_observer)\n",
"builder.new_clause_builder('Server Group Tags Added').inspect_resource(\n 'managed-instance-groups', server_group_name).contains_pred_list([jc.\n PathContainsPredicate('name', server_group_name), jc.\n PathContainsPredicate('tags/items', ['test-tag-1', 'test-tag-2'])])\n",
"return st.OperationContract(self.new_post_operation(title=\n 'upsert_server_group_tags', data=payload, path='ops'), contract=builder\n .build())\n"
] | [
0,
0,
0,
0,
1,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_10(self, VAR_13):...\n",
"return VAR_13.name in self._localrules or VAR_13.norun\n"
] | [
"def is_local(self, rule):...\n",
"return rule.name in self._localrules or rule.norun\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_6, VAR_7=None, VAR_8=None, VAR_9=None):...\n",
"super(CLASS_1, self).__init__(VAR_6, VAR_7=url_prefix, VAR_8=RESTClient.\n merge_headers(JSONRESTClient._DEFAULT_HEADERS, default_headers), VAR_9=None\n )\n"
] | [
"def __init__(self, connection, url_prefix=None, default_headers=None,...\n",
"super(JSONRESTClient, self).__init__(connection, url_prefix=url_prefix,\n default_headers=RESTClient.merge_headers(JSONRESTClient.\n _DEFAULT_HEADERS, default_headers), client_obj=None)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0(VAR_1):...\n",
"async def FUNC_15(*VAR_8, **VAR_9):...\n",
"VAR_12 = await get_session(VAR_8[0])\n",
"if 'uname' in VAR_12 and 'ignore_timeout' not in VAR_12:\n",
"VAR_23 = time.time()\n",
"VAR_12['visit_time'] = time.time()\n",
"VAR_24 = VAR_12['visit_time']\n",
"VAR_11 = await VAR_1(*VAR_8, **kwargs)\n",
"if VAR_23 - VAR_24 > FUNC_0.timeout:\n",
"VAR_19 = web.Response(content_type='text/html', VAR_11=HTML_base.format(\n text=text))\n",
"VAR_12['visit_time'] = VAR_23\n",
"return VAR_19\n"
] | [
"def handle_html(func):...\n",
"async def ret(*args, **kwargs):...\n",
"session = await get_session(args[0])\n",
"if 'uname' in session and 'ignore_timeout' not in session:\n",
"t = time.time()\n",
"session['visit_time'] = time.time()\n",
"prev = session['visit_time']\n",
"text = await func(*args, **kwargs)\n",
"if t - prev > handle_html.timeout:\n",
"out = web.Response(content_type='text/html', text=HTML_base.format(text=text))\n",
"session['visit_time'] = t\n",
"return out\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"AsyncFunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"VAR_10 = self.fetch()\n",
"return ' '.join(VAR_10)\n"
] | [
"def printAll(self):...\n",
"baddies = self.fetch()\n",
"return ' '.join(baddies)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_29(self):...\n",
"VAR_49 = re.split(self.dynamic_fill, self.file)[0]\n",
"VAR_50 = os.path.dirname(VAR_49)\n",
"if len(VAR_50) > 0 and not os.path.exists(VAR_50):\n",
"os.makedirs(VAR_50)\n",
"if e.errno != 17:\n"
] | [
"def prepare(self):...\n",
"path_until_wildcard = re.split(self.dynamic_fill, self.file)[0]\n",
"dir = os.path.dirname(path_until_wildcard)\n",
"if len(dir) > 0 and not os.path.exists(dir):\n",
"os.makedirs(dir)\n",
"if e.errno != 17:\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition"
] |
[
"import logging\n",
"from bulk_update.helper import bulk_update\n",
"from django.contrib.auth.decorators import login_required\n",
"from django.db import transaction\n",
"from django.http import HttpResponseBadRequest, HttpResponseForbidden, JsonResponse\n",
"from django.shortcuts import get_object_or_404\n",
"from django.views.decorators.http import require_POST\n",
"from pontoon.base.models import ChangedEntityLocale, Entity, Locale, Project, ProjectLocale, TranslationMemoryEntry, Translation\n",
"from pontoon.base.utils import require_AJAX, readonly_exists\n",
"from pontoon.batch import forms\n",
"from pontoon.batch.actions import ACTIONS_FN_MAP\n",
"VAR_0 = logging.getLogger(__name__)\n",
"def FUNC_0(VAR_1, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = set()\n",
"for translated_resource in VAR_1:\n",
"VAR_7.add(translated_resource.resource.project)\n",
"bulk_update(VAR_1, update_fields=['total_strings', 'approved_strings',\n 'fuzzy_strings', 'strings_with_errors', 'strings_with_warnings',\n 'unreviewed_strings'])\n",
"translated_resource.calculate_stats(save=False)\n",
"VAR_2.aggregate_stats()\n",
"for VAR_5 in VAR_7:\n",
"VAR_5.aggregate_stats()\n",
"def FUNC_1(VAR_3, VAR_2):...\n",
"ProjectLocale.objects.get(VAR_2=locale, VAR_5=project).aggregate_stats()\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = []\n",
"VAR_9 = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct()\n",
"for changed_entity in VAR_3:\n",
"VAR_18 = changed_entity.pk, VAR_2.pk\n",
"ChangedEntityLocale.objects.bulk_create(VAR_8)\n",
"if VAR_18 not in VAR_9:\n",
"def FUNC_2(VAR_4, VAR_5, VAR_2):...\n",
"VAR_8.append(ChangedEntityLocale(entity=changed_entity, VAR_2=locale))\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = [TranslationMemoryEntry(source=t.entity.string, target=t.string,\n VAR_2=locale, entity=t.entity, translation=t, VAR_5=project) for t in\n Translation.objects.filter(pk__in=changed_translation_pks).\n prefetch_related('entity__resource')]\n",
"TranslationMemoryEntry.objects.bulk_create(VAR_10)\n",
"@login_required(redirect_field_name='', login_url='/403')...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = forms.BatchActionsForm(VAR_6.POST)\n",
"if not VAR_11.is_valid():\n",
"return HttpResponseBadRequest(VAR_11.errors.as_json())\n",
"VAR_2 = get_object_or_404(Locale, code=form.cleaned_data['locale'])\n",
"VAR_12 = Entity.objects.filter(pk__in=form.cleaned_data['entities'])\n",
"if not VAR_12.exists():\n",
"return JsonResponse({'count': 0})\n",
"VAR_13 = VAR_12.values_list('resource__project__pk', flat=True)\n",
"VAR_7 = Project.objects.filter(pk__in=projects_pk.distinct())\n",
"for VAR_5 in VAR_7:\n",
"if not VAR_6.user.can_translate(VAR_5=project, VAR_2=locale\n",
"VAR_14 = Translation.objects.filter(active=True, VAR_2=locale, entity__in=\n entities)\n",
"return HttpResponseForbidden(\n \"Forbidden: You don't have permission for batch editing\")\n",
"VAR_15 = ACTIONS_FN_MAP[VAR_11.cleaned_data['action']]\n",
"VAR_16 = VAR_15(VAR_11, VAR_6.user, VAR_14, VAR_2)\n",
"if VAR_16.get('error'):\n",
"return JsonResponse(VAR_16)\n",
"VAR_17 = len(VAR_16.get('invalid_translation_pks', []))\n",
"if VAR_16['count'] == 0:\n",
"return JsonResponse({'count': 0, 'invalid_translation_count': VAR_17})\n",
"FUNC_0(VAR_16['translated_resources'], VAR_2)\n",
"FUNC_1(VAR_16['changed_entities'], VAR_2)\n",
"if VAR_16['latest_translation_pk']:\n",
"Translation.objects.get(pk=action_status['latest_translation_pk']\n ).update_latest_translation()\n",
"FUNC_2(VAR_16['changed_translation_pks'], VAR_5, VAR_2)\n",
"return JsonResponse({'count': VAR_16['count'], 'invalid_translation_count':\n VAR_17})\n"
] | [
"import logging\n",
"from bulk_update.helper import bulk_update\n",
"from django.contrib.auth.decorators import login_required\n",
"from django.db import transaction\n",
"from django.http import HttpResponseBadRequest, HttpResponseForbidden, JsonResponse\n",
"from django.shortcuts import get_object_or_404\n",
"from django.views.decorators.http import require_POST\n",
"from pontoon.base.models import ChangedEntityLocale, Entity, Locale, Project, ProjectLocale, TranslationMemoryEntry, Translation\n",
"from pontoon.base.utils import require_AJAX, readonly_exists\n",
"from pontoon.batch import forms\n",
"from pontoon.batch.actions import ACTIONS_FN_MAP\n",
"log = logging.getLogger(__name__)\n",
"def update_stats(translated_resources, locale):...\n",
"\"\"\"docstring\"\"\"\n",
"projects = set()\n",
"for translated_resource in translated_resources:\n",
"projects.add(translated_resource.resource.project)\n",
"bulk_update(translated_resources, update_fields=['total_strings',\n 'approved_strings', 'fuzzy_strings', 'strings_with_errors',\n 'strings_with_warnings', 'unreviewed_strings'])\n",
"translated_resource.calculate_stats(save=False)\n",
"locale.aggregate_stats()\n",
"for project in projects:\n",
"project.aggregate_stats()\n",
"def mark_changed_translation(changed_entities, locale):...\n",
"ProjectLocale.objects.get(locale=locale, project=project).aggregate_stats()\n",
"\"\"\"docstring\"\"\"\n",
"changed_entities_array = []\n",
"existing = ChangedEntityLocale.objects.values_list('entity', 'locale'\n ).distinct()\n",
"for changed_entity in changed_entities:\n",
"key = changed_entity.pk, locale.pk\n",
"ChangedEntityLocale.objects.bulk_create(changed_entities_array)\n",
"if key not in existing:\n",
"def update_translation_memory(changed_translation_pks, project, locale):...\n",
"changed_entities_array.append(ChangedEntityLocale(entity=changed_entity,\n locale=locale))\n",
"\"\"\"docstring\"\"\"\n",
"memory_entries = [TranslationMemoryEntry(source=t.entity.string, target=t.\n string, locale=locale, entity=t.entity, translation=t, project=project) for\n t in Translation.objects.filter(pk__in=changed_translation_pks).\n prefetch_related('entity__resource')]\n",
"TranslationMemoryEntry.objects.bulk_create(memory_entries)\n",
"@login_required(redirect_field_name='', login_url='/403')...\n",
"\"\"\"docstring\"\"\"\n",
"form = forms.BatchActionsForm(request.POST)\n",
"if not form.is_valid():\n",
"return HttpResponseBadRequest(form.errors.as_json())\n",
"locale = get_object_or_404(Locale, code=form.cleaned_data['locale'])\n",
"entities = Entity.objects.filter(pk__in=form.cleaned_data['entities'])\n",
"if not entities.exists():\n",
"return JsonResponse({'count': 0})\n",
"projects_pk = entities.values_list('resource__project__pk', flat=True)\n",
"projects = Project.objects.filter(pk__in=projects_pk.distinct())\n",
"for project in projects:\n",
"if not request.user.can_translate(project=project, locale=locale\n",
"active_translations = Translation.objects.filter(active=True, locale=locale,\n entity__in=entities)\n",
"return HttpResponseForbidden(\n \"Forbidden: You don't have permission for batch editing\")\n",
"action_function = ACTIONS_FN_MAP[form.cleaned_data['action']]\n",
"action_status = action_function(form, request.user, active_translations, locale\n )\n",
"if action_status.get('error'):\n",
"return JsonResponse(action_status)\n",
"invalid_translation_count = len(action_status.get('invalid_translation_pks',\n []))\n",
"if action_status['count'] == 0:\n",
"return JsonResponse({'count': 0, 'invalid_translation_count':\n invalid_translation_count})\n",
"update_stats(action_status['translated_resources'], locale)\n",
"mark_changed_translation(action_status['changed_entities'], locale)\n",
"if action_status['latest_translation_pk']:\n",
"Translation.objects.get(pk=action_status['latest_translation_pk']\n ).update_latest_translation()\n",
"update_translation_memory(action_status['changed_translation_pks'], project,\n locale)\n",
"return JsonResponse({'count': action_status['count'],\n 'invalid_translation_count': invalid_translation_count})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"For",
"Expr'",
"FunctionDef'",
"Expr'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Assign'",
"Expr'",
"Condition",
"FunctionDef'",
"Expr'",
"Docstring",
"Assign'",
"Expr'",
"Condition",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_10(VAR_16):...\n",
"return isinstance(VAR_16, str) or not isinstance(VAR_16, Iterable)\n"
] | [
"def not_iterable(value):...\n",
"return isinstance(value, str) or not isinstance(value, Iterable)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self.format_wildcards(self.rule.shellcmd\n ) if self.rule.shellcmd else None\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self.format_wildcards(self.rule.shellcmd\n ) if self.rule.shellcmd else None\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"\"\"\"Functions for interfacing with the SQL databases.\n\"\"\"\n",
"import sqlite3 as sql\n",
"import traceback\n",
"def FUNC_0(VAR_0, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_18 = sql.connect('./resources/users.db')\n",
"VAR_19 = False\n",
"VAR_20 = \"\"\"SELECT * FROM users\n WHERE username = ? AND password = ?;\n \"\"\"\n",
"VAR_21 = VAR_18.cursor()\n",
"VAR_22 = VAR_0, VAR_1\n",
"VAR_21.execute(VAR_20, VAR_22)\n",
"VAR_23 = VAR_21.fetchone()\n",
"if VAR_23 != None:\n",
"VAR_19 = True\n",
"VAR_19 = False\n",
"VAR_5 = VAR_23[0]\n",
"VAR_5 = None\n",
"VAR_2 = VAR_23[2]\n",
"VAR_2 = None\n",
"VAR_3 = VAR_23[3]\n",
"VAR_3 = None\n",
"VAR_18.close()\n",
"return VAR_19, VAR_5, VAR_2, VAR_3\n"
] | [
"\"\"\"Functions for interfacing with the SQL databases.\n\"\"\"\n",
"import sqlite3 as sql\n",
"import traceback\n",
"def authenticate(username, password):...\n",
"\"\"\"docstring\"\"\"\n",
"con = sql.connect('./resources/users.db')\n",
"authenticated = False\n",
"cmd = \"\"\"SELECT * FROM users\n WHERE username = ? AND password = ?;\n \"\"\"\n",
"cur = con.cursor()\n",
"info = username, password\n",
"cur.execute(cmd, info)\n",
"user_info = cur.fetchone()\n",
"if user_info != None:\n",
"authenticated = True\n",
"authenticated = False\n",
"user_id = user_info[0]\n",
"user_id = None\n",
"firstname = user_info[2]\n",
"firstname = None\n",
"lastname = user_info[3]\n",
"lastname = None\n",
"con.close()\n",
"return authenticated, user_id, firstname, lastname\n"
] | [
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_11(self):...\n",
"VAR_9 = []\n",
"VAR_10 = {}\n",
"for kid in self.child_groups:\n",
"VAR_13 = kid.get_hosts()\n",
"for VAR_14 in self.hosts:\n",
"for VAR_15 in VAR_13:\n",
"if VAR_14 not in VAR_10:\n",
"return VAR_9\n",
"if VAR_15 not in VAR_10:\n",
"VAR_10[VAR_14] = 1\n",
"VAR_10[VAR_15] = 1\n",
"if self.name == 'all' and VAR_14.implicit:\n",
"if self.name == 'all' and VAR_15.implicit:\n",
"VAR_9.append(VAR_14)\n",
"VAR_9.append(VAR_15)\n"
] | [
"def _get_hosts(self):...\n",
"hosts = []\n",
"seen = {}\n",
"for kid in self.child_groups:\n",
"kid_hosts = kid.get_hosts()\n",
"for mine in self.hosts:\n",
"for kk in kid_hosts:\n",
"if mine not in seen:\n",
"return hosts\n",
"if kk not in seen:\n",
"seen[mine] = 1\n",
"seen[kk] = 1\n",
"if self.name == 'all' and mine.implicit:\n",
"if self.name == 'all' and kk.implicit:\n",
"hosts.append(mine)\n",
"hosts.append(kk)\n"
] | [
0,
0,
0,
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Assign'",
"For",
"For",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_10(self, VAR_10, VAR_5):...\n",
"VAR_11 = 'string'.format(self.columns[VAR_10].select(), VAR_5, self.\n build_filter_string(), self.columns[VAR_10].name)\n",
"print(VAR_11)\n",
"return self.query_boilerplate(VAR_11)\n"
] | [
"def topn_sum(self, column, sum_by):...\n",
"q = (\n \"\"\"\n SELECT {0}, sum({1}) AS c FROM test_goflow_records {2} GROUP BY {3} ORDER BY c DESC\n \"\"\"\n .format(self.columns[column].select(), sum_by, self.build_filter_string\n (), self.columns[column].name))\n",
"print(q)\n",
"return self.query_boilerplate(q)\n"
] | [
0,
0,
4,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(VAR_0=None, VAR_1=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if not go.ANAGONDA_PRESENT:\n",
"return\n",
"if get_settings(VAR_0, 'anaconda_go_fast_linters_only', False):\n",
"FUNC_1(VAR_0, VAR_1)\n",
"FUNC_3(VAR_0, VAR_1)\n"
] | [
"def run_linter(view=None, hook=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if not go.ANAGONDA_PRESENT:\n",
"return\n",
"if get_settings(view, 'anaconda_go_fast_linters_only', False):\n",
"fast_linters(view, hook)\n",
"all_linters(view, hook)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Condition",
"Expr'",
"Expr'"
] |
[
"@api.model_cr...\n",
"tools.drop_view_if_exists(self._cr, self._table)\n",
"self._cr.execute(\n \"\"\"\n create or replace view %s as (\n %s\n %s\n )\"\"\"\n % (self._table, self._select(), self._from()))\n"
] | [
"@api.model_cr...\n",
"tools.drop_view_if_exists(self._cr, self._table)\n",
"self._cr.execute(\n \"\"\"\n create or replace view %s as (\n %s\n %s\n )\"\"\"\n % (self._table, self._select(), self._from()))\n"
] | [
0,
0,
4
] | [
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(self, VAR_1, VAR_3):...\n",
"VAR_17 = VAR_3['source']['serial_id']\n",
"VAR_18, VAR_19, VAR_4 = self.get_previous_yields(VAR_17)\n",
"VAR_6 = 'OK'\n",
"self.add_day_data_row(VAR_1, VAR_3, VAR_4)\n",
"if self.is_timestamps_from_same_day(VAR_18, VAR_1):\n",
"self.update_inverter(VAR_17, VAR_1, VAR_6, VAR_19 + VAR_3['energy'], VAR_4 +\n VAR_3['energy'])\n",
"self.update_inverter(VAR_17, VAR_1, VAR_6, VAR_3['energy'], VAR_4 + VAR_3[\n 'energy'])\n",
"self.db.commit()\n",
"self.add_month_data_row(VAR_17, VAR_1, VAR_19, VAR_4)\n"
] | [
"def add_inverter_data(self, ts, data):...\n",
"inv_serial = data['source']['serial_id']\n",
"prev_ts, prev_etoday, prev_etotal = self.get_previous_yields(inv_serial)\n",
"status = 'OK'\n",
"self.add_day_data_row(ts, data, prev_etotal)\n",
"if self.is_timestamps_from_same_day(prev_ts, ts):\n",
"self.update_inverter(inv_serial, ts, status, prev_etoday + data['energy'], \n prev_etotal + data['energy'])\n",
"self.update_inverter(inv_serial, ts, status, data['energy'], prev_etotal +\n data['energy'])\n",
"self.db.commit()\n",
"self.add_month_data_row(inv_serial, ts, prev_etoday, prev_etotal)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@permission_required('tournament.add_game')...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = get_modifiable_tournament_or_404(VAR_3, VAR_2.user)\n",
"VAR_8 = FUNC_0(VAR_7, VAR_1)\n",
"VAR_21 = VAR_8.game_set.all()\n",
"VAR_11 = []\n",
"for VAR_39 in VAR_21:\n",
"VAR_28 = {'game_name': VAR_39.name, 'the_set': VAR_39.the_set}\n",
"VAR_17 = VAR_8.roundplayer_set.count()\n",
"for VAR_38 in VAR_39.gameplayer_set.all():\n",
"VAR_22 = (VAR_17 + 6) // 7\n",
"VAR_28[VAR_38.power.name] = VAR_38.roundplayer()\n",
"VAR_11.append(VAR_28)\n",
"if VAR_22 < 1:\n",
"VAR_22 = 1\n",
"VAR_23 = formset_factory(GamePlayersForm, extra=expected_games - games.\n count(), VAR_24=BaseGamePlayersFormset)\n",
"VAR_24 = VAR_23(VAR_2.POST or None, VAR_5=r, initial=data)\n",
"if VAR_24.is_valid():\n",
"for f in VAR_24:\n",
"return render(VAR_2, 'rounds/create_games.html', {'tournament': VAR_7,\n 'round': VAR_8, 'formset': VAR_24})\n",
"send_board_call(VAR_8)\n",
"VAR_39, VAR_35 = Game.objects.get_or_create(name=f.cleaned_data['game_name'\n ], VAR_5=r, the_set=f.cleaned_data['the_set'])\n",
"VAR_39.full_clean()\n",
"f.add_error(None, e)\n",
"if VAR_35:\n",
"return HttpResponseRedirect(reverse('game_index', args=(tournament_id,\n round_num)))\n",
"VAR_39.delete()\n",
"VAR_39.save()\n",
"for power, field in f.cleaned_data.items():\n",
"return render(VAR_2, 'rounds/create_games.html', {'tournament': VAR_7,\n 'round': VAR_8, 'formset': VAR_24})\n",
"VAR_40 = GreatPower.objects.get(name=power)\n",
"VAR_34 = GamePlayer.objects.get(VAR_37=g, power=p)\n",
"VAR_34 = GamePlayer(player=field.player, VAR_37=g, power=p)\n",
"VAR_34.player = field.player\n",
"VAR_34.full_clean()\n",
"f.add_error(None, e)\n",
"VAR_34.save()\n",
"VAR_34.delete()\n",
"return render(VAR_2, 'rounds/create_games.html', {'tournament': VAR_7,\n 'round': VAR_8, 'formset': VAR_24})\n"
] | [
"@permission_required('tournament.add_game')...\n",
"\"\"\"docstring\"\"\"\n",
"t = get_modifiable_tournament_or_404(tournament_id, request.user)\n",
"r = get_round_or_404(t, round_num)\n",
"games = r.game_set.all()\n",
"data = []\n",
"for g in games:\n",
"current = {'game_name': g.name, 'the_set': g.the_set}\n",
"round_players = r.roundplayer_set.count()\n",
"for gp in g.gameplayer_set.all():\n",
"expected_games = (round_players + 6) // 7\n",
"current[gp.power.name] = gp.roundplayer()\n",
"data.append(current)\n",
"if expected_games < 1:\n",
"expected_games = 1\n",
"GamePlayersFormset = formset_factory(GamePlayersForm, extra=expected_games -\n games.count(), formset=BaseGamePlayersFormset)\n",
"formset = GamePlayersFormset(request.POST or None, the_round=r, initial=data)\n",
"if formset.is_valid():\n",
"for f in formset:\n",
"return render(request, 'rounds/create_games.html', {'tournament': t,\n 'round': r, 'formset': formset})\n",
"send_board_call(r)\n",
"g, created = Game.objects.get_or_create(name=f.cleaned_data['game_name'],\n the_round=r, the_set=f.cleaned_data['the_set'])\n",
"g.full_clean()\n",
"f.add_error(None, e)\n",
"if created:\n",
"return HttpResponseRedirect(reverse('game_index', args=(tournament_id,\n round_num)))\n",
"g.delete()\n",
"g.save()\n",
"for power, field in f.cleaned_data.items():\n",
"return render(request, 'rounds/create_games.html', {'tournament': t,\n 'round': r, 'formset': formset})\n",
"p = GreatPower.objects.get(name=power)\n",
"i = GamePlayer.objects.get(game=g, power=p)\n",
"i = GamePlayer(player=field.player, game=g, power=p)\n",
"i.player = field.player\n",
"i.full_clean()\n",
"f.add_error(None, e)\n",
"i.save()\n",
"i.delete()\n",
"return render(request, 'rounds/create_games.html', {'tournament': t,\n 'round': r, 'formset': formset})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"For",
"Return'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Expr'",
"For",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_13(self, VAR_25='default.yaml'):...\n",
"self.config = load(data_file, Loader)\n"
] | [
"def load_config(self, filename='default.yaml'):...\n",
"self.config = load(data_file, Loader)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"@property...\n",
"return self.dag.priority(self)\n"
] | [
"@property...\n",
"return self.dag.priority(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_11(VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_28 = f.read()\n",
"return VAR_28\n"
] | [
"def getLocalFile(filepath):...\n",
"\"\"\"docstring\"\"\"\n",
"data = f.read()\n",
"return data\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_10(self, VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"self.post_event('bot_rebooting', VAR_8)\n",
"self.cancel_all_timers()\n",
"if self._shutdown_hook:\n",
"self._shutdown_hook(self)\n",
"logging.exception('shutdown hook failed: %s', e)\n",
"os_utilities.restart(VAR_8, timeout=15 * 60)\n",
"self.post_error('This host partition is bad; please fix the host')\n",
"self.post_error('Bot is stuck restarting for: %s' % VAR_8)\n",
"while True:\n",
"time.sleep(1)\n"
] | [
"def restart(self, message):...\n",
"\"\"\"docstring\"\"\"\n",
"self.post_event('bot_rebooting', message)\n",
"self.cancel_all_timers()\n",
"if self._shutdown_hook:\n",
"self._shutdown_hook(self)\n",
"logging.exception('shutdown hook failed: %s', e)\n",
"os_utilities.restart(message, timeout=15 * 60)\n",
"self.post_error('This host partition is bad; please fix the host')\n",
"self.post_error('Bot is stuck restarting for: %s' % message)\n",
"while True:\n",
"time.sleep(1)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_18(self):...\n",
"if self.contains('yunohost app setting'):\n",
"FUNC_3('string')\n",
"if self.contains('yunohost app checkurl'):\n",
"FUNC_3(\n \"'yunohost app checkurl' is deprecated. Please use 'ynh_webpath_register' instead.\"\n )\n",
"if self.contains('yunohost app checkport'):\n",
"FUNC_3(\n \"'yunohost app checkport' is deprecated. Please use 'ynh_find_port' instead.\"\n )\n",
"if self.contains('yunohost app initdb'):\n",
"FUNC_3(\n \"'yunohost app initdb' is deprecated. Please use 'ynh_mysql_setup_db' instead.\"\n )\n",
"if self.contains('exit'):\n",
"FUNC_3(\"'exit' command shouldn't be used. Please use 'ynh_die' instead.\")\n",
"if self.contains('rm -rf'):\n",
"FUNC_4(\n \"[YEP-2.12] You should avoid using 'rm -rf', please use 'ynh_secure_remove' instead\"\n )\n",
"if self.contains('sed -i'):\n",
"FUNC_3(\n \"[YEP-2.12] You should avoid using 'sed -i', please use 'ynh_replace_string' instead\"\n )\n",
"if self.contains('sudo'):\n",
"FUNC_3('string')\n",
"if self.contains('dd if=/dev/urandom') or self.contains('openssl rand'):\n",
"FUNC_3(\n \"Instead of 'dd if=/dev/urandom' or 'openssl rand', you might want to use ynh_string_random\"\n )\n",
"if self.contains('systemctl restart nginx') or self.contains(\n",
"FUNC_4('string')\n",
"if self.name == 'install' and not self.contains('ynh_print_info'\n",
"FUNC_3('string')\n"
] | [
"def check_deprecated_practices(self):...\n",
"if self.contains('yunohost app setting'):\n",
"print_warning(\n \"'yunohost app setting' shouldn't be used directly. Please use 'ynh_app_setting_(set,get,delete)' instead.\"\n )\n",
"if self.contains('yunohost app checkurl'):\n",
"print_warning(\n \"'yunohost app checkurl' is deprecated. Please use 'ynh_webpath_register' instead.\"\n )\n",
"if self.contains('yunohost app checkport'):\n",
"print_warning(\n \"'yunohost app checkport' is deprecated. Please use 'ynh_find_port' instead.\"\n )\n",
"if self.contains('yunohost app initdb'):\n",
"print_warning(\n \"'yunohost app initdb' is deprecated. Please use 'ynh_mysql_setup_db' instead.\"\n )\n",
"if self.contains('exit'):\n",
"print_warning(\"'exit' command shouldn't be used. Please use 'ynh_die' instead.\"\n )\n",
"if self.contains('rm -rf'):\n",
"print_error(\n \"[YEP-2.12] You should avoid using 'rm -rf', please use 'ynh_secure_remove' instead\"\n )\n",
"if self.contains('sed -i'):\n",
"print_warning(\n \"[YEP-2.12] You should avoid using 'sed -i', please use 'ynh_replace_string' instead\"\n )\n",
"if self.contains('sudo'):\n",
"print_warning(\n \"[YEP-2.12] You should not need to use 'sudo', the script is being run as root. (If you need to run a command using a specific user, use 'ynh_exec_as')\"\n )\n",
"if self.contains('dd if=/dev/urandom') or self.contains('openssl rand'):\n",
"print_warning(\n \"Instead of 'dd if=/dev/urandom' or 'openssl rand', you might want to use ynh_string_random\"\n )\n",
"if self.contains('systemctl restart nginx') or self.contains(\n",
"print_error(\n \"Restarting nginx is quite dangerous (especially for web installs) and should be avoided at all cost. Use 'reload' instead.\"\n )\n",
"if self.name == 'install' and not self.contains('ynh_print_info'\n",
"print_warning(\n \"Please add a few messages for the user, to explain what is going on (in friendly, not-too-technical terms) during the installation. You can use 'ynh_print_info' or 'ynh_script_progression' for this.\"\n )\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_3(VAR_2, VAR_3):...\n",
"VAR_2.browser = webdriver.PhantomJS()\n",
"VAR_2.browser.set_window_size(1280, 1024)\n",
"VAR_2.browser.implicitly_wait(DEFAULT_IMPLICIT_WAIT_TIMEOUT_IN_S)\n",
"VAR_2.browser.set_page_load_timeout(60)\n",
"VAR_2.browser.get(HOMEPAGE_URL)\n"
] | [
"def before_feature(context, feature):...\n",
"context.browser = webdriver.PhantomJS()\n",
"context.browser.set_window_size(1280, 1024)\n",
"context.browser.implicitly_wait(DEFAULT_IMPLICIT_WAIT_TIMEOUT_IN_S)\n",
"context.browser.set_page_load_timeout(60)\n",
"context.browser.get(HOMEPAGE_URL)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_4(self, VAR_2):...\n",
"if self == VAR_2:\n",
"if VAR_2 not in self.child_groups:\n",
"self.child_groups.append(VAR_2)\n",
"VAR_2.depth = max([self.depth + 1, VAR_2.depth])\n",
"VAR_2._check_children_depth()\n",
"if self.name not in [VAR_12.name for VAR_12 in VAR_2.parent_groups]:\n",
"VAR_2.parent_groups.append(self)\n",
"self.clear_hosts_cache()\n",
"for h in VAR_2.get_hosts():\n",
"h.populate_ancestors()\n"
] | [
"def add_child_group(self, group):...\n",
"if self == group:\n",
"if group not in self.child_groups:\n",
"self.child_groups.append(group)\n",
"group.depth = max([self.depth + 1, group.depth])\n",
"group._check_children_depth()\n",
"if self.name not in [g.name for g in group.parent_groups]:\n",
"group.parent_groups.append(self)\n",
"self.clear_hosts_cache()\n",
"for h in group.get_hosts():\n",
"h.populate_ancestors()\n"
] | [
0,
0,
0,
0,
1,
1,
0,
0,
0,
0,
1
] | [
"FunctionDef'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Expr'",
"For",
"Expr'"
] |
[
"@eqlx.with_timeout...\n",
"time.sleep(1)\n"
] | [
"@eqlx.with_timeout...\n",
"time.sleep(1)\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_1(self, VAR_1=False):...\n",
"\"\"\"docstring\"\"\"\n",
"self.testbed.setup_env(user_email='[email protected]', user_id=ViewTestsBase.\n _USER_ID, user_is_admin='1' if is_admin else '0', overwrite=True)\n"
] | [
"def login(self, is_admin=False):...\n",
"\"\"\"docstring\"\"\"\n",
"self.testbed.setup_env(user_email='[email protected]', user_id=ViewTestsBase.\n _USER_ID, user_is_admin='1' if is_admin else '0', overwrite=True)\n"
] | [
0,
0,
5
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def __call__(self, VAR_0):...\n",
"VAR_31 = ResourceTreeTraverser.__call__(self, VAR_0)\n",
"VAR_32 = VAR_31['context']\n",
"VAR_33 = VAR_31['view_name']\n",
"if IResource.providedBy(VAR_32) and '.' in VAR_33:\n",
"VAR_43, VAR_44 = VAR_33.split('.')\n",
"return VAR_31\n",
"VAR_51 = VAR_32[VAR_43]\n",
"if IResource.providedBy(VAR_51):\n",
"VAR_31['context'] = VAR_51\n",
"VAR_31['view_name'] = VAR_44\n"
] | [
"def __call__(self, request):...\n",
"system = ResourceTreeTraverser.__call__(self, request)\n",
"context = system['context']\n",
"view_name = system['view_name']\n",
"if IResource.providedBy(context) and '.' in view_name:\n",
"rc_name, repr_name = view_name.split('.')\n",
"return system\n",
"child_rc = context[rc_name]\n",
"if IResource.providedBy(child_rc):\n",
"system['context'] = child_rc\n",
"system['view_name'] = repr_name\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"@staticmethod...\n",
"VAR_20 = bot.get_file(VAR_1.document.file_id).file_path\n",
"VAR_21 = f'https://api.telegram.org/file/bot{config.TELEGRAM_TOKEN}/{VAR_20}'\n",
"if machine == 'prod':\n",
"VAR_45 = requests.get(VAR_21)\n",
"VAR_46 = {'https': config.PROXY_CONFIG}\n",
"return BytesIO(VAR_45.content)\n",
"VAR_45 = requests.get(VAR_21, VAR_46=proxies)\n"
] | [
"@staticmethod...\n",
"file_path = bot.get_file(message.document.file_id).file_path\n",
"link = f'https://api.telegram.org/file/bot{config.TELEGRAM_TOKEN}/{file_path}'\n",
"if machine == 'prod':\n",
"r = requests.get(link)\n",
"proxies = {'https': config.PROXY_CONFIG}\n",
"return BytesIO(r.content)\n",
"r = requests.get(link, proxies=proxies)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Assign'"
] |
[
"def FUNC_4(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._check_flags()\n"
] | [
"def check_for_setup_error(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._check_flags()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def FUNC_7(self, VAR_1=QModelIndex()):...\n",
"return len(self.columns)\n"
] | [
"def columnCount(self, parent=QModelIndex()):...\n",
"return len(self.columns)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_3(VAR_0, VAR_1, VAR_2):...\n",
"VAR_1['VMWARE_USER'] = VAR_0.get_input('username', default='')\n",
"VAR_1['VMWARE_PASSWORD'] = VAR_0.get_input('password', default='')\n",
"VAR_1['VMWARE_HOST'] = VAR_0.get_input('host', default='')\n",
"VAR_1['VMWARE_VALIDATE_CERTS'] = str(settings.VMWARE_VALIDATE_CERTS)\n"
] | [
"def vmware(cred, env, private_data_dir):...\n",
"env['VMWARE_USER'] = cred.get_input('username', default='')\n",
"env['VMWARE_PASSWORD'] = cred.get_input('password', default='')\n",
"env['VMWARE_HOST'] = cred.get_input('host', default='')\n",
"env['VMWARE_VALIDATE_CERTS'] = str(settings.VMWARE_VALIDATE_CERTS)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_12, VAR_13=1000, VAR_14=(), VAR_6=None, VAR_15=False):...\n",
"super(CLASS_1, self).__init__()\n",
"self._conn = VAR_12\n",
"self._seq = VAR_13\n",
"self._timeout = VAR_6\n",
"self._owned = VAR_15\n",
"self._handlers = []\n",
"for VAR_18 in VAR_14:\n",
"if callable(VAR_18):\n",
"self._received = []\n",
"self._add_handler(VAR_18)\n",
"self._add_handler(*VAR_18)\n",
"self._listenerthread = new_hidden_thread(target=self._listen, VAR_27=\n 'test.session')\n",
"self._listenerthread.start()\n"
] | [
"def __init__(self, conn, seq=1000, handlers=(), timeout=None, owned=False):...\n",
"super(DebugSession, self).__init__()\n",
"self._conn = conn\n",
"self._seq = seq\n",
"self._timeout = timeout\n",
"self._owned = owned\n",
"self._handlers = []\n",
"for handler in handlers:\n",
"if callable(handler):\n",
"self._received = []\n",
"self._add_handler(handler)\n",
"self._add_handler(*handler)\n",
"self._listenerthread = new_hidden_thread(target=self._listen, name=\n 'test.session')\n",
"self._listenerthread.start()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_34(self):...\n",
"\"\"\"docstring\"\"\"\n",
"FUNC_5(VAR_1='Mark German', VAR_2='Germany')\n",
"self.assertEqual(tournament.list_players(), 0)\n"
] | [
"def test_list_players(self):...\n",
"\"\"\"docstring\"\"\"\n",
"dummy_player(player_name='Mark German', country='Germany')\n",
"self.assertEqual(tournament.list_players(), 0)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(VAR_0, VAR_1):...\n",
"VAR_5 = VAR_0.name.replace(' ', '_')\n",
"VAR_6 = '{0}/{0}_{1}'.format(VAR_5, VAR_1)\n",
"return VAR_6\n"
] | [
"def update_filename(instance, filename):...\n",
"name_fill_space = instance.name.replace(' ', '_')\n",
"name = '{0}/{0}_{1}'.format(name_fill_space, filename)\n",
"return name\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"return 'submit source %s for task %s (ID %d) %s' % (self.source_path, self.\n task[1], self.task[0], self.url)\n"
] | [
"def describe(self):...\n",
"return 'submit source %s for task %s (ID %d) %s' % (self.source_path, self.\n task[1], self.task[0], self.url)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_8(VAR_15):...\n",
"return len(VAR_15) == 3 and VAR_15[1\n ] in schemas.CONDITION_OPERATORS and isinstance(VAR_15[0], (six.\n string_types, tuple, list))\n"
] | [
"def is_condition(cond_or_list):...\n",
"return len(cond_or_list) == 3 and cond_or_list[1\n ] in schemas.CONDITION_OPERATORS and isinstance(cond_or_list[0], (six.\n string_types, tuple, list))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(self, VAR_2, VAR_1='='):...\n",
"for regex, VAR_10 in self.filter_map.items():\n",
"if re.search(regex, VAR_2):\n",
"VAR_34 = re.search(regex, VAR_2)\n",
"VAR_35 = VAR_34.group(1)\n",
"self.columns[VAR_10].filter(VAR_35, VAR_1)\n"
] | [
"def add_filter(self, value, op='='):...\n",
"for regex, column in self.filter_map.items():\n",
"if re.search(regex, value):\n",
"m = re.search(regex, value)\n",
"v = m.group(1)\n",
"self.columns[column].filter(v, op)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(self):...\n",
"self.assertTrue(self.client.login(username='autotest', password='password'))\n"
] | [
"def setUp(self):...\n",
"self.assertTrue(self.client.login(username='autotest', password='password'))\n"
] | [
0,
5
] | [
"FunctionDef'",
"Expr'"
] |
[
"import stack.commands\n",
"from stack.exception import CommandError\n",
"\"\"\"string\"\"\"\n",
"def FUNC_0(self, VAR_0, VAR_1):...\n",
"VAR_2, VAR_3 = self.fillParams([('attr', None, True), ('doc', None, True)])\n",
"VAR_4 = self.db.execute(\n \"\"\"\n\t\t\tselect attr from attributes where attr='%s'\n\t\t\t\"\"\" % VAR_2)\n",
"if not VAR_4:\n",
"self.db.execute(\"\"\"\n\t\t\tdelete from attributes_doc where attr='%s'\n\t\t\t\"\"\" %\n VAR_2)\n",
"if VAR_3:\n",
"self.db.execute(\n \"\"\"\n\t\t\t\tinsert into attributes_doc\n\t\t\t\t(attr, doc)\n\t\t\t\tvalues ('%s', '%s')\n\t\t\t\t\"\"\"\n % (VAR_2, VAR_3))\n"
] | [
"import stack.commands\n",
"from stack.exception import CommandError\n",
"\"\"\"\n\tChanges a string containing documention for an attribute\n\n\t<param type='string' name='attr' optional='0'>\n\tName of the attribute\n\t</param>\n\n\t<param type='string' name='doc' optional='0'>\n\tDocumentation of the attribute\n\t</param>\n\t\n\t<example cmd='set attr doc attr=\"ssh.use_dns\" doc=\"hosts with ssh.use_dns == True will enable DNS lookups in sshd config.\"'>\n\tSets the documentation string for 'ssh.use_dns'\n\t</example>\n\n\t<related>list attr doc</related>\n\t<related>set attr</related>\n\t\"\"\"\n",
"def run(self, params, args):...\n",
"attr, doc = self.fillParams([('attr', None, True), ('doc', None, True)])\n",
"rows = self.db.execute(\n \"\"\"\n\t\t\tselect attr from attributes where attr='%s'\n\t\t\t\"\"\" % attr)\n",
"if not rows:\n",
"self.db.execute(\"\"\"\n\t\t\tdelete from attributes_doc where attr='%s'\n\t\t\t\"\"\" % attr\n )\n",
"if doc:\n",
"self.db.execute(\n \"\"\"\n\t\t\t\tinsert into attributes_doc\n\t\t\t\t(attr, doc)\n\t\t\t\tvalues ('%s', '%s')\n\t\t\t\t\"\"\"\n % (attr, doc))\n"
] | [
0,
0,
0,
0,
0,
2,
0,
2,
0,
2
] | [
"Import'",
"ImportFrom'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_7(VAR_11):...\n",
"return True\n"
] | [
"def directory_filter(_path):...\n",
"return True\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_3(self, VAR_5):...\n",
"VAR_14 = 'string'.format(VAR_5)\n",
"self.cur.execute(VAR_14)\n",
"return self.cur.fetchall()\n"
] | [
"def get_events(self, server_id):...\n",
"sql = (\n \"\"\"SELECT events.event_id as e, title, description, start_time, time_zone, (\n SELECT GROUP_CONCAT(DISTINCT username)\n FROM user_event, events\n WHERE user_event.event_id = e\n AND events.server_id = {0}\n AND user_event.attending = 1)\n AS accepted, (\n SELECT GROUP_CONCAT(DISTINCT username)\n FROM user_event, events\n WHERE user_event.event_id = e\n AND events.server_id = {0}\n AND user_event.attending = 0)\n AS declined\n FROM events\n WHERE events.server_id = {0}\n GROUP BY event_id, title, description, start_time, time_zone;\n \"\"\"\n .format(server_id))\n",
"self.cur.execute(sql)\n",
"return self.cur.fetchall()\n"
] | [
0,
4,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_8(self, VAR_4, VAR_5):...\n",
"if VAR_4 == 'ansible_group_priority':\n",
"self.set_priority(int(VAR_5))\n",
"self.vars[VAR_4] = VAR_5\n"
] | [
"def set_variable(self, key, value):...\n",
"if key == 'ansible_group_priority':\n",
"self.set_priority(int(value))\n",
"self.vars[key] = value\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'"
] |
[
"def FUNC_0():...\n",
"VAR_11 = 'string'\n"
] | [
"def update_sold_statusDONOTUSE():...\n",
"sql = \"\"\"\n DONT DO THIS UNLESS ABSOLUTELT SURE ABOUT QTY BETTER TO DO VIA IMPORT???????\n update set it.workflow_state = 'Sold'\n\n select it.item_code, bin.actual_qty\n from `tabItem` it\n right join `tabBin` bin\n on bin.item_code = it.item_code\n\n right join `zEbayListings` el\n on el.sku = it.item_code\n where el.qty =0 and bin.actual_qty =0\n \"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
""
] | [
"@utils.synchronized('3par', external=True)...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"@VAR_0.route('/delete_video/<filename>')...\n",
"if 'username' in VAR_19:\n",
"print(VAR_19['username'], file=sys.stdout)\n",
"return 'test'\n",
"VAR_14 = CLASS_0.query.filter_by(VAR_6=session['username']).first()\n",
"VAR_15 = CLASS_1.query.filter_by(VAR_5=data.UserID, VAR_11=filename).first()\n",
"if VAR_15 != None:\n",
"os.remove('static/videos/{}'.format(VAR_4))\n",
"return \"Don't delete other people's videos!\"\n",
"VAR_1.session.delete(VAR_15)\n",
"VAR_1.session.commit()\n",
"return redirect(url_for('upload'))\n"
] | [
"@app.route('/delete_video/<filename>')...\n",
"if 'username' in session:\n",
"print(session['username'], file=sys.stdout)\n",
"return 'test'\n",
"data = users.query.filter_by(Username=session['username']).first()\n",
"video = Video.query.filter_by(UserID=data.UserID, Name=filename).first()\n",
"if video != None:\n",
"os.remove('static/videos/{}'.format(filename))\n",
"return \"Don't delete other people's videos!\"\n",
"db.session.delete(video)\n",
"db.session.commit()\n",
"return redirect(url_for('upload'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(self, VAR_4):...\n",
"VAR_11 = super(CLASS_2, self).run(VAR_4)\n",
"self.logger.print_response_code(VAR_11)\n",
"self.logger.print_headers(VAR_11.getheaders())\n",
"VAR_6 = VAR_11.read()\n",
"if self.pipe_command:\n",
"VAR_6 = self.pipe(self.pip_command, VAR_6)\n",
"if VAR_6:\n",
"self.logger.print_data(VAR_6)\n"
] | [
"def run(self, headers):...\n",
"response = super(HttpGet, self).run(headers)\n",
"self.logger.print_response_code(response)\n",
"self.logger.print_headers(response.getheaders())\n",
"data = response.read()\n",
"if self.pipe_command:\n",
"data = self.pipe(self.pip_command, data)\n",
"if data:\n",
"self.logger.print_data(data)\n"
] | [
0,
0,
0,
0,
0,
0,
1,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'"
] |
[
"@app.route('/auth/register')...\n",
"return FUNC_7()\n"
] | [
"@app.route('/auth/register')...\n",
"return render_register()\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@login_required()...\n",
"VAR_5 = get_object_or_404(DataGroup, VAR_2=pk)\n",
"VAR_5.doc_types = DocumentType.objects.filter(group_type=dg.group_type)\n",
"VAR_6 = VAR_5.datadocument_set.get_queryset()\n",
"VAR_7 = ProductDocument.objects.filter(document__in=docs)\n",
"VAR_8 = VAR_0.GET.get('page')\n",
"VAR_9 = Paginator(VAR_6, 50)\n",
"VAR_10 = settings.MEDIA_URL + str(VAR_5.fs_id)\n",
"VAR_11 = ExtractedText.objects.filter(data_document_id__in=docs).first()\n",
"VAR_12 = {'datagroup': VAR_5, 'documents': VAR_9.page(1 if VAR_8 is None else\n VAR_8), 'all_documents': VAR_6, 'extract_fields': VAR_5.\n get_extracted_template_fieldnames(), 'ext_err': {}, 'clean_comp_err': {\n }, 'extract_form': include_extract_form(VAR_5), 'clean_comp_data_form':\n include_clean_comp_data_form(VAR_5), 'bulk': len(VAR_6) - len(VAR_7),\n 'msg': ''}\n",
"if VAR_0.method == 'POST' and 'upload' in VAR_0.POST:\n",
"VAR_33 = [VAR_43 for VAR_62 in VAR_6 for VAR_43 in VAR_0.FILES.getlist(\n 'multifiles') if VAR_43.name == VAR_62.filename]\n",
"if VAR_0.method == 'POST' and 'extract_button' in VAR_0.POST:\n",
"if not VAR_33:\n",
"VAR_35 = ExtractionScriptForm(VAR_0.POST, VAR_0.FILES, dg_type=dg.type)\n",
"if VAR_0.method == 'POST' and 'bulk' in VAR_0.POST:\n",
"VAR_12['msg'] = 'There are no matching records in the selected directory.'\n",
"VAR_34 = zipfile.ZipFile(VAR_5.zip_file, 'a', zipfile.ZIP_DEFLATED)\n",
"if VAR_35.is_valid():\n",
"VAR_36 = set(VAR_6.values_list('pk', flat=True))\n",
"if VAR_0.method == 'POST' and 'clean_comp_data_button' in VAR_0.POST:\n",
"return render(VAR_0, VAR_1, VAR_12)\n",
"while VAR_33:\n",
"VAR_46 = VAR_0.FILES.get('extract_file')\n",
"VAR_37 = set(VAR_7.values_list('document_id', flat=True))\n",
"VAR_40 = CleanCompDataForm(VAR_0.POST, VAR_0.FILES)\n",
"return render(VAR_0, VAR_1, VAR_12)\n",
"VAR_43 = VAR_33.pop(0)\n",
"VAR_34.close()\n",
"VAR_47 = int(VAR_0.POST['script_selection'])\n",
"VAR_38 = DataDocument.objects.filter(pk__in=list(a - b))\n",
"if VAR_40.is_valid():\n",
"VAR_24 = DataDocument.objects.get(filename=f.name, data_group=dg.pk)\n",
"VAR_18 = include_extract_form(VAR_5)\n",
"VAR_25 = Script.objects.get(VAR_2=script_pk)\n",
"VAR_39 = Product.objects.all().aggregate(Max('id'))['id__max'] + 1\n",
"VAR_47 = int(VAR_0.POST['script_selection'])\n",
"VAR_12['clean_comp_data_form'].collapsed = False\n",
"if VAR_24.matched:\n",
"VAR_12['all_documents'] = VAR_5.datadocument_set.get_queryset()\n",
"VAR_48 = [x.decode('ascii', 'ignore') for x in VAR_46.readlines()]\n",
"for VAR_24 in VAR_38:\n",
"VAR_25 = Script.objects.get(VAR_2=script_pk)\n",
"VAR_24.matched = True\n",
"VAR_12['extract_form'] = VAR_18\n",
"VAR_49 = csv.DictReader(VAR_48)\n",
"VAR_12['bulk'] = 0\n",
"VAR_11 = ExtractedText.objects.get(data_document_id=doc.id)\n",
"VAR_75 = None\n",
"if not VAR_75:\n",
"VAR_46 = VAR_0.FILES.get('clean_comp_data_file')\n",
"VAR_24.save()\n",
"VAR_12['msg'] = 'Matching records uploaded successfully.'\n",
"VAR_50 = list(set(VAR_5.get_extracted_template_fieldnames()) - set(VAR_49.\n fieldnames))\n",
"if VAR_11:\n",
"if VAR_24.title:\n",
"VAR_54 = Product.objects.create(title=new_prod_title, upc=f'stub_{stub}',\n data_source_id=doc.data_group.data_source_id)\n",
"VAR_48 = [x.decode('ascii', 'ignore') for x in VAR_46.readlines()]\n",
"VAR_44 = FileSystemStorage(VAR_10 + '/pdf')\n",
"if VAR_50:\n",
"if VAR_11.prod_name:\n",
"VAR_75 = '%s stub' % VAR_24.title\n",
"VAR_75 = 'unknown'\n",
"ProductDocument.objects.create(VAR_54=product, document=doc)\n",
"VAR_49 = csv.DictReader(VAR_48)\n",
"VAR_45 = VAR_24.get_abstract_filename()\n",
"VAR_12['msg'\n ] = f'The following columns need to be added or renamed in the csv: {VAR_50}'\n",
"VAR_51 = []\n",
"VAR_75 = VAR_11.prod_name\n",
"VAR_75 = None\n",
"VAR_39 += 1\n",
"VAR_50 = list(set(VAR_5.get_clean_comp_data_fieldnames()) - set(VAR_49.\n fieldnames))\n",
"VAR_44.save(VAR_45, VAR_43)\n",
"return render(VAR_0, VAR_1, VAR_12)\n",
"VAR_52, VAR_53 = get_extracted_models(VAR_5.type)\n",
"if VAR_50:\n",
"VAR_34.write(VAR_10 + '/pdf/' + VAR_45, VAR_45)\n",
"for VAR_73, VAR_64 in enumerate(csv.DictReader(VAR_48)):\n",
"VAR_12['clean_comp_data_form'].collapsed = False\n",
"VAR_51 = []\n",
"VAR_62 = VAR_6.get(VAR_2=int(row['data_document_id']))\n",
"if VAR_12['ext_err']:\n",
"VAR_12['msg'\n ] = f'The following columns need to be added or renamed in the csv: {VAR_50}'\n",
"for VAR_73, VAR_64 in enumerate(csv.DictReader(VAR_48)):\n",
"VAR_62.raw_category = VAR_64.pop('raw_category')\n",
"[e[1].delete() for e in VAR_51]\n",
"if not VAR_12['ext_err']:\n",
"return render(VAR_0, VAR_1, VAR_12)\n",
"if VAR_12['clean_comp_err']:\n",
"VAR_76 = ExtractedChemical.objects.get(rawchem_ptr=int(row['id']))\n",
"VAR_76 = None\n",
"VAR_77 = Ingredient.objects.get(rawchem_ptr=extracted_chemical.rawchem_ptr)\n",
"VAR_77 = Ingredient(rawchem_ptr=extracted_chemical.rawchem_ptr)\n",
"VAR_77.lower_wf_analysis = VAR_64['lower_wf_analysis']\n",
"VAR_63 = VAR_0.POST.get('weight_fraction_type', None)\n",
"return render(VAR_0, VAR_1, VAR_12)\n",
"for VAR_24, VAR_56, VAR_74 in VAR_51:\n",
"VAR_12['clean_comp_data_form'].collapsed = False\n",
"if not VAR_12['clean_comp_err']:\n",
"VAR_12['clean_comp_err'][VAR_73 + 1] = {'id': [\n 'No ExtractedChemical matches rawchem_ptr_id ' + VAR_64['id']]}\n",
"VAR_77.central_wf_analysis = VAR_64['central_wf_analysis']\n",
"if VAR_63:\n",
"VAR_24.extracted = True\n",
"VAR_44 = FileSystemStorage(VAR_10)\n",
"return render(VAR_0, VAR_1, VAR_12)\n",
"for VAR_77 in VAR_51:\n",
"print('No ExtractedChemical matches rawchem_ptr_id %s' % VAR_64['id'])\n",
"VAR_77.upper_wf_analysis = VAR_64['upper_wf_analysis']\n",
"VAR_68 = 'weight_fraction_type'\n",
"VAR_11, VAR_27 = VAR_52.objects.get_or_create(data_document=d,\n extraction_script=script)\n",
"VAR_24.save()\n",
"VAR_44.save(str(VAR_5) + '_extracted.csv', VAR_46)\n",
"VAR_77.save()\n",
"VAR_12['msg'\n ] = f'{len(VAR_51)} clean composition data records uploaded successfully.'\n",
"VAR_77.script = VAR_25\n",
"VAR_64[VAR_68] = WeightFractionType.objects.get(VAR_2=int(wft))\n",
"if not VAR_27 and VAR_11.one_to_one_check(VAR_64):\n",
"VAR_56.save()\n",
"VAR_12['msg'] = f'{len(VAR_51)} extracted records uploaded successfully.'\n",
"VAR_12['clean_comp_data_form'] = include_clean_comp_data_form(VAR_5)\n",
"VAR_77.full_clean()\n",
"VAR_12['clean_comp_err'][VAR_73 + 1] = e.message_dict\n",
"VAR_51.append(VAR_77)\n",
"VAR_69 = int(VAR_64['unit_type'])\n",
"VAR_71 = 'cat_code' if hasattr(VAR_11, 'cat_code') else 'prod_name'\n",
"if VAR_27:\n",
"VAR_74.save()\n",
"VAR_12['extract_form'] = include_extract_form(VAR_5)\n",
"VAR_64['unit_type'] = UnitType.objects.get(VAR_2=unit_type_id)\n",
"VAR_72 = ['must be 1:1 with \"data_document_id\".']\n",
"update_fields(VAR_64, VAR_11)\n",
"VAR_64['extracted_text'] = VAR_11\n",
"VAR_70 = VAR_64['ingredient_rank']\n",
"VAR_12['ext_err'][VAR_73 + 1] = {VAR_71: VAR_72}\n",
"if VAR_53 == ExtractedListPresence:\n",
"VAR_64['ingredient_rank'] = None if VAR_70 == '' else VAR_70\n",
"VAR_64['extracted_cpcat'] = VAR_11.extractedtext_ptr\n",
"VAR_64 = clean_dict(VAR_64, VAR_53)\n",
"VAR_11.full_clean()\n",
"VAR_12['ext_err'][VAR_73 + 1] = e.message_dict\n",
"VAR_11.save()\n",
"VAR_74 = VAR_53(**row)\n",
"VAR_74.full_clean()\n",
"VAR_51.append((VAR_62, VAR_11, VAR_74))\n"
] | [
"@login_required()...\n",
"dg = get_object_or_404(DataGroup, pk=pk)\n",
"dg.doc_types = DocumentType.objects.filter(group_type=dg.group_type)\n",
"docs = dg.datadocument_set.get_queryset()\n",
"prod_link = ProductDocument.objects.filter(document__in=docs)\n",
"page = request.GET.get('page')\n",
"paginator = Paginator(docs, 50)\n",
"store = settings.MEDIA_URL + str(dg.fs_id)\n",
"ext = ExtractedText.objects.filter(data_document_id__in=docs).first()\n",
"context = {'datagroup': dg, 'documents': paginator.page(1 if page is None else\n page), 'all_documents': docs, 'extract_fields': dg.\n get_extracted_template_fieldnames(), 'ext_err': {}, 'clean_comp_err': {\n }, 'extract_form': include_extract_form(dg), 'clean_comp_data_form':\n include_clean_comp_data_form(dg), 'bulk': len(docs) - len(prod_link),\n 'msg': ''}\n",
"if request.method == 'POST' and 'upload' in request.POST:\n",
"matched_files = [f for d in docs for f in request.FILES.getlist(\n 'multifiles') if f.name == d.filename]\n",
"if request.method == 'POST' and 'extract_button' in request.POST:\n",
"if not matched_files:\n",
"extract_form = ExtractionScriptForm(request.POST, request.FILES, dg_type=dg\n .type)\n",
"if request.method == 'POST' and 'bulk' in request.POST:\n",
"context['msg'] = 'There are no matching records in the selected directory.'\n",
"zf = zipfile.ZipFile(dg.zip_file, 'a', zipfile.ZIP_DEFLATED)\n",
"if extract_form.is_valid():\n",
"a = set(docs.values_list('pk', flat=True))\n",
"if request.method == 'POST' and 'clean_comp_data_button' in request.POST:\n",
"return render(request, template_name, context)\n",
"while matched_files:\n",
"csv_file = request.FILES.get('extract_file')\n",
"b = set(prod_link.values_list('document_id', flat=True))\n",
"clean_comp_data_form = CleanCompDataForm(request.POST, request.FILES)\n",
"return render(request, template_name, context)\n",
"f = matched_files.pop(0)\n",
"zf.close()\n",
"script_pk = int(request.POST['script_selection'])\n",
"docs_needing_products = DataDocument.objects.filter(pk__in=list(a - b))\n",
"if clean_comp_data_form.is_valid():\n",
"doc = DataDocument.objects.get(filename=f.name, data_group=dg.pk)\n",
"form = include_extract_form(dg)\n",
"script = Script.objects.get(pk=script_pk)\n",
"stub = Product.objects.all().aggregate(Max('id'))['id__max'] + 1\n",
"script_pk = int(request.POST['script_selection'])\n",
"context['clean_comp_data_form'].collapsed = False\n",
"if doc.matched:\n",
"context['all_documents'] = dg.datadocument_set.get_queryset()\n",
"info = [x.decode('ascii', 'ignore') for x in csv_file.readlines()]\n",
"for doc in docs_needing_products:\n",
"script = Script.objects.get(pk=script_pk)\n",
"doc.matched = True\n",
"context['extract_form'] = form\n",
"table = csv.DictReader(info)\n",
"context['bulk'] = 0\n",
"ext = ExtractedText.objects.get(data_document_id=doc.id)\n",
"new_prod_title = None\n",
"if not new_prod_title:\n",
"csv_file = request.FILES.get('clean_comp_data_file')\n",
"doc.save()\n",
"context['msg'] = 'Matching records uploaded successfully.'\n",
"missing = list(set(dg.get_extracted_template_fieldnames()) - set(table.\n fieldnames))\n",
"if ext:\n",
"if doc.title:\n",
"product = Product.objects.create(title=new_prod_title, upc=f'stub_{stub}',\n data_source_id=doc.data_group.data_source_id)\n",
"info = [x.decode('ascii', 'ignore') for x in csv_file.readlines()]\n",
"fs = FileSystemStorage(store + '/pdf')\n",
"if missing:\n",
"if ext.prod_name:\n",
"new_prod_title = '%s stub' % doc.title\n",
"new_prod_title = 'unknown'\n",
"ProductDocument.objects.create(product=product, document=doc)\n",
"table = csv.DictReader(info)\n",
"afn = doc.get_abstract_filename()\n",
"context['msg'\n ] = f'The following columns need to be added or renamed in the csv: {missing}'\n",
"good_records = []\n",
"new_prod_title = ext.prod_name\n",
"new_prod_title = None\n",
"stub += 1\n",
"missing = list(set(dg.get_clean_comp_data_fieldnames()) - set(table.fieldnames)\n )\n",
"fs.save(afn, f)\n",
"return render(request, template_name, context)\n",
"ext_parent, ext_child = get_extracted_models(dg.type)\n",
"if missing:\n",
"zf.write(store + '/pdf/' + afn, afn)\n",
"for i, row in enumerate(csv.DictReader(info)):\n",
"context['clean_comp_data_form'].collapsed = False\n",
"good_records = []\n",
"d = docs.get(pk=int(row['data_document_id']))\n",
"if context['ext_err']:\n",
"context['msg'\n ] = f'The following columns need to be added or renamed in the csv: {missing}'\n",
"for i, row in enumerate(csv.DictReader(info)):\n",
"d.raw_category = row.pop('raw_category')\n",
"[e[1].delete() for e in good_records]\n",
"if not context['ext_err']:\n",
"return render(request, template_name, context)\n",
"if context['clean_comp_err']:\n",
"extracted_chemical = ExtractedChemical.objects.get(rawchem_ptr=int(row['id']))\n",
"extracted_chemical = None\n",
"ingredient = Ingredient.objects.get(rawchem_ptr=extracted_chemical.rawchem_ptr)\n",
"ingredient = Ingredient(rawchem_ptr=extracted_chemical.rawchem_ptr)\n",
"ingredient.lower_wf_analysis = row['lower_wf_analysis']\n",
"wft = request.POST.get('weight_fraction_type', None)\n",
"return render(request, template_name, context)\n",
"for doc, text, record in good_records:\n",
"context['clean_comp_data_form'].collapsed = False\n",
"if not context['clean_comp_err']:\n",
"context['clean_comp_err'][i + 1] = {'id': [\n 'No ExtractedChemical matches rawchem_ptr_id ' + row['id']]}\n",
"ingredient.central_wf_analysis = row['central_wf_analysis']\n",
"if wft:\n",
"doc.extracted = True\n",
"fs = FileSystemStorage(store)\n",
"return render(request, template_name, context)\n",
"for ingredient in good_records:\n",
"print('No ExtractedChemical matches rawchem_ptr_id %s' % row['id'])\n",
"ingredient.upper_wf_analysis = row['upper_wf_analysis']\n",
"w = 'weight_fraction_type'\n",
"ext, created = ext_parent.objects.get_or_create(data_document=d,\n extraction_script=script)\n",
"doc.save()\n",
"fs.save(str(dg) + '_extracted.csv', csv_file)\n",
"ingredient.save()\n",
"context['msg'] = (\n f'{len(good_records)} clean composition data records uploaded successfully.'\n )\n",
"ingredient.script = script\n",
"row[w] = WeightFractionType.objects.get(pk=int(wft))\n",
"if not created and ext.one_to_one_check(row):\n",
"text.save()\n",
"context['msg'\n ] = f'{len(good_records)} extracted records uploaded successfully.'\n",
"context['clean_comp_data_form'] = include_clean_comp_data_form(dg)\n",
"ingredient.full_clean()\n",
"context['clean_comp_err'][i + 1] = e.message_dict\n",
"good_records.append(ingredient)\n",
"unit_type_id = int(row['unit_type'])\n",
"col = 'cat_code' if hasattr(ext, 'cat_code') else 'prod_name'\n",
"if created:\n",
"record.save()\n",
"context['extract_form'] = include_extract_form(dg)\n",
"row['unit_type'] = UnitType.objects.get(pk=unit_type_id)\n",
"err_msg = ['must be 1:1 with \"data_document_id\".']\n",
"update_fields(row, ext)\n",
"row['extracted_text'] = ext\n",
"rank = row['ingredient_rank']\n",
"context['ext_err'][i + 1] = {col: err_msg}\n",
"if ext_child == ExtractedListPresence:\n",
"row['ingredient_rank'] = None if rank == '' else rank\n",
"row['extracted_cpcat'] = ext.extractedtext_ptr\n",
"row = clean_dict(row, ext_child)\n",
"ext.full_clean()\n",
"context['ext_err'][i + 1] = e.message_dict\n",
"ext.save()\n",
"record = ext_child(**row)\n",
"record.full_clean()\n",
"good_records.append((d, ext, record))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"AugAssign'",
"Assign'",
"Expr'",
"Return'",
"Assign'",
"Condition",
"Expr'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"For",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"For",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"For",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_12(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_15 = None\n",
"self._timers_dying = True\n",
"for t in self._timers:\n",
"t.cancel()\n",
"VAR_15, self._timers = self._timers, []\n",
"for t in VAR_15:\n",
"t.join(timeout=5)\n",
"if t.isAlive():\n",
"logging.error('Timer thread did not terminate fast enough: %s', t)\n"
] | [
"def cancel_all_timers(self):...\n",
"\"\"\"docstring\"\"\"\n",
"timers = None\n",
"self._timers_dying = True\n",
"for t in self._timers:\n",
"t.cancel()\n",
"timers, self._timers = self._timers, []\n",
"for t in timers:\n",
"t.join(timeout=5)\n",
"if t.isAlive():\n",
"logging.error('Timer thread did not terminate fast enough: %s', t)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Expr'",
"Assign'",
"For",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_4(VAR_0):...\n",
"VAR_0.clear_filters()\n",
"for arg in request.args:\n",
"VAR_8 = re.findall('_flt_(\\\\d)_(.*)', arg)\n",
"if VAR_8:\n",
"VAR_0.add_filter_index(VAR_8[0][1], int(VAR_8[0][0]), request.args.get(arg))\n"
] | [
"def get_filter_args(filters):...\n",
"filters.clear_filters()\n",
"for arg in request.args:\n",
"re_match = re.findall('_flt_(\\\\d)_(.*)', arg)\n",
"if re_match:\n",
"filters.add_filter_index(re_match[0][1], int(re_match[0][0]), request.args.\n get(arg))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"For",
"Assign'",
"Condition",
"Expr'"
] |
[
"async def FUNC_3(self, VAR_0):...\n",
"VAR_9 = {'username': self.username, 'email': self.email, 'password': VAR_0}\n",
"VAR_10 = await VAR_2.users.insert_one(VAR_9)\n"
] | [
"async def do_insert(self, hashed_password):...\n",
"document = {'username': self.username, 'email': self.email, 'password':\n hashed_password}\n",
"result = await async_db.users.insert_one(document)\n"
] | [
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_21(self):...\n",
"self.run_test_case(self.scenario.list_available_images())\n"
] | [
"def test_available_images(self):...\n",
"self.run_test_case(self.scenario.list_available_images())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_9(self, *VAR_15, **VAR_75):...\n",
""
] | [
"def newfn(self, *a, **env):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_3(self):...\n",
"VAR_6 = {'message': '', 'title': 'Config: Chrome Infra Monitoring Proxy'}\n",
"VAR_7 = common.MonAcqData.get_by_id(common.CONFIG_DATA_KEY)\n",
"if VAR_7:\n",
"self.setParams(VAR_6, VAR_7)\n",
"self.render_response('set_credentials.html', **params)\n"
] | [
"def get(self):...\n",
"params = {'message': '', 'title': 'Config: Chrome Infra Monitoring Proxy'}\n",
"data = common.MonAcqData.get_by_id(common.CONFIG_DATA_KEY)\n",
"if data:\n",
"self.setParams(params, data)\n",
"self.render_response('set_credentials.html', **params)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_22 = self.request.headers.get('X-XSRF-TOKEN')\n",
"if not VAR_22:\n",
"VAR_22 = self.get_argument('xsrf-token', default=None)\n",
"if not VAR_22:\n",
"if self.xsrf_token != VAR_22:\n"
] | [
"def check_xsrf_cookie(self):...\n",
"\"\"\"docstring\"\"\"\n",
"token = self.request.headers.get('X-XSRF-TOKEN')\n",
"if not token:\n",
"token = self.get_argument('xsrf-token', default=None)\n",
"if not token:\n",
"if self.xsrf_token != token:\n"
] | [
0,
0,
0,
0,
0,
0,
5
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition"
] |
[
"def FUNC_5(self):...\n",
""
] | [
"def Start(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_1(self):...\n",
"VAR_8 = Event.objects.order_by('-publish_timestamp')\n",
"VAR_9 = self.request.GET.get('tag')\n",
"if VAR_9 is not None:\n",
"VAR_8 = VAR_8.filter(tags__id=tag)\n",
"VAR_10 = self.request.GET.get('org')\n",
"if VAR_10 is not None:\n",
"VAR_8 = VAR_8.filter(orgc=org)\n",
"VAR_11 = self.request.GET.get('level')\n",
"if VAR_11 is not None:\n",
"VAR_8 = VAR_8.filter(threat_level_id=level)\n",
"VAR_12 = self.request.GET.get('keyword')\n",
"if VAR_12 is not None:\n",
"VAR_8 = VAR_8.filter(Q(info__icontains=keyword)).order_by('-publish_timestamp')\n",
"return VAR_8\n"
] | [
"def get_queryset(self):...\n",
"query = Event.objects.order_by('-publish_timestamp')\n",
"tag = self.request.GET.get('tag')\n",
"if tag is not None:\n",
"query = query.filter(tags__id=tag)\n",
"org = self.request.GET.get('org')\n",
"if org is not None:\n",
"query = query.filter(orgc=org)\n",
"level = self.request.GET.get('level')\n",
"if level is not None:\n",
"query = query.filter(threat_level_id=level)\n",
"keyword = self.request.GET.get('keyword')\n",
"if keyword is not None:\n",
"query = query.filter(Q(info__icontains=keyword)).order_by('-publish_timestamp')\n",
"return query\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_32(*VAR_26):...\n",
""
] | [
"def mocked_check(*_args):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_10(self, *VAR_11, **VAR_12):...\n",
"for VAR_9 in VAR_11:\n",
"self._set_params_item(VAR_9)\n",
"for VAR_10, VAR_9 in VAR_12.items():\n",
"self._set_params_item(VAR_9, VAR_10=name)\n"
] | [
"def set_params(self, *params, **kwparams):...\n",
"for item in params:\n",
"self._set_params_item(item)\n",
"for name, item in kwparams.items():\n",
"self._set_params_item(item, name=name)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Expr'",
"For",
"Expr'"
] |
[
"def FUNC_7(VAR_6, VAR_1):...\n",
"VAR_1.execute(\n 'UPDATE video SET position = position - 1 WHERE position > {removed_position}'\n .format(VAR_6=removed_position))\n"
] | [
"def update_video_positions(removed_position, db):...\n",
"db.execute(\n 'UPDATE video SET position = position - 1 WHERE position > {removed_position}'\n .format(removed_position=removed_position))\n"
] | [
0,
4
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0(self):...\n",
"self._mox = mox.Mox()\n",
"self.mock(logging, 'error', lambda *_: None)\n",
"self.mock(logging, 'exception', lambda *_: None)\n",
"self.mock(logging, 'info', lambda *_: None)\n",
"self.mock(logging, 'warning', lambda *_: None)\n",
"self._mox.StubOutWithMock(time, 'sleep')\n",
"self._mox.StubOutWithMock(urllib2, 'urlopen')\n"
] | [
"def setUp(self):...\n",
"self._mox = mox.Mox()\n",
"self.mock(logging, 'error', lambda *_: None)\n",
"self.mock(logging, 'exception', lambda *_: None)\n",
"self.mock(logging, 'info', lambda *_: None)\n",
"self.mock(logging, 'warning', lambda *_: None)\n",
"self._mox.StubOutWithMock(time, 'sleep')\n",
"self._mox.StubOutWithMock(urllib2, 'urlopen')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@api.public...\n",
"self.response.write(ipaddr.ip_to_string(api.get_peer_ip()))\n"
] | [
"@api.public...\n",
"self.response.write(ipaddr.ip_to_string(api.get_peer_ip()))\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return 'Running check for remote component %s with pid %s on host %s' % (self\n .comp_name, self.pid, self.hostname)\n"
] | [
"def info(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return 'Running check for remote component %s with pid %s on host %s' % (self\n .comp_name, self.pid, self.hostname)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_6(self):...\n",
"self.parse_args()\n",
"self.sanitize_fields()\n",
"self.extract_tables()\n",
"self.set_optional_columns()\n",
"self.build_conditions()\n",
"VAR_2 = frappe._dict()\n",
"if self.with_childnames:\n",
"for t in self.tables:\n",
"VAR_2.tables = self.tables[0]\n",
"if t != '`tab' + self.doctype + '`':\n",
"for child in self.tables[1:]:\n",
"self.fields.append(t + \".name as '%s:name'\" % t[4:-1])\n",
"VAR_2.tables += ' {join} {child} on ({child}.parent = {main}.name)'.format(\n VAR_21=self.join, child=child, main=self.tables[0])\n",
"if self.grouped_or_conditions:\n",
"self.conditions.append('({0})'.format(' or '.join(self.grouped_or_conditions)))\n",
"VAR_2.conditions = ' and '.join(self.conditions)\n",
"if self.or_conditions:\n",
"VAR_2.conditions += (' or ' if VAR_2.conditions else '') + ' or '.join(self\n .or_conditions)\n",
"self.set_field_tables()\n",
"VAR_2.fields = ', '.join(self.fields)\n",
"self.set_order_by(VAR_2)\n",
"self.validate_order_by_and_group_by(VAR_2.order_by)\n",
"VAR_2.order_by = VAR_2.order_by and ' order by ' + VAR_2.order_by or ''\n",
"self.validate_order_by_and_group_by(self.group_by)\n",
"VAR_2.group_by = self.group_by and ' group by ' + self.group_by or ''\n",
"return VAR_2\n"
] | [
"def prepare_args(self):...\n",
"self.parse_args()\n",
"self.sanitize_fields()\n",
"self.extract_tables()\n",
"self.set_optional_columns()\n",
"self.build_conditions()\n",
"args = frappe._dict()\n",
"if self.with_childnames:\n",
"for t in self.tables:\n",
"args.tables = self.tables[0]\n",
"if t != '`tab' + self.doctype + '`':\n",
"for child in self.tables[1:]:\n",
"self.fields.append(t + \".name as '%s:name'\" % t[4:-1])\n",
"args.tables += ' {join} {child} on ({child}.parent = {main}.name)'.format(join\n =self.join, child=child, main=self.tables[0])\n",
"if self.grouped_or_conditions:\n",
"self.conditions.append('({0})'.format(' or '.join(self.grouped_or_conditions)))\n",
"args.conditions = ' and '.join(self.conditions)\n",
"if self.or_conditions:\n",
"args.conditions += (' or ' if args.conditions else '') + ' or '.join(self.\n or_conditions)\n",
"self.set_field_tables()\n",
"args.fields = ', '.join(self.fields)\n",
"self.set_order_by(args)\n",
"self.validate_order_by_and_group_by(args.order_by)\n",
"args.order_by = args.order_by and ' order by ' + args.order_by or ''\n",
"self.validate_order_by_and_group_by(self.group_by)\n",
"args.group_by = self.group_by and ' group by ' + self.group_by or ''\n",
"return args\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"For",
"Assign'",
"Condition",
"For",
"Expr'",
"AugAssign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"AugAssign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(VAR_2: str, VAR_3: str):...\n",
"if not VAR_2 == VAR_3:\n",
"return False\n",
"if len(VAR_2) < 6:\n",
"return False\n",
"if VAR_2.isupper():\n",
"return False\n",
"if VAR_2.islower():\n",
"return False\n",
"return True\n"
] | [
"def check_password_conformity(pw1: str, pw2: str):...\n",
"if not pw1 == pw2:\n",
"return False\n",
"if len(pw1) < 6:\n",
"return False\n",
"if pw1.isupper():\n",
"return False\n",
"if pw1.islower():\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_10(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_21 = self.agent.type_to_payload(\n 'deregisterInstancesFromGoogleLoadBalancerDescription', {\n 'loadBalancerNames': [self.__use_lb_name], 'instanceIds': self.\n use_instance_names[:2], 'region': self.bindings['TEST_GCE_REGION'],\n 'credentials': self.bindings['GCE_CREDENTIALS']})\n",
"VAR_20 = gcp.GceContractBuilder(self.gce_observer)\n",
"VAR_20.new_clause_builder('Instances not in Target Pool', retryable_for_secs=5\n ).list_resources('target-pools', VAR_35=['--region', self.bindings[\n 'TEST_GCE_REGION']]).excludes_pred_list([jc.PathContainsPredicate(\n 'name', self.__use_lb_tp_name), jc.PathElementsContainPredicate(\n 'instances', self.use_instance_names[0]), jc.\n PathElementsContainPredicate('instances', self.use_instance_names[1])])\n",
"return st.OperationContract(self.new_post_operation(title=\n 'deregister_load_balancer_instances', data=payload, VAR_29='ops'),\n contract=builder.build())\n"
] | [
"def deregister_load_balancer_instances(self):...\n",
"\"\"\"docstring\"\"\"\n",
"payload = self.agent.type_to_payload(\n 'deregisterInstancesFromGoogleLoadBalancerDescription', {\n 'loadBalancerNames': [self.__use_lb_name], 'instanceIds': self.\n use_instance_names[:2], 'region': self.bindings['TEST_GCE_REGION'],\n 'credentials': self.bindings['GCE_CREDENTIALS']})\n",
"builder = gcp.GceContractBuilder(self.gce_observer)\n",
"builder.new_clause_builder('Instances not in Target Pool', retryable_for_secs=5\n ).list_resources('target-pools', extra_args=['--region', self.bindings[\n 'TEST_GCE_REGION']]).excludes_pred_list([jc.PathContainsPredicate(\n 'name', self.__use_lb_tp_name), jc.PathElementsContainPredicate(\n 'instances', self.use_instance_names[0]), jc.\n PathElementsContainPredicate('instances', self.use_instance_names[1])])\n",
"return st.OperationContract(self.new_post_operation(title=\n 'deregister_load_balancer_instances', data=payload, path='ops'),\n contract=builder.build())\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"@property...\n",
"return self.exists and not os.access(self.file, os.W_OK)\n"
] | [
"@property...\n",
"return self.exists and not os.access(self.file, os.W_OK)\n"
] | [
0,
7
] | [
"Condition",
"Return'"
] |
[
"def FUNC_0(VAR_6):...\n",
"if call(VAR_6['cmd'][1]['check'], shell=True) == 0:\n",
"return True\n",
"return False\n"
] | [
"def run_component_check(comp):...\n",
"if call(comp['cmd'][1]['check'], shell=True) == 0:\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def __repr__(self):...\n",
"return self.get_name()\n"
] | [
"def __repr__(self):...\n",
"return self.get_name()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_2(VAR_7=None, VAR_8=0, VAR_9=0):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_7 is None:\n",
"return True\n",
"return False\n",
"VAR_21 = re.match('^([^;]+)(; length=([0-9]+))?$', VAR_7, re.IGNORECASE)\n",
"VAR_22 = parse_http_date(VAR_21.group(1))\n",
"VAR_23 = VAR_21.group(3)\n",
"if VAR_23 and int(VAR_23) != VAR_9:\n",
"if int(VAR_8) > VAR_22:\n"
] | [
"def was_modified_since(header=None, mtime=0, size=0):...\n",
"\"\"\"docstring\"\"\"\n",
"if header is None:\n",
"return True\n",
"return False\n",
"matches = re.match('^([^;]+)(; length=([0-9]+))?$', header, re.IGNORECASE)\n",
"header_mtime = parse_http_date(matches.group(1))\n",
"header_len = matches.group(3)\n",
"if header_len and int(header_len) != size:\n",
"if int(mtime) > header_mtime:\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition"
] |
[
"def FUNC_3(VAR_5, VAR_6=''):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_18 = urllib2.Request(VAR_5)\n",
"if VAR_6:\n",
"VAR_18.add_header('User-Agent', VAR_6)\n",
"VAR_20 = VAR_8(VAR_18)\n",
"VAR_0.error('Unable to read from URL %s.' % VAR_5)\n",
"VAR_11 = FUNC_1(VAR_21, VAR_3=True)\n",
"VAR_21 = tempfile.mkstemp(prefix='bibclassify.')[1]\n",
"return None\n",
"os.remove(VAR_21)\n",
"VAR_22 = open(VAR_21, 'w')\n",
"VAR_12 = len(VAR_11)\n",
"VAR_22.write(VAR_20.read())\n",
"VAR_13 = 0\n",
"VAR_22.close()\n",
"for line in VAR_11:\n",
"VAR_13 += len(re.findall('\\\\S+', line))\n",
"VAR_0.info('Remote file has %d lines and %d words.' % (VAR_12, VAR_13))\n",
"return VAR_11\n"
] | [
"def text_lines_from_url(url, user_agent=''):...\n",
"\"\"\"docstring\"\"\"\n",
"request = urllib2.Request(url)\n",
"if user_agent:\n",
"request.add_header('User-Agent', user_agent)\n",
"distant_stream = urlopen(request)\n",
"log.error('Unable to read from URL %s.' % url)\n",
"lines = text_lines_from_local_file(local_file, remote=True)\n",
"local_file = tempfile.mkstemp(prefix='bibclassify.')[1]\n",
"return None\n",
"os.remove(local_file)\n",
"local_stream = open(local_file, 'w')\n",
"line_nb = len(lines)\n",
"local_stream.write(distant_stream.read())\n",
"word_nb = 0\n",
"local_stream.close()\n",
"for line in lines:\n",
"word_nb += len(re.findall('\\\\S+', line))\n",
"log.info('Remote file has %d lines and %d words.' % (line_nb, word_nb))\n",
"return lines\n"
] | [
0,
0,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
0,
7,
0,
7,
0,
0,
7,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"For",
"AugAssign'",
"Expr'",
"Return'"
] |
[
"\"\"\"Versioned singleton entity with the global configuration.\"\"\"\n",
"import logging\n",
"from google.appengine.ext import ndb\n",
"from gae_libs.model.versioned_model import VersionedModel\n",
"\"\"\"string\"\"\"\n",
"VAR_0 = ndb.DateTimeProperty(indexed=False, auto_now=True)\n",
"VAR_1 = ndb.StringProperty(indexed=False)\n",
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = VAR_2.GetVersion(VAR_3=version)\n",
"return VAR_7 or VAR_2() if VAR_3 is None else VAR_7\n"
] | [
"\"\"\"Versioned singleton entity with the global configuration.\"\"\"\n",
"import logging\n",
"from google.appengine.ext import ndb\n",
"from gae_libs.model.versioned_model import VersionedModel\n",
"\"\"\"Singleton entity with the global configuration of the service.\n\n All changes are stored in the revision log.\n \"\"\"\n",
"updated_ts = ndb.DateTimeProperty(indexed=False, auto_now=True)\n",
"updated_by = ndb.StringProperty(indexed=False)\n",
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"config_data = cls.GetVersion(version=version)\n",
"return config_data or cls() if version is None else config_data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_6(self):...\n",
"VAR_2 = CLASS_1.construct_testscript_command('test_program.py')\n",
"VAR_3, VAR_4 = run_shell_command(VAR_2)\n",
"VAR_5 = \"\"\"test_program Z\nnon-interactive mode.\nExiting...\n\"\"\"\n",
"self.assertEqual(VAR_3, VAR_5)\n",
"self.assertEqual(VAR_4, '')\n"
] | [
"def test_run_shell_command_without_stdin(self):...\n",
"command = RunShellCommandTest.construct_testscript_command('test_program.py')\n",
"stdout, stderr = run_shell_command(command)\n",
"expected = \"\"\"test_program Z\nnon-interactive mode.\nExiting...\n\"\"\"\n",
"self.assertEqual(stdout, expected)\n",
"self.assertEqual(stderr, '')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_8(self):...\n",
"VAR_9 = {}\n",
"VAR_9['inventory_hostname'] = self.name\n",
"VAR_9['inventory_hostname_short'] = self.name.split('.')[0]\n",
"VAR_9['group_names'] = sorted([VAR_10.name for VAR_10 in self.get_groups() if\n VAR_10.name != 'all'])\n",
"return VAR_9\n"
] | [
"def get_magic_vars(self):...\n",
"results = {}\n",
"results['inventory_hostname'] = self.name\n",
"results['inventory_hostname_short'] = self.name.split('.')[0]\n",
"results['group_names'] = sorted([g.name for g in self.get_groups() if g.\n name != 'all'])\n",
"return results\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(VAR_5, VAR_4=None):...\n",
"if not VAR_4:\n",
"VAR_4 = 'This field needs to have a different value than ' + orm[VAR_5\n ].label + '.'\n",
"def FUNC_3(VAR_6, VAR_7):...\n",
"if VAR_7.data is VAR_6[VAR_5].data:\n",
"return FUNC_3\n"
] | [
"def different(fieldname, message=None):...\n",
"if not message:\n",
"message = 'This field needs to have a different value than ' + orm[fieldname\n ].label + '.'\n",
"def _different(form, field):...\n",
"if field.data is form[fieldname].data:\n",
"return _different\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"FunctionDef'",
"Condition",
"Return'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(self.fields, string_types):\n",
"if self.fields == '*':\n",
"for filter_name in ['filters', 'or_filters']:\n",
"self.fields = ['*']\n",
"self.fields = json.loads(self.fields)\n",
"self.fields = [VAR_34.strip() for VAR_34 in self.fields.split(',')]\n",
"VAR_4 = getattr(self, filter_name)\n",
"if isinstance(VAR_4, string_types):\n",
"VAR_4 = json.loads(VAR_4)\n",
"if isinstance(VAR_4, dict):\n",
"VAR_66 = VAR_4\n",
"setattr(self, filter_name, VAR_4)\n",
"VAR_4 = []\n",
"for key, VAR_5 in iteritems(VAR_66):\n",
"VAR_4.append(make_filter_tuple(self.doctype, key, VAR_5))\n"
] | [
"def parse_args(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(self.fields, string_types):\n",
"if self.fields == '*':\n",
"for filter_name in ['filters', 'or_filters']:\n",
"self.fields = ['*']\n",
"self.fields = json.loads(self.fields)\n",
"self.fields = [f.strip() for f in self.fields.split(',')]\n",
"filters = getattr(self, filter_name)\n",
"if isinstance(filters, string_types):\n",
"filters = json.loads(filters)\n",
"if isinstance(filters, dict):\n",
"fdict = filters\n",
"setattr(self, filter_name, filters)\n",
"filters = []\n",
"for key, value in iteritems(fdict):\n",
"filters.append(make_filter_tuple(self.doctype, key, value))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Condition",
"For",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"For",
"Expr'"
] |
[
"def __init__(self, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"super(CLASS_5, self).__init__(VAR_2)\n",
"self.message = 'Component %s crashed on localhost' % VAR_2\n"
] | [
"def __init__(self, comp_name):...\n",
"\"\"\"docstring\"\"\"\n",
"super(LocalCrashEvent, self).__init__(comp_name)\n",
"self.message = 'Component %s crashed on localhost' % comp_name\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'"
] |
[
"def FUNC_6():...\n",
"VAR_13.throw(_('Invalid Search Field'), VAR_13.DataError)\n"
] | [
"def _raise_exception():...\n",
"frappe.throw(_('Invalid Search Field'), frappe.DataError)\n"
] | [
0,
4
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_18(VAR_10, **VAR_19):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3[VAR_10] = VAR_19\n"
] | [
"def add_existing_user(user_id, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"created_users[user_id] = kwargs\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'"
] |
[
"def FUNC_13(self, VAR_9, VAR_10=None, VAR_11=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_28 = self.get_and_check_task(VAR_9, VAR_10, VAR_11, VAR_16={\n 'orthophoto_area': Envelope(Cast('orthophoto', GeometryField()))})\n",
"VAR_30 = get_tile_json(VAR_28.name, [\n '/api/projects/{}/tasks/{}/tiles/{{z}}/{{x}}/{{y}}.png'.format(VAR_28.\n project.id, VAR_28.id)], VAR_28.orthophoto_area.extent)\n",
"return Response(VAR_30)\n"
] | [
"def get(self, request, pk=None, project_pk=None):...\n",
"\"\"\"docstring\"\"\"\n",
"task = self.get_and_check_task(request, pk, project_pk, annotate={\n 'orthophoto_area': Envelope(Cast('orthophoto', GeometryField()))})\n",
"json = get_tile_json(task.name, [\n '/api/projects/{}/tasks/{}/tiles/{{z}}/{{x}}/{{y}}.png'.format(task.\n project.id, task.id)], task.orthophoto_area.extent)\n",
"return Response(json)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"import logging\n",
"from datetime import timedelta\n",
"import voluptuous as vol\n",
"from homeassistant.components.sensor import PLATFORM_SCHEMA\n",
"from homeassistant.const import TEMP_CELSIUS, DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_TEMPERATURE, STATE_UNKNOWN\n",
"from homeassistant.helpers.entity import Entity\n",
"from homeassistant.util import Throttle\n",
"import homeassistant.helpers.config_validation as cv\n",
"VAR_0 = logging.getLogger(__name__)\n",
"VAR_1 = 'modules'\n",
"VAR_2 = 'station'\n",
"VAR_3 = ['netatmo']\n",
"VAR_4 = timedelta(seconds=600)\n",
"VAR_5 = {'temperature': ['Temperature', TEMP_CELSIUS, None,\n DEVICE_CLASS_TEMPERATURE], 'co2': ['CO2', 'ppm', 'mdi:cloud', None],\n 'pressure': ['Pressure', 'mbar', 'mdi:gauge', None], 'noise': ['Noise',\n 'dB', 'mdi:volume-high', None], 'humidity': ['Humidity', '%', None,\n DEVICE_CLASS_HUMIDITY], 'rain': ['Rain', 'mm', 'mdi:weather-rainy',\n None], 'sum_rain_1': ['sum_rain_1', 'mm', 'mdi:weather-rainy', None],\n 'sum_rain_24': ['sum_rain_24', 'mm', 'mdi:weather-rainy', None],\n 'battery_vp': ['Battery', '', 'mdi:battery', None], 'battery_lvl': [\n 'Battery_lvl', '', 'mdi:battery', None], 'min_temp': ['Min Temp.',\n TEMP_CELSIUS, 'mdi:thermometer', None], 'max_temp': ['Max Temp.',\n TEMP_CELSIUS, 'mdi:thermometer', None], 'windangle': ['Angle', '',\n 'mdi:compass', None], 'windangle_value': ['Angle Value', 'º',\n 'mdi:compass', None], 'windstrength': ['Strength', 'km/h',\n 'mdi:weather-windy', None], 'gustangle': ['Gust Angle', '',\n 'mdi:compass', None], 'gustangle_value': ['Gust Angle Value', 'º',\n 'mdi:compass', None], 'guststrength': ['Gust Strength', 'km/h',\n 'mdi:weather-windy', None], 'rf_status': ['Radio', '', 'mdi:signal',\n None], 'rf_status_lvl': ['Radio_lvl', '', 'mdi:signal', None],\n 'wifi_status': ['Wifi', '', 'mdi:wifi', None], 'wifi_status_lvl': [\n 'Wifi_lvl', 'dBm', 'mdi:wifi', None]}\n",
"VAR_6 = vol.Schema({vol.Required(cv.string): vol.All(cv.ensure_list, [vol.\n In(VAR_5)])})\n",
"VAR_7 = VAR_7.extend({vol.Optional(VAR_2): cv.string, vol.Optional(VAR_1):\n VAR_6})\n",
"def FUNC_0(VAR_8, VAR_9, VAR_10, VAR_11=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12 = VAR_8.components.netatmo\n",
"VAR_13 = CLASS_1(VAR_12.NETATMO_AUTH, VAR_9.get(VAR_2, None))\n",
"VAR_14 = []\n",
"import pyatmo\n",
"if VAR_1 in VAR_9:\n",
"return None\n",
"VAR_10(VAR_14, True)\n",
"for VAR_16, monitored_conditions in VAR_9[VAR_1].items():\n",
"for VAR_16 in VAR_13.get_module_names():\n",
"\"\"\"Implementation of a Netatmo sensor.\"\"\"\n",
"if VAR_16 not in VAR_13.get_module_names():\n",
"for variable in VAR_13.station_data.monitoredConditions(VAR_16):\n",
"def __init__(self, VAR_15, VAR_16, VAR_17):...\n",
"VAR_0.error('Module name: \"%s\" not found', VAR_16)\n",
"for variable in monitored_conditions:\n",
"if variable in VAR_5.keys():\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14.append(CLASS_0(VAR_13, VAR_16, variable))\n",
"VAR_14.append(CLASS_0(VAR_13, VAR_16, variable))\n",
"VAR_0.warning('Ignoring unknown var %s for mod %s', variable, VAR_16)\n",
"self._name = 'Netatmo {} {}'.format(VAR_16, VAR_5[VAR_17][0])\n",
"self.netatmo_data = VAR_15\n",
"self.module_name = VAR_16\n",
"self.type = VAR_17\n",
"self._state = None\n",
"self._device_class = VAR_5[self.type][3]\n",
"self._icon = VAR_5[self.type][2]\n",
"self._unit_of_measurement = VAR_5[self.type][1]\n",
"VAR_20 = self.netatmo_data.station_data.moduleByName(module=module_name)['_id']\n",
"self.module_id = VAR_20[1]\n",
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._name\n"
] | [
"\"\"\"\nSupport for the NetAtmo Weather Service.\n\nFor more details about this platform, please refer to the documentation at\nhttps://home-assistant.io/components/sensor.netatmo/\n\"\"\"\n",
"import logging\n",
"from datetime import timedelta\n",
"import voluptuous as vol\n",
"from homeassistant.components.sensor import PLATFORM_SCHEMA\n",
"from homeassistant.const import TEMP_CELSIUS, DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_TEMPERATURE, STATE_UNKNOWN\n",
"from homeassistant.helpers.entity import Entity\n",
"from homeassistant.util import Throttle\n",
"import homeassistant.helpers.config_validation as cv\n",
"_LOGGER = logging.getLogger(__name__)\n",
"CONF_MODULES = 'modules'\n",
"CONF_STATION = 'station'\n",
"DEPENDENCIES = ['netatmo']\n",
"MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=600)\n",
"SENSOR_TYPES = {'temperature': ['Temperature', TEMP_CELSIUS, None,\n DEVICE_CLASS_TEMPERATURE], 'co2': ['CO2', 'ppm', 'mdi:cloud', None],\n 'pressure': ['Pressure', 'mbar', 'mdi:gauge', None], 'noise': ['Noise',\n 'dB', 'mdi:volume-high', None], 'humidity': ['Humidity', '%', None,\n DEVICE_CLASS_HUMIDITY], 'rain': ['Rain', 'mm', 'mdi:weather-rainy',\n None], 'sum_rain_1': ['sum_rain_1', 'mm', 'mdi:weather-rainy', None],\n 'sum_rain_24': ['sum_rain_24', 'mm', 'mdi:weather-rainy', None],\n 'battery_vp': ['Battery', '', 'mdi:battery', None], 'battery_lvl': [\n 'Battery_lvl', '', 'mdi:battery', None], 'min_temp': ['Min Temp.',\n TEMP_CELSIUS, 'mdi:thermometer', None], 'max_temp': ['Max Temp.',\n TEMP_CELSIUS, 'mdi:thermometer', None], 'windangle': ['Angle', '',\n 'mdi:compass', None], 'windangle_value': ['Angle Value', 'º',\n 'mdi:compass', None], 'windstrength': ['Strength', 'km/h',\n 'mdi:weather-windy', None], 'gustangle': ['Gust Angle', '',\n 'mdi:compass', None], 'gustangle_value': ['Gust Angle Value', 'º',\n 'mdi:compass', None], 'guststrength': ['Gust Strength', 'km/h',\n 'mdi:weather-windy', None], 'rf_status': ['Radio', '', 'mdi:signal',\n None], 'rf_status_lvl': ['Radio_lvl', '', 'mdi:signal', None],\n 'wifi_status': ['Wifi', '', 'mdi:wifi', None], 'wifi_status_lvl': [\n 'Wifi_lvl', 'dBm', 'mdi:wifi', None]}\n",
"MODULE_SCHEMA = vol.Schema({vol.Required(cv.string): vol.All(cv.ensure_list,\n [vol.In(SENSOR_TYPES)])})\n",
"PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Optional(CONF_STATION): cv.\n string, vol.Optional(CONF_MODULES): MODULE_SCHEMA})\n",
"def setup_platform(hass, config, add_devices, discovery_info=None):...\n",
"\"\"\"docstring\"\"\"\n",
"netatmo = hass.components.netatmo\n",
"data = NetAtmoData(netatmo.NETATMO_AUTH, config.get(CONF_STATION, None))\n",
"dev = []\n",
"import pyatmo\n",
"if CONF_MODULES in config:\n",
"return None\n",
"add_devices(dev, True)\n",
"for module_name, monitored_conditions in config[CONF_MODULES].items():\n",
"for module_name in data.get_module_names():\n",
"\"\"\"Implementation of a Netatmo sensor.\"\"\"\n",
"if module_name not in data.get_module_names():\n",
"for variable in data.station_data.monitoredConditions(module_name):\n",
"def __init__(self, netatmo_data, module_name, sensor_type):...\n",
"_LOGGER.error('Module name: \"%s\" not found', module_name)\n",
"for variable in monitored_conditions:\n",
"if variable in SENSOR_TYPES.keys():\n",
"\"\"\"docstring\"\"\"\n",
"dev.append(NetAtmoSensor(data, module_name, variable))\n",
"dev.append(NetAtmoSensor(data, module_name, variable))\n",
"_LOGGER.warning('Ignoring unknown var %s for mod %s', variable, module_name)\n",
"self._name = 'Netatmo {} {}'.format(module_name, SENSOR_TYPES[sensor_type][0])\n",
"self.netatmo_data = netatmo_data\n",
"self.module_name = module_name\n",
"self.type = sensor_type\n",
"self._state = None\n",
"self._device_class = SENSOR_TYPES[self.type][3]\n",
"self._icon = SENSOR_TYPES[self.type][2]\n",
"self._unit_of_measurement = SENSOR_TYPES[self.type][1]\n",
"module_id = self.netatmo_data.station_data.moduleByName(module=module_name)[\n '_id']\n",
"self.module_id = module_id[1]\n",
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._name\n"
] | [
0,
0,
7,
0,
0,
0,
0,
7,
0,
0,
0,
0,
0,
7,
7,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Import'",
"Condition",
"Return'",
"Expr'",
"For",
"For",
"Expr'",
"Condition",
"For",
"FunctionDef'",
"Expr'",
"For",
"Condition",
"Docstring",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Docstring",
"Return'"
] |
[
"def __init__(self, VAR_0, VAR_1, VAR_2):...\n",
"super(CLASS_2, self).__init__(VAR_0, VAR_1, VAR_2, 'GET')\n"
] | [
"def __init__(self, connection, args, logger):...\n",
"super(HttpGet, self).__init__(connection, args, logger, 'GET')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def __init__(self, *VAR_2, **VAR_3):...\n",
"super(CLASS_0, self).__init__(*VAR_2, **kwargs)\n",
"self.common = None\n",
"self.configuration.append_config_values(hpcommon.hp3par_opts)\n",
"self.configuration.append_config_values(san.san_opts)\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super(HP3PARFCDriver, self).__init__(*args, **kwargs)\n",
"self.common = None\n",
"self.configuration.append_config_values(hpcommon.hp3par_opts)\n",
"self.configuration.append_config_values(san.san_opts)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.