lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def FUNC_11(VAR_2, VAR_3, VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = get_visible_tournament_or_404(VAR_3, VAR_2.user)\n", "VAR_8 = FUNC_0(VAR_7, VAR_1)\n", "VAR_26 = VAR_8.game_set.all()\n", "VAR_9 = {'round': VAR_8, 'game_list': VAR_26}\n", "return render(VAR_2, 'games/index.html', VAR_9)\n" ]
[ "def game_index(request, tournament_id, round_num):...\n", "\"\"\"docstring\"\"\"\n", "t = get_visible_tournament_or_404(tournament_id, request.user)\n", "r = get_round_or_404(t, round_num)\n", "the_list = r.game_set.all()\n", "context = {'round': r, 'game_list': the_list}\n", "return render(request, 'games/index.html', context)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def __init__(self, VAR_3, VAR_4):...\n", "Frame.__init__(self, VAR_3)\n", "VAR_5 = Frame(self, bg='#f8f8f8')\n", "VAR_6 = Frame(self, bg='#e7e7e7')\n", "VAR_7 = Frame(self, bg='#f8f8f8')\n", "VAR_7.pack(expand=True, fill='both')\n", "Label(VAR_7, text=\"Trisha's Meal Planner\", font=LARGE_FONT, bg='#f8f8f8',\n fg='#000000').pack(fill='both', pady=20)\n", "VAR_8 = Image.open('recipe_card.jpg')\n", "VAR_9 = ImageTk.PhotoImage(VAR_8)\n", "VAR_10 = Label(VAR_7, image=render, bg='#f8f8f8')\n", "VAR_10.image = VAR_9\n", "VAR_10.pack(fill='both', pady=40)\n", "Button(VAR_7, text='Add A Recipe', highlightbackground='#f8f8f8', command=\n lambda : controller.show_frame(AddARecipe)).pack(fill=Y)\n", "Button(VAR_7, text='Make a Meal Plan', highlightbackground='#f8f8f8',\n command=lambda : controller.show_frame(MakeMealPlan)).pack(fill=Y)\n", "Button(VAR_7, text='View Recipes', highlightbackground='#f8f8f8', command=\n lambda : view_recipes()).pack(fill=Y)\n", "def FUNC_0():...\n", "VAR_7.pack_forget()\n", "VAR_5.pack(expand=True, fill='both')\n", "VAR_12 = 'meal_planner.db'\n", "VAR_20 = conn.cursor()\n", "VAR_21 = VAR_20.execute('SELECT * FROM recipe')\n", "for result in [VAR_21]:\n", "for row in result.fetchall():\n", "conn.close()\n", "VAR_24 = row[0]\n", "for i in range(len(VAR_2)):\n", "VAR_2.append(VAR_24)\n", "VAR_13 = Label(VAR_5, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000', text=\n recipeNames[i])\n", "def FUNC_1(VAR_11):...\n", "VAR_13.pack()\n", "VAR_5.pack_forget()\n", "VAR_13.bind('<Button-1>', lambda event, x=VAR_2[i]: [FUNC_1(x), VAR_5.\n pack_forget()])\n", "VAR_12 = 'meal_planner.db'\n", "VAR_6.pack(fill='both')\n", "VAR_8 = Image.open('home.jpg')\n", "VAR_9 = ImageTk.PhotoImage(VAR_8)\n", "VAR_10 = Button(VAR_6, image=render, borderwidth=0, highlightthickness=0,\n highlightbackground='#e7e7e7', command=lambda : [frame.pack(expand=True,\n fill='both'), menuFrame.pack_forget(), viewDetailsFrame.pack_forget()])\n", "VAR_10.image = VAR_9\n", "VAR_10.pack(side=LEFT)\n", "VAR_13 = Label(VAR_6, text='View Recipe', font=LARGE_FONT, bg='#e7e7e7', fg\n ='#272822')\n", "VAR_13.pack(side=LEFT, padx=300)\n", "VAR_14 = Frame(self, bg='#f8f8f8')\n", "VAR_14.pack(expand=True, fill='both')\n", "VAR_20 = conn.cursor()\n", "VAR_21 = VAR_20.execute('SELECT * FROM recipe WHERE name = ' + '\"' + VAR_11 +\n '\"')\n", "for result in [VAR_21]:\n", "for row in result.fetchall():\n", "VAR_22 = (\n \"\"\"Name: {} \n Cook time: {} \n Number of Servings: {} \n Ingredients: {} \n Directions: {}\"\"\"\n .format(VAR_24, VAR_25, VAR_26, VAR_28, VAR_29))\n", "VAR_24 = row[0]\n", "Label(VAR_14, text=string, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000').pack(\n side=LEFT)\n", "VAR_25 = row[1]\n", "conn.close()\n", "VAR_26 = row[2]\n", "Button(VAR_6, text='Delete', highlightbackground='#e7e7e7', command=lambda :\n delete_recipe(name)).pack(side=RIGHT)\n", "VAR_27 = row[3]\n", "def FUNC_2(VAR_11):...\n", "VAR_28 = row[4]\n", "VAR_12 = 'meal_planner.db'\n", "VAR_29 = row[5]\n", "VAR_15 = datetime.datetime.now()\n", "VAR_16 = datetime.date(VAR_15.year, VAR_15.month, VAR_15.day)\n", "VAR_17 = VAR_16.isocalendar()[1]\n", "VAR_18 = 'recipes_' + str(VAR_17)\n", "VAR_20 = conn.cursor()\n", "VAR_20.execute('SELECT recipe FROM ' + VAR_18 + ' WHERE recipe = ' + '\"' +\n VAR_11 + '\"')\n", "VAR_23 = VAR_20.fetchone()\n", "if VAR_23:\n", "print(VAR_23[0])\n", "FUNC_3(VAR_11)\n", "messagebox.showerror('Cannot Delete',\n \"Cannot delete recipe when it's used in the current week's menu.\")\n", "def FUNC_3(VAR_11):...\n", "VAR_19 = '\"' + VAR_11 + '\"'\n", "VAR_20 = conn.cursor()\n", "VAR_20.execute('DELETE FROM recipe WHERE name = ' + '\"' + VAR_11 + '\"')\n", "print(VAR_20.rowcount)\n", "if VAR_20.rowcount == 1:\n", "messagebox.showinfo('Success', 'Recipe Deleted.')\n", "if VAR_20.rowcount == 0:\n", "VAR_6.pack_forget()\n", "messagebox.showerror('Cannot Delete', 'Cannot delete recipe, please try again.'\n )\n", "conn.close()\n", "VAR_5.pack(expand=True, fill='both')\n" ]
[ "def __init__(self, parent, controller):...\n", "Frame.__init__(self, parent)\n", "viewRecipeFrame = Frame(self, bg='#f8f8f8')\n", "menuFrame = Frame(self, bg='#e7e7e7')\n", "frame = Frame(self, bg='#f8f8f8')\n", "frame.pack(expand=True, fill='both')\n", "Label(frame, text=\"Trisha's Meal Planner\", font=LARGE_FONT, bg='#f8f8f8',\n fg='#000000').pack(fill='both', pady=20)\n", "load = Image.open('recipe_card.jpg')\n", "render = ImageTk.PhotoImage(load)\n", "img = Label(frame, image=render, bg='#f8f8f8')\n", "img.image = render\n", "img.pack(fill='both', pady=40)\n", "Button(frame, text='Add A Recipe', highlightbackground='#f8f8f8', command=\n lambda : controller.show_frame(AddARecipe)).pack(fill=Y)\n", "Button(frame, text='Make a Meal Plan', highlightbackground='#f8f8f8',\n command=lambda : controller.show_frame(MakeMealPlan)).pack(fill=Y)\n", "Button(frame, text='View Recipes', highlightbackground='#f8f8f8', command=\n lambda : view_recipes()).pack(fill=Y)\n", "def view_recipes():...\n", "frame.pack_forget()\n", "viewRecipeFrame.pack(expand=True, fill='both')\n", "database_file = 'meal_planner.db'\n", "cursor = conn.cursor()\n", "selection = cursor.execute('SELECT * FROM recipe')\n", "for result in [selection]:\n", "for row in result.fetchall():\n", "conn.close()\n", "name = row[0]\n", "for i in range(len(recipeNames)):\n", "recipeNames.append(name)\n", "label = Label(viewRecipeFrame, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000',\n text=recipeNames[i])\n", "def callback(recipeName):...\n", "label.pack()\n", "viewRecipeFrame.pack_forget()\n", "label.bind('<Button-1>', lambda event, x=recipeNames[i]: [callback(x),\n viewRecipeFrame.pack_forget()])\n", "database_file = 'meal_planner.db'\n", "menuFrame.pack(fill='both')\n", "load = Image.open('home.jpg')\n", "render = ImageTk.PhotoImage(load)\n", "img = Button(menuFrame, image=render, borderwidth=0, highlightthickness=0,\n highlightbackground='#e7e7e7', command=lambda : [frame.pack(expand=True,\n fill='both'), menuFrame.pack_forget(), viewDetailsFrame.pack_forget()])\n", "img.image = render\n", "img.pack(side=LEFT)\n", "label = Label(menuFrame, text='View Recipe', font=LARGE_FONT, bg='#e7e7e7',\n fg='#272822')\n", "label.pack(side=LEFT, padx=300)\n", "viewDetailsFrame = Frame(self, bg='#f8f8f8')\n", "viewDetailsFrame.pack(expand=True, fill='both')\n", "cursor = conn.cursor()\n", "selection = cursor.execute('SELECT * FROM recipe WHERE name = ' + '\"' +\n recipeName + '\"')\n", "for result in [selection]:\n", "for row in result.fetchall():\n", "string = (\n \"\"\"Name: {} \n Cook time: {} \n Number of Servings: {} \n Ingredients: {} \n Directions: {}\"\"\"\n .format(name, time, servings, ingredients, directions))\n", "name = row[0]\n", "Label(viewDetailsFrame, text=string, font=MEDIUM_FONT, bg='#f8f8f8', fg=\n '#000000').pack(side=LEFT)\n", "time = row[1]\n", "conn.close()\n", "servings = row[2]\n", "Button(menuFrame, text='Delete', highlightbackground='#e7e7e7', command=lambda\n : delete_recipe(name)).pack(side=RIGHT)\n", "favorite = row[3]\n", "def delete_recipe(recipeName):...\n", "ingredients = row[4]\n", "database_file = 'meal_planner.db'\n", "directions = row[5]\n", "now = datetime.datetime.now()\n", "dt = datetime.date(now.year, now.month, now.day)\n", "weekNumber = dt.isocalendar()[1]\n", "tableName = 'recipes_' + str(weekNumber)\n", "cursor = conn.cursor()\n", "cursor.execute('SELECT recipe FROM ' + tableName + ' WHERE recipe = ' + '\"' +\n recipeName + '\"')\n", "returnObject = cursor.fetchone()\n", "if returnObject:\n", "print(returnObject[0])\n", "actually_delete(recipeName)\n", "messagebox.showerror('Cannot Delete',\n \"Cannot delete recipe when it's used in the current week's menu.\")\n", "def actually_delete(recipeName):...\n", "queryString = '\"' + recipeName + '\"'\n", "cursor = conn.cursor()\n", "cursor.execute('DELETE FROM recipe WHERE name = ' + '\"' + recipeName + '\"')\n", "print(cursor.rowcount)\n", "if cursor.rowcount == 1:\n", "messagebox.showinfo('Success', 'Recipe Deleted.')\n", "if cursor.rowcount == 0:\n", "menuFrame.pack_forget()\n", "messagebox.showerror('Cannot Delete', 'Cannot delete recipe, please try again.'\n )\n", "conn.close()\n", "viewRecipeFrame.pack(expand=True, fill='both')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 0, 4, 0, 4, 0, 4, 4, 0, 0, 0, 0, 4, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "For", "For", "Expr'", "Assign'", "For", "Expr'", "Assign'", "FunctionDef'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "For", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_0(VAR_6):...\n", "return VAR_6 if isinstance(VAR_6, list) else [VAR_6]\n" ]
[ "def to_list(value):...\n", "return value if isinstance(value, list) else [value]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(self, VAR_1, VAR_2=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_16 = self.get_userentry(VAR_1)\n", "if VAR_16 is not None:\n", "logger.warn('User {0} already exists, skip useradd', VAR_1)\n", "if VAR_2 is not None:\n", "return\n", "VAR_20 = 'pw useradd {0} -e {1} -m'.format(VAR_1, VAR_2)\n", "VAR_20 = 'pw useradd {0} -m'.format(VAR_1)\n", "VAR_17, VAR_18 = shellutil.run_get_output(VAR_20)\n", "if VAR_17 != 0:\n" ]
[ "def useradd(self, username, expiration=None):...\n", "\"\"\"docstring\"\"\"\n", "userentry = self.get_userentry(username)\n", "if userentry is not None:\n", "logger.warn('User {0} already exists, skip useradd', username)\n", "if expiration is not None:\n", "return\n", "cmd = 'pw useradd {0} -e {1} -m'.format(username, expiration)\n", "cmd = 'pw useradd {0} -m'.format(username)\n", "retcode, out = shellutil.run_get_output(cmd)\n", "if retcode != 0:\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Expr'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Condition" ]
[ "def FUNC_21(self, VAR_12):...\n", "return self.accounts[VAR_12]\n" ]
[ "def account_for(self, username):...\n", "return self.accounts[username]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_18, *VAR_15, **VAR_16):...\n", "self.item = VAR_18\n", "CLASS_0.__init__(self, VAR_18, *VAR_15, **kw)\n" ]
[ "def __init__(self, item, *a, **kw):...\n", "self.item = item\n", "Validator.__init__(self, item, *a, **kw)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_23(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = {'SEVERITY': {}, 'CONFIDENCE': {}}\n", "self.check_example('okay.py', VAR_2)\n" ]
[ "def test_okay(self):...\n", "\"\"\"docstring\"\"\"\n", "expect = {'SEVERITY': {}, 'CONFIDENCE': {}}\n", "self.check_example('okay.py', expect)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "def FUNC_1(self):...\n", "self.assertTrue(self.client.login(username='autotest', password='password'))\n" ]
[ "def setUp(self):...\n", "self.assertTrue(self.client.login(username='autotest', password='password'))\n" ]
[ 0, 5 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_5(VAR_8, VAR_9):...\n", "\"\"\"docstring\"\"\"\n", "VAR_16 = User(**user_info)\n", "VAR_16.save()\n", "VAR_17 = VAR_16.id\n", "for VAR_23 in VAR_9.values():\n", "VAR_23['user_id'] = VAR_17\n", "return 'new user and tasks created'\n", "VAR_24 = Task(**task)\n", "VAR_24.save()\n" ]
[ "def initialize_new_task_list(user_info, all_tasks):...\n", "\"\"\"docstring\"\"\"\n", "new_user = User(**user_info)\n", "new_user.save()\n", "user_id = new_user.id\n", "for task in all_tasks.values():\n", "task['user_id'] = user_id\n", "return 'new user and tasks created'\n", "new_task = Task(**task)\n", "new_task.save()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Assign'", "For", "Assign'", "Return'", "Assign'", "Expr'" ]
[ "@classmethod...\n", "VAR_37 = {}\n", "for header in VAR_12:\n", "if header:\n", "return VAR_37\n", "VAR_37.update(header)\n" ]
[ "@classmethod...\n", "merged = {}\n", "for header in headers:\n", "if header:\n", "return merged\n", "merged.update(header)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "For", "Condition", "Return'", "Expr'" ]
[ "def FUNC_5(self):...\n", "self.mock(time, 'time', lambda : 126.0)\n", "VAR_6 = os_utilities.get_state()\n", "VAR_6['sleep_streak'] = 12\n", "for disk in VAR_6['disks'].itervalues():\n", "self.assertGreater(disk.pop('free_mb'), 1.0)\n", "VAR_8 = bot_main.get_state(None, 12)\n", "for disk in VAR_8['disks'].itervalues():\n", "self.assertGreater(disk.pop('free_mb'), 1.0)\n", "self.assertGreater(VAR_8.pop('nb_files_in_temp'), 0)\n", "self.assertGreater(VAR_6.pop('nb_files_in_temp'), 0)\n", "self.assertGreater(VAR_8.pop('uptime'), 0)\n", "self.assertGreater(VAR_6.pop('uptime'), 0)\n", "self.assertEqual(sorted(VAR_6.pop('temp', {})), sorted(VAR_8.pop('temp', {})))\n", "self.assertEqual(VAR_6, VAR_8)\n" ]
[ "def test_get_state(self):...\n", "self.mock(time, 'time', lambda : 126.0)\n", "expected = os_utilities.get_state()\n", "expected['sleep_streak'] = 12\n", "for disk in expected['disks'].itervalues():\n", "self.assertGreater(disk.pop('free_mb'), 1.0)\n", "actual = bot_main.get_state(None, 12)\n", "for disk in actual['disks'].itervalues():\n", "self.assertGreater(disk.pop('free_mb'), 1.0)\n", "self.assertGreater(actual.pop('nb_files_in_temp'), 0)\n", "self.assertGreater(expected.pop('nb_files_in_temp'), 0)\n", "self.assertGreater(actual.pop('uptime'), 0)\n", "self.assertGreater(expected.pop('uptime'), 0)\n", "self.assertEqual(sorted(expected.pop('temp', {})), sorted(actual.pop('temp',\n {})))\n", "self.assertEqual(expected, actual)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "For", "Expr'", "Assign'", "For", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_3(VAR_0):...\n", "VAR_2 = []\n", "VAR_5 = {}\n", "VAR_6 = 's.transaction_date' if VAR_0['based_on'\n ] == 'Sales Order' else 's.posting_date'\n", "VAR_7 = frappe.db.sql('string'.format(VAR_6=date_field, doctype=filters[\n 'based_on']), as_dict=1)\n", "for d in VAR_7:\n", "VAR_5.setdefault(d.item_name, d)\n", "return VAR_5\n" ]
[ "def get_sales_details(filters):...\n", "data = []\n", "item_details_map = {}\n", "date_field = 's.transaction_date' if filters['based_on'\n ] == 'Sales Order' else 's.posting_date'\n", "sales_data = frappe.db.sql(\n \"\"\"\n\t\tselect s.territory, s.customer, si.item_group, si.item_name, si.qty, {date_field} as last_order_date,\n\t\tDATEDIFF(CURDATE(), {date_field}) as days_since_last_order\n\t\tfrom `tab{doctype}` s, `tab{doctype} Item` si\n\t\twhere s.name = si.parent and s.docstatus = 1\n\t\tgroup by si.name order by days_since_last_order \"\"\"\n .format(date_field=date_field, doctype=filters['based_on']), as_dict=1)\n", "for d in sales_data:\n", "item_details_map.setdefault(d.item_name, d)\n", "return item_details_map\n" ]
[ 0, 0, 0, 0, 4, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Return'" ]
[ "@staticmethod...\n", "return VAR_82\n" ]
[ "@staticmethod...\n", "return f\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_16(self, VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "for VAR_38 in sorted(self.r_handler.request.arguments):\n", "VAR_37 = self.r_handler.get_query_argument(VAR_38)\n", "return VAR_11\n", "if VAR_37.isdigit():\n", "VAR_37 = int(VAR_37)\n", "VAR_11[VAR_38] = VAR_37\n" ]
[ "def _add_meta_props(self, response):...\n", "\"\"\"docstring\"\"\"\n", "for prop in sorted(self.r_handler.request.arguments):\n", "prop_value = self.r_handler.get_query_argument(prop)\n", "return response\n", "if prop_value.isdigit():\n", "prop_value = int(prop_value)\n", "response[prop] = prop_value\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "Assign'", "Return'", "Condition", "Assign'", "Assign'" ]
[ "def __del__(self):...\n", "self.close()\n" ]
[ "def __del__(self):...\n", "self.close()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@app.route('/auth/login', methods=['GET', 'POST'])...\n", "if request.method == 'GET':\n", "return FUNC_4()\n", "VAR_0 = LoginForm(request.form)\n", "if not VAR_0.validate():\n", "return FUNC_5(VAR_0)\n", "VAR_1 = User.query.filter_by(username=form.username.data, password=form.\n password.data).first()\n", "if not VAR_1:\n", "return FUNC_6(VAR_0)\n", "login_user(VAR_1)\n", "return redirect(url_for('index'))\n" ]
[ "@app.route('/auth/login', methods=['GET', 'POST'])...\n", "if request.method == 'GET':\n", "return render_login()\n", "form = LoginForm(request.form)\n", "if not form.validate():\n", "return render_loginForm(form)\n", "user = User.query.filter_by(username=form.username.data, password=form.\n password.data).first()\n", "if not user:\n", "return render_loginInvalid(form)\n", "login_user(user)\n", "return redirect(url_for('index'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Return'", "Expr'", "Return'" ]
[ "def FUNC_8(VAR_4):...\n", "if not os.path.exists('D:' + VAR_4):\n", "return VAR_4\n", "VAR_28 = 1\n", "while VAR_28 < 101:\n", "VAR_32 = re.sub('\\\\.(.*?)$', ' (%d).\\\\1' % VAR_28, VAR_4)\n", "return ''\n", "if not os.path.exists('D:' + VAR_32):\n", "return VAR_32\n", "VAR_28 = VAR_28 + 1\n" ]
[ "def get_non_duplicate_path(file_path):...\n", "if not os.path.exists('D:' + file_path):\n", "return file_path\n", "duplicate = 1\n", "while duplicate < 101:\n", "new_path = re.sub('\\\\.(.*?)$', ' (%d).\\\\1' % duplicate, file_path)\n", "return ''\n", "if not os.path.exists('D:' + new_path):\n", "return new_path\n", "duplicate = duplicate + 1\n" ]
[ 0, 4, 4, 4, 4, 4, 4, 4, 4, 4 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Return'", "Condition", "Return'", "Assign'" ]
[ "@BaseHandler.check_is_group_user('Announcement Manager')...\n", "if not VAR_6:\n", "if not Announce.by_id(VAR_6, self.sql_session).scalar():\n", "VAR_8 = AttachmentList.by_ann_id(VAR_6, self.sql_session)\n", "VAR_9 = VAR_8.all()\n", "for old_att in VAR_9:\n", "shutil.rmtree('file/%s' % old_att.key)\n", "VAR_8.delete()\n", "Announce.by_id(VAR_6, self.sql_session).delete()\n", "self.write({'success': True})\n" ]
[ "@BaseHandler.check_is_group_user('Announcement Manager')...\n", "if not ann_id:\n", "if not Announce.by_id(ann_id, self.sql_session).scalar():\n", "q = AttachmentList.by_ann_id(ann_id, self.sql_session)\n", "old_atts = q.all()\n", "for old_att in old_atts:\n", "shutil.rmtree('file/%s' % old_att.key)\n", "q.delete()\n", "Announce.by_id(ann_id, self.sql_session).delete()\n", "self.write({'success': True})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Condition", "Assign'", "Assign'", "For", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_7(self, VAR_0, VAR_1, VAR_2, VAR_3):...\n", "VAR_13 = ORMPerson.objects.create()\n", "ORMProfile.objects.create(person=orm_person, VAR_8='u')\n", "VAR_14 = ORMExperience.objects.create(VAR_0=title, VAR_1=description, VAR_2\n =share_id, author=orm_person)\n", "VAR_14.picture = VAR_3\n", "VAR_14.save()\n", "return self\n" ]
[ "def given_an_experience_on_db(self, title, description, share_id, pic):...\n", "orm_person = ORMPerson.objects.create()\n", "ORMProfile.objects.create(person=orm_person, username='u')\n", "experience = ORMExperience.objects.create(title=title, description=\n description, share_id=share_id, author=orm_person)\n", "experience.picture = pic\n", "experience.save()\n", "return self\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_9(self):...\n", "url_helper.urllib2.urlopen(mox.IgnoreArg(), mox.IgnoreArg(), timeout=mox.\n IgnoreArg()).AndRaise(urllib2.HTTPError('url', 400, 'error message',\n None, None))\n", "self._mox.ReplayAll()\n", "self.assertIsNone(url_helper.UrlOpen('url', max_tries=10))\n", "self._mox.VerifyAll()\n" ]
[ "def testUrlOpenHTTPErrorNoRetry(self):...\n", "url_helper.urllib2.urlopen(mox.IgnoreArg(), mox.IgnoreArg(), timeout=mox.\n IgnoreArg()).AndRaise(urllib2.HTTPError('url', 400, 'error message',\n None, None))\n", "self._mox.ReplayAll()\n", "self.assertIsNone(url_helper.UrlOpen('url', max_tries=10))\n", "self._mox.VerifyAll()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_12(self, VAR_3):...\n", "return self.iden_reqid_map.get_key(VAR_3)\n" ]
[ "def get_iden(self, reqid):...\n", "return self.iden_reqid_map.get_key(reqid)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_8(self):...\n", "VAR_3 = DatabaseQuery('DocField').execute(filters={'parent': 'DocType'},\n fields=['fieldname', 'fieldtype'], or_filters=[{'fieldtype': 'Table'},\n {'fieldtype': 'Select'}])\n", "self.assertTrue({'fieldtype': 'Table', 'fieldname': 'fields'} in VAR_3)\n", "self.assertTrue({'fieldtype': 'Select', 'fieldname': 'document_type'} in VAR_3)\n", "self.assertFalse({'fieldtype': 'Check', 'fieldname': 'issingle'} in VAR_3)\n" ]
[ "def test_or_filters(self):...\n", "data = DatabaseQuery('DocField').execute(filters={'parent': 'DocType'},\n fields=['fieldname', 'fieldtype'], or_filters=[{'fieldtype': 'Table'},\n {'fieldtype': 'Select'}])\n", "self.assertTrue({'fieldtype': 'Table', 'fieldname': 'fields'} in data)\n", "self.assertTrue({'fieldtype': 'Select', 'fieldname': 'document_type'} in data)\n", "self.assertFalse({'fieldtype': 'Check', 'fieldname': 'issingle'} in data)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_15(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = self.get_info()\n", "self.save_info_to_db(VAR_8)\n", "VAR_10 = ''\n", "VAR_30 = VAR_8.latitude, VAR_8.longitude\n", "if not VAR_30[0]:\n", "VAR_10 += messages[self.user.language]['no_gps']\n", "VAR_31 = messages[self.user.language]['camera_info']\n", "VAR_32 = VAR_8.date_time, VAR_8.camera, VAR_8.lens, VAR_8.address[self.user\n .language]\n", "for arg in zip(VAR_31, VAR_32):\n", "if arg[1]:\n", "VAR_33 = self.user.language\n", "VAR_10 += f'*{arg[0]}*: {arg[1]}\\n'\n", "VAR_34 = messages[VAR_33]['users with the same feature'].values()\n", "VAR_35 = self.find_num_users_with_same_feature(VAR_8)\n", "for template, VAR_5 in zip(VAR_34, VAR_35):\n", "if VAR_5:\n", "return VAR_30, VAR_10\n", "VAR_10 += f'{template} {VAR_5}\\n'\n" ]
[ "def prepare_answer(self):...\n", "\"\"\"docstring\"\"\"\n", "image_data = self.get_info()\n", "self.save_info_to_db(image_data)\n", "answer = ''\n", "coordinates = image_data.latitude, image_data.longitude\n", "if not coordinates[0]:\n", "answer += messages[self.user.language]['no_gps']\n", "answ_template = messages[self.user.language]['camera_info']\n", "basic_data = (image_data.date_time, image_data.camera, image_data.lens,\n image_data.address[self.user.language])\n", "for arg in zip(answ_template, basic_data):\n", "if arg[1]:\n", "lang = self.user.language\n", "answer += f'*{arg[0]}*: {arg[1]}\\n'\n", "lang_templates = messages[lang]['users with the same feature'].values()\n", "ppl_wth_same_featrs = self.find_num_users_with_same_feature(image_data)\n", "for template, feature in zip(lang_templates, ppl_wth_same_featrs):\n", "if feature:\n", "return coordinates, answer\n", "answer += f'{template} {feature}\\n'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "AugAssign'", "Assign'", "Assign'", "For", "Condition", "Assign'", "AugAssign'", "Assign'", "Assign'", "For", "Condition", "Return'", "AugAssign'" ]
[ "@property...\n", "return self.exists and not os.access(self.file, os.W_OK)\n" ]
[ "@property...\n", "return self.exists and not os.access(self.file, os.W_OK)\n" ]
[ 0, 7 ]
[ "Condition", "Return'" ]
[ "def FUNC_14(self, VAR_10, VAR_11):...\n", "VAR_24 = self.common._cli_run('createhost -add %s %s' % (VAR_10, ' '.join(\n VAR_11)), None)\n" ]
[ "def _modify_3par_fibrechan_host(self, hostname, wwn):...\n", "out = self.common._cli_run('createhost -add %s %s' % (hostname, ' '.join(\n wwn)), None)\n" ]
[ 0, 2 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_1(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_18 = ['hp3par_api_url', 'hp3par_username', 'hp3par_password', 'san_ip',\n 'san_login', 'san_password']\n", "self.common.check_flags(self.configuration, VAR_18)\n" ]
[ "def _check_flags(self):...\n", "\"\"\"docstring\"\"\"\n", "required_flags = ['hp3par_api_url', 'hp3par_username', 'hp3par_password',\n 'san_ip', 'san_login', 'san_password']\n", "self.common.check_flags(self.configuration, required_flags)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "\"\"\"string\"\"\"\n", "import os\n", "VAR_0 = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n", "VAR_1 = 'eumasrxjp9des1)c^0i%=fx6ge-++7e8qaxs8%%an6ait_@d_!'\n", "VAR_2 = True\n", "VAR_3 = []\n", "VAR_4 = ['django.contrib.admin', 'django.contrib.auth',\n 'django.contrib.contenttypes', 'django.contrib.sessions',\n 'django.contrib.messages', 'django.contrib.staticfiles', 'sio']\n", "VAR_5 = ['django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware']\n", "VAR_6 = 'webapps.urls'\n", "VAR_7 = [{'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': {'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages']}}]\n", "VAR_8 = 'webapps.wsgi.application'\n", "VAR_9 = {'default': {'ENGINE': 'django.db.backends.postgresql_psycopg2',\n 'NAME': 'registration', 'USER': 'postgres', 'PASSWORD': '', 'HOST':\n 'localhost', 'PORT': ''}}\n", "VAR_10 = [{'NAME':\n 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'\n }, {'NAME':\n 'django.contrib.auth.password_validation.MinimumLengthValidator'}, {\n 'NAME':\n 'django.contrib.auth.password_validation.CommonPasswordValidator'}, {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}\n ]\n", "VAR_11 = 'en-us'\n", "VAR_12 = 'UTC'\n", "VAR_13 = True\n", "VAR_14 = True\n", "VAR_15 = True\n", "VAR_16 = '/static/'\n" ]
[ "\"\"\"\nDjango settings for webapps project.\n\nGenerated by 'django-admin startproject' using Django 1.10.1.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.10/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/1.10/ref/settings/\n\"\"\"\n", "import os\n", "BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n", "SECRET_KEY = 'eumasrxjp9des1)c^0i%=fx6ge-++7e8qaxs8%%an6ait_@d_!'\n", "DEBUG = True\n", "ALLOWED_HOSTS = []\n", "INSTALLED_APPS = ['django.contrib.admin', 'django.contrib.auth',\n 'django.contrib.contenttypes', 'django.contrib.sessions',\n 'django.contrib.messages', 'django.contrib.staticfiles', 'sio']\n", "MIDDLEWARE = ['django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware']\n", "ROOT_URLCONF = 'webapps.urls'\n", "TEMPLATES = [{'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': {'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages']}}]\n", "WSGI_APPLICATION = 'webapps.wsgi.application'\n", "DATABASES = {'default': {'ENGINE': 'django.db.backends.postgresql_psycopg2',\n 'NAME': 'registration', 'USER': 'postgres', 'PASSWORD': '', 'HOST':\n 'localhost', 'PORT': ''}}\n", "AUTH_PASSWORD_VALIDATORS = [{'NAME':\n 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'\n }, {'NAME':\n 'django.contrib.auth.password_validation.MinimumLengthValidator'}, {\n 'NAME':\n 'django.contrib.auth.password_validation.CommonPasswordValidator'}, {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}\n ]\n", "LANGUAGE_CODE = 'en-us'\n", "TIME_ZONE = 'UTC'\n", "USE_I18N = True\n", "USE_L10N = True\n", "USE_TZ = True\n", "STATIC_URL = '/static/'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_5(VAR_4, VAR_5):...\n", "VAR_6 = FUNC_3(VAR_4, VAR_5)\n", "logging.info('access.py: ' + (VAR_6 and 'authorized %s' % VAR_6.description or\n 'not authorized') + ' (token=%r, user=%r)' % (VAR_4.get('token'), VAR_5 and\n VAR_5.email()))\n", "if not VAR_6 and VAR_5:\n", "VAR_6 = CLASS_0(VAR_10=user.nickname(), VAR_2=user.email())\n", "return VAR_6\n" ]
[ "def check_and_log(request, user):...\n", "auth = check_request(request, user)\n", "logging.info('access.py: ' + (auth and 'authorized %s' % auth.description or\n 'not authorized') + ' (token=%r, user=%r)' % (request.get('token'), \n user and user.email()))\n", "if not auth and user:\n", "auth = Authorization(description=user.nickname(), email=user.email())\n", "return auth\n" ]
[ 0, 0, 5, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Condition", "Assign'", "Return'" ]
[ "import new\n", "import MySQLdb\n", "from MySQLdb import Warning\n", "from SQLObjectStore import SQLObjectStore\n", "\"\"\"string\"\"\"\n", "def __init__(self, **VAR_0):...\n", "self._autocommit = VAR_0.pop('autocommit', False)\n", "SQLObjectStore.__init__(self, **kwargs)\n", "def FUNC_0(self, VAR_1, VAR_2=False):...\n", "if not VAR_1.get('db'):\n", "VAR_1['db'] = self._model.sqlDatabaseName()\n", "def FUNC_1(self):...\n", "VAR_0 = self._dbArgs.copy()\n", "self.augmentDatabaseArgs(VAR_0)\n", "VAR_3 = self.dbapiModule().connect(**kwargs)\n", "if self._autocommit:\n", "return VAR_3\n", "VAR_3.autocommit(True)\n" ]
[ "import new\n", "import MySQLdb\n", "from MySQLdb import Warning\n", "from SQLObjectStore import SQLObjectStore\n", "\"\"\"MySQLObjectStore implements an object store backed by a MySQL database.\n\n MySQL notes:\n * MySQL home page: http://www.mysql.com.\n * MySQL version this was developed and tested with: 3.22.34 & 3.23.27\n * The platforms developed and tested with include Linux (Mandrake 7.1)\n and Windows ME.\n * The MySQL-Python DB API 2.0 module used under the hood is MySQLdb\n by Andy Dustman: http://dustman.net/andy/python/MySQLdb/.\n * Newer versions of MySQLdb have autocommit switched off by default.\n\n The connection arguments passed to __init__ are:\n - host\n - user\n - passwd\n - port\n - unix_socket\n - client_flag\n - autocommit\n\n You wouldn't use the 'db' argument, since that is determined by the model.\n\n See the MySQLdb docs or the DB API 2.0 docs for more information.\n http://www.python.org/topics/database/DatabaseAPI-2.0.html\n \"\"\"\n", "def __init__(self, **kwargs):...\n", "self._autocommit = kwargs.pop('autocommit', False)\n", "SQLObjectStore.__init__(self, **kwargs)\n", "def augmentDatabaseArgs(self, args, pool=False):...\n", "if not args.get('db'):\n", "args['db'] = self._model.sqlDatabaseName()\n", "def newConnection(self):...\n", "kwargs = self._dbArgs.copy()\n", "self.augmentDatabaseArgs(kwargs)\n", "conn = self.dbapiModule().connect(**kwargs)\n", "if self._autocommit:\n", "return conn\n", "conn.autocommit(True)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "ImportFrom'", "ImportFrom'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Condition", "Assign'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Condition", "Return'", "Expr'" ]
[ "def FUNC_4(self):...\n", "VAR_2 = CLASS_1.construct_testscript_command('test_interactive_program.py')\n", "self.assertEqual(p.stdout.readline(), 'test_program X\\n')\n", "self.assertEqual(p.stdout.readline(), 'Type in a number:\\n')\n", "p.stdin.write('33\\n')\n", "p.stdin.flush()\n", "self.assertEqual(p.stdout.readline(), '33\\n')\n", "self.assertEqual(p.stdout.readline(), 'Exiting program.\\n')\n" ]
[ "def test_run_interactive_shell_command(self):...\n", "command = RunShellCommandTest.construct_testscript_command(\n 'test_interactive_program.py')\n", "self.assertEqual(p.stdout.readline(), 'test_program X\\n')\n", "self.assertEqual(p.stdout.readline(), 'Type in a number:\\n')\n", "p.stdin.write('33\\n')\n", "p.stdin.flush()\n", "self.assertEqual(p.stdout.readline(), '33\\n')\n", "self.assertEqual(p.stdout.readline(), 'Exiting program.\\n')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_7(self, VAR_3):...\n", "if VAR_3.name in self.host_names:\n", "self.hosts.remove(VAR_3)\n", "self._hosts.remove(VAR_3.name)\n", "VAR_3.remove_group(self)\n", "self.clear_hosts_cache()\n" ]
[ "def remove_host(self, host):...\n", "if host.name in self.host_names:\n", "self.hosts.remove(host)\n", "self._hosts.remove(host.name)\n", "host.remove_group(self)\n", "self.clear_hosts_cache()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_9(self):...\n", "VAR_7 = BokChoyTestSuite('', VAR_3=True)\n", "VAR_1 = 'tests'\n", "self.assertEqual(VAR_7.cmd, self._expected_command(VAR_1=name, VAR_3=True))\n" ]
[ "def test_verify_xss(self):...\n", "suite = BokChoyTestSuite('', verify_xss=True)\n", "name = 'tests'\n", "self.assertEqual(suite.cmd, self._expected_command(name=name, verify_xss=True))\n" ]
[ 0, 0, 0, 3 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "\"\"\"Bot interface used in bot_config.py.\"\"\"\n", "import logging\n", "import os\n", "import threading\n", "import time\n", "import os_utilities\n", "from utils import zip_package\n", "VAR_0 = os.path.abspath(zip_package.get_main_script_path())\n", "def __init__(self, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6):...\n", "self._attributes = VAR_2\n", "self._base_dir = VAR_5\n", "self._remote = VAR_1\n", "self._server = VAR_3\n", "self._server_version = VAR_4\n", "self._shutdown_hook = VAR_6\n", "self._timers = []\n", "self._timers_dying = False\n", "self._timers_lock = threading.Lock()\n", "@property...\n", "\"\"\"docstring\"\"\"\n", "return self._base_dir\n" ]
[ "\"\"\"Bot interface used in bot_config.py.\"\"\"\n", "import logging\n", "import os\n", "import threading\n", "import time\n", "import os_utilities\n", "from utils import zip_package\n", "THIS_FILE = os.path.abspath(zip_package.get_main_script_path())\n", "def __init__(self, remote, attributes, server, server_version, base_dir,...\n", "self._attributes = attributes\n", "self._base_dir = base_dir\n", "self._remote = remote\n", "self._server = server\n", "self._server_version = server_version\n", "self._shutdown_hook = shutdown_hook\n", "self._timers = []\n", "self._timers_dying = False\n", "self._timers_lock = threading.Lock()\n", "@property...\n", "\"\"\"docstring\"\"\"\n", "return self._base_dir\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Docstring", "Return'" ]
[ "async def FUNC_5(self):...\n", "if self._loaded:\n", "await self._runon('finish')\n" ]
[ "async def finish(self):...\n", "if self._loaded:\n", "await self._runon('finish')\n" ]
[ 0, 0, 0 ]
[ "AsyncFunctionDef'", "Condition", "Expr'" ]
[ "def FUNC_2(self):...\n", "return ['Afghanistan', 'Albania', 'Algeria', 'American Samoa', 'Andorra',\n 'Angola', 'Anguilla', 'Antarctica', 'Antigua and Barbuda', 'Argentina',\n 'Armenia', 'Aruba', 'Australia', 'Austria', 'Azerbaijan', 'Bahamas',\n 'Bahrain', 'Bangladesh', 'Barbados', 'Belarus', 'Belgium', 'Belize',\n 'Benin', 'Bermuda', 'Bhutan', 'Bolivia', 'Bosnia and Herzegovina',\n 'Botswana', 'Brazil', 'British Indian Ocean Territory',\n 'British Virgin Islands', 'Brunei', 'Bulgaria', 'Burkina Faso',\n 'Burundi', 'Cambodia', 'Cameroon', 'Canada', 'Cape Verde',\n 'Cayman Islands', 'Central African Republic', 'Chad', 'Chile', 'China',\n 'Christmas Island', 'Cocos Islands', 'Colombia', 'Comoros',\n 'Cook Islands', 'Costa Rica', 'Croatia', 'Cuba', 'Curacao', 'Cyprus',\n 'Czech Republic', 'Democratic Republic of the Congo', 'Denmark',\n 'Djibouti', 'Dominica', 'Dominican Republic', 'East Timor', 'Ecuador',\n 'Egypt', 'El Salvador', 'Equatorial Guinea', 'Eritrea', 'Estonia',\n 'Ethiopia', 'Falkland Islands', 'Faroe Islands', 'Fiji', 'Finland',\n 'France', 'French Polynesia', 'Gabon', 'Gambia', 'Georgia', 'Germany',\n 'Ghana', 'Gibraltar', 'Greece', 'Greenland', 'Grenada', 'Guam',\n 'Guatemala', 'Guernsey', 'Guinea', 'Guinea-Bissau', 'Guyana', 'Haiti',\n 'Honduras', 'Hong Kong', 'Hungary', 'Iceland', 'India', 'Indonesia',\n 'Iran', 'Iraq', 'Ireland', 'Isle of Man', 'Israel', 'Italy',\n 'Ivory Coast', 'Jamaica', 'Japan', 'Jersey', 'Jordan', 'Kazakhstan',\n 'Kenya', 'Kiribati', 'Kosovo', 'Kuwait', 'Kyrgyzstan', 'Laos', 'Latvia',\n 'Lebanon', 'Lesotho', 'Liberia', 'Libya', 'Liechtenstein', 'Lithuania',\n 'Luxembourg', 'Macau', 'Macedonia', 'Madagascar', 'Malawi', 'Malaysia',\n 'Maldives', 'Mali', 'Malta', 'Marshall Islands', 'Mauritania',\n 'Mauritius', 'Mayotte', 'Mexico', 'Micronesia', 'Moldova', 'Monaco',\n 'Mongolia', 'Montenegro', 'Montserrat', 'Morocco', 'Mozambique',\n 'Myanmar', 'Namibia', 'Nauru', 'Nepal', 'Netherlands',\n 'Netherlands Antilles', 'New Caledonia', 'New Zealand', 'Nicaragua',\n 'Niger', 'Nigeria', 'Niue', 'North Korea', 'Northern Mariana Islands',\n 'Norway', 'Oman', 'Pakistan', 'Palau', 'Palestine', 'Panama',\n 'Papua New Guinea', 'Paraguay', 'Peru', 'Philippines', 'Pitcairn',\n 'Poland', 'Portugal', 'Puerto Rico', 'Qatar', 'Republic of the Congo',\n 'Reunion', 'Romania', 'Russia', 'Rwanda', 'Saint Barthelemy',\n 'Saint Helena', 'Saint Kitts and Nevis', 'Saint Lucia', 'Saint Martin',\n 'Saint Pierre and Miquelon', 'Saint Vincent and the Grenadines',\n 'Samoa', 'San Marino', 'Sao Tome and Principe', 'Saudi Arabia',\n 'Senegal', 'Serbia', 'Seychelles', 'Sierra Leone', 'Singapore',\n 'Sint Maarten', 'Slovakia', 'Slovenia', 'Solomon Islands', 'Somalia',\n 'South Africa', 'South Korea', 'South Sudan', 'Spain', 'Sri Lanka',\n 'Sudan', 'Suriname', 'Svalbard and Jan Mayen', 'Swaziland', 'Sweden',\n 'Switzerland', 'Syria', 'Taiwan', 'Tajikistan', 'Tanzania', 'Thailand',\n 'Togo', 'Tokelau', 'Tonga', 'Trinidad and Tobago', 'Tunisia', 'Turkey',\n 'Turkmenistan', 'Turks and Caicos Islands', 'Tuvalu',\n 'U.S. Virgin Islands', 'Uganda', 'Ukraine', 'United Arab Emirates',\n 'United Kingdom', 'United States', 'Uruguay', 'Uzbekistan', 'Vanuatu',\n 'Vatican', 'Venezuela', 'Vietnam', 'Wallis and Futuna',\n 'Western Sahara', 'Yemen', 'Zambia', 'Zimbabwe']\n" ]
[ "def country_list(self):...\n", "return ['Afghanistan', 'Albania', 'Algeria', 'American Samoa', 'Andorra',\n 'Angola', 'Anguilla', 'Antarctica', 'Antigua and Barbuda', 'Argentina',\n 'Armenia', 'Aruba', 'Australia', 'Austria', 'Azerbaijan', 'Bahamas',\n 'Bahrain', 'Bangladesh', 'Barbados', 'Belarus', 'Belgium', 'Belize',\n 'Benin', 'Bermuda', 'Bhutan', 'Bolivia', 'Bosnia and Herzegovina',\n 'Botswana', 'Brazil', 'British Indian Ocean Territory',\n 'British Virgin Islands', 'Brunei', 'Bulgaria', 'Burkina Faso',\n 'Burundi', 'Cambodia', 'Cameroon', 'Canada', 'Cape Verde',\n 'Cayman Islands', 'Central African Republic', 'Chad', 'Chile', 'China',\n 'Christmas Island', 'Cocos Islands', 'Colombia', 'Comoros',\n 'Cook Islands', 'Costa Rica', 'Croatia', 'Cuba', 'Curacao', 'Cyprus',\n 'Czech Republic', 'Democratic Republic of the Congo', 'Denmark',\n 'Djibouti', 'Dominica', 'Dominican Republic', 'East Timor', 'Ecuador',\n 'Egypt', 'El Salvador', 'Equatorial Guinea', 'Eritrea', 'Estonia',\n 'Ethiopia', 'Falkland Islands', 'Faroe Islands', 'Fiji', 'Finland',\n 'France', 'French Polynesia', 'Gabon', 'Gambia', 'Georgia', 'Germany',\n 'Ghana', 'Gibraltar', 'Greece', 'Greenland', 'Grenada', 'Guam',\n 'Guatemala', 'Guernsey', 'Guinea', 'Guinea-Bissau', 'Guyana', 'Haiti',\n 'Honduras', 'Hong Kong', 'Hungary', 'Iceland', 'India', 'Indonesia',\n 'Iran', 'Iraq', 'Ireland', 'Isle of Man', 'Israel', 'Italy',\n 'Ivory Coast', 'Jamaica', 'Japan', 'Jersey', 'Jordan', 'Kazakhstan',\n 'Kenya', 'Kiribati', 'Kosovo', 'Kuwait', 'Kyrgyzstan', 'Laos', 'Latvia',\n 'Lebanon', 'Lesotho', 'Liberia', 'Libya', 'Liechtenstein', 'Lithuania',\n 'Luxembourg', 'Macau', 'Macedonia', 'Madagascar', 'Malawi', 'Malaysia',\n 'Maldives', 'Mali', 'Malta', 'Marshall Islands', 'Mauritania',\n 'Mauritius', 'Mayotte', 'Mexico', 'Micronesia', 'Moldova', 'Monaco',\n 'Mongolia', 'Montenegro', 'Montserrat', 'Morocco', 'Mozambique',\n 'Myanmar', 'Namibia', 'Nauru', 'Nepal', 'Netherlands',\n 'Netherlands Antilles', 'New Caledonia', 'New Zealand', 'Nicaragua',\n 'Niger', 'Nigeria', 'Niue', 'North Korea', 'Northern Mariana Islands',\n 'Norway', 'Oman', 'Pakistan', 'Palau', 'Palestine', 'Panama',\n 'Papua New Guinea', 'Paraguay', 'Peru', 'Philippines', 'Pitcairn',\n 'Poland', 'Portugal', 'Puerto Rico', 'Qatar', 'Republic of the Congo',\n 'Reunion', 'Romania', 'Russia', 'Rwanda', 'Saint Barthelemy',\n 'Saint Helena', 'Saint Kitts and Nevis', 'Saint Lucia', 'Saint Martin',\n 'Saint Pierre and Miquelon', 'Saint Vincent and the Grenadines',\n 'Samoa', 'San Marino', 'Sao Tome and Principe', 'Saudi Arabia',\n 'Senegal', 'Serbia', 'Seychelles', 'Sierra Leone', 'Singapore',\n 'Sint Maarten', 'Slovakia', 'Slovenia', 'Solomon Islands', 'Somalia',\n 'South Africa', 'South Korea', 'South Sudan', 'Spain', 'Sri Lanka',\n 'Sudan', 'Suriname', 'Svalbard and Jan Mayen', 'Swaziland', 'Sweden',\n 'Switzerland', 'Syria', 'Taiwan', 'Tajikistan', 'Tanzania', 'Thailand',\n 'Togo', 'Tokelau', 'Tonga', 'Trinidad and Tobago', 'Tunisia', 'Turkey',\n 'Turkmenistan', 'Turks and Caicos Islands', 'Tuvalu',\n 'U.S. Virgin Islands', 'Uganda', 'Ukraine', 'United Arab Emirates',\n 'United Kingdom', 'United States', 'Uruguay', 'Uzbekistan', 'Vanuatu',\n 'Vatican', 'Venezuela', 'Vietnam', 'Wallis and Futuna',\n 'Western Sahara', 'Yemen', 'Zambia', 'Zimbabwe']\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_9(self, VAR_10):...\n", "VAR_22 = _('local_path not supported')\n" ]
[ "def local_path(self, volume):...\n", "msg = _('local_path not supported')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def __call__(self, VAR_2, VAR_3, VAR_4=None):...\n", "if len(VAR_3.strip()) > 0:\n", "return\n", "self.reply(VAR_2, random.choice(self.fnordlist))\n", "return True\n" ]
[ "def __call__(self, msg, arguments, errorSink=None):...\n", "if len(arguments.strip()) > 0:\n", "return\n", "self.reply(msg, random.choice(self.fnordlist))\n", "return True\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Expr'", "Return'" ]
[ "def FUNC_3(VAR_14, VAR_15):...\n", "return FUNC_6(VAR_14, [[VAR_16] for line in VAR_15 for VAR_16 in line], False)\n" ]
[ "def vStrip(dims, files):...\n", "return smartGrid(dims, [[file] for line in files for file in line], False)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_12(self, VAR_1, VAR_62=False, VAR_63=False, VAR_4=None):...\n", "\"\"\"docstring\"\"\"\n", "if not urllib.parse.urlparse(VAR_1).scheme:\n", "if not os.path.isabs(VAR_1) and self.included_stack:\n", "if VAR_1 in self.included:\n", "VAR_108 = os.path.dirname(self.included_stack[-1])\n", "VAR_1 = os.path.abspath(VAR_1)\n", "logger.info('Multiple include of {} ignored'.format(VAR_1))\n", "self.included.append(VAR_1)\n", "VAR_1 = os.path.join(VAR_108, VAR_1)\n", "return\n", "self.included_stack.append(VAR_1)\n", "VAR_83 = self\n", "VAR_96 = self.first_rule\n", "VAR_97, VAR_98 = parse(VAR_1, VAR_4=self.overwrite_shellcmd)\n", "if VAR_63:\n", "print(VAR_97)\n", "sys.path.insert(0, os.path.dirname(VAR_1))\n", "self.linemaps[VAR_1] = VAR_98\n", "exec(compile(VAR_97, VAR_1, 'exec'), self.globals)\n", "if not VAR_62:\n", "self.first_rule = VAR_96\n", "self.included_stack.pop()\n" ]
[ "def include(self, snakefile, overwrite_first_rule=False, print_compilation=...\n", "\"\"\"docstring\"\"\"\n", "if not urllib.parse.urlparse(snakefile).scheme:\n", "if not os.path.isabs(snakefile) and self.included_stack:\n", "if snakefile in self.included:\n", "current_path = os.path.dirname(self.included_stack[-1])\n", "snakefile = os.path.abspath(snakefile)\n", "logger.info('Multiple include of {} ignored'.format(snakefile))\n", "self.included.append(snakefile)\n", "snakefile = os.path.join(current_path, snakefile)\n", "return\n", "self.included_stack.append(snakefile)\n", "workflow = self\n", "first_rule = self.first_rule\n", "code, linemap = parse(snakefile, overwrite_shellcmd=self.overwrite_shellcmd)\n", "if print_compilation:\n", "print(code)\n", "sys.path.insert(0, os.path.dirname(snakefile))\n", "self.linemaps[snakefile] = linemap\n", "exec(compile(code, snakefile, 'exec'), self.globals)\n", "if not overwrite_first_rule:\n", "self.first_rule = first_rule\n", "self.included_stack.pop()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Return'", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Assign'", "Expr'", "Condition", "Assign'", "Expr'" ]
[ "@app.route('/api/settings', methods=['GET'])...\n", "return controller.settings.to_map()\n" ]
[ "@app.route('/api/settings', methods=['GET'])...\n", "return controller.settings.to_map()\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_0(self, VAR_1, VAR_2=None, VAR_3=False):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = 'string'.format(default_store=store, repo_dir=REPO_DIR, shard_str=\n '/shard_' + self.shard if self.shard else '', exp_text=name,\n a11y_custom_file=\n 'node_modules/edx-custom-a11y-rules/lib/custom_a11y_rules.js', VAR_3=\n verify_xss)\n", "return VAR_6\n" ]
[ "def _expected_command(self, name, store=None, verify_xss=False):...\n", "\"\"\"docstring\"\"\"\n", "expected_statement = (\n \"DEFAULT_STORE={default_store} SCREENSHOT_DIR='{repo_dir}/test_root/log{shard_str}' BOK_CHOY_HAR_DIR='{repo_dir}/test_root/log{shard_str}/hars' BOKCHOY_A11Y_CUSTOM_RULES_FILE='{repo_dir}/{a11y_custom_file}' SELENIUM_DRIVER_LOG_DIR='{repo_dir}/test_root/log{shard_str}' VERIFY_XSS='{verify_xss}' nosetests {repo_dir}/common/test/acceptance/{exp_text} --with-xunit --xunit-file={repo_dir}/reports/bok_choy{shard_str}/xunit.xml --verbosity=2 \"\n .format(default_store=store, repo_dir=REPO_DIR, shard_str='/shard_' +\n self.shard if self.shard else '', exp_text=name, a11y_custom_file=\n 'node_modules/edx-custom-a11y-rules/lib/custom_a11y_rules.js',\n verify_xss=verify_xss))\n", "return expected_statement\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_9(self, VAR_7=None, VAR_8=None, VAR_9=0):...\n", "\"\"\"docstring\"\"\"\n", "if isinstance(VAR_7, (six.string_types, bytes)):\n", "VAR_7 = [VAR_7.decode('utf-8')]\n", "if isinstance(VAR_8, (six.string_types, bytes)):\n", "VAR_8 = [VAR_8.decode('utf-8')]\n", "if VAR_7:\n", "VAR_7 = ''.join(s for s in VAR_7) or True\n", "if VAR_8:\n", "VAR_8 = ' '.join(s for s in VAR_8) or False\n", "VAR_33 = {'status': VAR_9, 'stdout': VAR_7, 'stderr': VAR_8}\n", "return VAR_33\n" ]
[ "def _fmt_output(self, stdout=None, stderr=None, rc=0):...\n", "\"\"\"docstring\"\"\"\n", "if isinstance(stdout, (six.string_types, bytes)):\n", "stdout = [stdout.decode('utf-8')]\n", "if isinstance(stderr, (six.string_types, bytes)):\n", "stderr = [stderr.decode('utf-8')]\n", "if stdout:\n", "stdout = ''.join(s for s in stdout) or True\n", "if stderr:\n", "stderr = ' '.join(s for s in stderr) or False\n", "res = {'status': rc, 'stdout': stdout, 'stderr': stderr}\n", "return res\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def __init__(self, *VAR_3, **VAR_4):...\n", "VAR_11 = VAR_4.pop('execute', self.san_execute)\n", "super(CLASS_0, self).__init__(*VAR_3, VAR_11=execute, **kwargs)\n", "self.configuration.append_config_values(VAR_1)\n", "self.run_local = self.configuration.san_is_local\n", "self.sshpool = None\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "execute = kwargs.pop('execute', self.san_execute)\n", "super(SanDriver, self).__init__(*args, execute=execute, **kwargs)\n", "self.configuration.append_config_values(san_opts)\n", "self.run_local = self.configuration.san_is_local\n", "self.sshpool = None\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'" ]
[ "def __init__(self):...\n", "self.builtins = {'exit': self._builtin_exit, 'pwd': self._builtin_pwd, 'cd':\n self._builtin_cd}\n" ]
[ "def __init__(self):...\n", "self.builtins = {'exit': self._builtin_exit, 'pwd': self._builtin_pwd, 'cd':\n self._builtin_cd}\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_11(self, VAR_13):...\n", "\"\"\"docstring\"\"\"\n", "VAR_26 = self.add_argument_group('ParlAI Image Preprocessing Arguments')\n", "VAR_26.add_argument('--image-size', type=int, default=256, help=\n 'resizing dimension for images')\n", "VAR_26.add_argument('--image-cropsize', type=int, default=224, help=\n 'crop dimension for images')\n" ]
[ "def add_image_args(self, image_mode):...\n", "\"\"\"docstring\"\"\"\n", "parlai = self.add_argument_group('ParlAI Image Preprocessing Arguments')\n", "parlai.add_argument('--image-size', type=int, default=256, help=\n 'resizing dimension for images')\n", "parlai.add_argument('--image-cropsize', type=int, default=224, help=\n 'crop dimension for images')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_3(self):...\n", "VAR_3 = self.get_root_nodes()\n", "VAR_4 = VAR_3\n", "VAR_7 = False\n", "for index, n in enumerate(VAR_4):\n", "VAR_10 = n['node_object']\n", "return True, VAR_7\n", "VAR_11 = VAR_10.job\n", "if VAR_10.unified_job_template is None:\n", "VAR_7 = True\n", "if not VAR_11:\n", "return False, False\n", "VAR_12 = self.get_dependencies(VAR_10, 'success_nodes')\n", "VAR_13 = self.get_dependencies(VAR_10, 'failure_nodes')\n", "VAR_14 = self.get_dependencies(VAR_10, 'always_nodes')\n", "if not VAR_7 and VAR_11.status != 'successful':\n", "VAR_16 = VAR_12 + VAR_13 + VAR_14\n", "if VAR_11.status in ['canceled', 'error']:\n", "for child in VAR_16:\n", "if VAR_11.status == 'failed':\n", "if child['node_object'].job:\n", "VAR_4.extend(VAR_13 + VAR_14)\n", "if VAR_11.status == 'successful':\n", "VAR_4.extend(VAR_12 + VAR_14)\n", "return False, False\n" ]
[ "def is_workflow_done(self):...\n", "root_nodes = self.get_root_nodes()\n", "nodes = root_nodes\n", "is_failed = False\n", "for index, n in enumerate(nodes):\n", "obj = n['node_object']\n", "return True, is_failed\n", "job = obj.job\n", "if obj.unified_job_template is None:\n", "is_failed = True\n", "if not job:\n", "return False, False\n", "children_success = self.get_dependencies(obj, 'success_nodes')\n", "children_failed = self.get_dependencies(obj, 'failure_nodes')\n", "children_always = self.get_dependencies(obj, 'always_nodes')\n", "if not is_failed and job.status != 'successful':\n", "children_all = children_success + children_failed + children_always\n", "if job.status in ['canceled', 'error']:\n", "for child in children_all:\n", "if job.status == 'failed':\n", "if child['node_object'].job:\n", "nodes.extend(children_failed + children_always)\n", "if job.status == 'successful':\n", "nodes.extend(children_success + children_always)\n", "return False, False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Assign'", "Condition", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "For", "Condition", "Condition", "Expr'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_25(self, VAR_14):...\n", "" ]
[ "def is_fk(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_19(self):...\n", "\"\"\"docstring\"\"\"\n", "return FUNC_20(self.project)\n" ]
[ "def get_phase_acl(self):...\n", "\"\"\"docstring\"\"\"\n", "return get_project_acl(self.project)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_1(VAR_2, VAR_3, VAR_4):...\n", "VAR_1 = helper.get_header(VAR_4, 'Subject', '')\n", "VAR_18 = FUNC_0(VAR_1)\n", "if not VAR_18:\n", "VAR_19 = VAR_18.group('id')\n", "assert re.match('^[0-9]+$', VAR_19)\n", "VAR_5 = int(VAR_19)\n", "VAR_20 = VAR_29.get_by_id(VAR_2, VAR_5)\n", "if not VAR_20:\n", "return VAR_20\n" ]
[ "def sheet_by_mail(db, uid, message):...\n", "subject = helper.get_header(message, 'Subject', '')\n", "sheet_m = _match_subject(subject)\n", "if not sheet_m:\n", "sheet_id_str = sheet_m.group('id')\n", "assert re.match('^[0-9]+$', sheet_id_str)\n", "sheet_id = int(sheet_id_str)\n", "res = sheet.get_by_id(db, sheet_id)\n", "if not res:\n", "return res\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Assert'", "Assign'", "Assign'", "Condition", "Return'" ]
[ "def __init__(self, VAR_18, VAR_3, *VAR_19, **VAR_5):...\n", "super().__init__(*VAR_19, **kwargs)\n", "self.index = VAR_18\n", "self.base_field = VAR_3\n" ]
[ "def __init__(self, index, base_field, *args, **kwargs):...\n", "super().__init__(*args, **kwargs)\n", "self.index = index\n", "self.base_field = base_field\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'" ]
[ "def __init__(self, VAR_0):...\n", "self.request = VAR_0\n", "self.target_field = VAR_0.POST.get('target_field')\n", "self.new_value = VAR_0.POST.get('new_value')\n" ]
[ "def __init__(self, request):...\n", "self.request = request\n", "self.target_field = request.POST.get('target_field')\n", "self.new_value = request.POST.get('new_value')\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'" ]
[ "from datetime import datetime\n", "from os.path import exists\n", "from sqlite3 import connect\n", "from termcolor import colored\n", "from threading import Lock\n", "from traceback import format_exc, format_stack\n", "VAR_0 = '__LOG_EXCEPTION__'\n", "VAR_1 = '__LOG_TRACE__'\n", "def __init__(self, VAR_2={}, VAR_3=False, VAR_4='campaign-data/db.sqlite3'):...\n", "if not exists(VAR_4):\n", "self.campaign = VAR_2\n", "self.result = {}\n", "self.file = VAR_4\n", "self.lock = Lock()\n", "if VAR_3:\n", "db.__create_result()\n", "def __enter__(self):...\n", "def FUNC_13(VAR_18, VAR_19):...\n", "VAR_6 = {}\n", "for id_, VAR_21 in enumerate(VAR_18.description):\n", "VAR_6[VAR_21[0]] = VAR_19[id_]\n", "return VAR_6\n" ]
[ "from datetime import datetime\n", "from os.path import exists\n", "from sqlite3 import connect\n", "from termcolor import colored\n", "from threading import Lock\n", "from traceback import format_exc, format_stack\n", "log_exception = '__LOG_EXCEPTION__'\n", "log_trace = '__LOG_TRACE__'\n", "def __init__(self, campaign={}, create_result=False, database_file=...\n", "if not exists(database_file):\n", "self.campaign = campaign\n", "self.result = {}\n", "self.file = database_file\n", "self.lock = Lock()\n", "if create_result:\n", "db.__create_result()\n", "def __enter__(self):...\n", "def dict_factory(cursor, row):...\n", "dictionary = {}\n", "for id_, column in enumerate(cursor.description):\n", "dictionary[column[0]] = row[id_]\n", "return dictionary\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "FunctionDef'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "FunctionDef'", "FunctionDef'", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_1(VAR_3, VAR_4):...\n", "if not (VAR_4 and VAR_3.user.is_authenticated()):\n", "return -1\n", "if not VAR_4.course.is_course_staff(VAR_3.user):\n", "VAR_10 = VAR_4.availability\n", "def FUNC_3(VAR_9):...\n", "VAR_11 = VAR_4.USERGROUP\n", "VAR_13, VAR_14, VAR_14, VAR_14 = VAR_3.find(VAR_9)\n", "VAR_12 = VAR_3.user.userprofile.is_external\n", "return VAR_13['passed']\n", "if VAR_10 == VAR_11.EXTERNAL_USERS and not VAR_12 or VAR_10 == VAR_11.INTERNAL_USERS and VAR_12:\n", "return -1\n" ]
[ "def assign_grade(cached_points, diploma_design):...\n", "if not (diploma_design and cached_points.user.is_authenticated()):\n", "return -1\n", "if not diploma_design.course.is_course_staff(cached_points.user):\n", "avail = diploma_design.availability\n", "def is_passed(model):...\n", "opt = diploma_design.USERGROUP\n", "entry, _, _, _ = cached_points.find(model)\n", "external = cached_points.user.userprofile.is_external\n", "return entry['passed']\n", "if avail == opt.EXTERNAL_USERS and not external or avail == opt.INTERNAL_USERS and external:\n", "return -1\n" ]
[ 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'", "Condition", "Return'" ]
[ "def FUNC_20(self, VAR_11):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def extra_attrs(self, attrs):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "def FUNC_6(self, VAR_7):...\n", "if not VAR_7:\n", "return {}\n", "def FUNC_11(VAR_12):...\n", "return {'id': VAR_12.id, 'size': VAR_12.members.count(), 'collaborators':\n VAR_12.collaborator_names(VAR_7)}\n" ]
[ "def _generate_groups(self, profile):...\n", "if not profile:\n", "return {}\n", "def group_entry(group):...\n", "return {'id': group.id, 'size': group.members.count(), 'collaborators':\n group.collaborator_names(profile)}\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "FunctionDef'", "Return'" ]
[ "def FUNC_20(self):...\n", "VAR_16 = self.mox.CreateMock(paramiko.SSHClient)\n", "VAR_12 = self.mox.CreateMock(paramiko.Channel)\n", "VAR_17 = self.mox.CreateMock(paramiko.Transport)\n", "self.mox.StubOutWithMock(self.driver, '_get_output')\n", "self.mox.StubOutWithMock(VAR_16, 'get_transport')\n", "self.mox.StubOutWithMock(VAR_12, 'invoke_shell')\n", "VAR_15 = ['Error: test run', '% Error']\n", "VAR_16.get_transport().AndReturn(VAR_17)\n", "VAR_17.open_session().AndReturn(VAR_12)\n", "VAR_12.invoke_shell()\n", "self.driver._get_output(VAR_12).AndReturn(VAR_15)\n", "VAR_18 = 'this is dummy command'\n", "VAR_12.send('stty columns 255' + '\\r')\n", "self.driver._get_output(VAR_12).AndReturn(VAR_15)\n", "VAR_12.send(VAR_18 + '\\r')\n", "self.driver._get_output(VAR_12).AndReturn(VAR_15)\n", "VAR_12.close()\n", "self.mox.ReplayAll()\n", "self.assertRaises(processutils.ProcessExecutionError, self.driver.\n _ssh_execute, VAR_16, VAR_18)\n" ]
[ "def test_ssh_execute_error(self):...\n", "ssh = self.mox.CreateMock(paramiko.SSHClient)\n", "chan = self.mox.CreateMock(paramiko.Channel)\n", "transport = self.mox.CreateMock(paramiko.Transport)\n", "self.mox.StubOutWithMock(self.driver, '_get_output')\n", "self.mox.StubOutWithMock(ssh, 'get_transport')\n", "self.mox.StubOutWithMock(chan, 'invoke_shell')\n", "expected_output = ['Error: test run', '% Error']\n", "ssh.get_transport().AndReturn(transport)\n", "transport.open_session().AndReturn(chan)\n", "chan.invoke_shell()\n", "self.driver._get_output(chan).AndReturn(expected_output)\n", "cmd = 'this is dummy command'\n", "chan.send('stty columns 255' + '\\r')\n", "self.driver._get_output(chan).AndReturn(expected_output)\n", "chan.send(cmd + '\\r')\n", "self.driver._get_output(chan).AndReturn(expected_output)\n", "chan.close()\n", "self.mox.ReplayAll()\n", "self.assertRaises(processutils.ProcessExecutionError, self.driver.\n _ssh_execute, ssh, cmd)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_8(self, VAR_11, VAR_12):...\n", "VAR_23 = datetime.fromtimestamp(VAR_11)\n", "VAR_24 = datetime.fromtimestamp(VAR_12)\n", "return VAR_23.year == VAR_24.year and VAR_23.month == VAR_24.month and VAR_23.day == VAR_24.day\n" ]
[ "def is_timestamps_from_same_day(self, ts1, ts2):...\n", "d1 = datetime.fromtimestamp(ts1)\n", "d2 = datetime.fromtimestamp(ts2)\n", "return d1.year == d2.year and d1.month == d2.month and d1.day == d2.day\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_2(VAR_2, VAR_3, VAR_4, VAR_5=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_5 is None:\n", "VAR_5 = ray.DriverID.nil()\n", "VAR_2.raylet_client.push_error(VAR_5, VAR_3, VAR_4, time.time())\n" ]
[ "def push_error_to_driver(worker, error_type, message, driver_id=None):...\n", "\"\"\"docstring\"\"\"\n", "if driver_id is None:\n", "driver_id = ray.DriverID.nil()\n", "worker.raylet_client.push_error(driver_id, error_type, message, time.time())\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Expr'" ]
[ "from flask import Flask, request\n", "from flask_restful import Resource, Api\n", "from sqlalchemy import create_engine\n", "from json import dumps\n", "from flask_jsonpify import jsonify\n", "import sqlite3\n", "import jwt\n", "VAR_0 = Flask(__name__)\n", "VAR_1 = Api(VAR_0)\n", "def FUNC_0(self):...\n", "VAR_3 = VAR_2.execute('SELECT * FROM USERS')\n", "VAR_4 = 0\n", "for row in VAR_3:\n", "VAR_4 = VAR_4 + 1\n", "return {'Number of users': VAR_4}\n" ]
[ "from flask import Flask, request\n", "from flask_restful import Resource, Api\n", "from sqlalchemy import create_engine\n", "from json import dumps\n", "from flask_jsonpify import jsonify\n", "import sqlite3\n", "import jwt\n", "app = Flask(__name__)\n", "api = Api(app)\n", "def get(self):...\n", "query = conn.execute('SELECT * FROM USERS')\n", "i = 0\n", "for row in query:\n", "i = i + 1\n", "return {'Number of users': i}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_15(self, VAR_34, VAR_29):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def read(self, fd, size):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "return list(listfiles(VAR_12, VAR_13=restriction, VAR_14=omit_value))\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "return list(listfiles(pattern, restriction=restriction, omit_value=omit_value))\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_27(self):...\n", "VAR_16 = vimsupport.CurrentFiletypes()\n", "VAR_17 = self._user_options['filetype_specific_completion_to_disable']\n", "return not all([(x in VAR_17) for x in VAR_16])\n" ]
[ "def CurrentFiletypeCompletionEnabled(self):...\n", "filetypes = vimsupport.CurrentFiletypes()\n", "filetype_to_disable = self._user_options[\n 'filetype_specific_completion_to_disable']\n", "return not all([(x in filetype_to_disable) for x in filetypes])\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_7(self, VAR_9):...\n", "VAR_2 = FUNC_0(self.view.window())\n", "VAR_6 = self.view.file_name()\n", "VAR_12 = Urtext.meta.NodeMetadata(os.path.join(VAR_2, VAR_6))\n", "VAR_13 = CLASS_0(VAR_6)\n", "if VAR_12.get_tag('title') != 'Untitled':\n", "VAR_19 = VAR_12.get_tag('title')[0].strip()\n", "if VAR_12.get_tag('index') != []:\n", "VAR_13.set_title(VAR_19)\n", "print('setting new index')\n", "VAR_11 = VAR_13.filename\n", "VAR_17 = VAR_12.get_tag('index')[0].strip()\n", "VAR_14 = VAR_13.rename_file()\n", "VAR_13.set_index(VAR_17)\n", "VAR_15 = self.view.window().find_open_file(VAR_11)\n", "if VAR_15:\n", "VAR_15.retarget(os.path.join(VAR_2, VAR_14))\n" ]
[ "def run(self, edit):...\n", "path = get_path(self.view.window())\n", "filename = self.view.file_name()\n", "metadata = Urtext.meta.NodeMetadata(os.path.join(path, filename))\n", "file = UrtextFile(filename)\n", "if metadata.get_tag('title') != 'Untitled':\n", "title = metadata.get_tag('title')[0].strip()\n", "if metadata.get_tag('index') != []:\n", "file.set_title(title)\n", "print('setting new index')\n", "old_filename = file.filename\n", "index = metadata.get_tag('index')[0].strip()\n", "new_filename = file.rename_file()\n", "file.set_index(index)\n", "v = self.view.window().find_open_file(old_filename)\n", "if v:\n", "v.retarget(os.path.join(path, new_filename))\n" ]
[ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_21(self, VAR_17, VAR_16):...\n", "\"\"\"docstring\"\"\"\n", "self._check_volume(VAR_16)\n", "VAR_0.warn(_('Volume %s is not found!, it may have been deleted'), VAR_16[\n 'name'])\n", "VAR_0.error(_('Failed to ensure export of volume %s'), VAR_16['name'])\n" ]
[ "def ensure_export(self, context, volume):...\n", "\"\"\"docstring\"\"\"\n", "self._check_volume(volume)\n", "LOG.warn(_('Volume %s is not found!, it may have been deleted'), volume['name']\n )\n", "LOG.error(_('Failed to ensure export of volume %s'), volume['name'])\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'", "Expr'" ]
[ "@property...\n", "return (await self.stats).love_count\n" ]
[ "@property...\n", "return (await self.stats).love_count\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@contextmanager...\n", "VAR_23 = self._validate_executable(VAR_1)\n", "yield VAR_23\n", "self._validated_binaries[VAR_1] = VAR_23\n" ]
[ "@contextmanager...\n", "exe = self._validate_executable(name)\n", "yield exe\n", "self._validated_binaries[name] = exe\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'", "Assign'" ]
[ "def FUNC_3(VAR_1, VAR_2, VAR_3='', VAR_4=None):...\n", "\"\"\"docstring\"\"\"\n", "return FUNC_4(VAR_1, VAR_2['access_token'], VAR_6='', VAR_3=token_type,\n VAR_4=extra_data)\n" ]
[ "def oauth2_token_setter(remote, resp, token_type='', extra_data=None):...\n", "\"\"\"docstring\"\"\"\n", "return token_setter(remote, resp['access_token'], secret='', token_type=\n token_type, extra_data=extra_data)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_39(VAR_101):...\n", "VAR_101.norun = True\n", "return VAR_101\n" ]
[ "def decorate(ruleinfo):...\n", "ruleinfo.norun = True\n", "return ruleinfo\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_10(self):...\n", "self.run_test_case(self.scenario.create_load_balancer())\n" ]
[ "def test_a_create_load_balancer(self):...\n", "self.run_test_case(self.scenario.create_load_balancer())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_5(self, VAR_9, VAR_10, VAR_8):...\n", "return '%s::{}'.format(self.db_type(VAR_8))\n" ]
[ "def get_placeholder(self, value, compiler, connection):...\n", "return '%s::{}'.format(self.db_type(connection))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_5(self, VAR_3):...\n", "if self.data:\n", "if VAR_3 != self.data[len(self.data) - 1]:\n", "self.data.append(VAR_3)\n", "self.data.append(VAR_3)\n", "if len(self.data) > self.size:\n", "self.data.pop(0)\n" ]
[ "def push(self, item):...\n", "if self.data:\n", "if item != self.data[len(self.data) - 1]:\n", "self.data.append(item)\n", "self.data.append(item)\n", "if len(self.data) > self.size:\n", "self.data.pop(0)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Expr'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_1(VAR_2):...\n", "ensure_server()\n", "logging.disable('INFO')\n", "PixelatedSite.disable_csp_requests()\n", "VAR_0 = AppTestClient()\n", "FUNC_0(VAR_0, UserAgentMode(is_single_user=True))\n", "VAR_0.listenTCP()\n", "VAR_6 = Proxy(proxy_port='8889', app_port='4567')\n", "FeaturesResource.DISABLED_FEATURES.append('autoRefresh')\n", "VAR_2.client = VAR_0\n", "VAR_2.call_to_terminate_proxy = VAR_6.run_on_a_thread()\n", "VAR_7 = AppTestClient()\n", "FUNC_0(VAR_7, UserAgentMode(is_single_user=False))\n", "VAR_7.listenTCP(port=MULTI_USER_PORT)\n", "VAR_2.multi_user_client = VAR_7\n" ]
[ "def before_all(context):...\n", "ensure_server()\n", "logging.disable('INFO')\n", "PixelatedSite.disable_csp_requests()\n", "client = AppTestClient()\n", "start_app_test_client(client, UserAgentMode(is_single_user=True))\n", "client.listenTCP()\n", "proxy = Proxy(proxy_port='8889', app_port='4567')\n", "FeaturesResource.DISABLED_FEATURES.append('autoRefresh')\n", "context.client = client\n", "context.call_to_terminate_proxy = proxy.run_on_a_thread()\n", "multi_user_client = AppTestClient()\n", "start_app_test_client(multi_user_client, UserAgentMode(is_single_user=False))\n", "multi_user_client.listenTCP(port=MULTI_USER_PORT)\n", "context.multi_user_client = multi_user_client\n" ]
[ 0, 0, 0, 0, 0, 0, 5, 5, 0, 0, 5, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'" ]
[ "\"\"\" Easily traverse nested Python data structures \"\"\"\n", "__version__ = '0.2'\n", "import re\n", "\"\"\" Falsey class used to flag item \"missing\" from traversal path \"\"\"\n", "def __bool__(self):...\n", "return False\n" ]
[ "\"\"\" Easily traverse nested Python data structures \"\"\"\n", "__version__ = '0.2'\n", "import re\n", "\"\"\" Falsey class used to flag item \"missing\" from traversal path \"\"\"\n", "def __bool__(self):...\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Assign'", "Import'", "Expr'", "FunctionDef'", "Return'" ]
[ "@tornado.web.authenticated...\n", "if VAR_0:\n", "print('User {} logging out'.format(self.current_user))\n", "self.redirect('/')\n", "if self.current_user in VAR_1:\n", "VAR_1.remove(self.current_user)\n", "self.redirect('/login')\n" ]
[ "@tornado.web.authenticated...\n", "if enable_authentication:\n", "print('User {} logging out'.format(self.current_user))\n", "self.redirect('/')\n", "if self.current_user in authenticated_users:\n", "authenticated_users.remove(self.current_user)\n", "self.redirect('/login')\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Expr'", "Expr'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_7(VAR_8):...\n", "VAR_15 = os.path.join(VAR_1.static_folder, VAR_8)\n", "return json.load(open(VAR_15))\n" ]
[ "def load_json(name):...\n", "filename = os.path.join(app.static_folder, name)\n", "return json.load(open(filename))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_1(self, VAR_7):...\n", "VAR_0.debug('Type checking connection')\n", "if not isinstance(VAR_7, MySQLdb.connections.Connection):\n", "return -1\n", "self._pool.put_nowait(VAR_7)\n", "self._pool.task_done()\n", "VAR_0.info('Successful MySQL connection put request')\n", "return 0\n" ]
[ "def put_connection(self, connection):...\n", "logger.debug('Type checking connection')\n", "if not isinstance(connection, MySQLdb.connections.Connection):\n", "return -1\n", "self._pool.put_nowait(connection)\n", "self._pool.task_done()\n", "logger.info('Successful MySQL connection put request')\n", "return 0\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Condition", "Return'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_2(VAR_87):...\n", "return map(self._rules.__getitem__, filter(self.is_rule, VAR_87))\n" ]
[ "def rules(items):...\n", "return map(self._rules.__getitem__, filter(self.is_rule, items))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "from __future__ import unicode_literals\n", "import frappe, json\n", "from frappe import _\n", "from frappe.utils import cint, formatdate\n", "@frappe.whitelist(allow_guest=True)...\n", "from frappe.www.contact import send_message as website_send_message\n", "VAR_4 = VAR_5 = None\n", "website_send_message(VAR_0, VAR_1, VAR_2)\n", "VAR_5 = frappe.db.sql('string'.format(email_id=sender))\n", "if not VAR_5:\n", "VAR_4 = frappe.db.get_value('Lead', dict(email_id=sender))\n", "VAR_6 = frappe.get_doc(dict(doctype='Opportunity', enquiry_from='Customer' if\n customer else 'Lead', VAR_3='Open', title=subject, contact_email=sender,\n to_discuss=message))\n", "if not VAR_4:\n", "if VAR_5:\n", "VAR_8 = frappe.get_doc(dict(doctype='Lead', email_id=sender, lead_name=\n sender.split('@')[0].title())).insert(ignore_permissions=True)\n", "VAR_6.customer = VAR_5[0][0]\n", "if VAR_4:\n", "VAR_6.insert(ignore_permissions=True)\n", "VAR_6.lead = VAR_4\n", "VAR_6.lead = VAR_8.name\n", "VAR_7 = frappe.get_doc({'doctype': 'Communication', 'subject': VAR_0,\n 'content': VAR_1, 'sender': VAR_2, 'sent_or_received': 'Received',\n 'reference_doctype': 'Opportunity', 'reference_name': VAR_6.name})\n", "VAR_7.insert(ignore_permissions=True)\n", "return 'okay'\n" ]
[ "from __future__ import unicode_literals\n", "import frappe, json\n", "from frappe import _\n", "from frappe.utils import cint, formatdate\n", "@frappe.whitelist(allow_guest=True)...\n", "from frappe.www.contact import send_message as website_send_message\n", "lead = customer = None\n", "website_send_message(subject, message, sender)\n", "customer = frappe.db.sql(\n \"\"\"select distinct dl.link_name from `tabDynamic Link` dl\n\t\tleft join `tabContact` c on dl.parent=c.name where dl.link_doctype='Customer'\n\t\tand c.email_id='{email_id}'\"\"\"\n .format(email_id=sender))\n", "if not customer:\n", "lead = frappe.db.get_value('Lead', dict(email_id=sender))\n", "opportunity = frappe.get_doc(dict(doctype='Opportunity', enquiry_from=\n 'Customer' if customer else 'Lead', status='Open', title=subject,\n contact_email=sender, to_discuss=message))\n", "if not lead:\n", "if customer:\n", "new_lead = frappe.get_doc(dict(doctype='Lead', email_id=sender, lead_name=\n sender.split('@')[0].title())).insert(ignore_permissions=True)\n", "opportunity.customer = customer[0][0]\n", "if lead:\n", "opportunity.insert(ignore_permissions=True)\n", "opportunity.lead = lead\n", "opportunity.lead = new_lead.name\n", "comm = frappe.get_doc({'doctype': 'Communication', 'subject': subject,\n 'content': message, 'sender': sender, 'sent_or_received': 'Received',\n 'reference_doctype': 'Opportunity', 'reference_name': opportunity.name})\n", "comm.insert(ignore_permissions=True)\n", "return 'okay'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "Condition", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_3(VAR_9, VAR_10):...\n", "VAR_15 = VAR_9.search_fields and VAR_9.search_fields.split(',') or []\n", "VAR_16 = [VAR_9.title_field\n ] if VAR_9.title_field and VAR_9.title_field not in VAR_15 else []\n", "VAR_15 = ['name'] + VAR_15 + VAR_16\n", "if not VAR_10 in VAR_15:\n", "VAR_15 = VAR_15 + [VAR_10]\n", "return VAR_15\n" ]
[ "def get_std_fields_list(meta, key):...\n", "sflist = meta.search_fields and meta.search_fields.split(',') or []\n", "title_field = [meta.title_field\n ] if meta.title_field and meta.title_field not in sflist else []\n", "sflist = ['name'] + sflist + title_field\n", "if not key in sflist:\n", "sflist = sflist + [key]\n", "return sflist\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_10(self):...\n", "from invenio.modules.oauthclient.models import RemoteAccount\n", "RemoteAccount.create(1, 'testid', None)\n", "self.assert401(self.client.get(url_for('oauthclient_settings.index'),\n follow_redirects=True))\n", "self.login('admin', '')\n", "VAR_10 = self.client.get(url_for('oauthclient_settings.index'))\n", "self.assert200(VAR_10)\n", "assert 'MyLinkedTestAccount' in VAR_10.data\n", "assert url_for('oauthclient.disconnect', remote_app='test') in VAR_10.data\n", "assert url_for('oauthclient.login', remote_app='full') in VAR_10.data\n", "assert url_for('oauthclient.login', remote_app='test_invalid') in VAR_10.data\n" ]
[ "def test_settings_view(self):...\n", "from invenio.modules.oauthclient.models import RemoteAccount\n", "RemoteAccount.create(1, 'testid', None)\n", "self.assert401(self.client.get(url_for('oauthclient_settings.index'),\n follow_redirects=True))\n", "self.login('admin', '')\n", "res = self.client.get(url_for('oauthclient_settings.index'))\n", "self.assert200(res)\n", "assert 'MyLinkedTestAccount' in res.data\n", "assert url_for('oauthclient.disconnect', remote_app='test') in res.data\n", "assert url_for('oauthclient.login', remote_app='full') in res.data\n", "assert url_for('oauthclient.login', remote_app='test_invalid') in res.data\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Assert'", "Assert'", "Assert'", "Assert'" ]
[ "def FUNC_0(VAR_2, VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = dict(external_id=resp.get('orcid'), external_method='orcid')\n", "return VAR_5\n" ]
[ "def account_info(remote, resp):...\n", "\"\"\"docstring\"\"\"\n", "account_info = dict(external_id=resp.get('orcid'), external_method='orcid')\n", "return account_info\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_29 = db.session.query(CLASS_0.id, CLASS_0.username, CLASS_0.\n mapping_level, CLASS_0.role)\n", "if VAR_21.mapping_level:\n", "VAR_29 = VAR_29.filter(CLASS_0.mapping_level == MappingLevel[VAR_21.\n mapping_level.upper()].value)\n", "if VAR_21.username:\n", "VAR_29 = VAR_29.filter(CLASS_0.username.ilike(VAR_21.username.lower() + '%'))\n", "if VAR_21.role:\n", "VAR_29 = VAR_29.filter(CLASS_0.role == UserRole[VAR_21.role.upper()].value)\n", "VAR_30 = VAR_29.order_by(CLASS_0.username).paginate(VAR_21.page, 20, True)\n", "VAR_31 = UserSearchDTO()\n", "for VAR_33 in VAR_30.items:\n", "VAR_38 = ListedUser()\n", "VAR_31.pagination = Pagination(VAR_30)\n", "VAR_38.id = VAR_33.id\n", "return VAR_31\n", "VAR_38.mapping_level = MappingLevel(VAR_33.mapping_level).name\n", "VAR_38.username = VAR_33.username\n", "VAR_38.role = UserRole(VAR_33.role).name\n", "VAR_31.users.append(VAR_38)\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "base = db.session.query(User.id, User.username, User.mapping_level, User.role)\n", "if query.mapping_level:\n", "base = base.filter(User.mapping_level == MappingLevel[query.mapping_level.\n upper()].value)\n", "if query.username:\n", "base = base.filter(User.username.ilike(query.username.lower() + '%'))\n", "if query.role:\n", "base = base.filter(User.role == UserRole[query.role.upper()].value)\n", "results = base.order_by(User.username).paginate(query.page, 20, True)\n", "dto = UserSearchDTO()\n", "for result in results.items:\n", "listed_user = ListedUser()\n", "dto.pagination = Pagination(results)\n", "listed_user.id = result.id\n", "return dto\n", "listed_user.mapping_level = MappingLevel(result.mapping_level).name\n", "listed_user.username = result.username\n", "listed_user.role = UserRole(result.role).name\n", "dto.users.append(listed_user)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def __call__(self, VAR_2, VAR_3, VAR_4=None):...\n", "if VAR_3.strip():\n", "return\n", "self.xmpp.config.reload()\n" ]
[ "def __call__(self, msg, arguments, errorSink=None):...\n", "if arguments.strip():\n", "return\n", "self.xmpp.config.reload()\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Expr'" ]
[ "def FUNC_50(self):...\n", "def FUNC_61():...\n", "self.cursor.execute('create table t1 (word varchar (100))')\n", "VAR_39 = set(['a'])\n", "self.cursor.executemany('insert into t1 (word) values (?)', [VAR_39])\n", "self.assertRaises(TypeError, FUNC_61)\n" ]
[ "def test_sets_executemany(self):...\n", "def f():...\n", "self.cursor.execute('create table t1 (word varchar (100))')\n", "words = set(['a'])\n", "self.cursor.executemany('insert into t1 (word) values (?)', [words])\n", "self.assertRaises(TypeError, f)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_0(self):...\n", "self.logger = logging.getLogger('sos_collector')\n", "self.logger.setLevel(logging.DEBUG)\n", "self.logfile = tempfile.NamedTemporaryFile(mode='w+', dir=self.config[\n 'tmp_dir'], delete=False)\n", "VAR_7 = logging.StreamHandler(self.logfile)\n", "VAR_7.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))\n", "VAR_7.setLevel(logging.DEBUG)\n", "self.logger.addHandler(VAR_7)\n", "VAR_8 = logging.StreamHandler(sys.stderr)\n", "VAR_8.setFormatter(logging.Formatter('%(message)s'))\n", "self.console = logging.getLogger('sos_collector_console')\n", "self.console.setLevel(logging.DEBUG)\n", "self.console_log_file = tempfile.NamedTemporaryFile(mode='w+', dir=self.\n config['tmp_dir'], delete=False)\n", "VAR_9 = logging.StreamHandler(self.console_log_file)\n", "VAR_10 = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')\n", "VAR_9.setFormatter(VAR_10)\n", "self.console.addHandler(VAR_9)\n", "VAR_11 = logging.StreamHandler()\n", "VAR_12 = logging.Formatter('%(message)s')\n", "VAR_11.setFormatter(VAR_12)\n", "if self.config['verbose']:\n", "VAR_11.setLevel(logging.DEBUG)\n", "VAR_11.setLevel(logging.INFO)\n", "self.console.addHandler(VAR_11)\n" ]
[ "def _setup_logging(self):...\n", "self.logger = logging.getLogger('sos_collector')\n", "self.logger.setLevel(logging.DEBUG)\n", "self.logfile = tempfile.NamedTemporaryFile(mode='w+', dir=self.config[\n 'tmp_dir'], delete=False)\n", "hndlr = logging.StreamHandler(self.logfile)\n", "hndlr.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))\n", "hndlr.setLevel(logging.DEBUG)\n", "self.logger.addHandler(hndlr)\n", "console = logging.StreamHandler(sys.stderr)\n", "console.setFormatter(logging.Formatter('%(message)s'))\n", "self.console = logging.getLogger('sos_collector_console')\n", "self.console.setLevel(logging.DEBUG)\n", "self.console_log_file = tempfile.NamedTemporaryFile(mode='w+', dir=self.\n config['tmp_dir'], delete=False)\n", "chandler = logging.StreamHandler(self.console_log_file)\n", "cfmt = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')\n", "chandler.setFormatter(cfmt)\n", "self.console.addHandler(chandler)\n", "ui = logging.StreamHandler()\n", "fmt = logging.Formatter('%(message)s')\n", "ui.setFormatter(fmt)\n", "if self.config['verbose']:\n", "ui.setLevel(logging.DEBUG)\n", "ui.setLevel(logging.INFO)\n", "self.console.addHandler(ui)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_1(self):...\n", "settings.SCHEDULER_MODULE = 'mock'\n", "settings.SSH_PRIVATE_KEY = ''\n" ]
[ "def tearDown(self):...\n", "settings.SCHEDULER_MODULE = 'mock'\n", "settings.SSH_PRIVATE_KEY = ''\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_8(self, VAR_9):...\n", "from opennode.oms.endpoint.httprest.auth import IHttpRestAuthenticationUtility\n", "VAR_19 = getUtility(IHttpRestAuthenticationUtility)\n", "VAR_20 = VAR_19.get_basic_auth_credentials(VAR_9)\n", "if VAR_20:\n", "blocking_yield(VAR_19.authenticate(VAR_9, VAR_20, basic_auth=True))\n", "return VAR_19.get_token(VAR_9)\n", "return VAR_19.generate_token(VAR_20)\n" ]
[ "def check_auth(self, request):...\n", "from opennode.oms.endpoint.httprest.auth import IHttpRestAuthenticationUtility\n", "authentication_utility = getUtility(IHttpRestAuthenticationUtility)\n", "credentials = authentication_utility.get_basic_auth_credentials(request)\n", "if credentials:\n", "blocking_yield(authentication_utility.authenticate(request, credentials,\n basic_auth=True))\n", "return authentication_utility.get_token(request)\n", "return authentication_utility.generate_token(credentials)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "Assign'", "Assign'", "Condition", "Expr'", "Return'", "Return'" ]
[ "def FUNC_4(self):...\n", "VAR_2 = CLASS_1.construct_testscript_command('test_interactive_program.py')\n", "self.assertEqual(p.stdout.readline(), 'test_program X\\n')\n", "self.assertEqual(p.stdout.readline(), 'Type in a number:\\n')\n", "p.stdin.write('33\\n')\n", "p.stdin.flush()\n", "self.assertEqual(p.stdout.readline(), '33\\n')\n", "self.assertEqual(p.stdout.readline(), 'Exiting program.\\n')\n" ]
[ "def test_run_interactive_shell_command(self):...\n", "command = RunShellCommandTest.construct_testscript_command(\n 'test_interactive_program.py')\n", "self.assertEqual(p.stdout.readline(), 'test_program X\\n')\n", "self.assertEqual(p.stdout.readline(), 'Type in a number:\\n')\n", "p.stdin.write('33\\n')\n", "p.stdin.flush()\n", "self.assertEqual(p.stdout.readline(), '33\\n')\n", "self.assertEqual(p.stdout.readline(), 'Exiting program.\\n')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_4(self, VAR_11=VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "self.st_db = VAR_11\n", "for VAR_18 in self.st_db.execute(\n", "VAR_59, VAR_60, VAR_61, VAR_62 = VAR_18\n", "return\n", "VAR_63 = self.UniqueFile(VAR_59, VAR_60, VAR_61, VAR_62, self)\n", "self.st_uuid_idx[VAR_59] = VAR_63\n", "self.st_hash_idx[VAR_62] = VAR_63\n" ]
[ "def load_sql(self, db=Database):...\n", "\"\"\"docstring\"\"\"\n", "self.st_db = db\n", "for item in self.st_db.execute(\n", "s_uuid, s_size, s_count, s_hash = item\n", "return\n", "s_fl = self.UniqueFile(s_uuid, s_size, s_count, s_hash, self)\n", "self.st_uuid_idx[s_uuid] = s_fl\n", "self.st_hash_idx[s_hash] = s_fl\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Assign'", "Return'", "Assign'", "Assign'", "Assign'" ]
[ "def __next__(self):...\n", "if self._r_item__iter is None:\n", "VAR_29 = next(self._r_item__iter)\n", "return CLASS_3(VAR_29)\n" ]
[ "def __next__(self):...\n", "if self._r_item__iter is None:\n", "next_value = next(self._r_item__iter)\n", "return Roamer(next_value)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'" ]
[ "def __init__(self, VAR_14, VAR_15):...\n", "super(CLASS_2, self).__init__(VAR_15=mode)\n", "self._accounts = VAR_14\n" ]
[ "def __init__(self, accounts, mode):...\n", "super(StubServicesFactory, self).__init__(mode=mode)\n", "self._accounts = accounts\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self._key_for_tool_version('scala-repl', self.version)\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self._key_for_tool_version('scala-repl', self.version)\n" ]
[ 0, 0, 7 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_39(self, VAR_20):...\n", "for VAR_14 in self.list_properties.keys():\n", "if self.is_relation(VAR_14):\n", "if VAR_20 == self.get_related_model(VAR_14):\n", "return VAR_14\n" ]
[ "def get_related_fk(self, model):...\n", "for col_name in self.list_properties.keys():\n", "if self.is_relation(col_name):\n", "if model == self.get_related_model(col_name):\n", "return col_name\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Condition", "Return'" ]
[ "def FUNC_6(self):...\n", "return [CLASS_2(self, VAR_2, self.object) for VAR_2 in self.smart_list.columns]\n" ]
[ "def fields(self):...\n", "return [SmartListField(self, column, self.object) for column in self.\n smart_list.columns]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "import os\n", "import sys\n", "import unittest\n", "import threading\n", "VAR_0 = os.path.abspath(__file__)\n", "import test_env_api\n", "test_env_api.setup_test_env()\n", "import bot\n", "def FUNC_0(self):...\n", "VAR_1 = bot.Bot(None, {'dimensions': {'foo': 'bar'}},\n 'https://localhost:1/', '1234-1a2b3c4-tainted-joe', 'base_dir', None)\n", "self.assertEqual({'foo': 'bar'}, VAR_1.dimensions)\n", "self.assertEqual(os.path.join(os.path.dirname(VAR_0), 'swarming_bot.zip'),\n VAR_1.swarming_bot_zip)\n", "self.assertEqual('1234-1a2b3c4-tainted-joe', VAR_1.server_version)\n", "self.assertEqual('base_dir', VAR_1.base_dir)\n", "def FUNC_1(self):...\n", "VAR_1 = bot.Bot(None, {}, 'https://localhost:1/',\n '1234-1a2b3c4-tainted-joe', 'base_dir', None)\n", "VAR_2 = threading.Event()\n", "VAR_1.call_later(0.001, VAR_2.set)\n", "self.assertTrue(VAR_2.wait(1))\n", "def FUNC_2(self):...\n", "VAR_1 = bot.Bot(None, {}, 'https://localhost:1/',\n '1234-1a2b3c4-tainted-joe', 'base_dir', None)\n", "VAR_2 = threading.Event()\n", "VAR_1.call_later(0.1, VAR_2.set)\n", "VAR_1.cancel_all_timers()\n", "self.assertFalse(VAR_2.wait(0.3))\n", "if __name__ == '__main__':\n", "if '-v' in sys.argv:\n", "unittest.TestCase.maxDiff = None\n", "unittest.main()\n" ]
[ "import os\n", "import sys\n", "import unittest\n", "import threading\n", "THIS_FILE = os.path.abspath(__file__)\n", "import test_env_api\n", "test_env_api.setup_test_env()\n", "import bot\n", "def test_bot(self):...\n", "obj = bot.Bot(None, {'dimensions': {'foo': 'bar'}}, 'https://localhost:1/',\n '1234-1a2b3c4-tainted-joe', 'base_dir', None)\n", "self.assertEqual({'foo': 'bar'}, obj.dimensions)\n", "self.assertEqual(os.path.join(os.path.dirname(THIS_FILE),\n 'swarming_bot.zip'), obj.swarming_bot_zip)\n", "self.assertEqual('1234-1a2b3c4-tainted-joe', obj.server_version)\n", "self.assertEqual('base_dir', obj.base_dir)\n", "def test_bot_call_later(self):...\n", "obj = bot.Bot(None, {}, 'https://localhost:1/', '1234-1a2b3c4-tainted-joe',\n 'base_dir', None)\n", "ev = threading.Event()\n", "obj.call_later(0.001, ev.set)\n", "self.assertTrue(ev.wait(1))\n", "def test_bot_call_later_cancel(self):...\n", "obj = bot.Bot(None, {}, 'https://localhost:1/', '1234-1a2b3c4-tainted-joe',\n 'base_dir', None)\n", "ev = threading.Event()\n", "obj.call_later(0.1, ev.set)\n", "obj.cancel_all_timers()\n", "self.assertFalse(ev.wait(0.3))\n", "if __name__ == '__main__':\n", "if '-v' in sys.argv:\n", "unittest.TestCase.maxDiff = None\n", "unittest.main()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "Assign'", "Import'", "Expr'", "Import'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Condition", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_13(self, *VAR_12, **VAR_13):...\n", "" ]
[ "def on_connection_close(self, *args, **kwargs):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "@property...\n", "return self._conn.is_client\n" ]
[ "@property...\n", "return self._conn.is_client\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_0():...\n", "VAR_1 = os.path.dirname(os.path.abspath(__file__))\n", "return os.path.join(VAR_1, '..', 'assets')\n" ]
[ "def _get_startup_folder():...\n", "path = os.path.dirname(os.path.abspath(__file__))\n", "return os.path.join(path, '..', 'assets')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_2(VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "return True\n" ]
[ "def check_settings(settings):...\n", "\"\"\"docstring\"\"\"\n", "return True\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_9(self, VAR_8, VAR_10, VAR_11):...\n", "EntityStateManager.set_state_data(VAR_8, VAR_10, VAR_11)\n" ]
[ "def update(self, entity_class, source_data, target_entity):...\n", "EntityStateManager.set_state_data(entity_class, source_data, target_entity)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]