lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def FUNC_28(VAR_24):...\n", "VAR_33.append(VAR_24)\n", "return 23\n" ]
[ "def exec_python(args):...\n", "calls.append(args)\n", "return 23\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "def FUNC_20(self, VAR_19, VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "for clause in reversed(self.order):\n", "VAR_40 = VAR_20.has_wildcards() - VAR_19.has_wildcards()\n", "VAR_51 = clause.index(VAR_19.name)\n", "if VAR_40 != 0:\n", "VAR_52 = clause.index(VAR_20.name)\n", "return VAR_40\n", "return 0\n", "VAR_39 = VAR_52 - VAR_51\n", "if VAR_39 < 0:\n", "VAR_39 = -1\n", "if VAR_39 > 0:\n", "return VAR_39\n", "VAR_39 = 1\n" ]
[ "def compare(self, rule1, rule2):...\n", "\"\"\"docstring\"\"\"\n", "for clause in reversed(self.order):\n", "wildcard_cmp = rule2.has_wildcards() - rule1.has_wildcards()\n", "i = clause.index(rule1.name)\n", "if wildcard_cmp != 0:\n", "j = clause.index(rule2.name)\n", "return wildcard_cmp\n", "return 0\n", "comp = j - i\n", "if comp < 0:\n", "comp = -1\n", "if comp > 0:\n", "return comp\n", "comp = 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Return'", "Assign'", "Condition", "Assign'", "Condition", "Return'", "Assign'" ]
[ "\"\"\"string\"\"\"\n", "from __future__ import unicode_literals\n", "from django.conf import settings\n", "from django.conf.urls import include\n", "from django.conf.urls import patterns\n", "from django.conf.urls import url\n", "from api import routers\n", "from api import views\n", "VAR_0 = routers.ApiRouter()\n", "VAR_1 = patterns('', url('^', include(VAR_0.urls)), url(\n '^apps/(?P<id>{})/config/?'.format(settings.APP_URL_REGEX), views.\n AppConfigViewSet.as_view({'get': 'retrieve', 'post': 'create'})), url(\n '^apps/(?P<id>{})/builds/(?P<uuid>[-_\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.AppBuildViewSet.as_view({'get': 'retrieve'})),\n url('^apps/(?P<id>{})/builds/?'.format(settings.APP_URL_REGEX), views.\n AppBuildViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^apps/(?P<id>{})/releases/v(?P<version>[0-9]+)/?'.format(settings.\n APP_URL_REGEX), views.AppReleaseViewSet.as_view({'get': 'retrieve'})),\n url('^apps/(?P<id>{})/releases/rollback/?'.format(settings.\n APP_URL_REGEX), views.AppReleaseViewSet.as_view({'post': 'rollback'})),\n url('^apps/(?P<id>{})/releases/?'.format(settings.APP_URL_REGEX), views\n .AppReleaseViewSet.as_view({'get': 'list'})), url(\n '^apps/(?P<id>{})/containers/(?P<type>[-_\\\\w]+)/(?P<num>[-_\\\\w]+)/?'.\n format(settings.APP_URL_REGEX), views.AppContainerViewSet.as_view({\n 'get': 'retrieve'})), url(\n '^apps/(?P<id>{})/containers/(?P<type>[-_\\\\w.]+)/?'.format(settings.\n APP_URL_REGEX), views.AppContainerViewSet.as_view({'get': 'list'})),\n url('^apps/(?P<id>{})/containers/?'.format(settings.APP_URL_REGEX),\n views.AppContainerViewSet.as_view({'get': 'list'})), url(\n '^apps/(?P<id>{})/domains/(?P<domain>[-\\\\._\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.DomainViewSet.as_view({'delete': 'destroy'})),\n url('^apps/(?P<id>{})/domains/?'.format(settings.APP_URL_REGEX), views.\n DomainViewSet.as_view({'post': 'create', 'get': 'list'})), url(\n '^apps/(?P<id>{})/scale/?'.format(settings.APP_URL_REGEX), views.\n AppViewSet.as_view({'post': 'scale'})), url('^apps/(?P<id>{})/logs/?'.\n format(settings.APP_URL_REGEX), views.AppViewSet.as_view({'get': 'logs'\n })), url('^apps/(?P<id>{})/run/?'.format(settings.APP_URL_REGEX), views\n .AppViewSet.as_view({'post': 'run'})), url(\n '^apps/(?P<id>{})/perms/(?P<username>[-_\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.AppPermsViewSet.as_view({'delete': 'destroy'})),\n url('^apps/(?P<id>{})/perms/?'.format(settings.APP_URL_REGEX), views.\n AppPermsViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^apps/(?P<id>{})/?'.format(settings.APP_URL_REGEX), views.AppViewSet.\n as_view({'get': 'retrieve', 'delete': 'destroy'})), url('^apps/?',\n views.AppViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^keys/(?P<id>.+)/?', views.KeyViewSet.as_view({'get': 'retrieve',\n 'delete': 'destroy'})), url('^keys/?', views.KeyViewSet.as_view({'get':\n 'list', 'post': 'create'})), url('^hooks/push/?', views.PushHookViewSet\n .as_view({'post': 'create'})), url('^hooks/build/?', views.\n BuildHookViewSet.as_view({'post': 'create'})), url('^hooks/config/?',\n views.ConfigHookViewSet.as_view({'post': 'create'})), url(\n '^auth/register/?', views.UserRegistrationView.as_view({'post':\n 'create'})), url('^auth/cancel/?', views.UserCancellationView.as_view({\n 'delete': 'destroy'})), url('^auth/', include('rest_framework.urls',\n namespace='rest_framework')), url('^generate-api-key/',\n 'rest_framework.authtoken.views.obtain_auth_token'), url(\n '^admin/perms/(?P<username>[-_\\\\w]+)/?', views.AdminPermsViewSet.\n as_view({'delete': 'destroy'})), url('^admin/perms/?', views.\n AdminPermsViewSet.as_view({'get': 'list', 'post': 'create'})))\n" ]
[ "\"\"\"\nRESTful URL patterns and routing for the Deis API app.\n\n\nApplications\n============\n\n.. http:get:: /api/apps/(string:id)/\n\n Retrieve a :class:`~api.models.App` by its `id`.\n\n.. http:delete:: /api/apps/(string:id)/\n\n Destroy a :class:`~api.models.App` by its `id`.\n\n.. http:get:: /api/apps/\n\n List all :class:`~api.models.App`\\\\s.\n\n.. http:post:: /api/apps/\n\n Create a new :class:`~api.models.App`.\n\n\nApplication Release Components\n------------------------------\n\n.. http:get:: /api/apps/(string:id)/config/\n\n List all :class:`~api.models.Config`\\\\s.\n\n.. http:post:: /api/apps/(string:id)/config/\n\n Create a new :class:`~api.models.Config`.\n\n.. http:get:: /api/apps/(string:id)/builds/(string:uuid)/\n\n Retrieve a :class:`~api.models.Build` by its `uuid`.\n\n.. http:get:: /api/apps/(string:id)/builds/\n\n List all :class:`~api.models.Build`\\\\s.\n\n.. http:post:: /api/apps/(string:id)/builds/\n\n Create a new :class:`~api.models.Build`.\n\n.. http:get:: /api/apps/(string:id)/releases/(int:version)/\n\n Retrieve a :class:`~api.models.Release` by its `version`.\n\n.. http:get:: /api/apps/(string:id)/releases/\n\n List all :class:`~api.models.Release`\\\\s.\n\n.. http:post:: /api/apps/(string:id)/releases/rollback/\n\n Rollback to a previous :class:`~api.models.Release`.\n\n\nApplication Infrastructure\n--------------------------\n\n.. http:get:: /api/apps/(string:id)/containers/(string:type)/(int:num)/\n\n List all :class:`~api.models.Container`\\\\s.\n\n.. http:get:: /api/apps/(string:id)/containers/(string:type)/\n\n List all :class:`~api.models.Container`\\\\s.\n\n.. http:get:: /api/apps/(string:id)/containers/\n\n List all :class:`~api.models.Container`\\\\s.\n\n\nApplication Domains\n-------------------\n\n\n.. http:delete:: /api/apps/(string:id)/domains/(string:hostname)\n\n Destroy a :class:`~api.models.Domain` by its `hostname`\n\n.. http:get:: /api/apps/(string:id)/domains/\n\n List all :class:`~api.models.Domain`\\\\s.\n\n.. http:post:: /api/apps/(string:id)/domains/\n\n Create a new :class:`~api.models.Domain`\\\\s.\n\n\nApplication Actions\n-------------------\n\n.. http:post:: /api/apps/(string:id)/scale/\n\n See also\n :meth:`AppViewSet.scale() <api.views.AppViewSet.scale>`\n\n.. http:get:: /api/apps/(string:id)/logs/\n\n See also\n :meth:`AppViewSet.logs() <api.views.AppViewSet.logs>`\n\n.. http:post:: /api/apps/(string:id)/run/\n\n See also\n :meth:`AppViewSet.run() <api.views.AppViewSet.run>`\n\n\nApplication Sharing\n===================\n\n.. http:delete:: /api/apps/(string:id)/perms/(string:username)/\n\n Destroy an app permission by its `username`.\n\n.. http:get:: /api/apps/(string:id)/perms/\n\n List all permissions granted to this app.\n\n.. http:post:: /api/apps/(string:id)/perms/\n\n Create a new app permission.\n\n\nKeys\n====\n\n.. http:get:: /api/keys/(string:id)/\n\n Retrieve a :class:`~api.models.Key` by its `id`.\n\n.. http:delete:: /api/keys/(string:id)/\n\n Destroy a :class:`~api.models.Key` by its `id`.\n\n.. http:get:: /api/keys/\n\n List all :class:`~api.models.Key`\\\\s.\n\n.. http:post:: /api/keys/\n\n Create a new :class:`~api.models.Key`.\n\n\nAPI Hooks\n=========\n\n.. http:post:: /api/hooks/push/\n\n Create a new :class:`~api.models.Push`.\n\n.. http:post:: /api/hooks/build/\n\n Create a new :class:`~api.models.Build`.\n\n.. http:post:: /api/hooks/config/\n\n Retrieve latest application :class:`~api.models.Config`.\n\n\nAuth\n====\n\n.. http:post:: /api/auth/register/\n\n Create a new User.\n\n.. http:delete:: /api/auth/register/\n\n Destroy the logged-in User.\n\n.. http:post:: /api/auth/login\n\n Authenticate for the REST framework.\n\n.. http:post:: /api/auth/logout\n\n Clear authentication for the REST framework.\n\n.. http:get:: /api/generate-api-key/\n\n Generate an API key.\n\n\nAdmin Sharing\n=============\n\n.. http:delete:: /api/admin/perms/(string:username)/\n\n Destroy an admin permission by its `username`.\n\n.. http:get:: /api/admin/perms/\n\n List all admin permissions granted.\n\n.. http:post:: /api/admin/perms/\n\n Create a new admin permission.\n\n\"\"\"\n", "from __future__ import unicode_literals\n", "from django.conf import settings\n", "from django.conf.urls import include\n", "from django.conf.urls import patterns\n", "from django.conf.urls import url\n", "from api import routers\n", "from api import views\n", "router = routers.ApiRouter()\n", "urlpatterns = patterns('', url('^', include(router.urls)), url(\n '^apps/(?P<id>{})/config/?'.format(settings.APP_URL_REGEX), views.\n AppConfigViewSet.as_view({'get': 'retrieve', 'post': 'create'})), url(\n '^apps/(?P<id>{})/builds/(?P<uuid>[-_\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.AppBuildViewSet.as_view({'get': 'retrieve'})),\n url('^apps/(?P<id>{})/builds/?'.format(settings.APP_URL_REGEX), views.\n AppBuildViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^apps/(?P<id>{})/releases/v(?P<version>[0-9]+)/?'.format(settings.\n APP_URL_REGEX), views.AppReleaseViewSet.as_view({'get': 'retrieve'})),\n url('^apps/(?P<id>{})/releases/rollback/?'.format(settings.\n APP_URL_REGEX), views.AppReleaseViewSet.as_view({'post': 'rollback'})),\n url('^apps/(?P<id>{})/releases/?'.format(settings.APP_URL_REGEX), views\n .AppReleaseViewSet.as_view({'get': 'list'})), url(\n '^apps/(?P<id>{})/containers/(?P<type>[-_\\\\w]+)/(?P<num>[-_\\\\w]+)/?'.\n format(settings.APP_URL_REGEX), views.AppContainerViewSet.as_view({\n 'get': 'retrieve'})), url(\n '^apps/(?P<id>{})/containers/(?P<type>[-_\\\\w.]+)/?'.format(settings.\n APP_URL_REGEX), views.AppContainerViewSet.as_view({'get': 'list'})),\n url('^apps/(?P<id>{})/containers/?'.format(settings.APP_URL_REGEX),\n views.AppContainerViewSet.as_view({'get': 'list'})), url(\n '^apps/(?P<id>{})/domains/(?P<domain>[-\\\\._\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.DomainViewSet.as_view({'delete': 'destroy'})),\n url('^apps/(?P<id>{})/domains/?'.format(settings.APP_URL_REGEX), views.\n DomainViewSet.as_view({'post': 'create', 'get': 'list'})), url(\n '^apps/(?P<id>{})/scale/?'.format(settings.APP_URL_REGEX), views.\n AppViewSet.as_view({'post': 'scale'})), url('^apps/(?P<id>{})/logs/?'.\n format(settings.APP_URL_REGEX), views.AppViewSet.as_view({'get': 'logs'\n })), url('^apps/(?P<id>{})/run/?'.format(settings.APP_URL_REGEX), views\n .AppViewSet.as_view({'post': 'run'})), url(\n '^apps/(?P<id>{})/perms/(?P<username>[-_\\\\w]+)/?'.format(settings.\n APP_URL_REGEX), views.AppPermsViewSet.as_view({'delete': 'destroy'})),\n url('^apps/(?P<id>{})/perms/?'.format(settings.APP_URL_REGEX), views.\n AppPermsViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^apps/(?P<id>{})/?'.format(settings.APP_URL_REGEX), views.AppViewSet.\n as_view({'get': 'retrieve', 'delete': 'destroy'})), url('^apps/?',\n views.AppViewSet.as_view({'get': 'list', 'post': 'create'})), url(\n '^keys/(?P<id>.+)/?', views.KeyViewSet.as_view({'get': 'retrieve',\n 'delete': 'destroy'})), url('^keys/?', views.KeyViewSet.as_view({'get':\n 'list', 'post': 'create'})), url('^hooks/push/?', views.PushHookViewSet\n .as_view({'post': 'create'})), url('^hooks/build/?', views.\n BuildHookViewSet.as_view({'post': 'create'})), url('^hooks/config/?',\n views.ConfigHookViewSet.as_view({'post': 'create'})), url(\n '^auth/register/?', views.UserRegistrationView.as_view({'post':\n 'create'})), url('^auth/cancel/?', views.UserCancellationView.as_view({\n 'delete': 'destroy'})), url('^auth/', include('rest_framework.urls',\n namespace='rest_framework')), url('^generate-api-key/',\n 'rest_framework.authtoken.views.obtain_auth_token'), url(\n '^admin/perms/(?P<username>[-_\\\\w]+)/?', views.AdminPermsViewSet.\n as_view({'delete': 'destroy'})), url('^admin/perms/?', views.\n AdminPermsViewSet.as_view({'get': 'list', 'post': 'create'})))\n" ]
[ 5, 0, 0, 0, 0, 0, 0, 0, 0, 5 ]
[ "Expr'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'" ]
[ "def __init__(self):...\n", "self.content = None\n", "self.filled = False\n" ]
[ "def __init__(self):...\n", "self.content = None\n", "self.filled = False\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_0(VAR_1):...\n", "if not 'now' in VAR_1:\n", "VAR_1['now'] = timezone.now()\n", "return VAR_1['now']\n" ]
[ "def _prepare_now(context):...\n", "if not 'now' in context:\n", "context['now'] = timezone.now()\n", "return context['now']\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_7():...\n", "parse_command_line()\n", "VAR_19 = motor.motor_tornado.MotorClient().news\n", "VAR_4 = VAR_19.articles\n", "VAR_20 = tornado.web.Application([('/', CLASS_1), ('/post/(.+)', CLASS_2),\n ('/new', CLASS_3)], cookie_secret='__THERE_IS_NO_SECRET_COOKIE__',\n template_path=os.path.join(os.path.dirname(__file__), 'templates'),\n static_path=os.path.join(os.path.dirname(__file__), 'static'),\n xsrf_cookies=True, debug=options.debug, VAR_19=db, VAR_4=collection)\n", "print('Listening on http://localhost:{}'.format(options.port))\n", "VAR_20.listen(options.port)\n", "tornado.ioloop.IOLoop.current().start()\n" ]
[ "def main():...\n", "parse_command_line()\n", "db = motor.motor_tornado.MotorClient().news\n", "collection = db.articles\n", "app = tornado.web.Application([('/', MainHandler), ('/post/(.+)',\n PostHandler), ('/new', PostNewHandler)], cookie_secret=\n '__THERE_IS_NO_SECRET_COOKIE__', template_path=os.path.join(os.path.\n dirname(__file__), 'templates'), static_path=os.path.join(os.path.\n dirname(__file__), 'static'), xsrf_cookies=True, debug=options.debug,\n db=db, collection=collection)\n", "print('Listening on http://localhost:{}'.format(options.port))\n", "app.listen(options.port)\n", "tornado.ioloop.IOLoop.current().start()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_6(self):...\n", "VAR_5 = [{'cloudProvider': 'gce', 'asgName': self.__server_group_name,\n 'serverGroupName': self.__server_group_name, 'region': self.TEST_REGION,\n 'zone': self.TEST_ZONE, 'type': 'disableServerGroup', 'regions': [self.\n TEST_REGION], 'zones': [self.TEST_ZONE], 'credentials': self.bindings[\n 'GCE_CREDENTIALS'], 'user': 'integration-tests'}]\n", "VAR_6 = gcp.GceContractBuilder(self.gce_observer)\n", "VAR_6.new_clause_builder('Server Group Disabled', retryable_for_secs=90\n ).list_resources('managed-instance-groups').contains_path_value(\n 'baseInstanceName', self.__server_group_name).excludes_pred_list([jc.\n PathContainsPredicate('baseInstanceName', self.__server_group_name), jc\n .PathContainsPredicate('targetPools', 'https')])\n", "VAR_7 = self.agent.make_json_payload_from_kwargs(VAR_5=job, description=\n 'Server Group Test - disable server group', application=self.TEST_APP)\n", "return st.OperationContract(self.new_post_operation(title=\n 'disable_server_group', data=payload, path=self.__path), contract=\n builder.build())\n" ]
[ "def disable_server_group(self):...\n", "job = [{'cloudProvider': 'gce', 'asgName': self.__server_group_name,\n 'serverGroupName': self.__server_group_name, 'region': self.TEST_REGION,\n 'zone': self.TEST_ZONE, 'type': 'disableServerGroup', 'regions': [self.\n TEST_REGION], 'zones': [self.TEST_ZONE], 'credentials': self.bindings[\n 'GCE_CREDENTIALS'], 'user': 'integration-tests'}]\n", "builder = gcp.GceContractBuilder(self.gce_observer)\n", "builder.new_clause_builder('Server Group Disabled', retryable_for_secs=90\n ).list_resources('managed-instance-groups').contains_path_value(\n 'baseInstanceName', self.__server_group_name).excludes_pred_list([jc.\n PathContainsPredicate('baseInstanceName', self.__server_group_name), jc\n .PathContainsPredicate('targetPools', 'https')])\n", "payload = self.agent.make_json_payload_from_kwargs(job=job, description=\n 'Server Group Test - disable server group', application=self.TEST_APP)\n", "return st.OperationContract(self.new_post_operation(title=\n 'disable_server_group', data=payload, path=self.__path), contract=\n builder.build())\n" ]
[ 0, 0, 0, 1, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_40(VAR_87):...\n", "return filterfalse(self.is_rule, VAR_87)\n" ]
[ "def files(items):...\n", "return filterfalse(self.is_rule, items)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_4(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = jc.Contract()\n", "return st.OperationContract(self.agent.make_delete_app_operation(VAR_1=self\n .bindings, application=self.TEST_APP), VAR_5=contract)\n" ]
[ "def delete_app(self):...\n", "\"\"\"docstring\"\"\"\n", "contract = jc.Contract()\n", "return st.OperationContract(self.agent.make_delete_app_operation(bindings=\n self.bindings, application=self.TEST_APP), contract=contract)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def __str__(self):...\n", "return self.name\n" ]
[ "def __str__(self):...\n", "return self.name\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_30(self, VAR_5):...\n", "def FUNC_32(VAR_19):...\n", "return dict((expr, vimsupport.VimExpressionToPythonType(expr)) for expr in\n VAR_19)\n" ]
[ "def _AddExtraConfDataIfNeeded(self, extra_data):...\n", "def BuildExtraConfData(extra_conf_vim_data):...\n", "return dict((expr, vimsupport.VimExpressionToPythonType(expr)) for expr in\n extra_conf_vim_data)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Return'" ]
[ "def FUNC_4():...\n", "return 'cap multigraph dirtyconfig' + VAR_4\n" ]
[ "def cap():...\n", "return 'cap multigraph dirtyconfig' + LINEBREAK\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "import logging, concurrent.futures\n", "from utils import *\n", "from urllib.parse import urljoin, urlparse\n", "from threading import Lock\n", "def __init__(self, VAR_0, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5=None, VAR_6=...\n", "self.logger = logging.getLogger('fuxploider')\n", "self.postData = VAR_4\n", "self.formUrl = VAR_6\n", "VAR_18 = urlparse(self.formUrl)\n", "self.schema = VAR_18.scheme\n", "self.host = VAR_18.netloc\n", "self.uploadUrl = urljoin(VAR_6, VAR_7)\n", "self.session = VAR_2\n", "self.trueRegex = VAR_1\n", "self.notRegex = VAR_0\n", "self.inputName = VAR_8\n", "self.uploadsFolder = VAR_5\n", "self.size = VAR_3\n", "self.validExtensions = []\n", "self.httpRequests = 0\n", "self.codeExecUrlPattern = None\n", "self.logLock = Lock()\n", "self.stopThreads = False\n", "self.shouldLog = True\n", "def FUNC_0(self, VAR_9):...\n", "self.formUrl = VAR_9\n", "VAR_18 = urlparse(self.formUrl)\n", "self.schema = VAR_18.scheme\n", "self.host = VAR_18.netloc\n", "self.httpRequests = 0\n", "VAR_34 = self.session.get(self.formUrl, headers={'Accept-Encoding': None})\n", "self.logger.critical('%s : Host unreachable (%s)', getHost(VAR_9), e)\n", "VAR_21 = FUNC_7(VAR_34.text)\n", "self.httpRequests += 1\n", "exit()\n", "if len(VAR_21) == 0:\n", "if self.logger.verbosity > 1:\n", "self.logger.critical('No HTML form found here')\n", "if len(VAR_21) > 1:\n", "printSimpleResponseObject(VAR_34)\n", "if self.logger.verbosity > 2:\n", "exit()\n", "self.logger.critical(\n '%s forms found containing file upload inputs, no way to choose which one to test.'\n , len(VAR_21))\n", "if len(VAR_21[0][1]) > 1:\n", "print('\\x1b[36m' + VAR_34.text + '\\x1b[m')\n", "if VAR_34.status_code < 200 or VAR_34.status_code > 300:\n", "exit()\n", "self.logger.critical(\n '%s file inputs found inside the same form, no way to choose which one to test.'\n , len(VAR_21[0]))\n", "self.inputName = VAR_21[0][1][0]['name']\n", "self.logger.critical('Server responded with following status : %s - %s',\n VAR_34.status_code, VAR_34.reason)\n", "exit()\n", "self.logger.debug('Found the following file upload input : %s', self.inputName)\n", "exit()\n", "VAR_22 = VAR_21[0][0]\n", "self.action = VAR_22['action']\n", "self.action = ''\n", "self.uploadUrl = urljoin(self.formUrl, self.action)\n", "self.logger.debug('Using following URL for file upload : %s', self.uploadUrl)\n", "if not self.uploadsFolder and not self.trueRegex:\n", "self.logger.warning(\n 'No uploads folder nor true regex defined, code execution detection will not be possible.'\n )\n", "if not self.uploadsFolder and self.trueRegex:\n", "def FUNC_1(self, VAR_10, VAR_11, VAR_12):...\n", "print(\n 'No uploads path provided, code detection can still be done using true regex capturing group.'\n )\n", "fd.write(VAR_12)\n", "VAR_40 = input(\n 'Do you want to use the True Regex for code execution detection ? [Y/n] ')\n", "fd.flush()\n", "if VAR_40.lower().startswith('y') or VAR_40 == '':\n", "fd.seek(0)\n", "VAR_43 = input('Preffix capturing group of the true regex with : ')\n", "self.logger.warning(\n 'Code execution detection will not be possible as there is no path nor regex pattern configured.'\n )\n", "VAR_35 = os.path.basename(fd.name)\n", "VAR_44 = input('Suffix capturing group of the true regex with : ')\n", "if self.shouldLog:\n", "self.codeExecUrlPattern = VAR_43 + '$captGroup$' + VAR_44\n", "self.logger.debug('Sending file %s with mime type : %s', VAR_35, VAR_11)\n", "VAR_30 = self.session.post(self.uploadUrl, files={self.inputName: (filename,\n fd, mime)}, data=self.postData)\n", "self.httpRequests += 1\n", "if self.shouldLog:\n", "if self.logger.verbosity > 1:\n", "return VAR_30, VAR_35\n", "printSimpleResponseObject(VAR_30)\n", "if self.logger.verbosity > 2:\n", "print('\\x1b[36m' + VAR_30.text + '\\x1b[m')\n" ]
[ "import logging, concurrent.futures\n", "from utils import *\n", "from urllib.parse import urljoin, urlparse\n", "from threading import Lock\n", "def __init__(self, notRegex, trueRegex, session, size, postData,...\n", "self.logger = logging.getLogger('fuxploider')\n", "self.postData = postData\n", "self.formUrl = formUrl\n", "url = urlparse(self.formUrl)\n", "self.schema = url.scheme\n", "self.host = url.netloc\n", "self.uploadUrl = urljoin(formUrl, formAction)\n", "self.session = session\n", "self.trueRegex = trueRegex\n", "self.notRegex = notRegex\n", "self.inputName = inputName\n", "self.uploadsFolder = uploadsFolder\n", "self.size = size\n", "self.validExtensions = []\n", "self.httpRequests = 0\n", "self.codeExecUrlPattern = None\n", "self.logLock = Lock()\n", "self.stopThreads = False\n", "self.shouldLog = True\n", "def setup(self, initUrl):...\n", "self.formUrl = initUrl\n", "url = urlparse(self.formUrl)\n", "self.schema = url.scheme\n", "self.host = url.netloc\n", "self.httpRequests = 0\n", "initGet = self.session.get(self.formUrl, headers={'Accept-Encoding': None})\n", "self.logger.critical('%s : Host unreachable (%s)', getHost(initUrl), e)\n", "detectedForms = detectForms(initGet.text)\n", "self.httpRequests += 1\n", "exit()\n", "if len(detectedForms) == 0:\n", "if self.logger.verbosity > 1:\n", "self.logger.critical('No HTML form found here')\n", "if len(detectedForms) > 1:\n", "printSimpleResponseObject(initGet)\n", "if self.logger.verbosity > 2:\n", "exit()\n", "self.logger.critical(\n '%s forms found containing file upload inputs, no way to choose which one to test.'\n , len(detectedForms))\n", "if len(detectedForms[0][1]) > 1:\n", "print('\\x1b[36m' + initGet.text + '\\x1b[m')\n", "if initGet.status_code < 200 or initGet.status_code > 300:\n", "exit()\n", "self.logger.critical(\n '%s file inputs found inside the same form, no way to choose which one to test.'\n , len(detectedForms[0]))\n", "self.inputName = detectedForms[0][1][0]['name']\n", "self.logger.critical('Server responded with following status : %s - %s',\n initGet.status_code, initGet.reason)\n", "exit()\n", "self.logger.debug('Found the following file upload input : %s', self.inputName)\n", "exit()\n", "formDestination = detectedForms[0][0]\n", "self.action = formDestination['action']\n", "self.action = ''\n", "self.uploadUrl = urljoin(self.formUrl, self.action)\n", "self.logger.debug('Using following URL for file upload : %s', self.uploadUrl)\n", "if not self.uploadsFolder and not self.trueRegex:\n", "self.logger.warning(\n 'No uploads folder nor true regex defined, code execution detection will not be possible.'\n )\n", "if not self.uploadsFolder and self.trueRegex:\n", "def uploadFile(self, suffix, mime, payload):...\n", "print(\n 'No uploads path provided, code detection can still be done using true regex capturing group.'\n )\n", "fd.write(payload)\n", "cont = input(\n 'Do you want to use the True Regex for code execution detection ? [Y/n] ')\n", "fd.flush()\n", "if cont.lower().startswith('y') or cont == '':\n", "fd.seek(0)\n", "preffixPattern = input('Preffix capturing group of the true regex with : ')\n", "self.logger.warning(\n 'Code execution detection will not be possible as there is no path nor regex pattern configured.'\n )\n", "filename = os.path.basename(fd.name)\n", "suffixPattern = input('Suffix capturing group of the true regex with : ')\n", "if self.shouldLog:\n", "self.codeExecUrlPattern = preffixPattern + '$captGroup$' + suffixPattern\n", "self.logger.debug('Sending file %s with mime type : %s', filename, mime)\n", "fu = self.session.post(self.uploadUrl, files={self.inputName: (filename, fd,\n mime)}, data=self.postData)\n", "self.httpRequests += 1\n", "if self.shouldLog:\n", "if self.logger.verbosity > 1:\n", "return fu, filename\n", "printSimpleResponseObject(fu)\n", "if self.logger.verbosity > 2:\n", "print('\\x1b[36m' + fu.text + '\\x1b[m')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "AugAssign'", "Expr'", "Condition", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Expr'", "Condition", "FunctionDef'", "Expr'", "Expr'", "Assign'", "Expr'", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "AugAssign'", "Condition", "Condition", "Return'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_39(VAR_101):...\n", "VAR_101.benchmark = VAR_74\n", "return VAR_101\n" ]
[ "def decorate(ruleinfo):...\n", "ruleinfo.benchmark = benchmark\n", "return ruleinfo\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_1():...\n", "VAR_9 = FUNC_0()\n", "VAR_10 = VAR_9.cursor()\n", "VAR_10.execute(\"\"\"\n SELECT 1 FROM people LIMIT 1;\n \"\"\")\n", "VAR_10.execute(\"\"\"\n SELECT 1 FROM people LIMIT 1;\n \"\"\")\n", "VAR_10.fetchone()\n", "VAR_10.fetchone()\n", "VAR_2.debug('people table exists')\n", "VAR_2.debug('people table exists')\n" ]
[ "def check_tables():...\n", "db = db_connect()\n", "cursor = db.cursor()\n", "cursor.execute(\"\"\"\n SELECT 1 FROM people LIMIT 1;\n \"\"\")\n", "cursor.execute(\"\"\"\n SELECT 1 FROM people LIMIT 1;\n \"\"\")\n", "cursor.fetchone()\n", "cursor.fetchone()\n", "logger.debug('people table exists')\n", "logger.debug('people table exists')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_3(self, VAR_21, VAR_23=True):...\n", "\"\"\"docstring\"\"\"\n", "VAR_22 = self.fs\n", "for part in VAR_21.split('/'):\n", "if not len(part):\n", "return VAR_22[VAR_8]\n", "VAR_67 = False\n", "for c in VAR_22[VAR_8]:\n", "if c[VAR_1] == part:\n", "if not VAR_67:\n", "if c[VAR_2] == VAR_11:\n", "VAR_22 = self.getfile(c[VAR_9], VAR_23=follow_symlinks)\n", "VAR_22 = c\n", "VAR_67 = True\n" ]
[ "def get_path(self, path, follow_symlinks=True):...\n", "\"\"\"docstring\"\"\"\n", "cwd = self.fs\n", "for part in path.split('/'):\n", "if not len(part):\n", "return cwd[A_CONTENTS]\n", "ok = False\n", "for c in cwd[A_CONTENTS]:\n", "if c[A_NAME] == part:\n", "if not ok:\n", "if c[A_TYPE] == T_LINK:\n", "cwd = self.getfile(c[A_TARGET], follow_symlinks=follow_symlinks)\n", "cwd = c\n", "ok = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Condition", "Return'", "Assign'", "For", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Assign'" ]
[ "async def FUNC_3(VAR_6):...\n", "VAR_7 = {'username': VAR_0, 'password': 'testing123G'}\n", "VAR_8 = await VAR_6.post('/auth', VAR_7=json.dumps(data))\n", "VAR_9 = await VAR_8.json()\n", "print(VAR_9)\n", "VAR_1 = VAR_9['access_token']\n", "VAR_2 = VAR_9['refresh_token']\n", "assert VAR_1 is not None\n", "assert VAR_2 is not None\n", "assert VAR_8.status == 200\n" ]
[ "async def test_positive_login(test_cli):...\n", "data = {'username': username, 'password': 'testing123G'}\n", "resp = await test_cli.post('/auth', data=json.dumps(data))\n", "resp_json = await resp.json()\n", "print(resp_json)\n", "access_token = resp_json['access_token']\n", "refresh_token = resp_json['refresh_token']\n", "assert access_token is not None\n", "assert refresh_token is not None\n", "assert resp.status == 200\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assert'", "Assert'", "Assert'" ]
[ "def FUNC_4(VAR_9):...\n", "if VAR_9['exception']['TYPE'] == UnknownExtraConf.__name__:\n" ]
[ "def _RaiseExceptionForData(data):...\n", "if data['exception']['TYPE'] == UnknownExtraConf.__name__:\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_0(self, VAR_4):...\n", "return '{:<{}} : {}'.format(self._hostname, self.config['hostlen'] + 1, VAR_4)\n" ]
[ "def _fmt_msg(self, msg):...\n", "return '{:<{}} : {}'.format(self._hostname, self.config['hostlen'] + 1, msg)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_10):...\n", "VAR_10 = VAR_10.strip()\n", "def FUNC_2():...\n", "VAR_30 = VAR_10.find(' ')\n", "VAR_24 = VAR_10[:VAR_30]\n", "VAR_42 = ''\n", "VAR_12 = VAR_10[VAR_30 + 1:]\n", "self.emph = 1 if VAR_24.find('*') > -1 else 0\n", "self.uncover = 2 if VAR_24.find('+') > -1 else 0\n", "self.kind = 0\n", "self.resume = False\n", "if VAR_24.find('.') > -1:\n", "self.kind = 1\n", "if VAR_24.find(',') > -1:\n", "super(CLASS_3, self).__init__(slideParser.parse(VAR_12, slideLexer), '%s' +\n self.markers[self.kind] % (self.specs[self.emph + self.uncover], VAR_42\n ), '\\n')\n", "self.kind = 1\n", "if VAR_24.find('=') > -1:\n", "CLASS_2.parsingQ.insert(0, FUNC_2)\n", "self.resume = True\n", "self.kind = 2\n", "VAR_43 = VAR_12.find('=')\n", "if VAR_43 == -1:\n", "VAR_43 = VAR_12.find(' ')\n", "if VAR_43 == -1:\n", "VAR_42 = VAR_12\n", "VAR_42 = VAR_12[:VAR_43]\n", "VAR_12 = ' '\n", "VAR_12 = VAR_12[VAR_43 + 1:]\n" ]
[ "def __init__(self, txt):...\n", "txt = txt.strip()\n", "def innerFunc():...\n", "i = txt.find(' ')\n", "marker = txt[:i]\n", "describee = ''\n", "content = txt[i + 1:]\n", "self.emph = 1 if marker.find('*') > -1 else 0\n", "self.uncover = 2 if marker.find('+') > -1 else 0\n", "self.kind = 0\n", "self.resume = False\n", "if marker.find('.') > -1:\n", "self.kind = 1\n", "if marker.find(',') > -1:\n", "super(ListItem, self).__init__(slideParser.parse(content, slideLexer), '%s' +\n self.markers[self.kind] % (self.specs[self.emph + self.uncover],\n describee), '\\n')\n", "self.kind = 1\n", "if marker.find('=') > -1:\n", "Slide.parsingQ.insert(0, innerFunc)\n", "self.resume = True\n", "self.kind = 2\n", "j = content.find('=')\n", "if j == -1:\n", "j = content.find(' ')\n", "if j == -1:\n", "describee = content\n", "describee = content[:j]\n", "content = ' '\n", "content = content[j + 1:]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_13(self, VAR_9, VAR_10=None, VAR_11=None, VAR_17='', VAR_18='',...\n", "\"\"\"docstring\"\"\"\n", "VAR_28 = self.get_and_check_task(VAR_9, VAR_10, VAR_11)\n", "VAR_29 = VAR_28.get_tile_path(VAR_17, VAR_18, VAR_19)\n", "if os.path.isfile(VAR_29):\n", "VAR_32 = open(VAR_29, 'rb')\n", "return HttpResponse(FileWrapper(VAR_32), content_type='image/png')\n" ]
[ "def get(self, request, pk=None, project_pk=None, z='', x='', y=''):...\n", "\"\"\"docstring\"\"\"\n", "task = self.get_and_check_task(request, pk, project_pk)\n", "tile_path = task.get_tile_path(z, x, y)\n", "if os.path.isfile(tile_path):\n", "tile = open(tile_path, 'rb')\n", "return HttpResponse(FileWrapper(tile), content_type='image/png')\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_1():...\n", "setup(**setup_args)\n" ]
[ "def main():...\n", "setup(**setup_args)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_0(self):...\n", "CLASS_1.ScenarioMaker().when_call_login_email_redirect(\n ).then_response_should_be_a_redirect_to_app_deeplink_with_params()\n" ]
[ "def test_when_called_redirect_view_redirects_to_apps_url(self):...\n", "RedirectLoginEmailTestCase.ScenarioMaker().when_call_login_email_redirect(\n ).then_response_should_be_a_redirect_to_app_deeplink_with_params()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@property...\n", "VAR_65 = '.' if self._workdir is None else self._workdir\n", "if not os.path.isabs(VAR_65):\n", "return os.path.abspath(os.path.join(self.workflow.basedir, VAR_65))\n", "return VAR_65\n" ]
[ "@property...\n", "workdir = '.' if self._workdir is None else self._workdir\n", "if not os.path.isabs(workdir):\n", "return os.path.abspath(os.path.join(self.workflow.basedir, workdir))\n", "return workdir\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_33(self, VAR_24):...\n", "return self.regex().match(VAR_24) or None\n" ]
[ "def match(self, target):...\n", "return self.regex().match(target) or None\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@VAR_0.route('/login')...\n", "\"\"\"docstring\"\"\"\n", "return FUNC_3('login-action')\n" ]
[ "@authn_views.route('/login')...\n", "\"\"\"docstring\"\"\"\n", "return _authn('login-action')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_3(VAR_9):...\n", "VAR_13, VAR_14, VAR_14, VAR_14 = VAR_3.find(VAR_9)\n", "return VAR_13['passed']\n" ]
[ "def is_passed(model):...\n", "entry, _, _, _ = cached_points.find(model)\n", "return entry['passed']\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_3(VAR_8, VAR_9, VAR_3='NGTREE', VAR_10='default', VAR_7=True,...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = 'CSV', 'TREE', 'JSON', 'YAML', 'NGTREE'\n", "if VAR_3 in VAR_11:\n", "VAR_0.info('Query: Finding Routed Paths (%s --> %s) for %s', VAR_8, VAR_9,\n nglib.user)\n", "return\n", "if re.search('^\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+$', VAR_8):\n", "VAR_12 = nglib.query.net.get_net(VAR_8, VAR_3='NGTREE')\n", "if re.search('^\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+$', VAR_9):\n", "VAR_8 = VAR_12['_child001']['Name']\n", "VAR_13 = nglib.query.net.get_net(VAR_9, VAR_3='NGTREE')\n", "VAR_19 = nglib.ngtree.get_ngtree('Path', tree_type='L3-PATH')\n", "if VAR_13:\n", "VAR_19['Path'] = VAR_8 + ' -> ' + VAR_9\n", "VAR_9 = VAR_13['_child001']['Name']\n", "VAR_19['Name'] = VAR_19['Path']\n", "VAR_23 = []\n", "VAR_27 = []\n", "VAR_28 = nglib.py2neo_ses.cypher.execute(\n 'MATCH (sn:Network), (dn:Network), rp = allShortestPaths ' +\n '((sn)-[:ROUTED|ROUTED_BY|ROUTED_STANDBY*0..12]-(dn)) ' +\n 'WHERE ALL(v IN rels(rp) WHERE v.vrf = {vrf}) ' +\n 'AND sn.cidr =~ {net1} AND dn.cidr =~ {net2}' +\n 'UNWIND nodes(rp) as r1 UNWIND nodes(rp) as r2 ' +\n 'MATCH (r1)<-[l1:ROUTED]-(n:Network {vrf:{vrf}})-[l2:ROUTED]->(r2) ' +\n 'OPTIONAL MATCH (n)-[:L3toL2]->(v:VLAN) ' +\n 'RETURN DISTINCT r1.name AS r1name, l1.gateway AS r1ip, ' +\n 'r2.name AS r2name, l2.gateway as r2ip, v.vid AS vid, ' +\n 'LENGTH(shortestPath((sn)<-[:ROUTED|ROUTED_BY|ROUTED_STANDBY*0..12]->(r1))) '\n + 'AS distance ORDER BY distance', {'net1': VAR_8, 'net2': VAR_9,\n 'vrf': VAR_10})\n", "VAR_29 = dict()\n", "for rec in VAR_28:\n", "VAR_40 = rec['r1name'], rec['r2name']\n", "for en in VAR_29:\n", "VAR_29[VAR_40] = rec['distance']\n", "if VAR_29[en] < VAR_29[tuple(reversed(en))]:\n", "VAR_27 = sorted(VAR_27, key=lambda tup: (tup[2], tup[0], tup[1]))\n", "VAR_41, VAR_42 = en\n", "for VAR_34 in VAR_27:\n", "VAR_43 = VAR_29[en]\n", "for rec in VAR_28:\n", "if VAR_23:\n", "VAR_27.append((VAR_41, VAR_42, VAR_43))\n", "if VAR_34[0] == rec['r1name'] and VAR_34[1] == rec['r2name']:\n", "VAR_19['Hops'] = len(VAR_23)\n", "if VAR_7:\n", "VAR_20 = nglib.ngtree.get_ngtree('Hop', tree_type='L3-HOP')\n", "VAR_19['Max Hops'] = max([s['distance'] for s in VAR_23])\n", "print('No results found for path between {:} and {:}'.format(VAR_8, VAR_9),\n file=sys.stderr)\n", "VAR_20['From Router'] = rec['r1name']\n", "VAR_19['VRF'] = VAR_10\n", "VAR_20['From IP'] = rec['r1ip']\n", "if VAR_3 == 'CSV':\n", "VAR_20['To Router'] = rec['r2name']\n", "nglib.query.print_dict_csv(VAR_23)\n", "VAR_19 = nglib.query.exp_ngtree(VAR_19, VAR_3)\n", "VAR_20['To IP'] = rec['r2ip']\n", "return VAR_19\n", "VAR_20['VLAN'] = rec['vid']\n", "VAR_43 = rec['distance']\n", "if VAR_43 != 1:\n", "VAR_43 = int((VAR_43 - 1) / 2) + 1\n", "VAR_20['distance'] = VAR_43\n", "VAR_20['Name'] = '#{:} {:}({:}) -> {:}({:})'.format(VAR_43, rec['r1name'],\n rec['r1ip'], rec['r2name'], rec['r2ip'])\n", "if VAR_4:\n", "VAR_54 = FUNC_2(rec['r1name'], rec['r2name'], VAR_7=False)\n", "nglib.ngtree.add_child_ngtree(VAR_19, VAR_20)\n", "for sp in VAR_54:\n", "VAR_23.append(VAR_20)\n", "if '_child' in sp and '_rvlans' in VAR_54[sp]:\n", "VAR_63 = '[^0-9]*' + rec['vid'] + '[^0-9]*'\n", "if re.search(VAR_63, VAR_54[sp]['_rvlans']):\n", "nglib.ngtree.add_child_ngtree(VAR_20, VAR_54[sp])\n" ]
[ "def get_routed_path(net1, net2, rtype='NGTREE', vrf='default', verbose=True,...\n", "\"\"\"docstring\"\"\"\n", "rtypes = 'CSV', 'TREE', 'JSON', 'YAML', 'NGTREE'\n", "if rtype in rtypes:\n", "logger.info('Query: Finding Routed Paths (%s --> %s) for %s', net1, net2,\n nglib.user)\n", "return\n", "if re.search('^\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+$', net1):\n", "n1tree = nglib.query.net.get_net(net1, rtype='NGTREE')\n", "if re.search('^\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+$', net2):\n", "net1 = n1tree['_child001']['Name']\n", "n2tree = nglib.query.net.get_net(net2, rtype='NGTREE')\n", "ngtree = nglib.ngtree.get_ngtree('Path', tree_type='L3-PATH')\n", "if n2tree:\n", "ngtree['Path'] = net1 + ' -> ' + net2\n", "net2 = n2tree['_child001']['Name']\n", "ngtree['Name'] = ngtree['Path']\n", "pathList = []\n", "pathRec = []\n", "rtrp = nglib.py2neo_ses.cypher.execute(\n 'MATCH (sn:Network), (dn:Network), rp = allShortestPaths ' +\n '((sn)-[:ROUTED|ROUTED_BY|ROUTED_STANDBY*0..12]-(dn)) ' +\n 'WHERE ALL(v IN rels(rp) WHERE v.vrf = {vrf}) ' +\n 'AND sn.cidr =~ {net1} AND dn.cidr =~ {net2}' +\n 'UNWIND nodes(rp) as r1 UNWIND nodes(rp) as r2 ' +\n 'MATCH (r1)<-[l1:ROUTED]-(n:Network {vrf:{vrf}})-[l2:ROUTED]->(r2) ' +\n 'OPTIONAL MATCH (n)-[:L3toL2]->(v:VLAN) ' +\n 'RETURN DISTINCT r1.name AS r1name, l1.gateway AS r1ip, ' +\n 'r2.name AS r2name, l2.gateway as r2ip, v.vid AS vid, ' +\n 'LENGTH(shortestPath((sn)<-[:ROUTED|ROUTED_BY|ROUTED_STANDBY*0..12]->(r1))) '\n + 'AS distance ORDER BY distance', {'net1': net1, 'net2': net2, 'vrf':\n vrf})\n", "allpaths = dict()\n", "for rec in rtrp:\n", "p = rec['r1name'], rec['r2name']\n", "for en in allpaths:\n", "allpaths[p] = rec['distance']\n", "if allpaths[en] < allpaths[tuple(reversed(en))]:\n", "pathRec = sorted(pathRec, key=lambda tup: (tup[2], tup[0], tup[1]))\n", "r1, r2 = en\n", "for path in pathRec:\n", "distance = allpaths[en]\n", "for rec in rtrp:\n", "if pathList:\n", "pathRec.append((r1, r2, distance))\n", "if path[0] == rec['r1name'] and path[1] == rec['r2name']:\n", "ngtree['Hops'] = len(pathList)\n", "if verbose:\n", "rtree = nglib.ngtree.get_ngtree('Hop', tree_type='L3-HOP')\n", "ngtree['Max Hops'] = max([s['distance'] for s in pathList])\n", "print('No results found for path between {:} and {:}'.format(net1, net2),\n file=sys.stderr)\n", "rtree['From Router'] = rec['r1name']\n", "ngtree['VRF'] = vrf\n", "rtree['From IP'] = rec['r1ip']\n", "if rtype == 'CSV':\n", "rtree['To Router'] = rec['r2name']\n", "nglib.query.print_dict_csv(pathList)\n", "ngtree = nglib.query.exp_ngtree(ngtree, rtype)\n", "rtree['To IP'] = rec['r2ip']\n", "return ngtree\n", "rtree['VLAN'] = rec['vid']\n", "distance = rec['distance']\n", "if distance != 1:\n", "distance = int((distance - 1) / 2) + 1\n", "rtree['distance'] = distance\n", "rtree['Name'] = '#{:} {:}({:}) -> {:}({:})'.format(distance, rec['r1name'],\n rec['r1ip'], rec['r2name'], rec['r2ip'])\n", "if l2path:\n", "spath = get_switched_path(rec['r1name'], rec['r2name'], verbose=False)\n", "nglib.ngtree.add_child_ngtree(ngtree, rtree)\n", "for sp in spath:\n", "pathList.append(rtree)\n", "if '_child' in sp and '_rvlans' in spath[sp]:\n", "vrgx = '[^0-9]*' + rec['vid'] + '[^0-9]*'\n", "if re.search(vrgx, spath[sp]['_rvlans']):\n", "nglib.ngtree.add_child_ngtree(rtree, spath[sp])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Expr'", "Return'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "For", "Assign'", "Condition", "Assign'", "Assign'", "For", "Assign'", "For", "Condition", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "For", "Expr'", "Condition", "Assign'", "Condition", "Expr'" ]
[ "@gen.coroutine...\n", "yield VAR_4.insert_many(VAR_5)\n" ]
[ "@gen.coroutine...\n", "yield collection.insert_many(items)\n" ]
[ 0, 0 ]
[ "Condition", "Expr'" ]
[ "def FUNC_13(self, VAR_11, VAR_12, VAR_13, VAR_14):...\n", "\"\"\"docstring\"\"\"\n", "VAR_31 = 'createhost -iscsi -persona %s -domain %s %s %s' % (VAR_14, VAR_13,\n VAR_11, VAR_12)\n", "VAR_32 = self.common._cli_run(VAR_31, None)\n", "if VAR_32 and len(VAR_32) > 1:\n", "return self.common.parse_create_host_error(VAR_11, VAR_32)\n", "return VAR_11\n" ]
[ "def _create_3par_iscsi_host(self, hostname, iscsi_iqn, domain, persona_id):...\n", "\"\"\"docstring\"\"\"\n", "cmd = 'createhost -iscsi -persona %s -domain %s %s %s' % (persona_id,\n domain, hostname, iscsi_iqn)\n", "out = self.common._cli_run(cmd, None)\n", "if out and len(out) > 1:\n", "return self.common.parse_create_host_error(hostname, out)\n", "return hostname\n" ]
[ 0, 0, 2, 2, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_1(self):...\n", "VAR_1 = list(self.uut.process_output(['1.0|0: Info message\\n',\n '2.2|1: Normal message\\n', \"\"\"3.4|2: Major message\n\"\"\"], 'a/file.py', [\n 'original_file_lines_placeholder']))\n", "self.assertEqual(len(VAR_1), 3)\n", "self.assertEqual(VAR_1[0].origin, 'Lint')\n", "self.assertEqual(VAR_1[0].affected_code[0], SourceRange.from_values(\n 'a/file.py', 1, 0))\n", "self.assertEqual(VAR_1[0].severity, RESULT_SEVERITY.INFO)\n", "self.assertEqual(VAR_1[0].message, 'Info message')\n", "self.assertEqual(VAR_1[1].affected_code[0], SourceRange.from_values(\n 'a/file.py', 2, 2))\n", "self.assertEqual(VAR_1[1].severity, RESULT_SEVERITY.NORMAL)\n", "self.assertEqual(VAR_1[1].message, 'Normal message')\n", "self.assertEqual(VAR_1[2].affected_code[0], SourceRange.from_values(\n 'a/file.py', 3, 4))\n", "self.assertEqual(VAR_1[2].severity, RESULT_SEVERITY.MAJOR)\n", "self.assertEqual(VAR_1[2].message, 'Major message')\n" ]
[ "def test_invalid_output(self):...\n", "out = list(self.uut.process_output(['1.0|0: Info message\\n',\n '2.2|1: Normal message\\n', \"\"\"3.4|2: Major message\n\"\"\"], 'a/file.py', [\n 'original_file_lines_placeholder']))\n", "self.assertEqual(len(out), 3)\n", "self.assertEqual(out[0].origin, 'Lint')\n", "self.assertEqual(out[0].affected_code[0], SourceRange.from_values(\n 'a/file.py', 1, 0))\n", "self.assertEqual(out[0].severity, RESULT_SEVERITY.INFO)\n", "self.assertEqual(out[0].message, 'Info message')\n", "self.assertEqual(out[1].affected_code[0], SourceRange.from_values(\n 'a/file.py', 2, 2))\n", "self.assertEqual(out[1].severity, RESULT_SEVERITY.NORMAL)\n", "self.assertEqual(out[1].message, 'Normal message')\n", "self.assertEqual(out[2].affected_code[0], SourceRange.from_values(\n 'a/file.py', 3, 4))\n", "self.assertEqual(out[2].severity, RESULT_SEVERITY.MAJOR)\n", "self.assertEqual(out[2].message, 'Major message')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "@VAR_0.before_app_first_request...\n", "\"\"\"docstring\"\"\"\n", "user_logged_out.connect(oauth_logout_handler)\n", "oauth.init_app(current_app)\n", "for VAR_1, conf in cfg['OAUTHCLIENT_REMOTE_APPS'].items():\n", "if VAR_1 not in oauth.remote_apps:\n", "VAR_4 = oauth.remote_app(VAR_1, **conf['params'])\n", "VAR_4 = oauth.remote_apps[VAR_1]\n", "VAR_4.tokengetter(make_token_getter(VAR_4))\n", "handlers.register(VAR_1, VAR_4.authorized_handler(make_handler(conf.get(\n 'authorized_handler', authorized_default_handler), VAR_4)))\n", "disconnect_handlers.register(VAR_1, make_handler(conf.get(\n 'disconnect_handler', disconnect_handler), VAR_4, with_response=False))\n", "def FUNC_5(VAR_4, *VAR_5, **VAR_6):...\n", "VAR_7 = conf.get('signup_handler', dict())\n", "VAR_8 = make_handler(VAR_7.get('info', FUNC_5), VAR_4, with_response=False)\n", "VAR_9 = make_handler(VAR_7.get('setup', FUNC_5), VAR_4, with_response=False)\n", "VAR_10 = make_handler(VAR_7.get('view', FUNC_5), VAR_4, with_response=False)\n", "signup_handlers.register(VAR_1, dict(info=account_info_handler, setup=\n account_setup_handler, view=account_view_handler))\n" ]
[ "@blueprint.before_app_first_request...\n", "\"\"\"docstring\"\"\"\n", "user_logged_out.connect(oauth_logout_handler)\n", "oauth.init_app(current_app)\n", "for remote_app, conf in cfg['OAUTHCLIENT_REMOTE_APPS'].items():\n", "if remote_app not in oauth.remote_apps:\n", "remote = oauth.remote_app(remote_app, **conf['params'])\n", "remote = oauth.remote_apps[remote_app]\n", "remote.tokengetter(make_token_getter(remote))\n", "handlers.register(remote_app, remote.authorized_handler(make_handler(conf.\n get('authorized_handler', authorized_default_handler), remote)))\n", "disconnect_handlers.register(remote_app, make_handler(conf.get(\n 'disconnect_handler', disconnect_handler), remote, with_response=False))\n", "def dummy_handler(remote, *args, **kargs):...\n", "signup_handler = conf.get('signup_handler', dict())\n", "account_info_handler = make_handler(signup_handler.get('info',\n dummy_handler), remote, with_response=False)\n", "account_setup_handler = make_handler(signup_handler.get('setup',\n dummy_handler), remote, with_response=False)\n", "account_view_handler = make_handler(signup_handler.get('view',\n dummy_handler), remote, with_response=False)\n", "signup_handlers.register(remote_app, dict(info=account_info_handler, setup=\n account_setup_handler, view=account_view_handler))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "For", "Docstring", "Expr'", "Expr'", "For", "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "@login_required...\n", "" ]
[ "@login_required...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "@VAR_0.route('/level-1')...\n", "if int(current_user.progress) >= 1:\n", "return render_template('ui.html', VAR_6='1', VAR_7='index', level_progress=\n current_user.level1_progress, max_level_progress=3)\n", "return redirect(VAR_4)\n" ]
[ "@app.route('/level-1')...\n", "if int(current_user.progress) >= 1:\n", "return render_template('ui.html', level='1', page='index', level_progress=\n current_user.level1_progress, max_level_progress=3)\n", "return redirect(url_prefix)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_5(VAR_3):...\n", "VAR_14 = None\n", "VAR_15 = {}\n", "def FUNC_16(VAR_5, VAR_6):...\n", "VAR_38 = 5\n", "VAR_39 = VAR_14 + timedelta(minutes=cache_time) < datetime.now(\n ) if VAR_14 else True\n", "if not VAR_14 or VAR_39 or VAR_5 not in VAR_15:\n", "VAR_14 = datetime.now()\n", "log.info('Returning cached result of %s', VAR_3.__name__)\n", "VAR_50 = VAR_3(VAR_5, VAR_6)\n", "VAR_51 = VAR_14 + timedelta(minutes=cache_time) - datetime.now()\n", "VAR_15[VAR_5] = VAR_50\n", "log.debug('Time to to reevaluate result of %s is %s', VAR_3.__name__, str(\n VAR_51)[:-7])\n", "return VAR_50\n", "return VAR_15[VAR_5]\n" ]
[ "def cache_number_users_with_same_feature(func):...\n", "when_was_called = None\n", "result = {}\n", "def func_launcher(feature, feature_type):...\n", "cache_time = 5\n", "high_time = when_was_called + timedelta(minutes=cache_time) < datetime.now(\n ) if when_was_called else True\n", "if not when_was_called or high_time or feature not in result:\n", "when_was_called = datetime.now()\n", "log.info('Returning cached result of %s', func.__name__)\n", "num_of_users = func(feature, feature_type)\n", "time_left = when_was_called + timedelta(minutes=cache_time) - datetime.now()\n", "result[feature] = num_of_users\n", "log.debug('Time to to reevaluate result of %s is %s', func.__name__, str(\n time_left)[:-7])\n", "return num_of_users\n", "return result[feature]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'", "Return'" ]
[ "def FUNC_8(self):...\n", "clear_cache('writers')\n" ]
[ "def on_update(self):...\n", "clear_cache('writers')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_18(VAR_16):...\n", "VAR_40 = ''\n", "VAR_41 = 1\n", "for item in VAR_16:\n", "if not item[0]:\n", "return VAR_40\n", "VAR_40 += '{}. {}\\n'.format(VAR_41, item[0])\n", "VAR_41 += 1\n" ]
[ "def list_to_ordered_str_list(list_of_gadgets):...\n", "string_roaster = ''\n", "index = 1\n", "for item in list_of_gadgets:\n", "if not item[0]:\n", "return string_roaster\n", "string_roaster += '{}. {}\\n'.format(index, item[0])\n", "index += 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Condition", "Return'", "AugAssign'", "AugAssign'" ]
[ "def FUNC_2(VAR_4):...\n", "@VAR_4.cli.command('initdb')...\n", "\"\"\"docstring\"\"\"\n", "FUNC_3()\n", "print('Initialized the database.')\n" ]
[ "def register_cli(app):...\n", "@app.cli.command('initdb')...\n", "\"\"\"docstring\"\"\"\n", "init_db()\n", "print('Initialized the database.')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Docstring", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_0, VAR_1=1, VAR_2='-', VAR_3=False, VAR_4=...\n", "\"\"\"docstring\"\"\"\n", "assert not VAR_3 or VAR_3 and VAR_1 > 1\n", "assert VAR_4 == 'FirstOrderTransitions' or VAR_4 == 'KOrderPi', 'Error: unknown method to build null model'\n", "assert VAR_0.paths.keys() and max(VAR_0.paths.keys()\n ) >= VAR_1, 'Error: constructing a model of order k requires paths of at least length k'\n", "self.order = VAR_1\n", "self.paths = VAR_0\n", "self.nodes = []\n", "self.separator = VAR_2\n", "self.successors = _co.defaultdict(lambda : set())\n", "self.predecessors = _co.defaultdict(lambda : set())\n", "self.outdegrees = _co.defaultdict(lambda : 0.0)\n", "self.indegrees = _co.defaultdict(lambda : 0.0)\n", "self.edges = _co.defaultdict(lambda : _np.array([0.0, 0.0]))\n", "self.inweights = _co.defaultdict(lambda : _np.array([0.0, 0.0]))\n", "self.outweights = _co.defaultdict(lambda : _np.array([0.0, 0.0]))\n", "if VAR_1 > 1:\n", "VAR_38 = CLASS_1(VAR_0, VAR_1=1)\n", "if not VAR_3:\n", "VAR_13 = VAR_38.getAdjacencyMatrix(VAR_10=True, VAR_11=False, VAR_12=True)\n", "VAR_39 = set()\n", "VAR_41 = list(VAR_38.edges.keys())\n", "VAR_40 = VAR_0.paths[VAR_1].items()\n", "for _ in range(VAR_1 - 1):\n", "if VAR_1 == 0:\n", "VAR_48 = list()\n", "assert (VAR_13 ** VAR_1).sum() == len(VAR_41), 'Expected ' + str((VAR_13 **\n VAR_1).sum()) + ' paths but got ' + str(len(VAR_41))\n", "VAR_39.add('start')\n", "for key, val in VAR_40:\n", "for e1 in VAR_41:\n", "if VAR_4 == 'KOrderPi':\n", "for key, val in VAR_40:\n", "VAR_25 = VAR_2.join(key[0:-1])\n", "self.nodes = list(VAR_39)\n", "for e2 in VAR_38.edges:\n", "VAR_41 = VAR_48\n", "VAR_49 = CLASS_1(VAR_0, VAR_1=k, VAR_2=separator, VAR_3=False)\n", "VAR_36 = VAR_38.getTransitionMatrix(VAR_10=True)\n", "VAR_34 = key[0]\n", "VAR_34 = VAR_2.join(key[1:])\n", "if VAR_1 == 0:\n", "if e1[-1] == e2[0]:\n", "VAR_50 = CLASS_1.getLeadingEigenvector(VAR_49.getTransitionMatrix(VAR_10=\n True), VAR_14=True, VAR_5=lanczosVecs, VAR_6=maxiter)\n", "for VAR_56 in VAR_41:\n", "VAR_39.add(VAR_34)\n", "VAR_39.add(VAR_25)\n", "self.dof_paths = self.vcount() - 2\n", "if VAR_1 == 1:\n", "VAR_56 = e1 + (e2[1],)\n", "VAR_25 = VAR_56[0]\n", "self.edges['start', VAR_34] += val\n", "VAR_39.add(VAR_34)\n", "self.dof_ngrams = self.vcount() - 2\n", "VAR_38 = self\n", "VAR_42 = VAR_38.vcount()\n", "VAR_48.append(VAR_56)\n", "for l in range(1, VAR_1):\n", "self.successors['start'].add(VAR_34)\n", "self.edges[VAR_25, VAR_34] += val\n", "VAR_13 = VAR_38.getAdjacencyMatrix(VAR_10=True, VAR_11=False, VAR_12=True)\n", "self.dof_ngrams = VAR_42 ** VAR_1 * (VAR_42 - 1)\n", "VAR_25 = VAR_25 + VAR_2 + VAR_56[l]\n", "VAR_34 = VAR_56[1]\n", "self.predecessors[VAR_34].add('start')\n", "self.successors[VAR_25].add(VAR_34)\n", "VAR_43 = (VAR_13 ** VAR_1).sum()\n", "for l in range(2, VAR_1 + 1):\n", "self.indegrees[VAR_34] = len(self.predecessors[VAR_34])\n", "self.predecessors[VAR_34].add(VAR_25)\n", "VAR_44 = _np.count_nonzero((VAR_13 ** VAR_1).sum(axis=0))\n", "VAR_34 = VAR_34 + VAR_2 + VAR_56[l]\n", "if VAR_25 not in self.nodes:\n", "self.inweights[VAR_34] += val\n", "self.indegrees[VAR_34] = len(self.predecessors[VAR_34])\n", "self.dof_paths = VAR_43 - VAR_44\n", "self.nodes.append(VAR_25)\n", "if VAR_34 not in self.nodes:\n", "self.outdegrees['start'] = len(self.successors['start'])\n", "self.inweights[VAR_34] += val\n", "self.nodes.append(VAR_34)\n", "if VAR_4 == 'KOrderPi':\n", "self.outweights['start'] += val\n", "self.outdegrees[VAR_25] = len(self.successors[VAR_25])\n", "self.edges[VAR_25, VAR_34] = _np.array([0, VAR_50[VAR_49.nodes.index(VAR_34)]])\n", "if VAR_4 == 'FirstOrderTransitions':\n", "self.outweights[VAR_25] += val\n", "self.successors[VAR_25].add(VAR_34)\n", "VAR_55 = VAR_36[VAR_38.nodes.index(VAR_56[-1]), VAR_38.nodes.index(VAR_56[-2])]\n", "self.indegrees[VAR_34] = len(self.predecessors[VAR_34])\n", "self.edges[VAR_25, VAR_34] = _np.array([0, VAR_55])\n", "self.inweights[VAR_34] += self.edges[VAR_25, VAR_34]\n", "self.outdegrees[VAR_25] = len(self.successors[VAR_25])\n", "self.outweights[VAR_25] += self.edges[VAR_25, VAR_34]\n" ]
[ "def __init__(self, paths, k=1, separator='-', nullModel=False, method=...\n", "\"\"\"docstring\"\"\"\n", "assert not nullModel or nullModel and k > 1\n", "assert method == 'FirstOrderTransitions' or method == 'KOrderPi', 'Error: unknown method to build null model'\n", "assert paths.paths.keys() and max(paths.paths.keys()\n ) >= k, 'Error: constructing a model of order k requires paths of at least length k'\n", "self.order = k\n", "self.paths = paths\n", "self.nodes = []\n", "self.separator = separator\n", "self.successors = _co.defaultdict(lambda : set())\n", "self.predecessors = _co.defaultdict(lambda : set())\n", "self.outdegrees = _co.defaultdict(lambda : 0.0)\n", "self.indegrees = _co.defaultdict(lambda : 0.0)\n", "self.edges = _co.defaultdict(lambda : _np.array([0.0, 0.0]))\n", "self.inweights = _co.defaultdict(lambda : _np.array([0.0, 0.0]))\n", "self.outweights = _co.defaultdict(lambda : _np.array([0.0, 0.0]))\n", "if k > 1:\n", "g1 = HigherOrderNetwork(paths, k=1)\n", "if not nullModel:\n", "A = g1.getAdjacencyMatrix(includeSubPaths=True, weighted=False, transposed=True\n )\n", "node_set = set()\n", "possiblePaths = list(g1.edges.keys())\n", "iterator = paths.paths[k].items()\n", "for _ in range(k - 1):\n", "if k == 0:\n", "E_new = list()\n", "assert (A ** k).sum() == len(possiblePaths), 'Expected ' + str((A ** k).sum()\n ) + ' paths but got ' + str(len(possiblePaths))\n", "node_set.add('start')\n", "for key, val in iterator:\n", "for e1 in possiblePaths:\n", "if method == 'KOrderPi':\n", "for key, val in iterator:\n", "v = separator.join(key[0:-1])\n", "self.nodes = list(node_set)\n", "for e2 in g1.edges:\n", "possiblePaths = E_new\n", "g_k = HigherOrderNetwork(paths, k=k, separator=separator, nullModel=False)\n", "T = g1.getTransitionMatrix(includeSubPaths=True)\n", "w = key[0]\n", "w = separator.join(key[1:])\n", "if k == 0:\n", "if e1[-1] == e2[0]:\n", "pi_k = HigherOrderNetwork.getLeadingEigenvector(g_k.getTransitionMatrix(\n includeSubPaths=True), normalized=True, lanczosVecs=lanczosVecs,\n maxiter=maxiter)\n", "for p in possiblePaths:\n", "node_set.add(w)\n", "node_set.add(v)\n", "self.dof_paths = self.vcount() - 2\n", "if k == 1:\n", "p = e1 + (e2[1],)\n", "v = p[0]\n", "self.edges['start', w] += val\n", "node_set.add(w)\n", "self.dof_ngrams = self.vcount() - 2\n", "g1 = self\n", "s = g1.vcount()\n", "E_new.append(p)\n", "for l in range(1, k):\n", "self.successors['start'].add(w)\n", "self.edges[v, w] += val\n", "A = g1.getAdjacencyMatrix(includeSubPaths=True, weighted=False, transposed=True\n )\n", "self.dof_ngrams = s ** k * (s - 1)\n", "v = v + separator + p[l]\n", "w = p[1]\n", "self.predecessors[w].add('start')\n", "self.successors[v].add(w)\n", "paths_k = (A ** k).sum()\n", "for l in range(2, k + 1):\n", "self.indegrees[w] = len(self.predecessors[w])\n", "self.predecessors[w].add(v)\n", "non_zero = _np.count_nonzero((A ** k).sum(axis=0))\n", "w = w + separator + p[l]\n", "if v not in self.nodes:\n", "self.inweights[w] += val\n", "self.indegrees[w] = len(self.predecessors[w])\n", "self.dof_paths = paths_k - non_zero\n", "self.nodes.append(v)\n", "if w not in self.nodes:\n", "self.outdegrees['start'] = len(self.successors['start'])\n", "self.inweights[w] += val\n", "self.nodes.append(w)\n", "if method == 'KOrderPi':\n", "self.outweights['start'] += val\n", "self.outdegrees[v] = len(self.successors[v])\n", "self.edges[v, w] = _np.array([0, pi_k[g_k.nodes.index(w)]])\n", "if method == 'FirstOrderTransitions':\n", "self.outweights[v] += val\n", "self.successors[v].add(w)\n", "p_vw = T[g1.nodes.index(p[-1]), g1.nodes.index(p[-2])]\n", "self.indegrees[w] = len(self.predecessors[w])\n", "self.edges[v, w] = _np.array([0, p_vw])\n", "self.inweights[w] += self.edges[v, w]\n", "self.outdegrees[v] = len(self.successors[v])\n", "self.outweights[v] += self.edges[v, w]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assert'", "Assert'", "Assert'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Assign'", "Assert'", "Expr'", "For", "For", "Condition", "For", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "For", "Expr'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "AugAssign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "For", "Expr'", "AugAssign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "For", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "AugAssign'", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "AugAssign'", "Expr'", "Condition", "AugAssign'", "Assign'", "Assign'", "Condition", "AugAssign'", "Expr'", "Assign'", "Assign'", "Assign'", "AugAssign'", "Assign'", "AugAssign'" ]
[ "def FUNC_2():...\n", "return [FUNC_1, UnionRule(TestTarget, PythonTestsAdaptor), optionable_rule(\n PyTest), optionable_rule(PythonSetup), optionable_rule(SourceRootConfig)]\n" ]
[ "def rules():...\n", "return [run_python_test, UnionRule(TestTarget, PythonTestsAdaptor),\n optionable_rule(PyTest), optionable_rule(PythonSetup), optionable_rule(\n SourceRootConfig)]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@VAR_4.user_loader...\n", "return CLASS_0(VAR_6)\n" ]
[ "@login_manager.user_loader...\n", "return User(userid)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "import sqlobject\n", "import vdm.sqlobject.base as vdmbase\n", "import vdm.base as vdmbase\n", "VAR_16 = 'name'\n", "from vdm.sqlobject.base import State\n", "from vdm.base import State\n", "VAR_0 = sqlobject.UnicodeCol(alternateID=True)\n", "VAR_1 = sqlobject.MultipleJoin('Package')\n", "VAR_2 = sqlobject.ForeignKey('Package', cascade=True)\n", "VAR_3 = sqlobject.UnicodeCol(default=None)\n", "VAR_4 = sqlobject.UnicodeCol(default=None)\n", "VAR_5 = sqlobject.UnicodeCol(default=None)\n", "VAR_6 = sqlobject.ForeignKey('License', default=None)\n", "VAR_7 = sqlobject.UnicodeCol(default=None)\n", "VAR_2 = sqlobject.ForeignKey('Tag', cascade=True)\n", "VAR_2 = sqlobject.ForeignKey('PackageTag', cascade=True)\n", "VAR_8 = CLASS_1\n", "VAR_9 = vdmbase.get_attribute_names(VAR_8)\n", "VAR_0 = sqlobject.UnicodeCol(alternateID=True)\n", "VAR_10 = [('tags', 'ckan.models.package', 'Tag', 'PackageTag')]\n", "def FUNC_0(self, VAR_11):...\n", "VAR_14 = self.revision.model.tags.get(VAR_11)\n", "VAR_14 = self.transaction.model.tags.create(VAR_0=tagname)\n", "self.tags.create(VAR_14=tag)\n", "VAR_8 = CLASS_2\n", "VAR_0 = sqlobject.UnicodeCol(alternateID=True)\n", "VAR_9 = vdmbase.get_attribute_names(VAR_8)\n", "VAR_10 = [('packages', 'ckan.models.package', 'Package', 'PackageTag')]\n", "@classmethod...\n", "VAR_17 = str(VAR_12)\n", "VAR_18 = \"UPPER(tag.name) LIKE UPPER('%%%s%%')\" % VAR_17\n", "return self.select(VAR_18)\n" ]
[ "import sqlobject\n", "import vdm.sqlobject.base as vdmbase\n", "import vdm.base as vdmbase\n", "_defaultOrder = 'name'\n", "from vdm.sqlobject.base import State\n", "from vdm.base import State\n", "name = sqlobject.UnicodeCol(alternateID=True)\n", "packages = sqlobject.MultipleJoin('Package')\n", "base = sqlobject.ForeignKey('Package', cascade=True)\n", "title = sqlobject.UnicodeCol(default=None)\n", "url = sqlobject.UnicodeCol(default=None)\n", "download_url = sqlobject.UnicodeCol(default=None)\n", "license = sqlobject.ForeignKey('License', default=None)\n", "notes = sqlobject.UnicodeCol(default=None)\n", "base = sqlobject.ForeignKey('Tag', cascade=True)\n", "base = sqlobject.ForeignKey('PackageTag', cascade=True)\n", "sqlobj_version_class = PackageRevision\n", "versioned_attributes = vdmbase.get_attribute_names(sqlobj_version_class)\n", "name = sqlobject.UnicodeCol(alternateID=True)\n", "m2m = [('tags', 'ckan.models.package', 'Tag', 'PackageTag')]\n", "def add_tag_by_name(self, tagname):...\n", "tag = self.revision.model.tags.get(tagname)\n", "tag = self.transaction.model.tags.create(name=tagname)\n", "self.tags.create(tag=tag)\n", "sqlobj_version_class = TagRevision\n", "name = sqlobject.UnicodeCol(alternateID=True)\n", "versioned_attributes = vdmbase.get_attribute_names(sqlobj_version_class)\n", "m2m = [('packages', 'ckan.models.package', 'Package', 'PackageTag')]\n", "@classmethod...\n", "text_query_str = str(text_query)\n", "sql_query = \"UPPER(tag.name) LIKE UPPER('%%%s%%')\" % text_query_str\n", "return self.select(sql_query)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4 ]
[ "Import'", "Import'", "Import'", "Assign'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_4(VAR_9):...\n", "if VAR_9['exception']['TYPE'] == UnknownExtraConf.__name__:\n" ]
[ "def _RaiseExceptionForData(data):...\n", "if data['exception']['TYPE'] == UnknownExtraConf.__name__:\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_6(self, VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "if self.lasttok == 'EXC':\n", "print(VAR_3)\n", "VAR_3.value = VAR_3.value.strip()\n", "VAR_21 = VAR_3.value.upper()\n", "if VAR_21 in self.reserved:\n", "VAR_3.type = VAR_21\n", "if self.lasttok == 'WITH':\n", "self.lasttok = VAR_3.type\n", "VAR_3.type = 'EXC'\n", "self.validate(VAR_3)\n", "return VAR_3\n" ]
[ "def t_ID(self, tok):...\n", "\"\"\"docstring\"\"\"\n", "if self.lasttok == 'EXC':\n", "print(tok)\n", "tok.value = tok.value.strip()\n", "val = tok.value.upper()\n", "if val in self.reserved:\n", "tok.type = val\n", "if self.lasttok == 'WITH':\n", "self.lasttok = tok.type\n", "tok.type = 'EXC'\n", "self.validate(tok)\n", "return tok\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_0(*VAR_0, **VAR_1):...\n", "VAR_2 = requests.Response()\n", "VAR_2.status_code = 200\n", "VAR_2._content_consumed = True\n", "return VAR_2\n" ]
[ "def mock_import_repository_task(*args, **kwargs):...\n", "resp = requests.Response()\n", "resp.status_code = 200\n", "resp._content_consumed = True\n", "return resp\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_26(self, VAR_14, VAR_30):...\n", "VAR_12 = \"%s '%s' '%s'\" % (VAR_5, VAR_30, VAR_14)\n", "FUNC_7(self.session, VAR_12)\n" ]
[ "def start_clone_session(self, comp_name, session_name):...\n", "cmd = \"%s '%s' '%s'\" % (SCRIPT_CLONE_PATH, session_name, comp_name)\n", "send_main_session_command(self.session, cmd)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_35(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = {'SEVERITY': {'LOW': 7}, 'CONFIDENCE': {'HIGH': 7}}\n", "self.check_example('skip.py', VAR_2, VAR_1=True)\n" ]
[ "def test_ignore_skip(self):...\n", "\"\"\"docstring\"\"\"\n", "expect = {'SEVERITY': {'LOW': 7}, 'CONFIDENCE': {'HIGH': 7}}\n", "self.check_example('skip.py', expect, ignore_nosec=True)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "def FUNC_1(VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = block.sysfs_to_devpath(VAR_1)\n", "VAR_12, VAR_13 = util.subp(['dmsetup', 'info', VAR_11, '-C', '-o', 'uuid',\n '--noheadings'], capture=True)\n", "return VAR_12.strip()\n" ]
[ "def get_dmsetup_uuid(device):...\n", "\"\"\"docstring\"\"\"\n", "blockdev = block.sysfs_to_devpath(device)\n", "out, _ = util.subp(['dmsetup', 'info', blockdev, '-C', '-o', 'uuid',\n '--noheadings'], capture=True)\n", "return out.strip()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Return'" ]
[ "import praw\n", "import time\n", "from datetime import datetime\n", "from time import sleep\n", "from rpc_bindings import send, open_account, generate_account, generate_qr, nano_to_raw, receive_all, send_all, check_balance, validate_address, open_or_receive\n", "import mysql.connector\n", "import pprint\n", "VAR_0 = 'string'\n", "VAR_1 = 'string'\n", "VAR_2 = praw.Reddit('bot1')\n", "VAR_3 = VAR_2.subreddit('nano_tipper_z+cryptocurrency247')\n", "VAR_4 = []\n", "VAR_5 = []\n", "VAR_6 = []\n", "VAR_7 = []\n", "VAR_8 = []\n", "VAR_9 = time.time()\n", "VAR_10 = 0.001\n", "VAR_11 = 0.01\n", "VAR_29 = f.read()\n", "VAR_12 = mysql.connector.connect(user='root', password=sql_password, host=\n 'localhost', auth_plugin='mysql_native_password', database='nano_tipper_z')\n", "VAR_13 = VAR_12.cursor()\n", "def FUNC_0():...\n", "VAR_30 = {comment for comment in VAR_3.comments()}\n", "VAR_31 = {VAR_27 for VAR_27 in VAR_2.inbox.unread()}\n", "print('received first stream')\n", "while True:\n", "sleep(6)\n", "VAR_9 = time.time()\n", "VAR_47 = {comment for comment in VAR_3.comments()}\n", "VAR_48 = VAR_47 - VAR_30\n", "VAR_30 = VAR_47\n", "VAR_49 = {VAR_27 for VAR_27 in VAR_2.inbox.unread()}\n", "VAR_50 = VAR_49 - VAR_31\n", "VAR_31 = VAR_49\n", "if len(VAR_48) >= 1:\n", "for new_comment in VAR_48:\n", "if len(VAR_50) >= 1:\n", "print('full name: ', new_comment.name)\n", "for new_message in VAR_50:\n", "yield None\n", "if new_comment.name[:3] == 't1_':\n", "print('full name: ', new_message.name)\n", "yield 'comment', new_comment\n", "if new_message.name[:3] == 't4_':\n", "yield 'message', new_message\n" ]
[ "import praw\n", "import time\n", "from datetime import datetime\n", "from time import sleep\n", "from rpc_bindings import send, open_account, generate_account, generate_qr, nano_to_raw, receive_all, send_all, check_balance, validate_address, open_or_receive\n", "import mysql.connector\n", "import pprint\n", "comment_footer = \"\"\"\n\n\n*Nano Tipper Z Bot v0.1. Replies to this comment might be treated as PM commands. This program is in beta testing,\n and your funds could be lost.*\n\"\"\"\n", "help_text = \"\"\"\nNano Tipper Z Bot v0.1. Use at your own risk, and don't put in more Nano than you're willing to lose.\n\n\nTo perform a command, create a new message with any of the following commands in the message body.\n\n\n'create' - Create a new account if one does not exist\n\n\n'private_key' - (disabled) Retrieve your account private key\n\n\n'new_address' - (disabled) If you feel this address was compromised, create a new account and key\n\n\n'send <amount> <user/address> - Send Nano to a reddit user or an address\n\n\n'receive' - Receive all pending transactions\n\n\n'balance' - Retrieve your account balance. Includes both pocketed and unpocketed transactions.\n\n\n'minimum <amount>' - Sets a minimum amount for receiving tips. Program minimum is 0.001 Nano.\n\n\n'help' - Get this help message\n\n\n\nIf you have any questions or bug fixes, please contact /u/zily88.\n\"\"\"\n", "reddit = praw.Reddit('bot1')\n", "subreddit = reddit.subreddit('nano_tipper_z+cryptocurrency247')\n", "tip_froms = []\n", "tip_parents = []\n", "tip_tos = []\n", "tip_comments = []\n", "tip_amounts = []\n", "last_action = time.time()\n", "program_minimum = 0.001\n", "recipient_minimum = 0.01\n", "sql_password = f.read()\n", "mydb = mysql.connector.connect(user='root', password=sql_password, host=\n 'localhost', auth_plugin='mysql_native_password', database='nano_tipper_z')\n", "mycursor = mydb.cursor()\n", "def stream_comments_messages():...\n", "previous_comments = {comment for comment in subreddit.comments()}\n", "previous_messages = {message for message in reddit.inbox.unread()}\n", "print('received first stream')\n", "while True:\n", "sleep(6)\n", "last_action = time.time()\n", "updated_comments = {comment for comment in subreddit.comments()}\n", "new_comments = updated_comments - previous_comments\n", "previous_comments = updated_comments\n", "updated_messages = {message for message in reddit.inbox.unread()}\n", "new_messages = updated_messages - previous_messages\n", "previous_messages = updated_messages\n", "if len(new_comments) >= 1:\n", "for new_comment in new_comments:\n", "if len(new_messages) >= 1:\n", "print('full name: ', new_comment.name)\n", "for new_message in new_messages:\n", "yield None\n", "if new_comment.name[:3] == 't1_':\n", "print('full name: ', new_message.name)\n", "yield 'comment', new_comment\n", "if new_message.name[:3] == 't4_':\n", "yield 'message', new_message\n" ]
[ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 ]
[ "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "For", "Condition", "Expr'", "For", "Expr'", "Condition", "Expr'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_26(self, VAR_14):...\n", "" ]
[ "def get_max_length(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "import os\n", "import unittest\n", "from coalib.bearlib.abstractions.Lint import Lint\n", "from coalib.misc.ContextManagers import prepare_file\n", "from coalib.misc.Shell import escape_path_argument\n", "from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY\n", "from coalib.results.SourceRange import SourceRange\n", "from coalib.settings.Section import Section\n", "def FUNC_0(self):...\n", "VAR_0 = Section('some_name')\n", "self.uut = Lint(VAR_0, None)\n", "def FUNC_1(self):...\n", "VAR_1 = list(self.uut.process_output(['1.0|0: Info message\\n',\n '2.2|1: Normal message\\n', \"\"\"3.4|2: Major message\n\"\"\"], 'a/file.py', [\n 'original_file_lines_placeholder']))\n", "self.assertEqual(len(VAR_1), 3)\n", "self.assertEqual(VAR_1[0].origin, 'Lint')\n", "self.assertEqual(VAR_1[0].affected_code[0], SourceRange.from_values(\n 'a/file.py', 1, 0))\n", "self.assertEqual(VAR_1[0].severity, RESULT_SEVERITY.INFO)\n", "self.assertEqual(VAR_1[0].message, 'Info message')\n", "self.assertEqual(VAR_1[1].affected_code[0], SourceRange.from_values(\n 'a/file.py', 2, 2))\n", "self.assertEqual(VAR_1[1].severity, RESULT_SEVERITY.NORMAL)\n", "self.assertEqual(VAR_1[1].message, 'Normal message')\n", "self.assertEqual(VAR_1[2].affected_code[0], SourceRange.from_values(\n 'a/file.py', 3, 4))\n", "self.assertEqual(VAR_1[2].severity, RESULT_SEVERITY.MAJOR)\n", "self.assertEqual(VAR_1[2].message, 'Major message')\n", "def FUNC_2(self):...\n", "self.uut.output_regex = 'string'\n", "self.uut.severity_map = {'I': RESULT_SEVERITY.INFO}\n", "VAR_1 = list(self.uut.process_output(['info_msg|1.0|2.3|I: Info message\\n'],\n 'a/file.py', ['original_file_lines_placeholder']))\n", "self.assertEqual(len(VAR_1), 1)\n", "self.assertEqual(VAR_1[0].affected_code[0].start.line, 1)\n", "self.assertEqual(VAR_1[0].affected_code[0].start.column, 0)\n", "self.assertEqual(VAR_1[0].affected_code[0].end.line, 2)\n", "self.assertEqual(VAR_1[0].affected_code[0].end.column, 3)\n", "self.assertEqual(VAR_1[0].severity, RESULT_SEVERITY.INFO)\n", "self.assertEqual(VAR_1[0].origin, 'Lint (info_msg)')\n", "def FUNC_3(self):...\n", "VAR_1 = list(self.uut.process_output([\n \"Random line that shouldn't be captured\\n\", '*************\\n'],\n 'a/file.py', ['original_file_lines_placeholder']))\n", "self.assertEqual(len(VAR_1), 0)\n", "def FUNC_4(self):...\n", "self.uut.executable = 'more'\n", "self.uut.use_stdin = True\n", "self.uut.use_stderr = False\n", "self.uut.process_output = lambda output, filename, file: output\n", "VAR_1 = self.uut.lint(file=lines)\n", "self.assertTrue(('abcd\\n', 'efgh\\n') == VAR_1 or ('abcd\\n', 'efgh\\n', '\\n') ==\n VAR_1)\n", "def FUNC_5(self):...\n", "self.uut.executable = 'echo'\n", "self.uut.arguments = 'hello'\n", "self.uut.use_stdin = False\n", "self.uut.use_stderr = True\n", "self.uut.process_output = lambda output, filename, file: output\n", "VAR_1 = self.uut.lint('unused_filename')\n", "self.assertEqual((), VAR_1)\n", "self.uut.use_stderr = False\n", "VAR_1 = self.uut.lint('unused_filename')\n", "self.assertEqual(('hello\\n',), VAR_1)\n", "def FUNC_9(VAR_2):...\n", "assert VAR_2 == 'hello'\n", "VAR_3 = self.uut.warn\n", "self.uut.warn = FUNC_9\n", "self.uut._print_errors(['hello', '\\n'])\n", "self.uut.warn = VAR_3\n", "def FUNC_6(self):...\n", "self.uut.gives_corrected = True\n", "VAR_1 = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a', 'b']))\n", "self.assertEqual((), VAR_1)\n", "VAR_1 = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a']))\n", "self.assertEqual(len(VAR_1), 1)\n", "def FUNC_7(self):...\n", "VAR_4 = Lint.executable\n", "VAR_5 = 'invalid_binary_which_doesnt_exist'\n", "Lint.executable = VAR_5\n", "self.assertEqual(Lint.check_prerequisites(), \"'{}' is not installed.\".\n format(VAR_5))\n", "Lint.executable = 'echo'\n", "self.assertTrue(Lint.check_prerequisites())\n", "self.assertTrue(Lint.check_prerequisites())\n", "Lint.executable = VAR_4\n", "def FUNC_8(self):...\n", "self.uut.executable = 'echo'\n", "self.uut.arguments = '-c {config_file}'\n", "self.assertEqual(self.uut._create_command(config_file='configfile').strip(),\n 'echo -c ' + escape_path_argument('configfile'))\n", "def FUNC_8(self):...\n", "self.uut.executable = 'echo'\n", "self.uut.config_file = lambda : ['config line1']\n", "VAR_6 = self.uut.generate_config_file()\n", "self.assertTrue(os.path.isfile(VAR_6))\n", "os.remove(VAR_6)\n", "self.uut.lint('filename')\n" ]
[ "import os\n", "import unittest\n", "from coalib.bearlib.abstractions.Lint import Lint\n", "from coalib.misc.ContextManagers import prepare_file\n", "from coalib.misc.Shell import escape_path_argument\n", "from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY\n", "from coalib.results.SourceRange import SourceRange\n", "from coalib.settings.Section import Section\n", "def setUp(self):...\n", "section = Section('some_name')\n", "self.uut = Lint(section, None)\n", "def test_invalid_output(self):...\n", "out = list(self.uut.process_output(['1.0|0: Info message\\n',\n '2.2|1: Normal message\\n', \"\"\"3.4|2: Major message\n\"\"\"], 'a/file.py', [\n 'original_file_lines_placeholder']))\n", "self.assertEqual(len(out), 3)\n", "self.assertEqual(out[0].origin, 'Lint')\n", "self.assertEqual(out[0].affected_code[0], SourceRange.from_values(\n 'a/file.py', 1, 0))\n", "self.assertEqual(out[0].severity, RESULT_SEVERITY.INFO)\n", "self.assertEqual(out[0].message, 'Info message')\n", "self.assertEqual(out[1].affected_code[0], SourceRange.from_values(\n 'a/file.py', 2, 2))\n", "self.assertEqual(out[1].severity, RESULT_SEVERITY.NORMAL)\n", "self.assertEqual(out[1].message, 'Normal message')\n", "self.assertEqual(out[2].affected_code[0], SourceRange.from_values(\n 'a/file.py', 3, 4))\n", "self.assertEqual(out[2].severity, RESULT_SEVERITY.MAJOR)\n", "self.assertEqual(out[2].message, 'Major message')\n", "def test_custom_regex(self):...\n", "self.uut.output_regex = (\n '(?P<origin>\\\\w+)\\\\|(?P<line>\\\\d+)\\\\.(?P<column>\\\\d+)\\\\|(?P<end_line>\\\\d+)\\\\.(?P<end_column>\\\\d+)\\\\|(?P<severity>\\\\w+): (?P<message>.*)'\n )\n", "self.uut.severity_map = {'I': RESULT_SEVERITY.INFO}\n", "out = list(self.uut.process_output(['info_msg|1.0|2.3|I: Info message\\n'],\n 'a/file.py', ['original_file_lines_placeholder']))\n", "self.assertEqual(len(out), 1)\n", "self.assertEqual(out[0].affected_code[0].start.line, 1)\n", "self.assertEqual(out[0].affected_code[0].start.column, 0)\n", "self.assertEqual(out[0].affected_code[0].end.line, 2)\n", "self.assertEqual(out[0].affected_code[0].end.column, 3)\n", "self.assertEqual(out[0].severity, RESULT_SEVERITY.INFO)\n", "self.assertEqual(out[0].origin, 'Lint (info_msg)')\n", "def test_valid_output(self):...\n", "out = list(self.uut.process_output([\n \"Random line that shouldn't be captured\\n\", '*************\\n'],\n 'a/file.py', ['original_file_lines_placeholder']))\n", "self.assertEqual(len(out), 0)\n", "def test_stdin_input(self):...\n", "self.uut.executable = 'more'\n", "self.uut.use_stdin = True\n", "self.uut.use_stderr = False\n", "self.uut.process_output = lambda output, filename, file: output\n", "out = self.uut.lint(file=lines)\n", "self.assertTrue(('abcd\\n', 'efgh\\n') == out or ('abcd\\n', 'efgh\\n', '\\n') ==\n out)\n", "def test_stderr_output(self):...\n", "self.uut.executable = 'echo'\n", "self.uut.arguments = 'hello'\n", "self.uut.use_stdin = False\n", "self.uut.use_stderr = True\n", "self.uut.process_output = lambda output, filename, file: output\n", "out = self.uut.lint('unused_filename')\n", "self.assertEqual((), out)\n", "self.uut.use_stderr = False\n", "out = self.uut.lint('unused_filename')\n", "self.assertEqual(('hello\\n',), out)\n", "def assert_warn(line):...\n", "assert line == 'hello'\n", "old_warn = self.uut.warn\n", "self.uut.warn = assert_warn\n", "self.uut._print_errors(['hello', '\\n'])\n", "self.uut.warn = old_warn\n", "def test_gives_corrected(self):...\n", "self.uut.gives_corrected = True\n", "out = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a', 'b']))\n", "self.assertEqual((), out)\n", "out = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a']))\n", "self.assertEqual(len(out), 1)\n", "def test_missing_binary(self):...\n", "old_binary = Lint.executable\n", "invalid_binary = 'invalid_binary_which_doesnt_exist'\n", "Lint.executable = invalid_binary\n", "self.assertEqual(Lint.check_prerequisites(), \"'{}' is not installed.\".\n format(invalid_binary))\n", "Lint.executable = 'echo'\n", "self.assertTrue(Lint.check_prerequisites())\n", "self.assertTrue(Lint.check_prerequisites())\n", "Lint.executable = old_binary\n", "def test_config_file_generator(self):...\n", "self.uut.executable = 'echo'\n", "self.uut.arguments = '-c {config_file}'\n", "self.assertEqual(self.uut._create_command(config_file='configfile').strip(),\n 'echo -c ' + escape_path_argument('configfile'))\n", "def test_config_file_generator(self):...\n", "self.uut.executable = 'echo'\n", "self.uut.config_file = lambda : ['config line1']\n", "config_filename = self.uut.generate_config_file()\n", "self.assertTrue(os.path.isfile(config_filename))\n", "os.remove(config_filename)\n", "self.uut.lint('filename')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assert'", "Assign'", "Assign'", "Expr'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_15(self, VAR_8):...\n", "VAR_32 = []\n", "if VAR_8 == '':\n", "return []\n", "VAR_33 = Urtext.get_all_files(self.view.window())\n", "VAR_34 = []\n", "for file in VAR_33:\n", "if file[-4:] == '.txt':\n", "return VAR_34\n", "VAR_0 = this_file.read()\n", "VAR_31 = re.findall('-> ' + VAR_8.replace('.txt', ''), VAR_0)\n", "for VAR_39 in VAR_31:\n", "VAR_34.append(file)\n" ]
[ "def get_links_to_file(self, filename):...\n", "visited_files = []\n", "if filename == '':\n", "return []\n", "files = Urtext.get_all_files(self.view.window())\n", "links_to_file = []\n", "for file in files:\n", "if file[-4:] == '.txt':\n", "return links_to_file\n", "contents = this_file.read()\n", "links = re.findall('-> ' + filename.replace('.txt', ''), contents)\n", "for link in links:\n", "links_to_file.append(file)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "For", "Condition", "Return'", "Assign'", "Assign'", "For", "Expr'" ]
[ "def FUNC_0(self, VAR_0, VAR_1, VAR_2, VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = VAR_3.pop('kbsite_name', None)\n", "if not VAR_6:\n", "VAR_6 = 'default'\n", "VAR_0.kbsite = models.KegbotSite.objects.get(name=kbsite_name)\n", "return None\n" ]
[ "def process_view(self, request, view_func, view_args, view_kwargs):...\n", "\"\"\"docstring\"\"\"\n", "kbsite_name = view_kwargs.pop('kbsite_name', None)\n", "if not kbsite_name:\n", "kbsite_name = 'default'\n", "request.kbsite = models.KegbotSite.objects.get(name=kbsite_name)\n", "return None\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_20(self, VAR_2):...\n", "if not self.logged_in():\n", "exit('send_cmd called before login')\n", "VAR_11 = FUNC_1()\n", "VAR_12, VAR_13, VAR_14 = VAR_11.exec_command('shrubbery {} {} '.format(self\n .user_creds[0], self.user_creds[1]) + VAR_2)\n", "print('***stdout: ' + VAR_13.read().decode('utf-8'))\n", "print('***stderr: ' + VAR_14.read().decode('utf-8'))\n", "return VAR_13.read().decode('utf-8')\n" ]
[ "def send_cmd(self, command_string):...\n", "if not self.logged_in():\n", "exit('send_cmd called before login')\n", "client = open_ssh_client()\n", "stdin, stdout, stderr = client.exec_command('shrubbery {} {} '.format(self.\n user_creds[0], self.user_creds[1]) + command_string)\n", "print('***stdout: ' + stdout.read().decode('utf-8'))\n", "print('***stderr: ' + stderr.read().decode('utf-8'))\n", "return stdout.read().decode('utf-8')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_1(self):...\n", "return 'submit source %s for task %s (ID %d) %s' % (self.source_path, self.\n task[1], self.task[0], self.url)\n" ]
[ "def describe(self):...\n", "return 'submit source %s for task %s (ID %d) %s' % (self.source_path, self.\n task[1], self.task[0], self.url)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_6(self, VAR_23):...\n", "if VAR_23:\n", "VAR_94 = _force_ascii(VAR_23)\n", "if VAR_94 == VAR_23:\n", "abort(404, 'page not found')\n", "return Tag._by_name(VAR_94)\n" ]
[ "def run(self, name):...\n", "if name:\n", "cleaned = _force_ascii(name)\n", "if cleaned == name:\n", "abort(404, 'page not found')\n", "return Tag._by_name(cleaned)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Expr'", "Return'" ]
[ "\"\"\"\nThis module contains ...\n\"\"\"\n", "from __future__ import division, absolute_import\n", "import cPickle as pickle\n", "import pickle\n", "import os\n", "import time\n", "import fnmatch\n", "import hashlib\n", "import re\n", "import stat\n", "import errno\n", "from twisted.python import log\n", "from cowrie.core.config import CONFIG\n", "VAR_0 = pickle.load(FUNC_14(CONFIG.get('honeypot', 'filesystem_file'), 'rb'))\n", "VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6, VAR_7, VAR_8, VAR_9, VAR_10 = list(\n range(0, 10))\n", "VAR_11, VAR_12, VAR_13, VAR_14, VAR_15, VAR_16, VAR_17 = list(range(0, 7))\n", "\"\"\"string\"\"\"\n", "\"\"\"\n raise OSError(errno.ENOENT, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n \"\"\"\n", "def __init__(self, VAR_18, VAR_19):...\n", "self.fs = VAR_18\n", "self.cfg = VAR_19\n", "self.tempfiles = {}\n", "self.filenames = {}\n", "self.newcount = 0\n", "self.init_honeyfs(self.cfg.get('honeypot', 'contents_path'))\n", "def FUNC_0(self, VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "for VAR_21, directories, filenames in os.walk(VAR_20):\n", "for VAR_32 in filenames:\n", "def FUNC_1(self, VAR_21, VAR_22):...\n", "VAR_72 = os.path.join(VAR_21, VAR_32)\n", "\"\"\"docstring\"\"\"\n", "VAR_73 = '/' + os.path.relpath(VAR_72, VAR_20)\n", "VAR_55 = VAR_21.rstrip('/').split('/')\n", "VAR_24 = self.getfile(VAR_73, VAR_23=False)\n", "if VAR_21[0] == '/':\n", "if VAR_24 and VAR_24[VAR_2] == VAR_13:\n", "VAR_22 = []\n", "VAR_22 = [x for x in VAR_22.split('/') if len(x) and x is not None]\n", "self.update_realfile(VAR_24, VAR_72)\n", "while 1:\n", "if not len(VAR_55):\n", "VAR_66 = VAR_55.pop(0)\n", "return '/%s' % ('/'.join(VAR_22),)\n", "if VAR_66 == '..':\n", "if len(VAR_22):\n", "if VAR_66 in ('.', ''):\n", "VAR_22.pop()\n", "VAR_22.append(VAR_66)\n" ]
[ "\"\"\"\nThis module contains ...\n\"\"\"\n", "from __future__ import division, absolute_import\n", "import cPickle as pickle\n", "import pickle\n", "import os\n", "import time\n", "import fnmatch\n", "import hashlib\n", "import re\n", "import stat\n", "import errno\n", "from twisted.python import log\n", "from cowrie.core.config import CONFIG\n", "PICKLE = pickle.load(open(CONFIG.get('honeypot', 'filesystem_file'), 'rb'))\n", "(A_NAME, A_TYPE, A_UID, A_GID, A_SIZE, A_MODE, A_CTIME, A_CONTENTS,\n A_TARGET, A_REALFILE) = list(range(0, 10))\n", "T_LINK, T_DIR, T_FILE, T_BLK, T_CHR, T_SOCK, T_FIFO = list(range(0, 7))\n", "\"\"\"\n 62 ELOOP Too many levels of symbolic links. A path name lookup involved more than 8 symbolic links.\n raise OSError(errno.ELOOP, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n raise OSError(errno.ENOENT, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n \"\"\"\n", "def __init__(self, fs, cfg):...\n", "self.fs = fs\n", "self.cfg = cfg\n", "self.tempfiles = {}\n", "self.filenames = {}\n", "self.newcount = 0\n", "self.init_honeyfs(self.cfg.get('honeypot', 'contents_path'))\n", "def init_honeyfs(self, honeyfs_path):...\n", "\"\"\"docstring\"\"\"\n", "for path, directories, filenames in os.walk(honeyfs_path):\n", "for filename in filenames:\n", "def resolve_path(self, path, cwd):...\n", "realfile_path = os.path.join(path, filename)\n", "\"\"\"docstring\"\"\"\n", "virtual_path = '/' + os.path.relpath(realfile_path, honeyfs_path)\n", "pieces = path.rstrip('/').split('/')\n", "f = self.getfile(virtual_path, follow_symlinks=False)\n", "if path[0] == '/':\n", "if f and f[A_TYPE] == T_FILE:\n", "cwd = []\n", "cwd = [x for x in cwd.split('/') if len(x) and x is not None]\n", "self.update_realfile(f, realfile_path)\n", "while 1:\n", "if not len(pieces):\n", "piece = pieces.pop(0)\n", "return '/%s' % ('/'.join(cwd),)\n", "if piece == '..':\n", "if len(cwd):\n", "if piece in ('.', ''):\n", "cwd.pop()\n", "cwd.append(piece)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Docstring", "For", "For", "FunctionDef'", "Assign'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Return'", "Condition", "Condition", "Condition", "Expr'", "Expr'" ]
[ "@tornado.web.authenticated...\n", "self.write(api.user.generate_token({'email': self.current_user}))\n" ]
[ "@tornado.web.authenticated...\n", "self.write(api.user.generate_token({'email': self.current_user}))\n" ]
[ 0, 0 ]
[ "Condition", "Expr'" ]
[ "@mock.patch('requests.post', FUNC_0)...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(VAR_5.data['count'], 1)\n", "VAR_7 = {'image': 'autotest/example'}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_8 = VAR_5.data['uuid']\n", "VAR_9 = VAR_5.data\n", "self.assertEqual(VAR_5.data['image'], VAR_7['image'])\n", "VAR_4 = '/api/apps/{app_id}/builds/{build_id}'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "VAR_10 = VAR_5.data\n", "self.assertEqual(VAR_9, VAR_10)\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_7 = {'image': 'autotest/example'}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "self.assertIn('x-deis-release', VAR_5._headers)\n", "VAR_11 = VAR_5.data\n", "self.assertEqual(VAR_5.data['image'], VAR_7['image'])\n", "self.assertNotEqual(VAR_10['uuid'], VAR_11['uuid'])\n", "self.assertEqual(self.client.put(VAR_4).status_code, 405)\n", "self.assertEqual(self.client.patch(VAR_4).status_code, 405)\n", "self.assertEqual(self.client.delete(VAR_4).status_code, 405)\n" ]
[ "@mock.patch('requests.post', mock_import_repository_task)...\n", "\"\"\"docstring\"\"\"\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(response.data['count'], 1)\n", "body = {'image': 'autotest/example'}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "build_id = response.data['uuid']\n", "build1 = response.data\n", "self.assertEqual(response.data['image'], body['image'])\n", "url = '/api/apps/{app_id}/builds/{build_id}'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "build2 = response.data\n", "self.assertEqual(build1, build2)\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example'}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "self.assertIn('x-deis-release', response._headers)\n", "build3 = response.data\n", "self.assertEqual(response.data['image'], body['image'])\n", "self.assertNotEqual(build2['uuid'], build3['uuid'])\n", "self.assertEqual(self.client.put(url).status_code, 405)\n", "self.assertEqual(self.client.patch(url).status_code, 405)\n", "self.assertEqual(self.client.delete(url).status_code, 405)\n" ]
[ 0, 0, 0, 5, 0, 0, 0, 5, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 5 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_9(self, VAR_14):...\n", "self.cursor.execute('SELECT * FROM product_type WHERE name = %s;', (VAR_14,))\n", "return self.cursor.fetchone()\n" ]
[ "def find_type_by_name(self, type_name):...\n", "self.cursor.execute('SELECT * FROM product_type WHERE name = %s;', (type_name,)\n )\n", "return self.cursor.fetchone()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "def FUNC_5(VAR_1, *VAR_2, **VAR_3):...\n", "def FUNC_6():...\n", "if not request.method in VAR_0:\n", "return True\n", "if not request.environ.get('AUTH_TYPE') == 'cookie':\n", "return True\n", "if config.get('skip_authentication'):\n", "return True\n", "if request.environ.get('HTTP_REFERER'):\n", "VAR_7 = urlparse(request.environ.get('HTTP_REFERER'))\n", "if request.method == 'GET' and FUNC_4():\n", "VAR_8 = VAR_7.hostname\n", "return True\n", "return False\n", "if VAR_7.port:\n", "VAR_8 += ':' + str(VAR_7.port)\n", "if VAR_8.endswith(request.environ['adhocracy.domain']):\n", "if request.method != 'GET':\n", "return True\n" ]
[ "def _decorate(f, *a, **kw):...\n", "def check():...\n", "if not request.method in methods:\n", "return True\n", "if not request.environ.get('AUTH_TYPE') == 'cookie':\n", "return True\n", "if config.get('skip_authentication'):\n", "return True\n", "if request.environ.get('HTTP_REFERER'):\n", "ref_url = urlparse(request.environ.get('HTTP_REFERER'))\n", "if request.method == 'GET' and has_token():\n", "ref_host = ref_url.hostname\n", "return True\n", "return False\n", "if ref_url.port:\n", "ref_host += ':' + str(ref_url.port)\n", "if ref_host.endswith(request.environ['adhocracy.domain']):\n", "if request.method != 'GET':\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 5, 0 ]
[ "FunctionDef'", "FunctionDef'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Condition", "Assign'", "Condition", "Assign'", "Return'", "Return'", "Condition", "AugAssign'", "Condition", "Condition", "Return'" ]
[ "def FUNC_1(self, VAR_1, VAR_2, VAR_5, VAR_6, VAR_4={}):...\n", "\"\"\"docstring\"\"\"\n", "VAR_22 = self.pool.get('stock.location')\n", "VAR_23 = self.pool.get('account.move')\n", "VAR_24 = self.pool.get('account.move.line')\n", "VAR_25 = VAR_6.get('new_price', 0.0)\n", "VAR_19 = VAR_6.get('stock_output_account', False)\n", "VAR_18 = VAR_6.get('stock_input_account', False)\n", "VAR_20 = VAR_6.get('stock_journal', False)\n", "VAR_17 = self.browse(VAR_1, VAR_2, VAR_5)[0]\n", "VAR_21 = VAR_17.categ_id.property_stock_variation\n", "VAR_26 = VAR_21 and VAR_21.id or False\n", "if not VAR_26:\n", "VAR_27 = []\n", "VAR_28 = VAR_22.search(VAR_1, VAR_2, [('usage', '=', 'internal')])\n", "for rec_id in VAR_5:\n", "for location in VAR_22.browse(VAR_1, VAR_2, VAR_28):\n", "return VAR_27\n", "VAR_49 = VAR_4.copy()\n", "self.write(VAR_1, VAR_2, rec_id, {'standard_price': VAR_25})\n", "VAR_49.update({'location': location.id, 'compute_child': False})\n", "VAR_45 = self.browse(VAR_1, VAR_2, rec_id, VAR_4=c)\n", "VAR_53 = VAR_45.qty_available\n", "VAR_54 = VAR_45.standard_price - VAR_25\n", "if not VAR_54:\n", "if VAR_53:\n", "VAR_56 = location.company_id and location.company_id.id or False\n", "if not VAR_56:\n", "if not VAR_20:\n", "VAR_20 = (VAR_45.categ_id.property_stock_journal and VAR_45.categ_id.\n property_stock_journal.id or False)\n", "if not VAR_20:\n", "VAR_57 = VAR_23.create(VAR_1, VAR_2, {'journal_id': VAR_20, 'company_id':\n VAR_56})\n", "VAR_27.append(VAR_57)\n", "if VAR_54 > 0:\n", "if not VAR_18:\n", "if VAR_54 < 0:\n", "VAR_18 = VAR_45.product_tmpl_id.property_stock_account_input.id\n", "if not VAR_18:\n", "if not VAR_19:\n", "VAR_18 = VAR_45.categ_id.property_stock_account_input_categ.id\n", "if not VAR_18:\n", "VAR_19 = VAR_45.product_tmpl_id.property_stock_account_output.id\n", "if not VAR_19:\n", "VAR_58 = VAR_53 * VAR_54\n", "VAR_19 = VAR_45.categ_id.property_stock_account_output_categ.id\n", "if not VAR_19:\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.name, 'account_id': VAR_18,\n 'debit': VAR_58, 'move_id': VAR_57})\n", "VAR_58 = VAR_53 * -VAR_54\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.categ_id.name, 'account_id':\n VAR_26, 'credit': VAR_58, 'move_id': VAR_57})\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.name, 'account_id': VAR_19,\n 'credit': VAR_58, 'move_id': VAR_57})\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.categ_id.name, 'account_id':\n VAR_26, 'debit': VAR_58, 'move_id': VAR_57})\n" ]
[ "def do_change_standard_price(self, cr, uid, ids, datas, context={}):...\n", "\"\"\"docstring\"\"\"\n", "location_obj = self.pool.get('stock.location')\n", "move_obj = self.pool.get('account.move')\n", "move_line_obj = self.pool.get('account.move.line')\n", "new_price = datas.get('new_price', 0.0)\n", "stock_output_acc = datas.get('stock_output_account', False)\n", "stock_input_acc = datas.get('stock_input_account', False)\n", "journal_id = datas.get('stock_journal', False)\n", "product_obj = self.browse(cr, uid, ids)[0]\n", "account_variation = product_obj.categ_id.property_stock_variation\n", "account_variation_id = account_variation and account_variation.id or False\n", "if not account_variation_id:\n", "move_ids = []\n", "loc_ids = location_obj.search(cr, uid, [('usage', '=', 'internal')])\n", "for rec_id in ids:\n", "for location in location_obj.browse(cr, uid, loc_ids):\n", "return move_ids\n", "c = context.copy()\n", "self.write(cr, uid, rec_id, {'standard_price': new_price})\n", "c.update({'location': location.id, 'compute_child': False})\n", "product = self.browse(cr, uid, rec_id, context=c)\n", "qty = product.qty_available\n", "diff = product.standard_price - new_price\n", "if not diff:\n", "if qty:\n", "company_id = location.company_id and location.company_id.id or False\n", "if not company_id:\n", "if not journal_id:\n", "journal_id = (product.categ_id.property_stock_journal and product.categ_id.\n property_stock_journal.id or False)\n", "if not journal_id:\n", "move_id = move_obj.create(cr, uid, {'journal_id': journal_id, 'company_id':\n company_id})\n", "move_ids.append(move_id)\n", "if diff > 0:\n", "if not stock_input_acc:\n", "if diff < 0:\n", "stock_input_acc = product.product_tmpl_id.property_stock_account_input.id\n", "if not stock_input_acc:\n", "if not stock_output_acc:\n", "stock_input_acc = product.categ_id.property_stock_account_input_categ.id\n", "if not stock_input_acc:\n", "stock_output_acc = product.product_tmpl_id.property_stock_account_output.id\n", "if not stock_output_acc:\n", "amount_diff = qty * diff\n", "stock_output_acc = product.categ_id.property_stock_account_output_categ.id\n", "if not stock_output_acc:\n", "move_line_obj.create(cr, uid, {'name': product.name, 'account_id':\n stock_input_acc, 'debit': amount_diff, 'move_id': move_id})\n", "amount_diff = qty * -diff\n", "move_line_obj.create(cr, uid, {'name': product.categ_id.name, 'account_id':\n account_variation_id, 'credit': amount_diff, 'move_id': move_id})\n", "move_line_obj.create(cr, uid, {'name': product.name, 'account_id':\n stock_output_acc, 'credit': amount_diff, 'move_id': move_id})\n", "move_line_obj.create(cr, uid, {'name': product.categ_id.name, 'account_id':\n account_variation_id, 'debit': amount_diff, 'move_id': move_id})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "For", "For", "Return'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Condition", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_38(self, VAR_84):...\n", "return [VAR_82 for job in VAR_84.jobs for VAR_82 in job.subworkflow_input if\n job.subworkflow_input[VAR_82] is self]\n" ]
[ "def targets(self, dag):...\n", "return [f for job in dag.jobs for f in job.subworkflow_input if job.\n subworkflow_input[f] is self]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self):...\n", "self.order = list()\n" ]
[ "def __init__(self):...\n", "self.order = list()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_7(self):...\n", "return self.content\n" ]
[ "def read(self):...\n", "return self.content\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(self):...\n", "return current_user.is_authenticated and current_user.role.role_code in self.roles_allowed\n" ]
[ "def is_accessible(self):...\n", "return current_user.is_authenticated and current_user.role.role_code in self.roles_allowed\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@VAR_0.errorhandler(404)...\n", "VAR_21 = uuid4()\n", "return render_template('404.html', VAR_21=cache_id), 404\n" ]
[ "@app.errorhandler(404)...\n", "cache_id = uuid4()\n", "return render_template('404.html', cache_id=cache_id), 404\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def FUNC_4(self, VAR_10=None, VAR_11=False, VAR_12=False):...\n", "if VAR_10 is None:\n", "VAR_10 = self.prompt\n", "VAR_20 = ''\n", "VAR_21 = ''\n", "VAR_22 = ''\n", "VAR_23 = 0\n", "while True:\n", "VAR_24 = self.serial.read().decode('utf-8', 'replace')\n", "if not VAR_24:\n", "if self.options.command != 'new':\n", "if self.options.command == 'new':\n", "VAR_21 = VAR_20.replace(VAR_22, '')\n", "if not VAR_11:\n", "self.campaign_data['dut_output' if not self.aux else 'aux_output'] += VAR_24\n", "self.result_data['dut_output' if not self.aux else 'aux_output'] += VAR_24\n", "db.log_event(self.result_data['id'], 'DUT' if not self.aux else 'AUX',\n 'Read timeout', VAR_21)\n", "if self.serial.timeout != self.options.timeout:\n", "if self.options.debug:\n", "VAR_22 += VAR_21\n", "self.serial.timeout = self.options.timeout\n", "if self.options.debug:\n", "print(colored(VAR_24, 'green' if not self.aux else 'cyan'), end='')\n", "VAR_20 += VAR_24\n", "print()\n", "if self.options.command == 'new':\n", "sys.stdout.flush()\n", "if not VAR_11 and VAR_20[-len(VAR_10):] == VAR_10:\n", "db.update_dict('campaign', self.campaign_data)\n", "db.update_dict('result', self.result_data)\n", "if VAR_20[-len('autoboot: '):] == 'autoboot: ' and self.uboot_command:\n", "if VAR_23 and not VAR_12:\n", "self.write('\\n')\n", "if VAR_20[-len('login: '):] == 'login: ':\n", "for message, category in self.error_messages:\n", "return VAR_20\n", "self.write(self.uboot_command + '\\n')\n", "self.write(self.options.username + '\\n')\n", "if VAR_20[-len('Password: '):] == 'Password: ':\n", "if message in VAR_20:\n", "for message, category in self.error_messages:\n", "self.write(self.options.password + '\\n')\n", "if VAR_20[-len(\"can't get kernel image\"):] == \"can't get kernel image\":\n", "if VAR_20[-len(message):] == message:\n", "if not VAR_11 and VAR_23 > 10:\n", "self.write('reset\\n')\n", "if not VAR_11 and not VAR_12:\n", "if not VAR_12 and VAR_20 and VAR_20[-1] == '\\n':\n", "VAR_23 += 1\n", "self.serial.timeout = 30\n", "if self.options.command != 'new' and not VAR_12:\n", "if self.options.command == 'new':\n", "VAR_23 += 1\n", "VAR_21 = VAR_20.replace(VAR_22, '')\n", "db.update_dict('campaign', self.campaign_data)\n", "db.update_dict('result', self.result_data)\n", "db.log_event(self.result_data['id'], 'DUT' if not self.aux else 'AUX',\n category, VAR_21)\n", "VAR_22 += VAR_21\n" ]
[ "def read_until(self, string=None, continuous=False, boot=False):...\n", "if string is None:\n", "string = self.prompt\n", "buff = ''\n", "event_buff = ''\n", "event_buff_logged = ''\n", "errors = 0\n", "while True:\n", "char = self.serial.read().decode('utf-8', 'replace')\n", "if not char:\n", "if self.options.command != 'new':\n", "if self.options.command == 'new':\n", "event_buff = buff.replace(event_buff_logged, '')\n", "if not continuous:\n", "self.campaign_data['dut_output' if not self.aux else 'aux_output'] += char\n", "self.result_data['dut_output' if not self.aux else 'aux_output'] += char\n", "db.log_event(self.result_data['id'], 'DUT' if not self.aux else 'AUX',\n 'Read timeout', event_buff)\n", "if self.serial.timeout != self.options.timeout:\n", "if self.options.debug:\n", "event_buff_logged += event_buff\n", "self.serial.timeout = self.options.timeout\n", "if self.options.debug:\n", "print(colored(char, 'green' if not self.aux else 'cyan'), end='')\n", "buff += char\n", "print()\n", "if self.options.command == 'new':\n", "sys.stdout.flush()\n", "if not continuous and buff[-len(string):] == string:\n", "db.update_dict('campaign', self.campaign_data)\n", "db.update_dict('result', self.result_data)\n", "if buff[-len('autoboot: '):] == 'autoboot: ' and self.uboot_command:\n", "if errors and not boot:\n", "self.write('\\n')\n", "if buff[-len('login: '):] == 'login: ':\n", "for message, category in self.error_messages:\n", "return buff\n", "self.write(self.uboot_command + '\\n')\n", "self.write(self.options.username + '\\n')\n", "if buff[-len('Password: '):] == 'Password: ':\n", "if message in buff:\n", "for message, category in self.error_messages:\n", "self.write(self.options.password + '\\n')\n", "if buff[-len(\"can't get kernel image\"):] == \"can't get kernel image\":\n", "if buff[-len(message):] == message:\n", "if not continuous and errors > 10:\n", "self.write('reset\\n')\n", "if not continuous and not boot:\n", "if not boot and buff and buff[-1] == '\\n':\n", "errors += 1\n", "self.serial.timeout = 30\n", "if self.options.command != 'new' and not boot:\n", "if self.options.command == 'new':\n", "errors += 1\n", "event_buff = buff.replace(event_buff_logged, '')\n", "db.update_dict('campaign', self.campaign_data)\n", "db.update_dict('result', self.result_data)\n", "db.log_event(self.result_data['id'], 'DUT' if not self.aux else 'AUX',\n category, event_buff)\n", "event_buff_logged += event_buff\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Condition", "AugAssign'", "AugAssign'", "Expr'", "Condition", "Condition", "AugAssign'", "Assign'", "Condition", "Expr'", "AugAssign'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Condition", "Condition", "Expr'", "Condition", "For", "Return'", "Expr'", "Expr'", "Condition", "Condition", "For", "Expr'", "Condition", "Condition", "Condition", "Expr'", "Condition", "Condition", "AugAssign'", "Assign'", "Condition", "Condition", "AugAssign'", "Assign'", "Expr'", "Expr'", "Expr'", "AugAssign'" ]
[ "from django.core.exceptions import ObjectDoesNotExist\n", "from rest_framework import exceptions\n", "from app import models\n", "def FUNC_0(VAR_0, VAR_1, VAR_2=('view_project',)):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = models.Project.objects.get(pk=project_pk, deleting=False)\n", "return VAR_6\n", "for perm in VAR_2:\n", "if not VAR_0.user.has_perm(perm, VAR_6):\n" ]
[ "from django.core.exceptions import ObjectDoesNotExist\n", "from rest_framework import exceptions\n", "from app import models\n", "def get_and_check_project(request, project_pk, perms=('view_project',)):...\n", "\"\"\"docstring\"\"\"\n", "project = models.Project.objects.get(pk=project_pk, deleting=False)\n", "return project\n", "for perm in perms:\n", "if not request.user.has_perm(perm, project):\n" ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Docstring", "Assign'", "Return'", "For", "Condition" ]
[ "def FUNC_12(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_29 = self.config_file()\n", "VAR_21 = ''\n", "if VAR_29 is not None:\n", "for VAR_34, line in enumerate(VAR_29):\n", "return VAR_21\n", "VAR_29[VAR_34] = line if line.endswith('\\n') else line + '\\n'\n", "VAR_32, VAR_21 = tempfile.mkstemp()\n", "os.close(VAR_32)\n", "conf_file.writelines(VAR_29)\n" ]
[ "def generate_config_file(self):...\n", "\"\"\"docstring\"\"\"\n", "config_lines = self.config_file()\n", "config_file = ''\n", "if config_lines is not None:\n", "for i, line in enumerate(config_lines):\n", "return config_file\n", "config_lines[i] = line if line.endswith('\\n') else line + '\\n'\n", "config_fd, config_file = tempfile.mkstemp()\n", "os.close(config_fd)\n", "conf_file.writelines(config_lines)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "For", "Return'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "import json\n", "from django import template\n", "from django.db.models import Max, Min\n", "from django.template.loader import render_to_string\n", "from django.utils import timezone\n", "from django.utils.translation import ugettext_lazy as _\n", "from course.models import CourseModule\n", "from lib.errors import TagUsageError\n", "from ..cache.content import CachedContent\n", "from ..cache.points import CachedPoints\n", "from ..exercise_summary import UserExerciseSummary\n", "from ..models import LearningObjectDisplay, LearningObject, Submission, BaseExercise\n", "VAR_0 = template.Library()\n", "def FUNC_0(VAR_1):...\n", "if not 'now' in VAR_1:\n", "VAR_1['now'] = timezone.now()\n", "return VAR_1['now']\n" ]
[ "import json\n", "from django import template\n", "from django.db.models import Max, Min\n", "from django.template.loader import render_to_string\n", "from django.utils import timezone\n", "from django.utils.translation import ugettext_lazy as _\n", "from course.models import CourseModule\n", "from lib.errors import TagUsageError\n", "from ..cache.content import CachedContent\n", "from ..cache.points import CachedPoints\n", "from ..exercise_summary import UserExerciseSummary\n", "from ..models import LearningObjectDisplay, LearningObject, Submission, BaseExercise\n", "register = template.Library()\n", "def _prepare_now(context):...\n", "if not 'now' in context:\n", "context['now'] = timezone.now()\n", "return context['now']\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_2(self, VAR_2):...\n", "if not self.get_options().transitive:\n", "VAR_2 = self.context.target_roots\n", "VAR_3 = tuple(target.sources_snapshot(scheduler=self.context._scheduler) for\n target in VAR_2)\n", "VAR_4 = {f.path for snapshot in VAR_3 for f in snapshot.files}\n", "VAR_13 = os.path.join(tmpdir, 'input_files_list')\n", "for input_file in sorted(VAR_4):\n", "list_file_out.write(input_file)\n", "VAR_14 = self.context._scheduler.capture_snapshots((PathGlobsAndRoot(\n PathGlobs(('input_files_list',)), text_type(tmpdir)),))[0]\n", "list_file_out.write('\\n')\n", "VAR_5, VAR_6 = ClocBinary.global_instance().hackily_snapshot(self.context)\n", "VAR_7 = self.context._scheduler.merge_directories(tuple(s.directory_digest for\n s in VAR_3 + (VAR_6, VAR_14)))\n", "VAR_8 = ('/usr/bin/perl', VAR_5, '--skip-uniqueness', '--ignored=ignored',\n '--list-file=input_files_list', '--report-file=report')\n", "VAR_9 = ExecuteProcessRequest(argv=cmd, VAR_4=directory_digest,\n output_files=('ignored', 'report'), description='cloc')\n", "VAR_10 = self.context.execute_process_synchronously(VAR_9, 'cloc', (\n WorkUnitLabel.TOOL,))\n", "VAR_11 = self.context._scheduler.product_request(FilesContent, [VAR_10.\n output_directory_digest])[0].dependencies\n", "VAR_12 = {fc.path: fc.content.decode('utf-8') for fc in VAR_11}\n", "for line in VAR_12['report'].split('\\n'):\n", "yield line\n", "if self.get_options().ignored:\n", "yield 'Ignored the following files:'\n", "for line in VAR_12['ignored'].split('\\n'):\n", "yield line\n" ]
[ "def console_output(self, targets):...\n", "if not self.get_options().transitive:\n", "targets = self.context.target_roots\n", "input_snapshots = tuple(target.sources_snapshot(scheduler=self.context.\n _scheduler) for target in targets)\n", "input_files = {f.path for snapshot in input_snapshots for f in snapshot.files}\n", "list_file = os.path.join(tmpdir, 'input_files_list')\n", "for input_file in sorted(input_files):\n", "list_file_out.write(input_file)\n", "list_file_snapshot = self.context._scheduler.capture_snapshots((\n PathGlobsAndRoot(PathGlobs(('input_files_list',)), text_type(tmpdir)),))[0]\n", "list_file_out.write('\\n')\n", "cloc_path, cloc_snapshot = ClocBinary.global_instance().hackily_snapshot(self\n .context)\n", "directory_digest = self.context._scheduler.merge_directories(tuple(s.\n directory_digest for s in input_snapshots + (cloc_snapshot,\n list_file_snapshot)))\n", "cmd = ('/usr/bin/perl', cloc_path, '--skip-uniqueness', '--ignored=ignored',\n '--list-file=input_files_list', '--report-file=report')\n", "req = ExecuteProcessRequest(argv=cmd, input_files=directory_digest,\n output_files=('ignored', 'report'), description='cloc')\n", "exec_result = self.context.execute_process_synchronously(req, 'cloc', (\n WorkUnitLabel.TOOL,))\n", "files_content_tuple = self.context._scheduler.product_request(FilesContent,\n [exec_result.output_directory_digest])[0].dependencies\n", "files_content = {fc.path: fc.content.decode('utf-8') for fc in\n files_content_tuple}\n", "for line in files_content['report'].split('\\n'):\n", "yield line\n", "if self.get_options().ignored:\n", "yield 'Ignored the following files:'\n", "for line in files_content['ignored'].split('\\n'):\n", "yield line\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Condition", "Expr'", "For", "Expr'" ]
[ "def FUNC_1(self):...\n", "return 'submit sources %s for task %s (ID %d) %s' % (repr(self.filenames),\n self.task[1], self.task[0], self.url)\n" ]
[ "def describe(self):...\n", "return 'submit sources %s for task %s (ID %d) %s' % (repr(self.filenames),\n self.task[1], self.task[0], self.url)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_18(self, VAR_19):...\n", "" ]
[ "def select_source(self, source_file_path):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_25(self, VAR_25):...\n", "self.__logger.debug('Traversing %s->%s with %s' % (self._src_prx, self.\n _tgt_prx, VAR_25))\n" ]
[ "def __log_run(self, visitor):...\n", "self.__logger.debug('Traversing %s->%s with %s' % (self._src_prx, self.\n _tgt_prx, visitor))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_15(self, **VAR_12):...\n", "\"\"\"docstring\"\"\"\n", "self.sessionhandler.data_out(self, **kwargs)\n" ]
[ "def data_out(self, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "self.sessionhandler.data_out(self, **kwargs)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'" ]
[ "def FUNC_3(self):...\n", "self.assertFalse(self.course.is_teacher(self.user))\n", "self.assertFalse(self.current_course_instance.is_assistant(self.user))\n", "self.assertFalse(self.current_course_instance.is_teacher(self.user))\n", "self.assertFalse(self.current_course_instance.is_course_staff(self.user))\n", "self.assertEquals(0, len(self.current_course_instance.\n get_course_staff_profiles()))\n", "self.current_course_instance.assistants.add(self.user.userprofile)\n", "self.assertFalse(self.course.is_teacher(self.user))\n", "self.assertTrue(self.current_course_instance.is_assistant(self.user))\n", "self.assertFalse(self.current_course_instance.is_teacher(self.user))\n", "self.assertTrue(self.current_course_instance.is_course_staff(self.user))\n", "self.assertEquals(1, len(self.current_course_instance.\n get_course_staff_profiles()))\n", "self.course.teachers.add(self.user.userprofile)\n", "self.assertTrue(self.course.is_teacher(self.user))\n", "self.assertTrue(self.current_course_instance.is_assistant(self.user))\n", "self.assertTrue(self.current_course_instance.is_teacher(self.user))\n", "self.assertTrue(self.current_course_instance.is_course_staff(self.user))\n", "self.assertEquals(1, len(self.current_course_instance.\n get_course_staff_profiles()))\n", "self.assertEquals('testUser', self.current_course_instance.\n get_course_staff_profiles()[0].shortname)\n", "self.current_course_instance.assistants.clear()\n", "self.assertTrue(self.course.is_teacher(self.user))\n", "self.assertFalse(self.current_course_instance.is_assistant(self.user))\n", "self.assertTrue(self.current_course_instance.is_teacher(self.user))\n", "self.assertTrue(self.current_course_instance.is_course_staff(self.user))\n", "self.assertEquals(1, len(self.current_course_instance.\n get_course_staff_profiles()))\n", "self.course.teachers.clear()\n", "self.assertFalse(self.course.is_teacher(self.user))\n", "self.assertFalse(self.current_course_instance.is_assistant(self.user))\n", "self.assertFalse(self.current_course_instance.is_teacher(self.user))\n", "self.assertFalse(self.current_course_instance.is_course_staff(self.user))\n", "self.assertEquals(0, len(self.current_course_instance.\n get_course_staff_profiles()))\n" ]
[ "def test_course_staff(self):...\n", "self.assertFalse(self.course.is_teacher(self.user))\n", "self.assertFalse(self.current_course_instance.is_assistant(self.user))\n", "self.assertFalse(self.current_course_instance.is_teacher(self.user))\n", "self.assertFalse(self.current_course_instance.is_course_staff(self.user))\n", "self.assertEquals(0, len(self.current_course_instance.\n get_course_staff_profiles()))\n", "self.current_course_instance.assistants.add(self.user.userprofile)\n", "self.assertFalse(self.course.is_teacher(self.user))\n", "self.assertTrue(self.current_course_instance.is_assistant(self.user))\n", "self.assertFalse(self.current_course_instance.is_teacher(self.user))\n", "self.assertTrue(self.current_course_instance.is_course_staff(self.user))\n", "self.assertEquals(1, len(self.current_course_instance.\n get_course_staff_profiles()))\n", "self.course.teachers.add(self.user.userprofile)\n", "self.assertTrue(self.course.is_teacher(self.user))\n", "self.assertTrue(self.current_course_instance.is_assistant(self.user))\n", "self.assertTrue(self.current_course_instance.is_teacher(self.user))\n", "self.assertTrue(self.current_course_instance.is_course_staff(self.user))\n", "self.assertEquals(1, len(self.current_course_instance.\n get_course_staff_profiles()))\n", "self.assertEquals('testUser', self.current_course_instance.\n get_course_staff_profiles()[0].shortname)\n", "self.current_course_instance.assistants.clear()\n", "self.assertTrue(self.course.is_teacher(self.user))\n", "self.assertFalse(self.current_course_instance.is_assistant(self.user))\n", "self.assertTrue(self.current_course_instance.is_teacher(self.user))\n", "self.assertTrue(self.current_course_instance.is_course_staff(self.user))\n", "self.assertEquals(1, len(self.current_course_instance.\n get_course_staff_profiles()))\n", "self.course.teachers.clear()\n", "self.assertFalse(self.course.is_teacher(self.user))\n", "self.assertFalse(self.current_course_instance.is_assistant(self.user))\n", "self.assertFalse(self.current_course_instance.is_teacher(self.user))\n", "self.assertFalse(self.current_course_instance.is_course_staff(self.user))\n", "self.assertEquals(0, len(self.current_course_instance.\n get_course_staff_profiles()))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "@utils.synchronized('3par', external=True)...\n", "self.common.client_login()\n", "self.common.delete_snapshot(VAR_8)\n", "self.common.client_logout()\n" ]
[ "@utils.synchronized('3par', external=True)...\n", "self.common.client_login()\n", "self.common.delete_snapshot(snapshot)\n", "self.common.client_logout()\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Expr'" ]
[ "import logging\n", "import tempfile\n", "import uuid\n", "from odoo import api, registry, fields, models, _\n", "from odoo.tools import mod10r\n", "from odoo.tools.config import config\n", "from odoo.addons.base_geoengine.fields import GeoPoint\n", "from odoo.addons.base_geoengine import fields as geo_fields\n", "VAR_0 = ['street', 'street2', 'street3', 'zip', 'city', 'state_id',\n 'country_id']\n", "VAR_1 = logging.getLogger(__name__)\n", "import pyminizip\n", "VAR_1.warning('Please install python dependencies.', exc_info=True)\n", "\"\"\"string\"\"\"\n", "import csv\n", "VAR_2 = 'res.partner'\n", "from smb.SMBConnection import SMBConnection\n", "def FUNC_0(self):...\n", "from smb.smb_structs import OperationFailure\n", "\"\"\"docstring\"\"\"\n", "return [('no', _('No receipt')), ('default', _('Default')), ('only_email',\n _('Only email')), ('paper', _('On paper'))]\n" ]
[ "import logging\n", "import tempfile\n", "import uuid\n", "from odoo import api, registry, fields, models, _\n", "from odoo.tools import mod10r\n", "from odoo.tools.config import config\n", "from odoo.addons.base_geoengine.fields import GeoPoint\n", "from odoo.addons.base_geoengine import fields as geo_fields\n", "ADDRESS_FIELDS = ['street', 'street2', 'street3', 'zip', 'city', 'state_id',\n 'country_id']\n", "logger = logging.getLogger(__name__)\n", "import pyminizip\n", "logger.warning('Please install python dependencies.', exc_info=True)\n", "\"\"\" This class upgrade the partners to match Compassion needs.\n It also synchronize all changes with the MySQL server of GP.\n \"\"\"\n", "import csv\n", "_inherit = 'res.partner'\n", "from smb.SMBConnection import SMBConnection\n", "def _get_receipt_types(self):...\n", "from smb.smb_structs import OperationFailure\n", "\"\"\"docstring\"\"\"\n", "return [('no', _('No receipt')), ('default', _('Default')), ('only_email',\n _('Only email')), ('paper', _('On paper'))]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Import'", "Expr'", "Expr'", "Import'", "Assign'", "ImportFrom'", "FunctionDef'", "ImportFrom'", "Docstring", "Return'" ]
[ "def FUNC_4(self):...\n", "VAR_8 = 'test_foo.py:FooTest'\n", "VAR_7 = BokChoyTestSuite('', test_spec=spec)\n", "VAR_1 = 'tests/{}'.format(VAR_8)\n", "self.assertEqual(VAR_7.cmd, self._expected_command(VAR_1=name))\n" ]
[ "def test_class_spec(self):...\n", "spec = 'test_foo.py:FooTest'\n", "suite = BokChoyTestSuite('', test_spec=spec)\n", "name = 'tests/{}'.format(spec)\n", "self.assertEqual(suite.cmd, self._expected_command(name=name))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_7(self):...\n", "vimsupport.PostVimMessage('Restarting ycmd server...')\n", "self._user_notified_about_crash = False\n", "self._ServerCleanup()\n", "self._SetupServer()\n" ]
[ "def RestartServer(self):...\n", "vimsupport.PostVimMessage('Restarting ycmd server...')\n", "self._user_notified_about_crash = False\n", "self._ServerCleanup()\n", "self._SetupServer()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_3(VAR_2, VAR_3):...\n", "VAR_15 = set()\n", "for level in VAR_2:\n", "VAR_24 = level[0]\n", "return VAR_15\n", "VAR_11 = level[1]\n", "VAR_15 |= VAR_0[VAR_24].get_parameters(VAR_11, VAR_3)\n" ]
[ "def get_rule_parameters(levellist, doc):...\n", "ret = set()\n", "for level in levellist:\n", "leveltype = level[0]\n", "return ret\n", "levelfields = level[1]\n", "ret |= FnLevel[leveltype].get_parameters(levelfields, doc)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Return'", "Assign'", "AugAssign'" ]
[ "def FUNC_6(self, VAR_23):...\n", "VAR_82 = CLASS_46.run(self, VAR_23)\n", "if not VAR_82 or not hasattr(VAR_82, 'email') or not VAR_82.email:\n", "return self.error(errors.NO_EMAIL_FOR_USER)\n", "return VAR_82\n" ]
[ "def run(self, name):...\n", "user = VExistingUname.run(self, name)\n", "if not user or not hasattr(user, 'email') or not user.email:\n", "return self.error(errors.NO_EMAIL_FOR_USER)\n", "return user\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_9(VAR_20, VAR_21, VAR_22=None):...\n", "VAR_45 = VAR_5[VAR_20]\n", "VAR_21 = str(VAR_21)\n", "VAR_22 = VAR_22 or ''\n", "VAR_46 = VAR_22, VAR_21\n", "VAR_0.info('Removing %s from targets[%s]', repr(VAR_46), VAR_20)\n", "VAR_45.remove(VAR_46)\n" ]
[ "def remove_target(domain, id_, tuser=None):...\n", "tlist = targets[domain]\n", "id_ = str(id_)\n", "tuser = tuser or ''\n", "t = tuser, id_\n", "logger.info('Removing %s from targets[%s]', repr(t), domain)\n", "tlist.remove(t)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_4(self):...\n", "VAR_6 = Script.objects.annotate(num_ets=Count('extractedtext')).filter(\n num_ets__lt=100).filter(script_type='EX').first()\n", "VAR_2 = ExtractedText.objects.filter(qa_group=None).filter(extraction_script\n =scr).filter(data_document__data_group__group_type__code='CO').first().pk\n", "VAR_1 = self.client.get(f'/qa/extractedtext/{VAR_2}/')\n", "VAR_6 = ExtractedText.objects.get(VAR_2=pk).extraction_script\n", "VAR_4 = QAGroup.objects.filter(extraction_script=scr).count()\n", "self.assertTrue(VAR_4 == 1)\n", "self.assertTrue(VAR_6.qa_begun)\n", "VAR_7 = QAGroup.objects.get(extraction_script=scr)\n", "VAR_3 = ExtractedText.objects.get(VAR_2=pk)\n", "self.assertTrue(VAR_3.qa_group == VAR_7)\n", "VAR_1 = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"'/qa/extractionscript/{VAR_6.pk}/'> Continue QA\".encode(),\n VAR_1.content)\n", "VAR_6 = Script.objects.annotate(num_ets=Count('extractedtext')).filter(\n num_ets__gt=100).first()\n", "VAR_2 = ExtractedText.objects.filter(extraction_script=scr).first().pk\n", "VAR_1 = self.client.get(f'/qa/extractedtext/{VAR_2}/')\n", "VAR_6 = ExtractedText.objects.get(VAR_2=pk).extraction_script\n", "VAR_7 = QAGroup.objects.get(extraction_script=scr)\n", "VAR_8 = ExtractedText.objects.filter(qa_group=new_group).count()\n", "self.assertTrue(VAR_8 > 100)\n", "VAR_2 = ExtractedText.objects.filter(extraction_script_id=scr.id).filter(\n qa_group=None).first().pk\n", "VAR_1 = self.client.get(f'/qa/extractedtext/{VAR_2}/')\n", "self.assertGreater(ExtractedText.objects.filter(qa_group=new_group).count(),\n VAR_8)\n" ]
[ "def test_data_document_qa(self):...\n", "scr = Script.objects.annotate(num_ets=Count('extractedtext')).filter(\n num_ets__lt=100).filter(script_type='EX').first()\n", "pk = ExtractedText.objects.filter(qa_group=None).filter(extraction_script=scr\n ).filter(data_document__data_group__group_type__code='CO').first().pk\n", "response = self.client.get(f'/qa/extractedtext/{pk}/')\n", "scr = ExtractedText.objects.get(pk=pk).extraction_script\n", "group_count = QAGroup.objects.filter(extraction_script=scr).count()\n", "self.assertTrue(group_count == 1)\n", "self.assertTrue(scr.qa_begun)\n", "new_group = QAGroup.objects.get(extraction_script=scr)\n", "et = ExtractedText.objects.get(pk=pk)\n", "self.assertTrue(et.qa_group == new_group)\n", "response = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"'/qa/extractionscript/{scr.pk}/'> Continue QA\".encode(),\n response.content)\n", "scr = Script.objects.annotate(num_ets=Count('extractedtext')).filter(\n num_ets__gt=100).first()\n", "pk = ExtractedText.objects.filter(extraction_script=scr).first().pk\n", "response = self.client.get(f'/qa/extractedtext/{pk}/')\n", "scr = ExtractedText.objects.get(pk=pk).extraction_script\n", "new_group = QAGroup.objects.get(extraction_script=scr)\n", "initial_qa_count = ExtractedText.objects.filter(qa_group=new_group).count()\n", "self.assertTrue(initial_qa_count > 100)\n", "pk = ExtractedText.objects.filter(extraction_script_id=scr.id).filter(qa_group\n =None).first().pk\n", "response = self.client.get(f'/qa/extractedtext/{pk}/')\n", "self.assertGreater(ExtractedText.objects.filter(qa_group=new_group).count(),\n initial_qa_count)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "@renderer...\n", "if self._error_msg is not None:\n", "return VAR_8(self._error_msg)\n", "return VAR_8('')\n" ]
[ "@renderer...\n", "if self._error_msg is not None:\n", "return tag(self._error_msg)\n", "return tag('')\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_2(self, VAR_6, VAR_7):...\n", "VAR_14 = \"SELECT Pass FROM user WHERE Email = '\" + VAR_6 + \"'\"\n", "self.cursor.execute(VAR_14)\n", "VAR_15 = self.cursor.fetchone()\n", "VAR_4 = ' '.join(map(str, VAR_15))\n", "return check_password_hash(VAR_4, VAR_7)\n" ]
[ "def verify_account(self, email, user_password):...\n", "query = \"SELECT Pass FROM user WHERE Email = '\" + email + \"'\"\n", "self.cursor.execute(query)\n", "fetch = self.cursor.fetchone()\n", "password = ' '.join(map(str, fetch))\n", "return check_password_hash(password, user_password)\n" ]
[ 0, 4, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "from argparse import ArgumentParser\n", "import utilities\n", "VAR_0 = ArgumentParser(description=\n 'The Dynamic Robust Single Event Upset Simulator was created by Ed Carlisle IV'\n , epilog=\n 'Begin by creating a new campaign with \"%(prog)s new APPLICATION\". Then run injections with \"%(prog)s inject\".'\n )\n", "VAR_0.add_argument('-C', '--campaign', action='store', type=int, metavar=\n 'ID', dest='campaign_id', default=0, help=\n 'campaign to use, defaults to last campaign created')\n", "VAR_0.add_argument('-D', '--debug', action='store_true', dest='debug', help\n ='display device output for parallel injections')\n", "VAR_0.add_argument('-T', '--timeout', action='store', type=int, metavar=\n 'SECONDS', dest='timeout', default=300, help=\n 'device read timeout [default=300]')\n", "VAR_0.add_argument('--serial', action='store', metavar='PORT', dest=\n 'dut_serial_port', help=\n 'DUT serial port [p2020 default=/dev/ttyUSB1] [a9 default=/dev/ttyACM0] (overridden by Simics)'\n )\n", "VAR_0.add_argument('--baud', action='store', type=int, metavar='RATE', dest\n ='dut_baud_rate', default=115200, help=\n 'DUT serial port baud rate [default=115200]')\n", "VAR_0.add_argument('--scp', action='store', type=int, metavar='PORT', dest=\n 'dut_scp_port', default=22, help=\n 'DUT scp port [default=22] (overridden by Simics)')\n", "VAR_0.add_argument('--prompt', action='store', metavar='PROMPT', dest=\n 'dut_prompt', help=\n 'DUT console prompt [p2020 default=root@p2020rdb:~#] [a9 default=[root@ZED]#] (overridden by Simics)'\n )\n", "VAR_0.add_argument('--user', action='store', dest='username', default=\n 'root', help='device username')\n", "VAR_0.add_argument('--pass', action='store', dest='password', default=\n 'chrec', help='device password')\n", "VAR_0.add_argument('--uboot', action='store', metavar='COMMAND', dest=\n 'dut_uboot', default='', help='DUT u-boot command')\n", "VAR_0.add_argument('--aux_serial', action='store', metavar='PORT', dest=\n 'aux_serial_port', help=\n 'AUX serial port [p2020 default=/dev/ttyUSB1] [a9 default=/dev/ttyACM0] (overridden by Simics)'\n )\n", "VAR_0.add_argument('--aux_baud', action='store', type=int, metavar='RATE',\n dest='aux_baud_rate', default=115200, help=\n 'AUX serial port baud rate [default=115200]')\n", "VAR_0.add_argument('--aux_scp', action='store', type=int, metavar='PORT',\n dest='aux_scp_port', default=22, help=\n 'AUX scp port [default=22] (overridden by Simics)')\n", "VAR_0.add_argument('--aux_prompt', action='store', metavar='PROMPT', dest=\n 'aux_prompt', help=\n 'AUX console prompt [p2020 default=root@p2020rdb:~#] [a9 default=[root@ZED]#] (overridden by Simics)'\n )\n", "VAR_0.add_argument('--aux_uboot', action='store', metavar='COMMAND', dest=\n 'aux_uboot', default='', help='AUX u-boot command')\n", "VAR_0.add_argument('--debugger_ip', action='store', metavar='ADDRESS', dest\n ='debugger_ip_address', default='10.42.0.50', help=\n 'debugger ip address [default=10.42.0.50] (ignored by Simics and ZedBoards)'\n )\n", "VAR_0.add_argument('--no_jtag', action='store_false', dest='jtag', help=\n 'do not connect to jtag debugger (ignored by Simics)')\n", "VAR_1 = VAR_0.add_subparsers(title='commands', description=\n 'Run \"%(prog)s COMMAND -h\" to get additional help for each command',\n metavar='COMMAND', dest='command')\n", "VAR_2 = VAR_1.add_parser('new', aliases=['n'], help='create a new campaign',\n description='create a new campaign')\n", "VAR_2.add_argument('application', action='store', metavar='APPLICATION',\n help='application to run on device')\n", "VAR_2.add_argument('-A', '--arch', action='store', choices=('a9', 'p2020'),\n dest='architecture', default='p2020', help=\n 'target architecture [default=p2020]')\n", "VAR_2.add_argument('-t', '--timing', action='store', type=int, dest=\n 'iterations', default=5, help=\n 'number of timing iterations to run [default=5]')\n", "VAR_2.add_argument('-a', '--args', action='store', nargs='+', dest=\n 'arguments', help='arguments for application')\n", "VAR_2.add_argument('-d', '--dir', action='store', dest='directory', default\n ='fiapps', help='directory to look for files [default=fiapps]')\n", "VAR_2.add_argument('-f', '--files', action='store', nargs='+', metavar=\n 'FILE', dest='files', help='files to copy to device')\n", "VAR_2.add_argument('-o', '--output', action='store', dest='file', default=\n 'result.dat', help='target application output file [default=result.dat]')\n", "VAR_2.add_argument('-x', '--aux', action='store_true', dest='use_aux', help\n ='use auxiliary device during testing')\n", "VAR_2.add_argument('-y', '--aux_app', action='store', metavar='APPLICATION',\n dest='aux_application', help='target application for auxiliary device')\n", "VAR_2.add_argument('-z', '--aux_args', action='store', metavar='ARGUMENTS',\n dest='aux_arguments', help='arguments for auxiliary application')\n", "VAR_2.add_argument('-F', '--aux_files', action='store', nargs='+', metavar=\n 'FILE', dest='aux_files', help='files to copy to auxiliary device')\n", "VAR_2.add_argument('-O', '--aux_output', action='store_true', dest=\n 'use_aux_output', help='use output file from auxiliary device')\n", "VAR_2.add_argument('-k', '--kill_dut', action='store_true', dest='kill_dut',\n help='send ctrl-c to DUT after auxiliary device completes execution')\n", "VAR_2.add_argument('-s', '--simics', action='store_true', dest='use_simics',\n help='use Simics simulator')\n", "VAR_3 = VAR_2.add_argument_group('Simics campaigns',\n 'Additional options for Simics campaigns only')\n", "VAR_3.add_argument('-c', '--checkpoints', action='store', type=int, metavar\n ='CHECKPOINTS', dest='checkpoints', default=50, help=\n 'number of gold checkpoints to target for creation (actual number of checkpoints may be different) [default=50]'\n )\n", "VAR_2.set_defaults(func=utilities.create_campaign)\n", "VAR_4 = VAR_1.add_parser('inject', aliases=['i', 'I', 'inj'], help=\n 'perform fault injections on a campaign', description=\n 'perform fault injections on a campaign')\n", "VAR_4.add_argument('-n', '--iterations', action='store', type=int, dest=\n 'iterations', help='number of iterations to perform [default=infinite]')\n", "VAR_4.add_argument('-i', '--injections', action='store', type=int, dest=\n 'injections', default=1, help=\n 'number of injections per iteration [default=1]')\n", "VAR_4.add_argument('-t', '--targets', action='store', nargs='+', metavar=\n 'TARGET', dest='selected_targets', help='list of targets for injection')\n", "VAR_4.add_argument('-p', '--processes', action='store', type=int, dest=\n 'processes', default=1, help=\n 'number of injections to perform in parallel (only supported for ZedBoards and Simics)'\n )\n", "VAR_5 = VAR_4.add_argument_group('Simics campaigns',\n 'Additional options for Simics campaigns only')\n", "VAR_5.add_argument('-a', '--compare_all', action='store_true', dest=\n 'compare_all', help=\n 'monitor all checkpoints (only last by default), IMPORTANT: do NOT use with \"-p\" or \"--processes\" when using this option for the first time in a campaign'\n )\n", "VAR_4.set_defaults(func=utilities.inject_campaign)\n", "VAR_6 = VAR_1.add_parser('supervise', aliases=['s', 'S'], help=\n 'run interactive supervisor', description='run interactive supervisor')\n", "VAR_6.add_argument('-w', '--wireshark', action='store_true', dest='capture',\n help='run remote packet capture')\n", "VAR_6.set_defaults(func=utilities.launch_supervisor)\n", "VAR_7 = VAR_1.add_parser('log', aliases=['l'], help=\n 'start the log web server', description='start the log web server')\n", "VAR_7.add_argument('-p', '--port', action='store', type=int, dest='port',\n default=8000, help='log web server port [default=8000]')\n", "VAR_7.set_defaults(func=utilities.view_logs)\n", "VAR_8 = VAR_1.add_parser('zedboards', aliases=['z', 'Z'], help=\n 'print information about attached ZedBoards', description=\n 'print information about attached ZedBoards')\n", "VAR_8.set_defaults(func=utilities.print_zedboard_info)\n", "VAR_9 = VAR_1.add_parser('list', aliases=['L', 'ls'], help='list campaigns',\n description='list campaigns')\n", "VAR_9.set_defaults(func=utilities.list_campaigns)\n", "VAR_10 = VAR_1.add_parser('delete', aliases=['d', 'D'], description=\n 'delete results and campaigns', help='delete results and campaigns')\n", "VAR_10.add_argument('delete', action='store', choices=('all', 'results',\n 'campaign'), help=\n 'delete {results} for the selected campaign, delete selected {campaign} and its results, or delete {all} campaigns and results'\n )\n", "VAR_10.set_defaults(func=utilities.delete)\n", "VAR_11 = VAR_1.add_parser('merge', aliases=['m', 'M'], help=\n 'merge campaigns', description='merge campaigns')\n", "VAR_11.add_argument('directory', action='store', metavar='DIRECTORY', help=\n 'merge campaigns from external directory into the local directory')\n", "VAR_11.set_defaults(func=utilities.merge_campaigns)\n", "VAR_12 = VAR_1.add_parser('openocd', aliases=['o', 'O'], help=\n 'launch openocd for DUT (only supported for ZedBoards)', description=\n 'launch openocd for DUT (only supported for ZedBoards)')\n", "VAR_12.set_defaults(func=utilities.launch_openocd)\n", "VAR_13 = VAR_1.add_parser('regenerate', aliases=['r', 'R'], help=\n 'regenerate injected state and launch in Simics (only supported for Simics campaigns)'\n , description=\n 'regenerate injected state and launch in Simics (only supported for Simics campaigns)'\n )\n", "VAR_13.add_argument('result_id', action='store', metavar='RESULT_ID', help=\n 'result to regenerate')\n", "VAR_13.set_defaults(func=utilities.regenerate)\n", "VAR_14 = VAR_1.add_parser('update', aliases=['u', 'U'], help=\n 'update gold checkpoint dependency paths (only supported for Simics campaigns)'\n , description=\n 'update gold checkpoint dependency paths (only supported for Simics campaigns)'\n )\n", "VAR_14.set_defaults(func=utilities.update_dependencies)\n", "VAR_15 = VAR_1.add_parser('backup', aliases=['b', 'B'], help=\n 'backup the results database', description='backup the results database')\n", "VAR_15.set_defaults(func=utilities.backup_database)\n", "VAR_16 = VAR_0.parse_args()\n", "if VAR_16.command is None:\n", "VAR_0.print_help()\n", "if VAR_16.command != 'new':\n", "if not VAR_16.campaign_id:\n", "if VAR_16.command == 'new' or VAR_16.campaign_id:\n", "VAR_16.campaign_id = utilities.get_last_campaign()\n", "if VAR_16.campaign_id:\n", "if VAR_16.architecture == 'p2020':\n", "if VAR_16.command == 'new' and VAR_16.arguments:\n", "VAR_16.architecture = utilities.get_campaign_data(VAR_16.campaign_id)[\n 'architecture']\n", "if VAR_16.dut_serial_port is None:\n", "if VAR_16.architecture == 'a9':\n", "VAR_16.arguments = ' '.join(VAR_16.arguments)\n", "VAR_16.func(VAR_16)\n", "VAR_16.dut_serial_port = '/dev/ttyUSB1'\n", "if VAR_16.dut_prompt is None:\n", "if VAR_16.dut_serial_port is None:\n", "VAR_16.dut_prompt = 'root@p2020rdb:~#'\n", "if VAR_16.aux_serial_port is None:\n", "VAR_16.dut_serial_port = '/dev/ttyACM0'\n", "if VAR_16.dut_prompt is None:\n", "VAR_16.aux_serial_port = '/dev/ttyUSB0'\n", "if VAR_16.aux_prompt is None:\n", "VAR_16.dut_prompt = '[root@ZED]#'\n", "if VAR_16.aux_serial_port is None:\n", "VAR_16.aux_prompt = 'root@p2020rdb:~#'\n", "VAR_16.aux_serial_port = '/dev/ttyACM1'\n", "if VAR_16.aux_prompt is None:\n", "VAR_16.aux_prompt = '[root@ZED]#'\n" ]
[ "from argparse import ArgumentParser\n", "import utilities\n", "parser = ArgumentParser(description=\n 'The Dynamic Robust Single Event Upset Simulator was created by Ed Carlisle IV'\n , epilog=\n 'Begin by creating a new campaign with \"%(prog)s new APPLICATION\". Then run injections with \"%(prog)s inject\".'\n )\n", "parser.add_argument('-C', '--campaign', action='store', type=int, metavar=\n 'ID', dest='campaign_id', default=0, help=\n 'campaign to use, defaults to last campaign created')\n", "parser.add_argument('-D', '--debug', action='store_true', dest='debug',\n help='display device output for parallel injections')\n", "parser.add_argument('-T', '--timeout', action='store', type=int, metavar=\n 'SECONDS', dest='timeout', default=300, help=\n 'device read timeout [default=300]')\n", "parser.add_argument('--serial', action='store', metavar='PORT', dest=\n 'dut_serial_port', help=\n 'DUT serial port [p2020 default=/dev/ttyUSB1] [a9 default=/dev/ttyACM0] (overridden by Simics)'\n )\n", "parser.add_argument('--baud', action='store', type=int, metavar='RATE',\n dest='dut_baud_rate', default=115200, help=\n 'DUT serial port baud rate [default=115200]')\n", "parser.add_argument('--scp', action='store', type=int, metavar='PORT', dest\n ='dut_scp_port', default=22, help=\n 'DUT scp port [default=22] (overridden by Simics)')\n", "parser.add_argument('--prompt', action='store', metavar='PROMPT', dest=\n 'dut_prompt', help=\n 'DUT console prompt [p2020 default=root@p2020rdb:~#] [a9 default=[root@ZED]#] (overridden by Simics)'\n )\n", "parser.add_argument('--user', action='store', dest='username', default=\n 'root', help='device username')\n", "parser.add_argument('--pass', action='store', dest='password', default=\n 'chrec', help='device password')\n", "parser.add_argument('--uboot', action='store', metavar='COMMAND', dest=\n 'dut_uboot', default='', help='DUT u-boot command')\n", "parser.add_argument('--aux_serial', action='store', metavar='PORT', dest=\n 'aux_serial_port', help=\n 'AUX serial port [p2020 default=/dev/ttyUSB1] [a9 default=/dev/ttyACM0] (overridden by Simics)'\n )\n", "parser.add_argument('--aux_baud', action='store', type=int, metavar='RATE',\n dest='aux_baud_rate', default=115200, help=\n 'AUX serial port baud rate [default=115200]')\n", "parser.add_argument('--aux_scp', action='store', type=int, metavar='PORT',\n dest='aux_scp_port', default=22, help=\n 'AUX scp port [default=22] (overridden by Simics)')\n", "parser.add_argument('--aux_prompt', action='store', metavar='PROMPT', dest=\n 'aux_prompt', help=\n 'AUX console prompt [p2020 default=root@p2020rdb:~#] [a9 default=[root@ZED]#] (overridden by Simics)'\n )\n", "parser.add_argument('--aux_uboot', action='store', metavar='COMMAND', dest=\n 'aux_uboot', default='', help='AUX u-boot command')\n", "parser.add_argument('--debugger_ip', action='store', metavar='ADDRESS',\n dest='debugger_ip_address', default='10.42.0.50', help=\n 'debugger ip address [default=10.42.0.50] (ignored by Simics and ZedBoards)'\n )\n", "parser.add_argument('--no_jtag', action='store_false', dest='jtag', help=\n 'do not connect to jtag debugger (ignored by Simics)')\n", "subparsers = parser.add_subparsers(title='commands', description=\n 'Run \"%(prog)s COMMAND -h\" to get additional help for each command',\n metavar='COMMAND', dest='command')\n", "new_campaign = subparsers.add_parser('new', aliases=['n'], help=\n 'create a new campaign', description='create a new campaign')\n", "new_campaign.add_argument('application', action='store', metavar=\n 'APPLICATION', help='application to run on device')\n", "new_campaign.add_argument('-A', '--arch', action='store', choices=('a9',\n 'p2020'), dest='architecture', default='p2020', help=\n 'target architecture [default=p2020]')\n", "new_campaign.add_argument('-t', '--timing', action='store', type=int, dest=\n 'iterations', default=5, help=\n 'number of timing iterations to run [default=5]')\n", "new_campaign.add_argument('-a', '--args', action='store', nargs='+', dest=\n 'arguments', help='arguments for application')\n", "new_campaign.add_argument('-d', '--dir', action='store', dest='directory',\n default='fiapps', help='directory to look for files [default=fiapps]')\n", "new_campaign.add_argument('-f', '--files', action='store', nargs='+',\n metavar='FILE', dest='files', help='files to copy to device')\n", "new_campaign.add_argument('-o', '--output', action='store', dest='file',\n default='result.dat', help=\n 'target application output file [default=result.dat]')\n", "new_campaign.add_argument('-x', '--aux', action='store_true', dest=\n 'use_aux', help='use auxiliary device during testing')\n", "new_campaign.add_argument('-y', '--aux_app', action='store', metavar=\n 'APPLICATION', dest='aux_application', help=\n 'target application for auxiliary device')\n", "new_campaign.add_argument('-z', '--aux_args', action='store', metavar=\n 'ARGUMENTS', dest='aux_arguments', help=\n 'arguments for auxiliary application')\n", "new_campaign.add_argument('-F', '--aux_files', action='store', nargs='+',\n metavar='FILE', dest='aux_files', help='files to copy to auxiliary device')\n", "new_campaign.add_argument('-O', '--aux_output', action='store_true', dest=\n 'use_aux_output', help='use output file from auxiliary device')\n", "new_campaign.add_argument('-k', '--kill_dut', action='store_true', dest=\n 'kill_dut', help=\n 'send ctrl-c to DUT after auxiliary device completes execution')\n", "new_campaign.add_argument('-s', '--simics', action='store_true', dest=\n 'use_simics', help='use Simics simulator')\n", "new_simics_campaign = new_campaign.add_argument_group('Simics campaigns',\n 'Additional options for Simics campaigns only')\n", "new_simics_campaign.add_argument('-c', '--checkpoints', action='store',\n type=int, metavar='CHECKPOINTS', dest='checkpoints', default=50, help=\n 'number of gold checkpoints to target for creation (actual number of checkpoints may be different) [default=50]'\n )\n", "new_campaign.set_defaults(func=utilities.create_campaign)\n", "inject = subparsers.add_parser('inject', aliases=['i', 'I', 'inj'], help=\n 'perform fault injections on a campaign', description=\n 'perform fault injections on a campaign')\n", "inject.add_argument('-n', '--iterations', action='store', type=int, dest=\n 'iterations', help='number of iterations to perform [default=infinite]')\n", "inject.add_argument('-i', '--injections', action='store', type=int, dest=\n 'injections', default=1, help=\n 'number of injections per iteration [default=1]')\n", "inject.add_argument('-t', '--targets', action='store', nargs='+', metavar=\n 'TARGET', dest='selected_targets', help='list of targets for injection')\n", "inject.add_argument('-p', '--processes', action='store', type=int, dest=\n 'processes', default=1, help=\n 'number of injections to perform in parallel (only supported for ZedBoards and Simics)'\n )\n", "inject_simics = inject.add_argument_group('Simics campaigns',\n 'Additional options for Simics campaigns only')\n", "inject_simics.add_argument('-a', '--compare_all', action='store_true', dest\n ='compare_all', help=\n 'monitor all checkpoints (only last by default), IMPORTANT: do NOT use with \"-p\" or \"--processes\" when using this option for the first time in a campaign'\n )\n", "inject.set_defaults(func=utilities.inject_campaign)\n", "supervise = subparsers.add_parser('supervise', aliases=['s', 'S'], help=\n 'run interactive supervisor', description='run interactive supervisor')\n", "supervise.add_argument('-w', '--wireshark', action='store_true', dest=\n 'capture', help='run remote packet capture')\n", "supervise.set_defaults(func=utilities.launch_supervisor)\n", "log_viewer = subparsers.add_parser('log', aliases=['l'], help=\n 'start the log web server', description='start the log web server')\n", "log_viewer.add_argument('-p', '--port', action='store', type=int, dest=\n 'port', default=8000, help='log web server port [default=8000]')\n", "log_viewer.set_defaults(func=utilities.view_logs)\n", "zedboards = subparsers.add_parser('zedboards', aliases=['z', 'Z'], help=\n 'print information about attached ZedBoards', description=\n 'print information about attached ZedBoards')\n", "zedboards.set_defaults(func=utilities.print_zedboard_info)\n", "list_campaigns = subparsers.add_parser('list', aliases=['L', 'ls'], help=\n 'list campaigns', description='list campaigns')\n", "list_campaigns.set_defaults(func=utilities.list_campaigns)\n", "delete = subparsers.add_parser('delete', aliases=['d', 'D'], description=\n 'delete results and campaigns', help='delete results and campaigns')\n", "delete.add_argument('delete', action='store', choices=('all', 'results',\n 'campaign'), help=\n 'delete {results} for the selected campaign, delete selected {campaign} and its results, or delete {all} campaigns and results'\n )\n", "delete.set_defaults(func=utilities.delete)\n", "merge = subparsers.add_parser('merge', aliases=['m', 'M'], help=\n 'merge campaigns', description='merge campaigns')\n", "merge.add_argument('directory', action='store', metavar='DIRECTORY', help=\n 'merge campaigns from external directory into the local directory')\n", "merge.set_defaults(func=utilities.merge_campaigns)\n", "openocd = subparsers.add_parser('openocd', aliases=['o', 'O'], help=\n 'launch openocd for DUT (only supported for ZedBoards)', description=\n 'launch openocd for DUT (only supported for ZedBoards)')\n", "openocd.set_defaults(func=utilities.launch_openocd)\n", "regenerate = subparsers.add_parser('regenerate', aliases=['r', 'R'], help=\n 'regenerate injected state and launch in Simics (only supported for Simics campaigns)'\n , description=\n 'regenerate injected state and launch in Simics (only supported for Simics campaigns)'\n )\n", "regenerate.add_argument('result_id', action='store', metavar='RESULT_ID',\n help='result to regenerate')\n", "regenerate.set_defaults(func=utilities.regenerate)\n", "update = subparsers.add_parser('update', aliases=['u', 'U'], help=\n 'update gold checkpoint dependency paths (only supported for Simics campaigns)'\n , description=\n 'update gold checkpoint dependency paths (only supported for Simics campaigns)'\n )\n", "update.set_defaults(func=utilities.update_dependencies)\n", "backup = subparsers.add_parser('backup', aliases=['b', 'B'], help=\n 'backup the results database', description='backup the results database')\n", "backup.set_defaults(func=utilities.backup_database)\n", "options = parser.parse_args()\n", "if options.command is None:\n", "parser.print_help()\n", "if options.command != 'new':\n", "if not options.campaign_id:\n", "if options.command == 'new' or options.campaign_id:\n", "options.campaign_id = utilities.get_last_campaign()\n", "if options.campaign_id:\n", "if options.architecture == 'p2020':\n", "if options.command == 'new' and options.arguments:\n", "options.architecture = utilities.get_campaign_data(options.campaign_id)[\n 'architecture']\n", "if options.dut_serial_port is None:\n", "if options.architecture == 'a9':\n", "options.arguments = ' '.join(options.arguments)\n", "options.func(options)\n", "options.dut_serial_port = '/dev/ttyUSB1'\n", "if options.dut_prompt is None:\n", "if options.dut_serial_port is None:\n", "options.dut_prompt = 'root@p2020rdb:~#'\n", "if options.aux_serial_port is None:\n", "options.dut_serial_port = '/dev/ttyACM0'\n", "if options.dut_prompt is None:\n", "options.aux_serial_port = '/dev/ttyUSB0'\n", "if options.aux_prompt is None:\n", "options.dut_prompt = '[root@ZED]#'\n", "if options.aux_serial_port is None:\n", "options.aux_prompt = 'root@p2020rdb:~#'\n", "options.aux_serial_port = '/dev/ttyACM1'\n", "if options.aux_prompt is None:\n", "options.aux_prompt = '[root@ZED]#'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "Condition", "Condition", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'" ]
[ "def FUNC_39(VAR_101):...\n", "VAR_101.resources = VAR_76, VAR_52\n", "return VAR_101\n" ]
[ "def decorate(ruleinfo):...\n", "ruleinfo.resources = args, resources\n", "return ruleinfo\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_0(self):...\n", "if self.label:\n", "return self.label\n", "if self.model_field:\n", "return self.model_field.verbose_name.title()\n", "if self.field_name == '__str__':\n", "return self.model._meta.verbose_name.title()\n", "VAR_10 = getattr(self.model, self.field_name)\n", "return self.field_name.title()\n", "if callable(VAR_10) and getattr(VAR_10, 'short_description', False):\n", "return VAR_10.short_description\n", "return self.field_name.replace('_', ' ').title()\n" ]
[ "def get_title(self):...\n", "if self.label:\n", "return self.label\n", "if self.model_field:\n", "return self.model_field.verbose_name.title()\n", "if self.field_name == '__str__':\n", "return self.model._meta.verbose_name.title()\n", "field = getattr(self.model, self.field_name)\n", "return self.field_name.title()\n", "if callable(field) and getattr(field, 'short_description', False):\n", "return field.short_description\n", "return self.field_name.replace('_', ' ').title()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Assign'", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_0(self, VAR_1=True):...\n", "self.is_api = VAR_1\n", "self.assets = Environment(os.path.join(os.path.dirname(__file__),\n '../static'), '/static')\n", "VAR_11 = Bundle('css/bootstrap.min.css', 'css/material.min.css', Bundle(\n 'css/schoolcms.css', 'css/dropdown.css', filters='cssmin'),\n 'outdatedbrowser/outdatedbrowser.min.css', output='dict/plugin.min.css')\n", "VAR_12 = Bundle(Bundle('outdatedbrowser/outdatedbrowser.min.js',\n 'react-0.13.2/react-with-addons.min.js', 'js/jquery-2.1.3.min.js',\n 'js/bootstrap.min.js', 'js/react-bootstrap.min.js',\n 'js/react-mini-router.min.js', 'js/marked.min.js', 'js/material.min.js',\n 'js/isMobile.min.js', 'js/moment-with-locales.min.js', 'js/dropdown.js',\n filters='jsmin'), Bundle('schoolcms/init.jsx', 'schoolcms/mixin/*.jsx',\n 'schoolcms/component/*.jsx', 'schoolcms/page/*.jsx', filters=('react',\n 'jsmin')), output='dict/plugin.min.js')\n", "self.assets.register('css_all', VAR_11)\n", "self.assets.register('js_all', VAR_12)\n" ]
[ "def initialize(self, is_api=True):...\n", "self.is_api = is_api\n", "self.assets = Environment(os.path.join(os.path.dirname(__file__),\n '../static'), '/static')\n", "css_all = Bundle('css/bootstrap.min.css', 'css/material.min.css', Bundle(\n 'css/schoolcms.css', 'css/dropdown.css', filters='cssmin'),\n 'outdatedbrowser/outdatedbrowser.min.css', output='dict/plugin.min.css')\n", "js_all = Bundle(Bundle('outdatedbrowser/outdatedbrowser.min.js',\n 'react-0.13.2/react-with-addons.min.js', 'js/jquery-2.1.3.min.js',\n 'js/bootstrap.min.js', 'js/react-bootstrap.min.js',\n 'js/react-mini-router.min.js', 'js/marked.min.js', 'js/material.min.js',\n 'js/isMobile.min.js', 'js/moment-with-locales.min.js', 'js/dropdown.js',\n filters='jsmin'), Bundle('schoolcms/init.jsx', 'schoolcms/mixin/*.jsx',\n 'schoolcms/component/*.jsx', 'schoolcms/page/*.jsx', filters=('react',\n 'jsmin')), output='dict/plugin.min.js')\n", "self.assets.register('css_all', css_all)\n", "self.assets.register('js_all', js_all)\n" ]
[ 0, 0, 5, 5, 5, 5, 5 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_24(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = 'os-chmod-{}.py'\n", "if six.PY2:\n", "VAR_8 = VAR_8.format('py2')\n", "VAR_8 = VAR_8.format('py3')\n", "VAR_2 = {'SEVERITY': {'MEDIUM': 2, 'HIGH': 8}, 'CONFIDENCE': {'MEDIUM': 1,\n 'HIGH': 9}}\n", "self.check_example(VAR_8, VAR_2)\n" ]
[ "def test_os_chmod(self):...\n", "\"\"\"docstring\"\"\"\n", "filename = 'os-chmod-{}.py'\n", "if six.PY2:\n", "filename = filename.format('py2')\n", "filename = filename.format('py3')\n", "expect = {'SEVERITY': {'MEDIUM': 2, 'HIGH': 8}, 'CONFIDENCE': {'MEDIUM': 1,\n 'HIGH': 9}}\n", "self.check_example(filename, expect)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def __init__(self, VAR_1, VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "super(CLASS_1, self).__init__(VAR_1, VAR_2)\n" ]
[ "def __init__(self, pid, comp_name):...\n", "\"\"\"docstring\"\"\"\n", "super(LocalComponentMonitoringJob, self).__init__(pid, comp_name)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'" ]
[ "def __init__(self, VAR_3):...\n", "self._data = VAR_3\n" ]
[ "def __init__(self, data):...\n", "self._data = data\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_6(self):...\n", "self.login()\n", "while True:\n", "VAR_29 = random.random()\n", "VAR_30 = random.choice(self.tasks)\n", "if VAR_29 < 0.1 and self.submissions_path is not None:\n", "self.do_step(SubmitRandomRequest(self.browser, VAR_30, VAR_10=self.base_url,\n VAR_11=self.submissions_path))\n", "if VAR_29 < 0.6 and VAR_30[2] != []:\n", "self.do_step(TaskStatementRequest(self.browser, VAR_30[1], random.choice(\n VAR_30[2]), VAR_10=self.base_url))\n", "self.do_step(TaskRequest(self.browser, VAR_30[1], VAR_10=self.base_url))\n" ]
[ "def act(self):...\n", "self.login()\n", "while True:\n", "choice = random.random()\n", "task = random.choice(self.tasks)\n", "if choice < 0.1 and self.submissions_path is not None:\n", "self.do_step(SubmitRandomRequest(self.browser, task, base_url=self.base_url,\n submissions_path=self.submissions_path))\n", "if choice < 0.6 and task[2] != []:\n", "self.do_step(TaskStatementRequest(self.browser, task[1], random.choice(task\n [2]), base_url=self.base_url))\n", "self.do_step(TaskRequest(self.browser, task[1], base_url=self.base_url))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_0, VAR_1=None, VAR_2=None, VAR_3=False, VAR_4=False,...\n", "super(CLASS_0, self).__init__(*VAR_5, **kwargs)\n", "self.rel_model = VAR_0\n", "self.reference = VAR_1 or VAR_0._meta.fields['id']\n", "self.related_name = VAR_2\n", "self.on_delete = VAR_3\n", "self.on_update = VAR_4\n" ]
[ "def __init__(self, rel_model, reference=None, related_name=None, on_delete=...\n", "super(ForeignKeyField, self).__init__(*args, **kwargs)\n", "self.rel_model = rel_model\n", "self.reference = reference or rel_model._meta.fields['id']\n", "self.related_name = related_name\n", "self.on_delete = on_delete\n", "self.on_update = on_update\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_2(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "VAR_1 = {'name': self.volume_name, 'size': 1}\n", "self.driver._eql_execute('volume', 'create', VAR_1['name'], '%sG' % VAR_1[\n 'size'], 'pool', self.configuration.eqlx_pool, 'thin-provision').AndReturn(\n ['iSCSI target name is %s.' % self.fake_iqn])\n", "self.mox.ReplayAll()\n", "VAR_2 = self.driver.create_volume(VAR_1)\n", "self.assertEqual(VAR_2, self._model_update)\n" ]
[ "def test_create_volume(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "volume = {'name': self.volume_name, 'size': 1}\n", "self.driver._eql_execute('volume', 'create', volume['name'], '%sG' % volume\n ['size'], 'pool', self.configuration.eqlx_pool, 'thin-provision'\n ).AndReturn(['iSCSI target name is %s.' % self.fake_iqn])\n", "self.mox.ReplayAll()\n", "model_update = self.driver.create_volume(volume)\n", "self.assertEqual(model_update, self._model_update)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "@VAR_2.route('/tournament_wins')...\n", "if VAR_0 == None:\n", "FUNC_16()\n", "VAR_5 = request.args.get('tag', default=None)\n", "VAR_19 = request.args.get('date', default=None)\n", "if VAR_5 and VAR_19:\n", "VAR_8 = 'string'.format(VAR_5, VAR_5, VAR_19)\n", "return ''\n", "VAR_7 = VAR_0.exec(VAR_8)\n", "VAR_8 = 'string'.format(VAR_5, VAR_5, VAR_19)\n", "VAR_7 = VAR_7 + VAR_0.exec(VAR_8)\n", "VAR_7 = [r for r in VAR_7]\n", "VAR_7.sort(key=lambda x: int(x[1]))\n", "def FUNC_18(VAR_37):...\n", "VAR_37 = VAR_37.replace('[', '')\n", "VAR_37 = VAR_37.replace(']', '')\n", "VAR_45, VAR_46 = VAR_37.split(',')\n", "VAR_37 = '{} - {}'.format(VAR_45, VAR_46)\n", "return VAR_37\n" ]
[ "@endpoints.route('/tournament_wins')...\n", "if db == None:\n", "init()\n", "tag = request.args.get('tag', default=None)\n", "date = request.args.get('date', default=None)\n", "if tag and date:\n", "sql = (\n \"select player1, place, date, score from matches join placings on matches.url=placings.url and matches.player1=placings.player where winner='{}' and player2='{}' and date='{}';\"\n .format(tag, tag, date))\n", "return ''\n", "data = db.exec(sql)\n", "sql = (\n \"select player2, place, date, score from matches join placings on matches.url=placings.url and matches.player2=placings.player where winner='{}' and player1='{}' and date='{}';\"\n .format(tag, tag, date))\n", "data = data + db.exec(sql)\n", "data = [r for r in data]\n", "data.sort(key=lambda x: int(x[1]))\n", "def reformat(score):...\n", "score = score.replace('[', '')\n", "score = score.replace(']', '')\n", "win, loss = score.split(',')\n", "score = '{} - {}'.format(win, loss)\n", "return score\n" ]
[ 0, 0, 0, 0, 0, 0, 4, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@property...\n", "if self.redirection_source.encoding:\n", "return self.redirection_source.encoding\n", "return 'utf8'\n" ]
[ "@property...\n", "if self.redirection_source.encoding:\n", "return self.redirection_source.encoding\n", "return 'utf8'\n" ]
[ 0, 0, 0, 6 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_19(self, VAR_16, VAR_20, VAR_21=False, **VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "self._eql_execute('volume', 'select', VAR_16['name'], 'access', 'delete', '1')\n", "VAR_0.error(_('Failed to terminate connection to volume %s'), VAR_16['name'])\n" ]
[ "def terminate_connection(self, volume, connector, force=False, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "self._eql_execute('volume', 'select', volume['name'], 'access', 'delete', '1')\n", "LOG.error(_('Failed to terminate connection to volume %s'), volume['name'])\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'" ]