INSTRUCTION
stringlengths 1
46.3k
| RESPONSE
stringlengths 75
80.2k
|
---|---|
Callback for self._client.new_job
|
def _callback(self, bjobid, result, grade, problems, tests, custom, archive, stdout, stderr):
""" Callback for self._client.new_job """
self._jobs_done[str(bjobid)] = (result, grade, problems, tests, custom, archive, stdout, stderr)
self._waiting_jobs.remove(str(bjobid))
|
Get the result of task. Must only be called ONCE, AFTER the task is done (after a successfull call to is_done).
:return a tuple (result, grade, problems, tests, custom, archive)
result is itself a tuple containing the result string and the main feedback (i.e. ('success', 'You succeeded')
grade is a number between 0 and 100 indicating the grade of the users
problems is a dict of tuple, in the form {'problemid': result}
test is a dict of tests made in the container
custom is a dict containing random things set in the container
archive is either None or a bytes containing a tgz archive of files from the job
|
def get_result(self, bjobid):
"""
Get the result of task. Must only be called ONCE, AFTER the task is done (after a successfull call to is_done).
:return a tuple (result, grade, problems, tests, custom, archive)
result is itself a tuple containing the result string and the main feedback (i.e. ('success', 'You succeeded')
grade is a number between 0 and 100 indicating the grade of the users
problems is a dict of tuple, in the form {'problemid': result}
test is a dict of tests made in the container
custom is a dict containing random things set in the container
archive is either None or a bytes containing a tgz archive of files from the job
"""
result = self._jobs_done[str(bjobid)]
del self._jobs_done[str(bjobid)]
return result
|
GET request
|
def POST_AUTH(self, courseid): # pylint: disable=arguments-differ
""" GET request """
course, __ = self.get_course_and_check_rights(courseid)
user_input = web.input(tasks=[], aggregations=[], users=[])
if "filter_type" not in user_input or "type" not in user_input or "format" not in user_input or user_input.format not in self.valid_formats():
raise web.notfound()
tasks = list(course.get_tasks().keys())
for i in user_input.tasks:
if i not in tasks:
raise web.notfound()
# Load submissions
submissions, aggregations = self.get_selected_submissions(course, user_input.filter_type, user_input.tasks,
user_input.users, user_input.aggregations, user_input.type)
self._logger.info("Downloading %d submissions from course %s", len(submissions), courseid)
web.header('Content-Type', 'application/x-gzip', unique=True)
web.header('Content-Disposition', 'attachment; filename="submissions.tgz"', unique=True)
return self.submission_manager.get_submission_archive(submissions, list(reversed(user_input.format.split('/'))), aggregations)
|
GET request
|
def GET_AUTH(self, courseid): # pylint: disable=arguments-differ
""" GET request """
course, __ = self.get_course_and_check_rights(courseid)
user_input = web.input()
# First, check for a particular submission
if "submission" in user_input:
submission = self.database.submissions.find_one({"_id": ObjectId(user_input.submission),
"courseid": course.get_id(),
"status": {"$in": ["done", "error"]}})
if submission is None:
raise web.notfound()
self._logger.info("Downloading submission %s - %s - %s - %s", submission['_id'], submission['courseid'],
submission['taskid'], submission['username'])
web.header('Content-Type', 'application/x-gzip', unique=True)
web.header('Content-Disposition', 'attachment; filename="submissions.tgz"', unique=True)
return self.submission_manager.get_submission_archive([submission], [], {})
# Else, display the complete page
tasks, user_data, aggregations, tutored_aggregations,\
tutored_users, checked_tasks, checked_users, show_aggregations = self.show_page_params(course, user_input)
chosen_format = self.valid_formats()[0]
if "format" in user_input and user_input.format in self.valid_formats():
chosen_format = user_input.format
if "aggregation" in chosen_format:
show_aggregations = True
return self.template_helper.get_renderer().course_admin.download(course, tasks, user_data, aggregations,
tutored_aggregations, tutored_users,
checked_tasks, checked_users,
self.valid_formats(), chosen_format,
show_aggregations)
|
Allow to connect through a LDAP service
Available configuration:
::
plugins:
- plugin_module": "inginious.frontend.plugins.auth.ldap_auth",
host: "ldap.test.be",
port: 0,
encryption: "ssl",
base_dn: "o=test,c=be",
request: "(uid={})",
name: "LDAP Login"
*host*
The host of the ldap server
*encryption*
Encryption method used to connect to the LDAP server
Can be either "none", "ssl" or "tls"
*request*
Request made to the server in order to find the dn of the user. The characters "{}" will be replaced by the login name.
|
def init(plugin_manager, _, _2, conf):
"""
Allow to connect through a LDAP service
Available configuration:
::
plugins:
- plugin_module": "inginious.frontend.plugins.auth.ldap_auth",
host: "ldap.test.be",
port: 0,
encryption: "ssl",
base_dn: "o=test,c=be",
request: "(uid={})",
name: "LDAP Login"
*host*
The host of the ldap server
*encryption*
Encryption method used to connect to the LDAP server
Can be either "none", "ssl" or "tls"
*request*
Request made to the server in order to find the dn of the user. The characters "{}" will be replaced by the login name.
"""
encryption = conf.get("encryption", "none")
if encryption not in ["none", "ssl", "tls"]:
raise Exception("Unknown encryption method {}".format(encryption))
if encryption == "none":
conf["encryption"] = None
if conf.get("port", 0) == 0:
conf["port"] = None
the_method = LdapAuthMethod(conf.get("id"), conf.get('name', 'LDAP'), conf.get("imlink", ""), conf)
plugin_manager.add_page(r'/auth/page/([^/]+)', LDAPAuthenticationPage)
plugin_manager.register_auth_method(the_method)
|
>>> from re import compile
>>> atomics = (True, 1, 1.0, '', None, compile(''), datetime.now(), b'')
>>> any(needs_encode(i) for i in atomics)
False
>>> needs_encode([1, 2, 3])
False
>>> needs_encode([])
False
>>> needs_encode([1, [2, 3]])
False
>>> needs_encode({})
False
>>> needs_encode({'1': {'2': 3}})
False
>>> needs_encode({'1': [2]})
False
>>> needs_encode(b'1')
False
Objects that don't round trip need encoding::
>>> needs_encode(tuple())
True
>>> needs_encode(set())
True
>>> needs_encode([1, [set()]])
True
>>> needs_encode({'1': {'2': set()}})
True
Mongo rejects dicts with non-string keys so they need encoding too::
>>> needs_encode({1: 2})
True
>>> needs_encode({'1': {None: True}})
True
|
def needs_encode(obj):
'''
>>> from re import compile
>>> atomics = (True, 1, 1.0, '', None, compile(''), datetime.now(), b'')
>>> any(needs_encode(i) for i in atomics)
False
>>> needs_encode([1, 2, 3])
False
>>> needs_encode([])
False
>>> needs_encode([1, [2, 3]])
False
>>> needs_encode({})
False
>>> needs_encode({'1': {'2': 3}})
False
>>> needs_encode({'1': [2]})
False
>>> needs_encode(b'1')
False
Objects that don't round trip need encoding::
>>> needs_encode(tuple())
True
>>> needs_encode(set())
True
>>> needs_encode([1, [set()]])
True
>>> needs_encode({'1': {'2': set()}})
True
Mongo rejects dicts with non-string keys so they need encoding too::
>>> needs_encode({1: 2})
True
>>> needs_encode({'1': {None: True}})
True
'''
obtype = type(obj)
if obtype in atomic_types:
return False
if obtype is list:
return any(needs_encode(i) for i in obj)
if obtype is dict:
return any(type(k) not in valid_key_types or needs_encode(v)
for (k, v) in obj.items())
return True
|
Removes all sessions older than ``timeout`` seconds.
Called automatically on every session access.
|
def cleanup(self, timeout):
'''
Removes all sessions older than ``timeout`` seconds.
Called automatically on every session access.
'''
cutoff = time() - timeout
self.collection.remove({_atime: {'$lt': cutoff}})
|
Loads the plugin manager. Must be done after the initialisation of the client
|
def load(self, client, webpy_app, course_factory, task_factory, database, user_manager, submission_manager, config):
""" Loads the plugin manager. Must be done after the initialisation of the client """
self._app = webpy_app
self._task_factory = task_factory
self._database = database
self._user_manager = user_manager
self._submission_manager = submission_manager
self._loaded = True
for entry in config:
module = importlib.import_module(entry["plugin_module"])
module.init(self, course_factory, client, entry)
|
Add a new page to the web application. Only available after that the Plugin Manager is loaded
|
def add_page(self, pattern, classname):
""" Add a new page to the web application. Only available after that the Plugin Manager is loaded """
if not self._loaded:
raise PluginManagerNotLoadedException()
self._app.add_mapping(pattern, classname)
|
Add a task file manager. Only available after that the Plugin Manager is loaded
|
def add_task_file_manager(self, task_file_manager):
""" Add a task file manager. Only available after that the Plugin Manager is loaded """
if not self._loaded:
raise PluginManagerNotLoadedException()
self._task_factory.add_custom_task_file_manager(task_file_manager)
|
Register a new authentication method
name
the name of the authentication method, typically displayed by the webapp
input_to_display
Only available after that the Plugin Manager is loaded
|
def register_auth_method(self, auth_method):
"""
Register a new authentication method
name
the name of the authentication method, typically displayed by the webapp
input_to_display
Only available after that the Plugin Manager is loaded
"""
if not self._loaded:
raise PluginManagerNotLoadedException()
self._user_manager.register_auth_method(auth_method)
|
Create a zip file containing all information about a given course in database and then remove it from db
|
def dump_course(self, courseid):
""" Create a zip file containing all information about a given course in database and then remove it from db"""
filepath = os.path.join(self.backup_dir, courseid, datetime.datetime.now().strftime("%Y%m%d.%H%M%S") + ".zip")
if not os.path.exists(os.path.dirname(filepath)):
os.makedirs(os.path.dirname(filepath))
with zipfile.ZipFile(filepath, "w", allowZip64=True) as zipf:
aggregations = self.database.aggregations.find({"courseid": courseid})
zipf.writestr("aggregations.json", bson.json_util.dumps(aggregations), zipfile.ZIP_DEFLATED)
user_tasks = self.database.user_tasks.find({"courseid": courseid})
zipf.writestr("user_tasks.json", bson.json_util.dumps(user_tasks), zipfile.ZIP_DEFLATED)
submissions = self.database.submissions.find({"courseid": courseid})
zipf.writestr("submissions.json", bson.json_util.dumps(submissions), zipfile.ZIP_DEFLATED)
submissions.rewind()
for submission in submissions:
for key in ["input", "archive"]:
if key in submission and type(submission[key]) == bson.objectid.ObjectId:
infile = self.submission_manager.get_gridfs().get(submission[key])
zipf.writestr(key + "/" + str(submission[key]) + ".data", infile.read(), zipfile.ZIP_DEFLATED)
self._logger.info("Course %s dumped to backup directory.", courseid)
self.wipe_course(courseid)
|
Restores a course of given courseid to a date specified in backup (format : YYYYMMDD.HHMMSS)
|
def restore_course(self, courseid, backup):
""" Restores a course of given courseid to a date specified in backup (format : YYYYMMDD.HHMMSS) """
self.wipe_course(courseid)
filepath = os.path.join(self.backup_dir, courseid, backup + ".zip")
with zipfile.ZipFile(filepath, "r") as zipf:
aggregations = bson.json_util.loads(zipf.read("aggregations.json").decode("utf-8"))
if len(aggregations) > 0:
self.database.aggregations.insert(aggregations)
user_tasks = bson.json_util.loads(zipf.read("user_tasks.json").decode("utf-8"))
if len(user_tasks) > 0:
self.database.user_tasks.insert(user_tasks)
submissions = bson.json_util.loads(zipf.read("submissions.json").decode("utf-8"))
for submission in submissions:
for key in ["input", "archive"]:
if key in submission and type(submission[key]) == bson.objectid.ObjectId:
submission[key] = self.submission_manager.get_gridfs().put(zipf.read(key + "/" + str(submission[key]) + ".data"))
if len(submissions) > 0:
self.database.submissions.insert(submissions)
self._logger.info("Course %s restored from backup directory.", courseid)
|
Erase all course data
|
def delete_course(self, courseid):
""" Erase all course data """
# Wipes the course (delete database)
self.wipe_course(courseid)
# Deletes the course from the factory (entire folder)
self.course_factory.delete_course(courseid)
# Removes backup
filepath = os.path.join(self.backup_dir, courseid)
if os.path.exists(os.path.dirname(filepath)):
for backup in glob.glob(os.path.join(filepath, '*.zip')):
os.remove(backup)
self._logger.info("Course %s files erased.", courseid)
|
GET request
|
def GET_AUTH(self, courseid): # pylint: disable=arguments-differ
""" GET request """
course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=False)
data = web.input()
if "download" in data:
filepath = os.path.join(self.backup_dir, courseid, data["download"] + '.zip')
if not os.path.exists(os.path.dirname(filepath)):
raise web.notfound()
web.header('Content-Type', 'application/zip', unique=True)
web.header('Content-Disposition', 'attachment; filename="' + data["download"] + '.zip' + '"', unique=True)
return open(filepath, 'rb')
else:
return self.page(course)
|
POST request
|
def POST_AUTH(self, courseid): # pylint: disable=arguments-differ
""" POST request """
course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=False)
msg = ""
error = False
data = web.input()
if not data.get("token", "") == self.user_manager.session_token():
msg = _("Operation aborted due to invalid token.")
error = True
elif "wipeall" in data:
if not data.get("courseid", "") == courseid:
msg = _("Wrong course id.")
error = True
else:
try:
self.dump_course(courseid)
msg = _("All course data have been deleted.")
except:
msg = _("An error occurred while dumping course from database.")
error = True
elif "restore" in data:
if "backupdate" not in data:
msg = "No backup date selected."
error = True
else:
try:
dt = datetime.datetime.strptime(data["backupdate"], "%Y%m%d.%H%M%S")
self.restore_course(courseid, data["backupdate"])
msg = _("Course restored to date : {}.").format(dt.strftime("%Y-%m-%d %H:%M:%S"))
except:
msg = _("An error occurred while restoring backup.")
error = True
elif "deleteall" in data:
if not data.get("courseid", "") == courseid:
msg = _("Wrong course id.")
error = True
else:
try:
self.delete_course(courseid)
web.seeother(self.app.get_homepath() + '/index')
except:
msg = _("An error occurred while deleting the course data.")
error = True
return self.page(course, msg, error)
|
Get all data and display the page
|
def page(self, course, msg="", error=False):
""" Get all data and display the page """
thehash = hashlib.sha512(str(random.getrandbits(256)).encode("utf-8")).hexdigest()
self.user_manager.set_session_token(thehash)
backups = self.get_backup_list(course)
return self.template_helper.get_renderer().course_admin.danger_zone(course, thehash, backups, msg, error)
|
Show BasicCodeProblem and derivatives
|
def show_input(self, template_helper, language, seed):
""" Show BasicCodeProblem and derivatives """
header = ParsableText(self.gettext(language,self._header), "rst",
translation=self._translations.get(language, gettext.NullTranslations()))
return str(DisplayableCodeProblem.get_renderer(template_helper).tasks.code(self.get_id(), header, 8, 0, self._language, self._optional, self._default))
|
Show InputBox
|
def show_input(self, template_helper, language, seed):
""" Show InputBox """
header = ParsableText(self.gettext(language, self._header), "rst",
translation=self._translations.get(language, gettext.NullTranslations()))
return str(DisplayableCodeSingleLineProblem.get_renderer(template_helper)
.tasks.single_line_code(self.get_id(), header, "text", 0, self._optional, self._default))
|
Show FileBox
|
def show_input(self, template_helper, language, seed):
""" Show FileBox """
header = ParsableText(self.gettext(language, self._header), "rst",
translation=self._translations.get(language, gettext.NullTranslations()))
return str(DisplayableFileProblem.get_renderer(template_helper).tasks.file(self.get_id(), header, self._max_size, self._allowed_exts))
|
Show multiple choice problems
|
def show_input(self, template_helper, language, seed):
""" Show multiple choice problems """
choices = []
limit = self._limit
if limit == 0:
limit = len(self._choices) # no limit
rand = Random("{}#{}#{}".format(self.get_task().get_id(), self.get_id(), seed))
# Ensure that the choices are random
# we *do* need to copy the choices here
random_order_choices = list(self._choices)
rand.shuffle(random_order_choices)
if self._multiple:
# take only the valid choices in the first pass
for entry in random_order_choices:
if entry['valid']:
choices.append(entry)
limit = limit - 1
# take everything else in a second pass
for entry in random_order_choices:
if limit == 0:
break
if not entry['valid']:
choices.append(entry)
limit = limit - 1
else:
# need to have ONE valid entry
for entry in random_order_choices:
if not entry['valid'] and limit > 1:
choices.append(entry)
limit = limit - 1
for entry in random_order_choices:
if entry['valid'] and limit > 0:
choices.append(entry)
limit = limit - 1
rand.shuffle(choices)
header = ParsableText(self.gettext(language, self._header), "rst",
translation=self._translations.get(language, gettext.NullTranslations()))
return str(DisplayableMultipleChoiceProblem.get_renderer(template_helper).tasks.multiple_choice(
self.get_id(), header, self._multiple, choices,
lambda text: ParsableText(self.gettext(language, text) if text else "", "rst",
translation=self._translations.get(language, gettext.NullTranslations()))))
|
Show MatchProblem
|
def show_input(self, template_helper, language, seed):
""" Show MatchProblem """
header = ParsableText(self.gettext(language, self._header), "rst",
translation=self._translations.get(language, gettext.NullTranslations()))
return str(DisplayableMatchProblem.get_renderer(template_helper).tasks.match(self.get_id(), header))
|
Set submission whose id is `submissionid` to selected grading submission for the given course/task.
Returns a boolean indicating whether the operation was successful or not.
|
def set_selected_submission(self, course, task, submissionid):
""" Set submission whose id is `submissionid` to selected grading submission for the given course/task.
Returns a boolean indicating whether the operation was successful or not.
"""
submission = self.submission_manager.get_submission(submissionid)
# Do not continue if submission does not exist or is not owned by current user
if not submission:
return False
# Check if the submission if from this task/course!
if submission["taskid"] != task.get_id() or submission["courseid"] != course.get_id():
return False
is_staff = self.user_manager.has_staff_rights_on_course(course, self.user_manager.session_username())
# Do not enable submission selection after deadline
if not task.get_accessible_time().is_open() and not is_staff:
return False
# Only allow to set submission if the student must choose their best submission themselves
if task.get_evaluate() != 'student' and not is_staff:
return False
# Check if task is done per group/team
if task.is_group_task() and not is_staff:
group = self.database.aggregations.find_one(
{"courseid": task.get_course_id(), "groups.students": self.user_manager.session_username()},
{"groups": {"$elemMatch": {"students": self.user_manager.session_username()}}})
students = group["groups"][0]["students"]
else:
students = [self.user_manager.session_username()]
# Check if group/team is the same
if students == submission["username"]:
self.database.user_tasks.update_many(
{"courseid": task.get_course_id(), "taskid": task.get_id(), "username": {"$in": students}},
{"$set": {"submissionid": submission['_id'],
"grade": submission['grade'],
"succeeded": submission["result"] == "success"}})
return True
else:
return False
|
GET request
|
def GET(self, courseid, taskid, isLTI):
""" GET request """
username = self.user_manager.session_username()
# Fetch the course
try:
course = self.course_factory.get_course(courseid)
except exceptions.CourseNotFoundException as ex:
raise web.notfound(str(ex))
if isLTI and not self.user_manager.course_is_user_registered(course):
self.user_manager.course_register_user(course, force=True)
if not self.user_manager.course_is_open_to_user(course, username, isLTI):
return self.template_helper.get_renderer().course_unavailable()
# Fetch the task
try:
tasks = OrderedDict((tid, t) for tid, t in course.get_tasks().items() if self.user_manager.task_is_visible_by_user(t, username, isLTI))
task = tasks[taskid]
except exceptions.TaskNotFoundException as ex:
raise web.notfound(str(ex))
if not self.user_manager.task_is_visible_by_user(task, username, isLTI):
return self.template_helper.get_renderer().task_unavailable()
# Compute previous and next taskid
keys = list(tasks.keys())
index = keys.index(taskid)
previous_taskid = keys[index - 1] if index > 0 else None
next_taskid = keys[index + 1] if index < len(keys) - 1 else None
self.user_manager.user_saw_task(username, courseid, taskid)
is_staff = self.user_manager.has_staff_rights_on_course(course, username)
userinput = web.input()
if "submissionid" in userinput and "questionid" in userinput:
# Download a previously submitted file
submission = self.submission_manager.get_submission(userinput["submissionid"], user_check=not is_staff)
if submission is None:
raise web.notfound()
sinput = self.submission_manager.get_input_from_submission(submission, True)
if userinput["questionid"] not in sinput:
raise web.notfound()
if isinstance(sinput[userinput["questionid"]], dict):
# File uploaded previously
mimetypes.init()
mime_type = mimetypes.guess_type(urllib.request.pathname2url(sinput[userinput["questionid"]]['filename']))
web.header('Content-Type', mime_type[0])
return sinput[userinput["questionid"]]['value']
else:
# Other file, download it as text
web.header('Content-Type', 'text/plain')
return sinput[userinput["questionid"]]
else:
# Generate random inputs and save it into db
random.seed(str(username if username is not None else "") + taskid + courseid + str(
time.time() if task.regenerate_input_random() else ""))
random_input_list = [random.random() for i in range(task.get_number_input_random())]
user_task = self.database.user_tasks.find_one_and_update(
{
"courseid": task.get_course_id(),
"taskid": task.get_id(),
"username": self.user_manager.session_username()
},
{
"$set": {"random": random_input_list}
},
return_document=ReturnDocument.AFTER
)
submissionid = user_task.get('submissionid', None)
eval_submission = self.database.submissions.find_one({'_id': ObjectId(submissionid)}) if submissionid else None
students = [self.user_manager.session_username()]
if task.is_group_task() and not self.user_manager.has_admin_rights_on_course(course, username):
group = self.database.aggregations.find_one(
{"courseid": task.get_course_id(), "groups.students": self.user_manager.session_username()},
{"groups": {"$elemMatch": {"students": self.user_manager.session_username()}}})
if group is not None and len(group["groups"]) > 0:
students = group["groups"][0]["students"]
# we don't care for the other case, as the student won't be able to submit.
submissions = self.submission_manager.get_user_submissions(task) if self.user_manager.session_logged_in() else []
user_info = self.database.users.find_one({"username": username})
# Display the task itself
return self.template_helper.get_renderer().task(user_info, course, task, submissions,
students, eval_submission, user_task, previous_taskid, next_taskid, self.webterm_link, random_input_list)
|
POST a new submission
|
def POST(self, courseid, taskid, isLTI):
""" POST a new submission """
username = self.user_manager.session_username()
course = self.course_factory.get_course(courseid)
if not self.user_manager.course_is_open_to_user(course, username, isLTI):
return self.template_helper.get_renderer().course_unavailable()
task = course.get_task(taskid)
if not self.user_manager.task_is_visible_by_user(task, username, isLTI):
return self.template_helper.get_renderer().task_unavailable()
self.user_manager.user_saw_task(username, courseid, taskid)
is_staff = self.user_manager.has_staff_rights_on_course(course, username)
is_admin = self.user_manager.has_admin_rights_on_course(course, username)
userinput = web.input()
if "@action" in userinput and userinput["@action"] == "submit":
# Verify rights
if not self.user_manager.task_can_user_submit(task, username, isLTI):
return json.dumps({"status": "error", "text": _("You are not allowed to submit for this task.")})
# Retrieve input random and check still valid
random_input = self.database.user_tasks.find_one({"courseid": task.get_course_id(), "taskid": task.get_id(), "username": username}, { "random": 1 })
random_input = random_input["random"] if "random" in random_input else []
for i in range(0, len(random_input)):
s = "@random_" + str(i)
if s not in userinput or float(userinput[s]) != random_input[i]:
return json.dumps({"status": "error", "text": _("Your task has been regenerated. This current task is outdated.")})
# Reparse user input with array for multiple choices
init_var = {
problem.get_id(): problem.input_type()()
for problem in task.get_problems() if problem.input_type() in [dict, list]
}
userinput = task.adapt_input_for_backend(web.input(**init_var))
if not task.input_is_consistent(userinput, self.default_allowed_file_extensions, self.default_max_file_size):
web.header('Content-Type', 'application/json')
return json.dumps({"status": "error", "text": _("Please answer to all the questions and verify the extensions of the files "
"you want to upload. Your responses were not tested.")})
del userinput['@action']
# Get debug info if the current user is an admin
debug = is_admin
if "@debug-mode" in userinput:
if userinput["@debug-mode"] == "ssh" and debug:
debug = "ssh"
del userinput['@debug-mode']
# Start the submission
try:
submissionid, oldsubids = self.submission_manager.add_job(task, userinput, debug)
web.header('Content-Type', 'application/json')
return json.dumps({"status": "ok", "submissionid": str(submissionid), "remove": oldsubids, "text": _("<b>Your submission has been sent...</b>")})
except Exception as ex:
web.header('Content-Type', 'application/json')
return json.dumps({"status": "error", "text": str(ex)})
elif "@action" in userinput and userinput["@action"] == "check" and "submissionid" in userinput:
result = self.submission_manager.get_submission(userinput['submissionid'], user_check=not is_staff)
if result is None:
web.header('Content-Type', 'application/json')
return json.dumps({'status': "error", "text": _("Internal error")})
elif self.submission_manager.is_done(result, user_check=not is_staff):
web.header('Content-Type', 'application/json')
result = self.submission_manager.get_input_from_submission(result)
result = self.submission_manager.get_feedback_from_submission(result, show_everything=is_staff)
# user_task always exists as we called user_saw_task before
user_task = self.database.user_tasks.find_one({
"courseid":task.get_course_id(),
"taskid": task.get_id(),
"username": {"$in": result["username"]}
})
default_submissionid = user_task.get('submissionid', None)
if default_submissionid is None:
# This should never happen, as user_manager.update_user_stats is called whenever a submission is done.
return json.dumps({'status': "error", "text": _("Internal error")})
return self.submission_to_json(task, result, is_admin, False, default_submissionid == result['_id'], tags=task.get_tags())
else:
web.header('Content-Type', 'application/json')
return self.submission_to_json(task, result, is_admin)
elif "@action" in userinput and userinput["@action"] == "load_submission_input" and "submissionid" in userinput:
submission = self.submission_manager.get_submission(userinput["submissionid"], user_check=not is_staff)
submission = self.submission_manager.get_input_from_submission(submission)
submission = self.submission_manager.get_feedback_from_submission(submission, show_everything=is_staff)
if not submission:
raise web.notfound()
web.header('Content-Type', 'application/json')
return self.submission_to_json(task, submission, is_admin, True, tags=task.get_tags())
elif "@action" in userinput and userinput["@action"] == "kill" and "submissionid" in userinput:
self.submission_manager.kill_running_submission(userinput["submissionid"]) # ignore return value
web.header('Content-Type', 'application/json')
return json.dumps({'status': 'done'})
elif "@action" in userinput and userinput["@action"] == "set_submission" and "submissionid" in userinput:
web.header('Content-Type', 'application/json')
if task.get_evaluate() != 'student':
return json.dumps({'status': "error"})
if self.set_selected_submission(course, task, userinput["submissionid"]):
return json.dumps({'status': 'done'})
else:
return json.dumps({'status': 'error'})
else:
raise web.notfound()
|
Converts a submission to json (keeps only needed fields)
|
def submission_to_json(self, task, data, debug, reloading=False, replace=False, tags={}):
""" Converts a submission to json (keeps only needed fields) """
if "ssh_host" in data:
return json.dumps({'status': "waiting", 'text': "<b>SSH server active</b>",
'ssh_host': data["ssh_host"], 'ssh_port': data["ssh_port"],
'ssh_password': data["ssh_password"]})
# Here we are waiting. Let's send some useful information.
waiting_data = self.submission_manager.get_job_queue_info(data["jobid"]) if "jobid" in data else None
if waiting_data is not None and not reloading:
nb_tasks_before, approx_wait_time = waiting_data
wait_time = round(approx_wait_time)
if nb_tasks_before == -1 and wait_time <= 0:
text = _("<b>INGInious is currently grading your answers.<b/> (almost done)")
elif nb_tasks_before == -1:
text = _("<b>INGInious is currently grading your answers.<b/> (Approx. wait time: {} seconds)").format(
wait_time)
elif nb_tasks_before == 0:
text = _("<b>You are next in the waiting queue!</b>")
elif nb_tasks_before == 1:
text = _("<b>There is one task in front of you in the waiting queue.</b>")
else:
text = _("<b>There are {} tasks in front of you in the waiting queue.</b>").format(nb_tasks_before)
return json.dumps({'status': "waiting", 'text': text})
tojson = {
'status': data['status'],
'result': data.get('result', 'crash'),
'id': str(data["_id"]),
'submitted_on': str(data['submitted_on']),
'grade': str(data.get("grade", 0.0)),
'replace': replace and not reloading # Replace the evaluated submission
}
if "text" in data:
tojson["text"] = data["text"]
if "problems" in data:
tojson["problems"] = data["problems"]
if debug:
tojson["debug"] = data
if tojson['status'] == 'waiting':
tojson["text"] = _("<b>Your submission has been sent...</b>")
elif tojson["result"] == "failed":
tojson["text"] = _("There are some errors in your answer. Your score is {score}%.").format(score=data["grade"])
elif tojson["result"] == "success":
tojson["text"] = _("Your answer passed the tests! Your score is {score}%.").format(score=data["grade"])
elif tojson["result"] == "timeout":
tojson["text"] = _("Your submission timed out. Your score is {score}%.").format(score=data["grade"])
elif tojson["result"] == "overflow":
tojson["text"] = _("Your submission made an overflow. Your score is {score}%.").format(score=data["grade"])
elif tojson["result"] == "killed":
tojson["text"] = _("Your submission was killed.")
else:
tojson["text"] = _("An internal error occurred. Please retry later. "
"If the error persists, send an email to the course administrator.")
tojson["text"] = "<b>" + tojson["text"] + " " + _("[Submission #{submissionid}]").format(submissionid=data["_id"]) + "</b>" + data.get("text", "")
tojson["text"] = self.plugin_manager.call_hook_recursive("feedback_text", task=task, submission=data, text=tojson["text"])["text"]
if reloading:
# Set status='ok' because we are reloading an old submission.
tojson["status"] = 'ok'
# And also include input
tojson["input"] = data.get('input', {})
if "tests" in data:
tojson["tests"] = {}
if tags:
for tag in tags[0]+tags[1]: # Tags only visible for admins should not appear in the json for students.
if (tag.is_visible_for_student() or debug) and tag.get_id() in data["tests"]:
tojson["tests"][tag.get_id()] = data["tests"][tag.get_id()]
if debug: #We add also auto tags when we are admin
for tag in data["tests"]:
if tag.startswith("*auto-tag-"):
tojson["tests"][tag] = data["tests"][tag]
# allow plugins to insert javascript to be run in the browser after the submission is loaded
tojson["feedback_script"] = "".join(self.plugin_manager.call_hook("feedback_script", task=task, submission=data))
return json.dumps(tojson, default=str)
|
GET request
|
def GET(self, courseid, taskid, path): # pylint: disable=arguments-differ
""" GET request """
try:
course = self.course_factory.get_course(courseid)
if not self.user_manager.course_is_open_to_user(course):
return self.template_helper.get_renderer().course_unavailable()
path_norm = posixpath.normpath(urllib.parse.unquote(path))
if taskid == "$common":
public_folder = course.get_fs().from_subfolder("$common").from_subfolder("public")
else:
task = course.get_task(taskid)
if not self.user_manager.task_is_visible_by_user(task): # ignore LTI check here
return self.template_helper.get_renderer().task_unavailable()
public_folder = task.get_fs().from_subfolder("public")
(method, mimetype_or_none, file_or_url) = public_folder.distribute(path_norm, False)
if method == "local":
web.header('Content-Type', mimetype_or_none)
return file_or_url
elif method == "url":
raise web.redirect(file_or_url)
else:
raise web.notfound()
except web.HTTPError as error_or_redirect:
raise error_or_redirect
except:
if web.config.debug:
raise
else:
raise web.notfound()
|
Add a job in the queue
:param username:
:param courseid:
:param taskid:
:param consumer_key:
:param service_url:
:param result_id:
|
def add(self, username, courseid, taskid, consumer_key, service_url, result_id):
""" Add a job in the queue
:param username:
:param courseid:
:param taskid:
:param consumer_key:
:param service_url:
:param result_id:
"""
search = {"username": username, "courseid": courseid,
"taskid": taskid, "service_url": service_url,
"consumer_key": consumer_key, "result_id": result_id}
entry = self._database.lis_outcome_queue.find_one_and_update(search, {"$set": {"nb_attempt": 0}},
return_document=ReturnDocument.BEFORE, upsert=True)
if entry is None: # and it should be
self._add_to_queue(self._database.lis_outcome_queue.find_one(search))
|
Increment the number of attempt for an entry and
:param mongo_id:
:return:
|
def _increment_attempt(self, mongo_id):
"""
Increment the number of attempt for an entry and
:param mongo_id:
:return:
"""
entry = self._database.lis_outcome_queue.find_one_and_update({"_id": mongo_id}, {"$inc": {"nb_attempt": 1}})
self._add_to_queue(entry)
|
Checks if user is authenticated and calls POST_AUTH or performs login and calls GET_AUTH.
Otherwise, returns the login template.
|
def GET(self):
"""
Checks if user is authenticated and calls POST_AUTH or performs login and calls GET_AUTH.
Otherwise, returns the login template.
"""
data = self.user_manager.session_lti_info()
if data is None:
raise web.notfound()
try:
course = self.course_factory.get_course(data["task"][0])
if data["consumer_key"] not in course.lti_keys().keys():
raise Exception()
except:
return self.template_helper.get_renderer().lti_bind(False, "", None, "Invalid LTI data")
user_profile = self.database.users.find_one({"ltibindings." + data["task"][0] + "." + data["consumer_key"]: data["username"]})
if user_profile:
self.user_manager.connect_user(user_profile["username"], user_profile["realname"], user_profile["email"], user_profile["language"])
if self.user_manager.session_logged_in():
raise web.seeother(self.app.get_homepath() + "/lti/task")
return self.template_helper.get_renderer().lti_login(False)
|
Verify and parse the data for the LTI basic launch
|
def _parse_lti_data(self, courseid, taskid):
""" Verify and parse the data for the LTI basic launch """
post_input = web.webapi.rawinput("POST")
self.logger.debug('_parse_lti_data:' + str(post_input))
try:
course = self.course_factory.get_course(courseid)
except exceptions.CourseNotFoundException as ex:
raise web.notfound(str(ex))
try:
test = LTIWebPyToolProvider.from_webpy_request()
validator = LTIValidator(self.database.nonce, course.lti_keys())
verified = test.is_valid_request(validator)
except Exception:
self.logger.exception("...")
self.logger.info("Error while validating LTI request for %s", str(post_input))
raise web.forbidden(_("Error while validating LTI request"))
if verified:
self.logger.debug('parse_lit_data for %s', str(post_input))
user_id = post_input["user_id"]
roles = post_input.get("roles", "Student").split(",")
realname = self._find_realname(post_input)
email = post_input.get("lis_person_contact_email_primary", "")
lis_outcome_service_url = post_input.get("lis_outcome_service_url", None)
outcome_result_id = post_input.get("lis_result_sourcedid", None)
consumer_key = post_input["oauth_consumer_key"]
if course.lti_send_back_grade():
if lis_outcome_service_url is None or outcome_result_id is None:
self.logger.info('Error: lis_outcome_service_url is None but lti_send_back_grade is True')
raise web.forbidden(_("In order to send grade back to the TC, INGInious needs the parameters lis_outcome_service_url and "
"lis_outcome_result_id in the LTI basic-launch-request. Please contact your administrator."))
else:
lis_outcome_service_url = None
outcome_result_id = None
tool_name = post_input.get('tool_consumer_instance_name', 'N/A')
tool_desc = post_input.get('tool_consumer_instance_description', 'N/A')
tool_url = post_input.get('tool_consumer_instance_url', 'N/A')
context_title = post_input.get('context_title', 'N/A')
context_label = post_input.get('context_label', 'N/A')
session_id = self.user_manager.create_lti_session(user_id, roles, realname, email, courseid, taskid, consumer_key,
lis_outcome_service_url, outcome_result_id, tool_name, tool_desc, tool_url,
context_title, context_label)
loggedin = self.user_manager.attempt_lti_login()
return session_id, loggedin
else:
self.logger.info("Couldn't validate LTI request")
raise web.forbidden(_("Couldn't validate LTI request"))
|
Returns the most appropriate name to identify the user
|
def _find_realname(self, post_input):
""" Returns the most appropriate name to identify the user """
# First, try the full name
if "lis_person_name_full" in post_input:
return post_input["lis_person_name_full"]
if "lis_person_name_given" in post_input and "lis_person_name_family" in post_input:
return post_input["lis_person_name_given"] + post_input["lis_person_name_family"]
# Then the email
if "lis_person_contact_email_primary" in post_input:
return post_input["lis_person_contact_email_primary"]
# Then only part of the full name
if "lis_person_name_family" in post_input:
return post_input["lis_person_name_family"]
if "lis_person_name_given" in post_input:
return post_input["lis_person_name_given"]
return post_input["user_id"]
|
GET request
|
def GET_AUTH(self): # pylint: disable=arguments-differ
""" GET request """
auth_methods = self.user_manager.get_auth_methods()
user_data = self.database.users.find_one({"username": self.user_manager.session_username()})
bindings = user_data.get("bindings", {})
return self.template_helper.get_renderer().preferences.bindings(bindings, auth_methods, "", False)
|
POST request
|
def POST_AUTH(self): # pylint: disable=arguments-differ
""" POST request """
msg = ""
error = False
user_data = self.database.users.find_one({"username": self.user_manager.session_username()})
if not user_data:
raise web.notfound()
user_input = web.input()
auth_methods = self.user_manager.get_auth_methods()
if "auth_binding" in user_input:
auth_binding = user_input["auth_binding"]
if auth_binding not in auth_methods.keys():
error = True
msg = _("Incorrect authentication binding.")
elif auth_binding not in user_data.get("bindings", {}):
raise web.seeother("/auth/signin/" + auth_binding)
elif "revoke_auth_binding" in user_input:
auth_id = user_input["revoke_auth_binding"]
if auth_id not in auth_methods.keys():
error = True
msg = _("Incorrect authentication binding.")
elif len(user_data.get("bindings", {}).keys()) > 1 or "password" in user_data:
user_data = self.database.users.find_one_and_update({"username": self.user_manager.session_username()},
{"$unset": {"bindings." + auth_id: 1}})
else:
error = True
msg = _("You must set a password before removing all bindings.")
bindings = user_data.get("bindings", {})
return self.template_helper.get_renderer().preferences.bindings(bindings, auth_methods, msg, error)
|
Compute statistics about submissions and tags.
This function returns a tuple of lists following the format describe below:
(
[('Number of submissions', 13), ('Evaluation submissions', 2), …],
[(<tag>, '61%', '50%'), (<tag>, '76%', '100%'), …]
)
|
def compute_statistics(tasks, data, ponderation):
"""
Compute statistics about submissions and tags.
This function returns a tuple of lists following the format describe below:
(
[('Number of submissions', 13), ('Evaluation submissions', 2), …],
[(<tag>, '61%', '50%'), (<tag>, '76%', '100%'), …]
)
"""
super_dict = {}
for submission in data:
task = tasks[submission["taskid"]]
username = "".join(submission["username"])
tags_of_task = task.get_tags()[0] + task.get_tags()[1]
for tag in tags_of_task:
super_dict.setdefault(tag, {})
super_dict[tag].setdefault(username, {})
super_dict[tag][username].setdefault(submission["taskid"], [0,0,0,0])
super_dict[tag][username][submission["taskid"]][0] += 1
if "tests" in submission and tag.get_id() in submission["tests"] and submission["tests"][tag.get_id()]:
super_dict[tag][username][submission["taskid"]][1] += 1
if submission["best"]:
super_dict[tag][username][submission["taskid"]][2] += 1
if "tests" in submission and tag.get_id() in submission["tests"] and submission["tests"][tag.get_id()]:
super_dict[tag][username][submission["taskid"]][3] += 1
output = []
for tag in super_dict:
if not ponderation:
results = [0,0,0,0]
for username in super_dict[tag]:
for task in super_dict[tag][username]:
for i in range (0,4):
results[i] += super_dict[tag][username][task][i]
output.append((tag, 100*safe_div(results[1],results[0]), 100*safe_div(results[3],results[2])))
#Ponderation by stud and tasks
else:
results = ([], [])
for username in super_dict[tag]:
for task in super_dict[tag][username]:
a = super_dict[tag][username][task]
results[0].append(safe_div(a[1],a[0]))
results[1].append(safe_div(a[3],a[2]))
output.append((tag, 100*safe_div(sum(results[0]),len(results[0])), 100*safe_div(sum(results[1]),len(results[1]))))
return (fast_stats(data), output)
|
Compute base statistics about submissions
|
def fast_stats(data):
""" Compute base statistics about submissions """
total_submission = len(data)
total_submission_best = 0
total_submission_best_succeeded = 0
for submission in data:
if "best" in submission and submission["best"]:
total_submission_best = total_submission_best + 1
if "result" in submission and submission["result"] == "success":
total_submission_best_succeeded += 1
statistics = [
(_("Number of submissions"), total_submission),
(_("Evaluation submissions (Total)"), total_submission_best),
(_("Evaluation submissions (Succeeded)"), total_submission_best_succeeded),
(_("Evaluation submissions (Failed)"), total_submission_best - total_submission_best_succeeded),
# add here new common statistics
]
return statistics
|
GET request
|
def GET_AUTH(self, courseid, f=None, t=None): # pylint: disable=arguments-differ
""" GET request """
course, __ = self.get_course_and_check_rights(courseid)
tasks = course.get_tasks()
now = datetime.now().replace(minute=0, second=0, microsecond=0)
error = None
if f == None and t == None:
daterange = [now - timedelta(days=14), now]
else:
try:
daterange = [datetime.strptime(x[0:16], "%Y-%m-%dT%H:%M") for x in (f,t)]
except:
error = "Invalid dates"
daterange = [now - timedelta(days=14), now]
stats_tasks = self._tasks_stats(courseid, tasks, daterange)
stats_users = self._users_stats(courseid, daterange)
stats_graph = self._graph_stats(courseid, daterange)
return self.template_helper.get_renderer().course_admin.stats(course, stats_graph, stats_tasks, stats_users, daterange, error)
|
Register a type of message to be sent.
After this message has been sent, if the answer is received, callback_recv is called.
If the remote server becomes dones, calls callback_abrt.
:param send_msg: class of message to be sent
:param recv_msg: message that the server should send in response
:param get_key: receive a `send_msg` or `recv_msg` as input, and returns the "key" (global identifier) of the message
:param coroutine_recv: callback called (on the event loop) when the transaction succeed, with, as input, `recv_msg` and eventually other args
given to .send
:param coroutine_abrt: callback called (on the event loop) when the transaction fails, with, as input, `recv_msg` and eventually other args
given to .send
:param inter_msg: a list of `(message_class, coroutine_recv)`, that can be received during the resolution of the transaction but will not
finalize it. `get_key` is used on these `message_class` to get the key of the transaction.
|
def _register_transaction(self, send_msg, recv_msg, coroutine_recv, coroutine_abrt, get_key=None, inter_msg=None):
"""
Register a type of message to be sent.
After this message has been sent, if the answer is received, callback_recv is called.
If the remote server becomes dones, calls callback_abrt.
:param send_msg: class of message to be sent
:param recv_msg: message that the server should send in response
:param get_key: receive a `send_msg` or `recv_msg` as input, and returns the "key" (global identifier) of the message
:param coroutine_recv: callback called (on the event loop) when the transaction succeed, with, as input, `recv_msg` and eventually other args
given to .send
:param coroutine_abrt: callback called (on the event loop) when the transaction fails, with, as input, `recv_msg` and eventually other args
given to .send
:param inter_msg: a list of `(message_class, coroutine_recv)`, that can be received during the resolution of the transaction but will not
finalize it. `get_key` is used on these `message_class` to get the key of the transaction.
"""
if get_key is None:
get_key = lambda x: None
if inter_msg is None:
inter_msg = []
# format is (other_msg, get_key, recv_handler, abrt_handler,responsible_for)
# where responsible_for is the list of classes whose transaction will be killed when this message is received.
self._msgs_registered[send_msg.__msgtype__] = ([recv_msg.__msgtype__] + [x.__msgtype__ for x, _ in inter_msg], get_key, None, None, [])
self._msgs_registered[recv_msg.__msgtype__] = (
[], get_key, coroutine_recv, coroutine_abrt, [recv_msg.__msgtype__] + [x.__msgtype__ for x, _ in inter_msg])
self._transactions[recv_msg.__msgtype__] = {}
for msg_class, handler in inter_msg:
self._msgs_registered[msg_class.__msgtype__] = ([], get_key, handler, None, [])
self._transactions[msg_class.__msgtype__] = {}
|
Create a transaction with the distant server
:param msg: message to be sent
:param args: args to be sent to the coroutines given to `register_transaction`
:param kwargs: kwargs to be sent to the coroutines given to `register_transaction`
|
async def _create_transaction(self, msg, *args, **kwargs):
"""
Create a transaction with the distant server
:param msg: message to be sent
:param args: args to be sent to the coroutines given to `register_transaction`
:param kwargs: kwargs to be sent to the coroutines given to `register_transaction`
"""
recv_msgs, get_key, _1, _2, _3 = self._msgs_registered[msg.__msgtype__]
key = get_key(msg)
if key in self._transactions[recv_msgs[0]]:
# If we already have a request for this particular key, just add it on the list of things to call
for recv_msg in recv_msgs:
self._transactions[recv_msg][key].append((args, kwargs))
else:
# If that's not the case, add us in the queue, and send the message
for recv_msg in recv_msgs:
self._transactions[recv_msg][key] = [(args, kwargs)]
await ZMQUtils.send(self._socket, msg)
|
Task that ensures Pings are sent periodically to the distant server
:return:
|
async def _do_ping(self):
"""
Task that ensures Pings are sent periodically to the distant server
:return:
"""
try:
while True:
await asyncio.sleep(1)
if self._ping_count > 10:
await self._reconnect()
else:
self._ping_count += 1
await ZMQUtils.send(self._socket, Ping())
except asyncio.CancelledError:
return
except KeyboardInterrupt:
return
|
Called when the remote server is innacessible and the connection has to be restarted
|
async def _reconnect(self):
"""
Called when the remote server is innacessible and the connection has to be restarted
"""
# 1. Close all transactions
for msg_class in self._transactions:
_1, _2, _3, coroutine_abrt, _4 = self._msgs_registered[msg_class]
if coroutine_abrt is not None:
for key in self._transactions[msg_class]:
for args, kwargs in self._transactions[msg_class][key]:
self._loop.create_task(coroutine_abrt(key, *args, **kwargs))
self._transactions[msg_class] = {}
# 2. Call on_disconnect
await self._on_disconnect()
# 3. Stop tasks
for task in self._restartable_tasks:
task.cancel()
self._restartable_tasks = []
# 4. Restart socket
self._socket.disconnect(self._router_addr)
# 5. Re-do start sequence
await self.client_start()
|
Starts the client
|
async def client_start(self):
"""
Starts the client
"""
await self._start_socket()
await self._on_connect()
self._ping_count = 0
# Start the loops, and don't forget to add them to the list of asyncio task to close when the client restarts
task_socket = self._loop.create_task(self._run_socket())
task_ping = self._loop.create_task(self._do_ping())
self._restartable_tasks.append(task_ping)
self._restartable_tasks.append(task_socket)
|
GET request
|
def GET_AUTH(self, courseid): # pylint: disable=arguments-differ
""" GET request """
course, _ = self.get_course_and_check_rights(courseid)
return self.page(course)
|
Get all data and display the page
|
def page(self, course):
""" Get all data and display the page """
if not self.webdav_host:
raise web.notfound()
url = self.webdav_host + "/" + course.get_id()
username = self.user_manager.session_username()
apikey = self.user_manager.session_api_key()
return self.template_helper.get_renderer().course_admin.webdav(course, url, username, apikey)
|
Open existing feedback file
|
def load_feedback():
""" Open existing feedback file """
result = {}
if os.path.exists(_feedback_file):
f = open(_feedback_file, 'r')
cont = f.read()
f.close()
else:
cont = '{}'
try:
result = json.loads(cont) if cont else {}
except ValueError as e:
result = {"result":"crash", "text":"Feedback file has been modified by user !"}
return result
|
Save feedback file
|
def save_feedback(rdict):
""" Save feedback file """
# Check for output folder
if not os.path.exists(_feedback_dir):
os.makedirs(_feedback_dir)
jcont = json.dumps(rdict)
f = open(_feedback_file, 'w')
f.write(jcont)
f.close()
|
Set problem specific result value
|
def set_problem_result(result, problem_id):
""" Set problem specific result value """
rdict = load_feedback()
if not 'problems' in rdict:
rdict['problems'] = {}
cur_val = rdict['problems'].get(problem_id, '')
rdict['problems'][problem_id] = [result, cur_val] if type(cur_val) == str else [result, cur_val[1]]
save_feedback(rdict)
|
Set global feedback in case of error
|
def set_global_feedback(feedback, append=False):
""" Set global feedback in case of error """
rdict = load_feedback()
rdict['text'] = rdict.get('text', '') + feedback if append else feedback
save_feedback(rdict)
|
Set problem specific feedback
|
def set_problem_feedback(feedback, problem_id, append=False):
""" Set problem specific feedback """
rdict = load_feedback()
if not 'problems' in rdict:
rdict['problems'] = {}
cur_val = rdict['problems'].get(problem_id, '')
rdict['problems'][problem_id] = (cur_val + feedback if append else feedback) if type(cur_val) == str else [cur_val[0], (cur_val[1] + feedback if append else feedback)]
save_feedback(rdict)
|
Set the tag 'tag' to the value True or False.
:param value: should be a boolean
:param tag: should be the id of the tag. Can not starts with '*auto-tag-'
|
def set_tag(tag, value):
"""
Set the tag 'tag' to the value True or False.
:param value: should be a boolean
:param tag: should be the id of the tag. Can not starts with '*auto-tag-'
"""
if not tag.startswith("*auto-tag-"):
rdict = load_feedback()
tests = rdict.setdefault("tests", {})
tests[tag] = (value == True)
save_feedback(rdict)
|
Add a tag with generated id.
:param value: everything working with the str() function
|
def tag(value):
"""
Add a tag with generated id.
:param value: everything working with the str() function
"""
rdict = load_feedback()
tests = rdict.setdefault("tests", {})
tests["*auto-tag-" + str(hash(str(value)))] = str(value)
save_feedback(rdict)
|
Set a custom value to be given back in the feedback
:param custom_name: name/key of the entry to be placed in the custom dict
:param custom_val: content of the entry to be placed in the custom dict
|
def set_custom_value(custom_name, custom_val):
"""
Set a custom value to be given back in the feedback
:param custom_name: name/key of the entry to be placed in the custom dict
:param custom_val: content of the entry to be placed in the custom dict
"""
rdict = load_feedback()
if not "custom" in rdict:
rdict["custom"] = {}
rdict["custom"][custom_name] = custom_val
save_feedback(rdict)
|
Parse a template, using the given parameters, and set it as the feedback message.
tpl_name must indicate a file. Given that XX_XX is the lang code of the current user ('en_US' or 'fr_FR', for example),
this function will search template file in different locations, in the following order:
- [current_dir]/tpl_name.XX_XX.tpl
- [task_dir]/lang/XX_XX/tpl_name.tpl (this is the preferred way, as it contributes to store all translations in the same folder)
- [current_dir]/tpl_name.tpl
Note that you can indicate "../otherdir/mytpl" to force the function to search in the "../otherdir" directory. Simply omit the final ".tpl".
If no file is found or a parsing exception occured, an error is displayed as feedback message, and False is returned.
If everything went well, True is returned.
The parsing uses Jinja2.
Parameters is a dictionnary that will be given to the Jinja template.
|
def set_feedback_from_tpl(tpl_name, parameters, problem_id=None, append=False):
""" Parse a template, using the given parameters, and set it as the feedback message.
tpl_name must indicate a file. Given that XX_XX is the lang code of the current user ('en_US' or 'fr_FR', for example),
this function will search template file in different locations, in the following order:
- [current_dir]/tpl_name.XX_XX.tpl
- [task_dir]/lang/XX_XX/tpl_name.tpl (this is the preferred way, as it contributes to store all translations in the same folder)
- [current_dir]/tpl_name.tpl
Note that you can indicate "../otherdir/mytpl" to force the function to search in the "../otherdir" directory. Simply omit the final ".tpl".
If no file is found or a parsing exception occured, an error is displayed as feedback message, and False is returned.
If everything went well, True is returned.
The parsing uses Jinja2.
Parameters is a dictionnary that will be given to the Jinja template.
"""
inginious.lang.init()
lang = get_lang()
tpl_location = None
possible_locations = [".".join([tpl_name, lang, "tpl"]),
os.path.join(inginious.lang.get_lang_dir_path(), lang, tpl_name) + ".tpl",
".".join([tpl_name, "tpl"])]
for path in possible_locations:
if os.path.exists(path):
tpl_location = path
break
if tpl_location is None:
output = """
.. error::
Unable to find template named %s. Please contact your administrator.
""" % tpl_name
if problem_id is None:
set_global_feedback(output, append)
else:
set_problem_feedback(output, problem_id, append)
return False
try:
template = Template(open(tpl_location, 'r').read())
parameters.update({"_": _})
output = template.render(parameters)
valid = True
except Exception:
output = """
.. error::
An error occured while parsing the feedback template. Here is the full error:
::
"""
output += "\n".join(["\t\t"+line for line in traceback.format_exc().split("\n")])
output += "\n\tPlease contact your administrator.\n"
valid = False
if problem_id is None:
set_global_feedback(output, append)
else:
set_problem_feedback(output, problem_id, append)
return valid
|
Displays a BIG warning
|
def _display_big_warning(self, content):
""" Displays a BIG warning """
print("")
print(BOLD + WARNING + "--- WARNING ---" + ENDC)
print(WARNING + content + ENDC)
print("")
|
Run the installator
|
def run(self):
""" Run the installator """
self._display_header("BACKEND CONFIGURATION")
options = {}
while True:
options = {}
backend = self.ask_backend()
if backend == "local":
self._display_info("Backend chosen: local. Testing the configuration.")
options = self._ask_local_config()
if not self.test_local_docker_conf():
self._display_error(
"An error occurred while testing the configuration. Please make sure you are able do run `docker info` in "
"your command line, and environment parameters like DOCKER_HOST are correctly set.")
if self._ask_boolean("Would you like to continue anyway?", False):
break
else:
break
else:
self._display_warning(
"Backend chosen: manual. As it is a really advanced feature, you will have to configure it yourself in "
"the configuration file, at the end of the setup process.")
options = {"backend": backend}
break
self._display_header("MONGODB CONFIGURATION")
mongo_opt = self.configure_mongodb()
options.update(mongo_opt)
self._display_header("TASK DIRECTORY")
task_directory_opt = self.configure_task_directory()
options.update(task_directory_opt)
self._display_header("CONTAINERS")
self.configure_containers(options)
self._display_header("MISC")
misc_opt = self.configure_misc()
options.update(misc_opt)
database = self.try_mongodb_opts(options["mongo_opt"]["host"], options["mongo_opt"]["database"])
self._display_header("BACKUP DIRECTORY")
backup_directory_opt = self.configure_backup_directory()
options.update(backup_directory_opt)
self._display_header("AUTHENTIFICATION")
auth_opts = self.configure_authentication(database)
options.update(auth_opts)
self._display_info("You may want to add additional plugins to the configuration file.")
self._display_header("REMOTE DEBUGGING - IN BROWSER")
self._display_info(
"If you want to activate the remote debugging of task in the users' browser, you have to install separately "
"INGInious-xterm, which is available on Github, according to the parameters you have given for the hostname and the "
"port range given in the configuration of the remote debugging.")
self._display_info(
"You can leave the following question empty to disable this feature; remote debugging will still be available, "
"but not in the browser.")
webterm = self._ask_with_default(
"Please indicate the link to your installation of INGInious-xterm (for example: "
"https://your-hostname.com:8080).", "")
if webterm != "":
options["webterm"] = webterm
self._display_header("END")
file_dir = self._config_path or os.path.join(os.getcwd(), self.configuration_filename())
try:
yaml.dump(options, open(file_dir, "w"))
self._display_info("Successfully written the configuration file")
except:
self._display_error("Cannot write the configuration file on disk. Here is the content of the file")
print(yaml.dump(options))
|
Ask some parameters about the local configuration
|
def _ask_local_config(self):
""" Ask some parameters about the local configuration """
options = {"backend": "local", "local-config": {}}
# Concurrency
while True:
concurrency = self._ask_with_default(
"Maximum concurrency (number of tasks running simultaneously). Leave it empty to use the number of "
"CPU of your host.", "")
if concurrency == "":
break
try:
concurrency = int(concurrency)
except:
self._display_error("Invalid number")
continue
if concurrency <= 0:
self._display_error("Invalid number")
continue
options["local-config"]["concurrency"] = concurrency
break
# Debug hostname
hostname = self._ask_with_default(
"What is the external hostname/address of your machine? You can leave this empty and let INGInious "
"autodetect it.", "")
if hostname != "":
options["local-config"]["debug_host"] = hostname
self._display_info(
"You can now enter the port range for the remote debugging feature of INGInious. Please verify that these "
"ports are open in your firewall. You can leave this parameters empty, the default is 64100-64200")
# Debug port range
port_range = None
while True:
start_port = self._ask_with_default("Beginning of the range", "")
if start_port != "":
try:
start_port = int(start_port)
except:
self._display_error("Invalid number")
continue
end_port = self._ask_with_default("End of the range", str(start_port + 100))
try:
end_port = int(end_port)
except:
self._display_error("Invalid number")
continue
if start_port > end_port:
self._display_error("Invalid range")
continue
port_range = str(start_port) + "-" + str(end_port)
else:
break
if port_range != None:
options["local-config"]["debug_ports"] = port_range
return options
|
Ask the user to choose the backend
|
def ask_backend(self):
""" Ask the user to choose the backend """
response = self._ask_boolean(
"Do you have a local docker daemon (on Linux), do you use docker-machine via a local machine, or do you use "
"Docker for macOS?", True)
if (response):
self._display_info("If you use docker-machine on macOS, please see "
"http://inginious.readthedocs.io/en/latest/install_doc/troubleshooting.html")
return "local"
else:
self._display_info(
"You will have to run inginious-backend and inginious-agent yourself. Please run the commands without argument "
"and/or read the documentation for more info")
return self._display_question("Please enter the address of your backend")
|
Try MongoDB configuration
|
def try_mongodb_opts(self, host="localhost", database_name='INGInious'):
""" Try MongoDB configuration """
try:
mongo_client = MongoClient(host=host)
except Exception as e:
self._display_warning("Cannot connect to MongoDB on host %s: %s" % (host, str(e)))
return None
try:
database = mongo_client[database_name]
except Exception as e:
self._display_warning("Cannot access database %s: %s" % (database_name, str(e)))
return None
try:
GridFS(database)
except Exception as e:
self._display_warning("Cannot access gridfs %s: %s" % (database_name, str(e)))
return None
return database
|
Configure MongoDB
|
def configure_mongodb(self):
""" Configure MongoDB """
self._display_info("Trying default configuration")
host = "localhost"
database_name = "INGInious"
should_ask = True
if self.try_mongodb_opts(host, database_name):
should_ask = self._ask_boolean(
"Successfully connected to MongoDB. Do you want to edit the configuration anyway?", False)
else:
self._display_info("Cannot guess configuration for MongoDB.")
while should_ask:
self._display_question(
"Please enter the MongoDB host. If you need to enter a password, here is the syntax:")
self._display_question("mongodb://USERNAME:PASSWORD@HOST:PORT/AUTHENTIFICATION_DATABASE")
host = self._ask_with_default("MongoDB host", host)
database_name = self._ask_with_default("Database name", database_name)
if not self.try_mongodb_opts(host, database_name):
if self._ask_boolean("Cannot connect to MongoDB. Would you like to continue anyway?", False):
break
else:
self._display_info("Successfully connected to MongoDB")
break
return {"mongo_opt": {"host": host, "database": database_name}}
|
Configure task directory
|
def configure_task_directory(self):
""" Configure task directory """
self._display_question(
"Please choose a directory in which to store the course/task files. By default, the tool will put them in the current "
"directory")
task_directory = None
while task_directory is None:
task_directory = self._ask_with_default("Task directory", ".")
if not os.path.exists(task_directory):
self._display_error("Path does not exists")
if self._ask_boolean("Would you like to retry?", True):
task_directory = None
if os.path.exists(task_directory):
self._display_question("Demonstration tasks can be downloaded to let you discover INGInious.")
if self._ask_boolean("Would you like to download them ?", True):
try:
filename, _ = urllib.request.urlretrieve(
"https://api.github.com/repos/UCL-INGI/INGInious-demo-tasks/tarball")
with tarfile.open(filename, mode="r:gz") as thetarfile:
members = thetarfile.getmembers()
commonpath = os.path.commonpath([tarinfo.name for tarinfo in members])
for member in members:
member.name = member.name[len(commonpath) + 1:]
if member.name:
thetarfile.extract(member, task_directory)
self._display_info("Successfully downloaded and copied demonstration tasks.")
except Exception as e:
self._display_error("An error occurred while copying the directory: %s" % str(e))
else:
self._display_warning("Skipping copying the 'test' course because the task dir does not exists")
return {"tasks_directory": task_directory}
|
Download the chosen containers on all the agents
|
def download_containers(self, to_download, current_options):
""" Download the chosen containers on all the agents """
if current_options["backend"] == "local":
self._display_info("Connecting to the local Docker daemon...")
try:
docker_connection = docker.from_env()
except:
self._display_error("Cannot connect to local Docker daemon. Skipping download.")
return
for image in to_download:
try:
self._display_info("Downloading image %s. This can take some time." % image)
docker_connection.images.pull(image + ":latest")
except Exception as e:
self._display_error("An error occurred while pulling the image: %s." % str(e))
else:
self._display_warning(
"This installation tool does not support the backend configuration directly, if it's not local. You will have to "
"pull the images by yourself. Here is the list: %s" % str(to_download))
|
Configures the container dict
|
def configure_containers(self, current_options):
""" Configures the container dict """
containers = [
("default", "Default container. For Bash and Python 2 tasks"),
("cpp", "Contains gcc and g++ for compiling C++"),
("java7", "Contains Java 7"),
("java8scala", "Contains Java 8 and Scala"),
("mono", "Contains Mono, which allows to run C#, F# and many other languages"),
("oz", "Contains Mozart 2, an implementation of the Oz multi-paradigm language, made for education"),
("php", "Contains PHP 5"),
("pythia0compat", "Compatibility container for Pythia 0"),
("pythia1compat", "Compatibility container for Pythia 1"),
("r", "Can run R scripts"),
("sekexe", "Can run an user-mode-linux for advanced tasks")
]
default_download = ["default"]
self._display_question(
"The tool will now propose to download some base container image for multiple languages.")
self._display_question(
"Please note that the download of these images can take a lot of time, so choose only the images you need")
to_download = []
for container_name, description in containers:
if self._ask_boolean("Download %s (%s) ?" % (container_name, description),
container_name in default_download):
to_download.append("ingi/inginious-c-%s" % container_name)
self.download_containers(to_download, current_options)
wants = self._ask_boolean("Do you want to manually add some images?", False)
while wants:
image = self._ask_with_default("Container image name (leave this field empty to skip)", "")
if image == "":
break
self._display_info("Configuration of the containers done.")
|
Configure backup directory
|
def configure_backup_directory(self):
""" Configure backup directory """
self._display_question("Please choose a directory in which to store the backup files. By default, the tool will them in the current "
"directory")
backup_directory = None
while backup_directory is None:
backup_directory = self._ask_with_default("Backup directory", ".")
if not os.path.exists(backup_directory):
self._display_error("Path does not exists")
if self._ask_boolean("Would you like to retry?", True):
backup_directory = None
return {"backup_directory": backup_directory}
|
Configures the LDAP plugin
|
def ldap_plugin(self):
""" Configures the LDAP plugin """
name = self._ask_with_default("Authentication method name (will be displayed on the login page)", "LDAP")
prefix = self._ask_with_default("Prefix to append to the username before db storage. Usefull when you have more than one auth method with "
"common usernames.", "")
ldap_host = self._ask_with_default("LDAP Host", "ldap.your.domain.com")
encryption = 'none'
while True:
encryption = self._ask_with_default("Encryption (either 'ssl', 'tls', or 'none')", 'none')
if encryption not in ['none', 'ssl', 'tls']:
self._display_error("Invalid value")
else:
break
base_dn = self._ask_with_default("Base DN", "ou=people,c=com")
request = self._ask_with_default("Request to find a user. '{}' will be replaced by the username", "uid={}")
require_cert = self._ask_boolean("Require certificate validation?", encryption is not None)
return {
"plugin_module": "inginious.frontend.plugins.auth.ldap_auth",
"host": ldap_host,
"encryption": encryption,
"base_dn": base_dn,
"request": request,
"prefix": prefix,
"name": name,
"require_cert": require_cert
}
|
Configure the authentication
|
def configure_authentication(self, database):
""" Configure the authentication """
options = {"plugins": [], "superadmins": []}
self._display_info("We will now create the first user.")
username = self._ask_with_default("Enter the login of the superadmin", "superadmin")
realname = self._ask_with_default("Enter the name of the superadmin", "INGInious SuperAdmin")
email = self._ask_with_default("Enter the email address of the superadmin", "[email protected]")
password = self._ask_with_default("Enter the password of the superadmin", "superadmin")
database.users.insert({"username": username,
"realname": realname,
"email": email,
"password": hashlib.sha512(password.encode("utf-8")).hexdigest(),
"bindings": {},
"language": "en"})
options["superadmins"].append(username)
while True:
if not self._ask_boolean("Would you like to add another auth method?", False):
break
self._display_info("You can choose an authentication plugin between:")
self._display_info("- 1. LDAP auth plugin. This plugin allows to connect to a distant LDAP host.")
plugin = self._ask_with_default("Enter the corresponding number to your choice", '1')
if plugin not in ['1']:
continue
elif plugin == '1':
options["plugins"].append(self.ldap_plugin())
return options
|
Ensures that the app is properly closed
|
def _close_app(app, mongo_client, client):
""" Ensures that the app is properly closed """
app.stop()
client.close()
mongo_client.close()
|
:param config: the configuration dict
:return: A new app
|
def get_app(config):
"""
:param config: the configuration dict
:return: A new app
"""
# First, disable debug. It will be enabled in the configuration, later.
web.config.debug = False
config = _put_configuration_defaults(config)
mongo_client = MongoClient(host=config.get('mongo_opt', {}).get('host', 'localhost'))
database = mongo_client[config.get('mongo_opt', {}).get('database', 'INGInious')]
gridfs = GridFS(database)
# Init database if needed
db_version = database.db_version.find_one({})
if db_version is None:
database.submissions.ensure_index([("username", pymongo.ASCENDING)])
database.submissions.ensure_index([("courseid", pymongo.ASCENDING)])
database.submissions.ensure_index([("courseid", pymongo.ASCENDING), ("taskid", pymongo.ASCENDING)])
database.submissions.ensure_index([("submitted_on", pymongo.DESCENDING)]) # sort speed
database.user_tasks.ensure_index(
[("username", pymongo.ASCENDING), ("courseid", pymongo.ASCENDING), ("taskid", pymongo.ASCENDING)],
unique=True)
database.user_tasks.ensure_index([("username", pymongo.ASCENDING), ("courseid", pymongo.ASCENDING)])
database.user_tasks.ensure_index([("courseid", pymongo.ASCENDING), ("taskid", pymongo.ASCENDING)])
database.user_tasks.ensure_index([("courseid", pymongo.ASCENDING)])
database.user_tasks.ensure_index([("username", pymongo.ASCENDING)])
appli = CookieLessCompatibleApplication(MongoStore(database, 'sessions'))
# Init gettext
available_languages = {
"en": "English",
"fr": "Français"
}
for lang in available_languages.keys():
appli.add_translation(lang, gettext.translation('messages', get_root_path() + '/frontend/i18n', [lang]))
builtins.__dict__['_'] = appli.gettext
if config.get("maintenance", False):
template_helper = TemplateHelper(PluginManager(), None,
'frontend/templates',
'frontend/templates/layout',
'frontend/templates/layout_lti',
config.get('use_minified_js', True))
template_helper.add_to_template_globals("get_homepath", appli.get_homepath)
template_helper.add_to_template_globals("_", _)
appli.template_helper = template_helper
appli.init_mapping(urls_maintenance)
return appli.wsgifunc(), appli.stop
default_allowed_file_extensions = config['allowed_file_extensions']
default_max_file_size = config['max_file_size']
zmq_context, __ = start_asyncio_and_zmq(config.get('debug_asyncio', False))
# Init the different parts of the app
plugin_manager = PluginManager()
# Create the FS provider
if "fs" in config:
fs_provider = filesystem_from_config_dict(config["fs"])
else:
task_directory = config["tasks_directory"]
fs_provider = LocalFSProvider(task_directory)
default_problem_types = {
problem_type.get_type(): problem_type for problem_type in [DisplayableCodeProblem,
DisplayableCodeSingleLineProblem,
DisplayableFileProblem,
DisplayableMultipleChoiceProblem,
DisplayableMatchProblem]
}
course_factory, task_factory = create_factories(fs_provider, default_problem_types, plugin_manager, WebAppCourse, WebAppTask)
user_manager = UserManager(appli.get_session(), database, config.get('superadmins', []))
update_pending_jobs(database)
client = create_arch(config, fs_provider, zmq_context, course_factory)
lti_outcome_manager = LTIOutcomeManager(database, user_manager, course_factory)
submission_manager = WebAppSubmissionManager(client, user_manager, database, gridfs, plugin_manager, lti_outcome_manager)
template_helper = TemplateHelper(plugin_manager, user_manager, 'frontend/templates',
'frontend/templates/layout',
'frontend/templates/layout_lti',
config.get('use_minified_js', True))
# Init web mail
smtp_conf = config.get('smtp', None)
if smtp_conf is not None:
web.config.smtp_server = smtp_conf["host"]
web.config.smtp_port = int(smtp_conf["port"])
web.config.smtp_starttls = bool(smtp_conf.get("starttls", False))
web.config.smtp_username = smtp_conf.get("username", "")
web.config.smtp_password = smtp_conf.get("password", "")
web.config.smtp_sendername = smtp_conf.get("sendername", "[email protected]")
# Add some helpers for the templates
template_helper.add_to_template_globals("_", _)
template_helper.add_to_template_globals("str", str)
template_helper.add_to_template_globals("available_languages", available_languages)
template_helper.add_to_template_globals("get_homepath", appli.get_homepath)
template_helper.add_to_template_globals("allow_registration", config.get("allow_registration", True))
template_helper.add_to_template_globals("user_manager", user_manager)
template_helper.add_to_template_globals("default_allowed_file_extensions", default_allowed_file_extensions)
template_helper.add_to_template_globals("default_max_file_size", default_max_file_size)
template_helper.add_other("course_admin_menu",
lambda course, current: course_admin_utils.get_menu(course, current, template_helper.get_renderer(False),
plugin_manager, user_manager))
template_helper.add_other("preferences_menu",
lambda current: preferences_utils.get_menu(appli, current, template_helper.get_renderer(False),
plugin_manager, user_manager))
# Not found page
appli.notfound = lambda: web.notfound(template_helper.get_renderer().notfound('Page not found'))
# Enable stacktrace display if needed
web_debug = config.get('web_debug', False)
appli.internalerror = internalerror_generator(template_helper.get_renderer(False))
if web_debug is True:
web.config.debug = True
appli.internalerror = debugerror
elif isinstance(web_debug, str):
web.config.debug = False
appli.internalerror = emailerrors(web_debug, appli.internalerror)
# Insert the needed singletons into the application, to allow pages to call them
appli.plugin_manager = plugin_manager
appli.course_factory = course_factory
appli.task_factory = task_factory
appli.submission_manager = submission_manager
appli.user_manager = user_manager
appli.template_helper = template_helper
appli.database = database
appli.gridfs = gridfs
appli.default_allowed_file_extensions = default_allowed_file_extensions
appli.default_max_file_size = default_max_file_size
appli.backup_dir = config.get("backup_directory", './backup')
appli.webterm_link = config.get("webterm", None)
appli.lti_outcome_manager = lti_outcome_manager
appli.allow_registration = config.get("allow_registration", True)
appli.allow_deletion = config.get("allow_deletion", True)
appli.available_languages = available_languages
appli.welcome_page = config.get("welcome_page", None)
appli.static_directory = config.get("static_directory", "./static")
appli.webdav_host = config.get("webdav_host", None)
# Init the mapping of the app
appli.init_mapping(urls)
# Loads plugins
plugin_manager.load(client, appli, course_factory, task_factory, database, user_manager, submission_manager, config.get("plugins", []))
# Start the inginious.backend
client.start()
return appli.wsgifunc(), lambda: _close_app(appli, mongo_client, client)
|
Must be called when the agent is starting
|
async def _init_clean(self):
""" Must be called when the agent is starting """
# Data about running containers
self._containers_running = {}
self._container_for_job = {}
self._student_containers_running = {}
self._student_containers_for_job = {}
self._containers_killed = dict()
# Delete tmp_dir, and recreate-it again
try:
await self._ashutil.rmtree(self._tmp_dir)
except OSError:
pass
try:
await self._aos.mkdir(self._tmp_dir)
except OSError:
pass
# Docker
self._docker = AsyncProxy(DockerInterface())
# Auto discover containers
self._logger.info("Discovering containers")
self._containers = await self._docker.get_containers()
self._assigned_external_ports = {} # container_id : [external_ports]
if self._address_host is None and len(self._containers) != 0:
self._logger.info("Guessing external host IP")
self._address_host = await self._docker.get_host_ip(next(iter(self._containers.values()))["id"])
if self._address_host is None:
self._logger.warning(
"Cannot find external host IP. Please indicate it in the configuration. Remote SSH debug has been deactivated.")
self._external_ports = None
else:
self._logger.info("External address for SSH remote debug is %s", self._address_host)
# Watchers
self._timeout_watcher = TimeoutWatcher(self._docker)
|
Must be called when the agent is closing
|
async def _end_clean(self):
""" Must be called when the agent is closing """
await self._timeout_watcher.clean()
async def close_and_delete(container_id):
try:
await self._docker.remove_container(container_id)
except:
pass
for container_id in self._containers_running:
await close_and_delete(container_id)
for container_id in self._student_containers_running:
await close_and_delete(container_id)
|
Get raw docker events and convert them to more readable objects, and then give them to self._docker_events_subscriber
|
async def _watch_docker_events(self):
""" Get raw docker events and convert them to more readable objects, and then give them to self._docker_events_subscriber """
try:
source = AsyncIteratorWrapper(self._docker.sync.event_stream(filters={"event": ["die", "oom"]}))
async for i in source:
if i["Type"] == "container" and i["status"] == "die":
container_id = i["id"]
try:
retval = int(i["Actor"]["Attributes"]["exitCode"])
except asyncio.CancelledError:
raise
except:
self._logger.exception("Cannot parse exitCode for container %s", container_id)
retval = -1
if container_id in self._containers_running:
self._create_safe_task(self.handle_job_closing(container_id, retval))
elif container_id in self._student_containers_running:
self._create_safe_task(self.handle_student_job_closing(container_id, retval))
elif i["Type"] == "container" and i["status"] == "oom":
container_id = i["id"]
if container_id in self._containers_running or container_id in self._student_containers_running:
self._logger.info("Container %s did OOM, killing it", container_id)
self._containers_killed[container_id] = "overflow"
try:
self._create_safe_task(self._docker.kill_container(container_id))
except asyncio.CancelledError:
raise
except: # this call can sometimes fail, and that is normal.
pass
else:
raise TypeError(str(i))
except asyncio.CancelledError:
pass
except:
self._logger.exception("Exception in _watch_docker_events")
|
Synchronous part of _new_job. Creates needed directories, copy files, and starts the container.
|
def __new_job_sync(self, message, future_results):
""" Synchronous part of _new_job. Creates needed directories, copy files, and starts the container. """
course_id = message.course_id
task_id = message.task_id
debug = message.debug
environment_name = message.environment
enable_network = message.enable_network
time_limit = message.time_limit
hard_time_limit = message.hard_time_limit or time_limit * 3
mem_limit = message.mem_limit
course_fs = self.tasks_fs.from_subfolder(course_id)
task_fs = course_fs.from_subfolder(task_id)
if not course_fs.exists() or not task_fs.exists():
self._logger.warning("Task %s/%s unavailable on this agent", course_id, task_id)
raise CannotCreateJobException('Task unavailable on agent. Please retry later, the agents should synchronize soon. '
'If the error persists, please contact your course administrator.')
# Check for realistic memory limit value
if mem_limit < 20:
mem_limit = 20
elif mem_limit > self._max_memory_per_slot:
self._logger.warning("Task %s/%s ask for too much memory (%dMB)! Available: %dMB", course_id, task_id,
mem_limit, self._max_memory_per_slot)
raise CannotCreateJobException('Not enough memory on agent (available: %dMB). Please contact your course administrator.' % self._max_memory_per_slot)
if environment_name not in self._containers:
self._logger.warning("Task %s/%s ask for an unknown environment %s (not in aliases)", course_id, task_id,
environment_name)
raise CannotCreateJobException('Unknown container. Please contact your course administrator.')
environment = self._containers[environment_name]["id"]
ports_needed = self._containers[environment_name]["ports"]
if debug == "ssh" and 22 not in ports_needed:
ports_needed.append(22)
ports = {}
if len(ports_needed) > 0:
time_limit = 30 * 60
hard_time_limit = 30 * 60
for p in ports_needed:
if len(self._external_ports) == 0:
self._logger.warning("User asked for a port but no one are available")
raise CannotCreateJobException('No ports are available right now. Please retry later.')
ports[p] = self._external_ports.pop()
# Create directories for storing all the data for the job
try:
container_path = tempfile.mkdtemp(dir=self._tmp_dir)
except Exception as e:
self._logger.error("Cannot make container temp directory! %s", str(e), exc_info=True)
for p in ports:
self._external_ports.add(ports[p])
raise CannotCreateJobException('Cannot make container temp directory.')
task_path = path_join(container_path, 'task') # tmp_dir/id/task/
course_path = path_join(container_path, 'course')
sockets_path = path_join(container_path, 'sockets') # tmp_dir/id/socket/
student_path = path_join(task_path, 'student') # tmp_dir/id/task/student/
systemfiles_path = path_join(task_path, 'systemfiles') # tmp_dir/id/task/systemfiles/
course_common_path = path_join(course_path, 'common')
course_common_student_path = path_join(course_path, 'common', 'student')
# Create the needed directories
os.mkdir(sockets_path)
os.chmod(container_path, 0o777)
os.chmod(sockets_path, 0o777)
os.mkdir(course_path)
# TODO: avoid copy
task_fs.copy_from(None, task_path)
os.chmod(task_path, 0o777)
if not os.path.exists(student_path):
os.mkdir(student_path)
os.chmod(student_path, 0o777)
# Copy common and common/student if needed
# TODO: avoid copy
if course_fs.from_subfolder("$common").exists():
course_fs.from_subfolder("$common").copy_from(None, course_common_path)
else:
os.mkdir(course_common_path)
if course_fs.from_subfolder("$common").from_subfolder("student").exists():
course_fs.from_subfolder("$common").from_subfolder("student").copy_from(None, course_common_student_path)
else:
os.mkdir(course_common_student_path)
# Run the container
try:
container_id = self._docker.sync.create_container(environment, enable_network, mem_limit, task_path,
sockets_path, course_common_path,
course_common_student_path, ports)
except Exception as e:
self._logger.warning("Cannot create container! %s", str(e), exc_info=True)
shutil.rmtree(container_path)
for p in ports:
self._external_ports.add(ports[p])
raise CannotCreateJobException('Cannot create container.')
# Store info
self._containers_running[container_id] = message, container_path, future_results
self._container_for_job[message.job_id] = container_id
self._student_containers_for_job[message.job_id] = set()
if len(ports) != 0:
self._assigned_external_ports[container_id] = list(ports.values())
try:
# Start the container
self._docker.sync.start_container(container_id)
except Exception as e:
self._logger.warning("Cannot start container! %s", str(e), exc_info=True)
shutil.rmtree(container_path)
for p in ports:
self._external_ports.add(ports[p])
raise CannotCreateJobException('Cannot start container')
return {
"job_id": message.job_id,
"container_id": container_id,
"inputdata": message.inputdata,
"debug": debug,
"ports": ports,
"orig_env": environment_name,
"orig_memory_limit": mem_limit,
"orig_time_limit": time_limit,
"orig_hard_time_limit": hard_time_limit,
"sockets_path": sockets_path,
"student_path": student_path,
"systemfiles_path": systemfiles_path,
"course_common_student_path": course_common_student_path
}
|
Handles a new job: starts the grading container
|
async def new_job(self, message: BackendNewJob):
"""
Handles a new job: starts the grading container
"""
self._logger.info("Received request for jobid %s", message.job_id)
future_results = asyncio.Future()
out = await self._loop.run_in_executor(None, lambda: self.__new_job_sync(message, future_results))
self._create_safe_task(self.handle_running_container(**out, future_results=future_results))
await self._timeout_watcher.register_container(out["container_id"], out["orig_time_limit"], out["orig_hard_time_limit"])
|
Creates a new student container.
:param write_stream: stream on which to write the return value of the container (with a correctly formatted msgpack message)
|
async def create_student_container(self, job_id, parent_container_id, sockets_path, student_path, systemfiles_path,
course_common_student_path, socket_id, environment_name, memory_limit,
time_limit, hard_time_limit, share_network, write_stream):
"""
Creates a new student container.
:param write_stream: stream on which to write the return value of the container (with a correctly formatted msgpack message)
"""
try:
self._logger.debug("Starting new student container... %s %s %s %s", environment_name, memory_limit, time_limit, hard_time_limit)
if environment_name not in self._containers:
self._logger.warning("Student container asked for an unknown environment %s (not in aliases)", environment_name)
await self._write_to_container_stdin(write_stream, {"type": "run_student_retval", "retval": 254, "socket_id": socket_id})
return
environment = self._containers[environment_name]["id"]
try:
socket_path = path_join(sockets_path, str(socket_id) + ".sock")
container_id = await self._docker.create_container_student(parent_container_id, environment, share_network,
memory_limit, student_path, socket_path,
systemfiles_path, course_common_student_path)
except Exception as e:
self._logger.exception("Cannot create student container!")
await self._write_to_container_stdin(write_stream, {"type": "run_student_retval", "retval": 254, "socket_id": socket_id})
if isinstance(e, asyncio.CancelledError):
raise
return
self._student_containers_for_job[job_id].add(container_id)
self._student_containers_running[container_id] = job_id, parent_container_id, socket_id, write_stream
# send to the container that the sibling has started
await self._write_to_container_stdin(write_stream, {"type": "run_student_started", "socket_id": socket_id})
try:
await self._docker.start_container(container_id)
except Exception as e:
self._logger.exception("Cannot start student container!")
await self._write_to_container_stdin(write_stream, {"type": "run_student_retval", "retval": 254, "socket_id": socket_id})
if isinstance(e, asyncio.CancelledError):
raise
return
# Verify the time limit
await self._timeout_watcher.register_container(container_id, time_limit, hard_time_limit)
except asyncio.CancelledError:
raise
except:
self._logger.exception("Exception in create_student_container")
|
Send a message to the stdin of a container, with the right data
:param write_stream: asyncio write stream to the stdin of the container
:param message: dict to be msgpacked and sent
|
async def _write_to_container_stdin(self, write_stream, message):
"""
Send a message to the stdin of a container, with the right data
:param write_stream: asyncio write stream to the stdin of the container
:param message: dict to be msgpacked and sent
"""
msg = msgpack.dumps(message, encoding="utf8", use_bin_type=True)
self._logger.debug("Sending %i bytes to container", len(msg))
write_stream.write(struct.pack('I', len(msg)))
write_stream.write(msg)
await write_stream.drain()
|
Talk with a container. Sends the initial input. Allows to start student containers
|
async def handle_running_container(self, job_id, container_id, inputdata, debug, ports, orig_env,
orig_memory_limit, orig_time_limit, orig_hard_time_limit, sockets_path,
student_path, systemfiles_path, course_common_student_path, future_results):
""" Talk with a container. Sends the initial input. Allows to start student containers """
sock = await self._docker.attach_to_container(container_id)
try:
read_stream, write_stream = await asyncio.open_connection(sock=sock.get_socket())
except asyncio.CancelledError:
raise
except:
self._logger.exception("Exception occurred while creating read/write stream to container")
return None
# Send hello msg
await self._write_to_container_stdin(write_stream, {"type": "start", "input": inputdata, "debug": debug})
result = None
buffer = bytearray()
try:
while not read_stream.at_eof():
msg_header = await read_stream.readexactly(8)
outtype, length = struct.unpack_from('>BxxxL', msg_header) # format imposed by docker in the attach endpoint
if length != 0:
content = await read_stream.readexactly(length)
if outtype == 1: # stdout
buffer += content
if outtype == 2: # stderr
self._logger.debug("Received stderr from containers:\n%s", content)
# 4 first bytes are the lenght of the message. If we have a complete message...
while len(buffer) > 4 and len(buffer) >= 4+struct.unpack('I',buffer[0:4])[0]:
msg_encoded = buffer[4:4 + struct.unpack('I', buffer[0:4])[0]] # ... get it
buffer = buffer[4 + struct.unpack('I', buffer[0:4])[0]:] # ... withdraw it from the buffer
try:
msg = msgpack.unpackb(msg_encoded, encoding="utf8", use_list=False)
self._logger.debug("Received msg %s from container %s", msg["type"], container_id)
if msg["type"] == "run_student":
# start a new student container
environment = msg["environment"] or orig_env
memory_limit = min(msg["memory_limit"] or orig_memory_limit, orig_memory_limit)
time_limit = min(msg["time_limit"] or orig_time_limit, orig_time_limit)
hard_time_limit = min(msg["hard_time_limit"] or orig_hard_time_limit, orig_hard_time_limit)
share_network = msg["share_network"]
socket_id = msg["socket_id"]
assert "/" not in socket_id # ensure task creator do not try to break the agent :-(
self._create_safe_task(self.create_student_container(job_id, container_id, sockets_path, student_path,
systemfiles_path, course_common_student_path,
socket_id, environment, memory_limit, time_limit,
hard_time_limit, share_network, write_stream))
elif msg["type"] == "ssh_key":
# send the data to the backend (and client)
self._logger.info("%s %s", container_id, str(msg))
await self.send_ssh_job_info(job_id, self._address_host, ports[22], msg["ssh_key"])
elif msg["type"] == "result":
# last message containing the results of the container
result = msg["result"]
except:
self._logger.exception("Received incorrect message from container %s (job id %s)", container_id, job_id)
except asyncio.IncompleteReadError:
self._logger.debug("Container output ended with an IncompleteReadError; It was probably killed.")
except asyncio.CancelledError:
write_stream.close()
sock.close_socket()
future_results.set_result(result)
raise
except:
self._logger.exception("Exception while reading container %s output", container_id)
write_stream.close()
sock.close_socket()
future_results.set_result(result)
if not result:
self._logger.warning("Container %s has not given any result", container_id)
|
Handle a closing student container. Do some cleaning, verify memory limits, timeouts, ... and returns data to the associated grading
container
|
async def handle_student_job_closing(self, container_id, retval):
"""
Handle a closing student container. Do some cleaning, verify memory limits, timeouts, ... and returns data to the associated grading
container
"""
try:
self._logger.debug("Closing student %s", container_id)
try:
job_id, parent_container_id, socket_id, write_stream = self._student_containers_running[container_id]
del self._student_containers_running[container_id]
except asyncio.CancelledError:
raise
except:
self._logger.warning("Student container %s that has finished(p1) was not launched by this agent", str(container_id), exc_info=True)
return
# Delete remaining student containers
if job_id in self._student_containers_for_job: # if it does not exists, then the parent container has closed
self._student_containers_for_job[job_id].remove(container_id)
killed = await self._timeout_watcher.was_killed(container_id)
if container_id in self._containers_killed:
killed = self._containers_killed[container_id]
del self._containers_killed[container_id]
if killed == "timeout":
retval = 253
elif killed == "overflow":
retval = 252
try:
await self._write_to_container_stdin(write_stream, {"type": "run_student_retval", "retval": retval, "socket_id": socket_id})
except asyncio.CancelledError:
raise
except:
pass # parent container closed
# Do not forget to remove the container
try:
await self._docker.remove_container(container_id)
except asyncio.CancelledError:
raise
except:
pass # ignore
except asyncio.CancelledError:
raise
except:
self._logger.exception("Exception in handle_student_job_closing")
|
Handles `kill` messages. Kill things.
|
async def kill_job(self, message: BackendKillJob):
""" Handles `kill` messages. Kill things. """
try:
if message.job_id in self._container_for_job:
self._containers_killed[self._container_for_job[message.job_id]] = "killed"
await self._docker.kill_container(self._container_for_job[message.job_id])
else:
self._logger.warning("Cannot kill container for job %s because it is not running", str(message.job_id))
except asyncio.CancelledError:
raise
except:
self._logger.exception("Exception in handle_kill_job")
|
Handle a closing student container. Do some cleaning, verify memory limits, timeouts, ... and returns data to the backend
|
async def handle_job_closing(self, container_id, retval):
"""
Handle a closing student container. Do some cleaning, verify memory limits, timeouts, ... and returns data to the backend
"""
try:
self._logger.debug("Closing %s", container_id)
try:
message, container_path, future_results = self._containers_running[container_id]
del self._containers_running[container_id]
except asyncio.CancelledError:
raise
except:
self._logger.warning("Container %s that has finished(p1) was not launched by this agent", str(container_id), exc_info=True)
return
# Close sub containers
for student_container_id_loop in self._student_containers_for_job[message.job_id]:
# little hack to ensure the value of student_container_id_loop is copied into the closure
async def close_and_delete(student_container_id=student_container_id_loop):
try:
await self._docker.kill_container(student_container_id)
await self._docker.remove_container(student_container_id)
except asyncio.CancelledError:
raise
except:
pass # ignore
self._create_safe_task(close_and_delete(student_container_id_loop))
del self._student_containers_for_job[message.job_id]
# Allow other container to reuse the external ports this container has finished to use
if container_id in self._assigned_external_ports:
for p in self._assigned_external_ports[container_id]:
self._external_ports.add(p)
del self._assigned_external_ports[container_id]
# Verify if the container was killed, either by the client, by an OOM or by a timeout
killed = await self._timeout_watcher.was_killed(container_id)
if container_id in self._containers_killed:
killed = self._containers_killed[container_id]
del self._containers_killed[container_id]
stdout = ""
stderr = ""
result = "crash" if retval == -1 else None
error_msg = None
grade = None
problems = {}
custom = {}
tests = {}
archive = None
state = ""
if killed is not None:
result = killed
# If everything did well, continue to retrieve the status from the container
if result is None:
# Get logs back
try:
return_value = await future_results
# Accepted types for return dict
accepted_types = {"stdout": str, "stderr": str, "result": str, "text": str, "grade": float,
"problems": dict, "custom": dict, "tests": dict, "state": str, "archive": str}
keys_fct = {"problems": id_checker, "custom": id_checker, "tests": id_checker_tests}
# Check dict content
for key, item in return_value.items():
if not isinstance(item, accepted_types[key]):
raise Exception("Feedback file is badly formatted.")
elif accepted_types[key] == dict and key != "custom": #custom can contain anything:
for sub_key, sub_item in item.items():
if not keys_fct[key](sub_key) or isinstance(sub_item, dict):
raise Exception("Feedback file is badly formatted.")
# Set output fields
stdout = return_value.get("stdout", "")
stderr = return_value.get("stderr", "")
result = return_value.get("result", "error")
error_msg = return_value.get("text", "")
grade = return_value.get("grade", None)
problems = return_value.get("problems", {})
custom = return_value.get("custom", {})
tests = return_value.get("tests", {})
state = return_value.get("state", "")
archive = return_value.get("archive", None)
if archive is not None:
archive = base64.b64decode(archive)
except Exception as e:
self._logger.exception("Cannot get back output of container %s! (%s)", container_id, str(e))
result = "crash"
error_msg = 'The grader did not return a readable output : {}'.format(str(e))
# Default values
if error_msg is None:
error_msg = ""
if grade is None:
if result == "success":
grade = 100.0
else:
grade = 0.0
# Remove container
try:
await self._docker.remove_container(container_id)
except asyncio.CancelledError:
raise
except:
pass
# Delete folders
try:
await self._ashutil.rmtree(container_path)
except PermissionError:
self._logger.debug("Cannot remove old container path!")
pass # todo: run a docker container to force removal
# Return!
await self.send_job_result(message.job_id, result, error_msg, grade, problems, tests, custom, state, archive, stdout, stderr)
# Do not forget to remove data from internal state
del self._container_for_job[message.job_id]
except asyncio.CancelledError:
raise
except:
self._logger.exception("Exception in handle_job_closing")
|
Get the available student and tutor lists for aggregation edition
|
def get_user_lists(self, course, aggregationid=''):
""" Get the available student and tutor lists for aggregation edition"""
tutor_list = course.get_staff()
# Determine student list and if they are grouped
student_list = list(self.database.aggregations.aggregate([
{"$match": {"courseid": course.get_id()}},
{"$unwind": "$students"},
{"$project": {
"classroom": "$_id",
"students": 1,
"grouped": {
"$anyElementTrue": {
"$map": {
"input": "$groups.students",
"as": "group",
"in": {
"$anyElementTrue": {
"$map": {
"input": "$$group",
"as": "groupmember",
"in": {"$eq": ["$$groupmember", "$students"]}
}
}
}
}
}
}
}}
]))
student_list = dict([(student["students"], student) for student in student_list])
users_info = self.user_manager.get_users_info(list(student_list.keys()) + tutor_list)
if aggregationid:
# Order the non-registered students
other_students = [student_list[entry]['students'] for entry in student_list.keys() if
not student_list[entry]['classroom'] == ObjectId(aggregationid)]
other_students = sorted(other_students, key=lambda val: (("0"+users_info[val][0]) if users_info[val] else ("1"+val)))
return student_list, tutor_list, other_students, users_info
else:
return student_list, tutor_list, users_info
|
Update aggregation and returns a list of errored students
|
def update_aggregation(self, course, aggregationid, new_data):
""" Update aggregation and returns a list of errored students"""
student_list = self.user_manager.get_course_registered_users(course, False)
# If aggregation is new
if aggregationid == 'None':
# Remove _id for correct insertion
del new_data['_id']
new_data["courseid"] = course.get_id()
# Insert the new aggregation
result = self.database.aggregations.insert_one(new_data)
# Retrieve new aggregation id
aggregationid = result.inserted_id
new_data['_id'] = result.inserted_id
aggregation = new_data
else:
aggregation = self.database.aggregations.find_one({"_id": ObjectId(aggregationid), "courseid": course.get_id()})
# Check tutors
new_data["tutors"] = [tutor for tutor in new_data["tutors"] if tutor in course.get_staff()]
students, groups, errored_students = [], [], []
# Check the students
for student in new_data["students"]:
if student in student_list:
# Remove user from the other aggregation
self.database.aggregations.find_one_and_update({"courseid": course.get_id(), "groups.students": student},
{"$pull": {"groups.$.students": student, "students": student}})
self.database.aggregations.find_one_and_update({"courseid": course.get_id(), "students": student}, {"$pull": {"students": student}})
students.append(student)
else:
# Check if user can be registered
user_info = self.user_manager.get_user_info(student)
if user_info is None or student in aggregation["tutors"]:
errored_students.append(student)
else:
students.append(student)
removed_students = [student for student in aggregation["students"] if student not in new_data["students"]]
self.database.aggregations.find_one_and_update({"courseid": course.get_id(), "default": True},
{"$push": {"students": {"$each": removed_students}}})
new_data["students"] = students
# Check the groups
for group in new_data["groups"]:
group["students"] = [student for student in group["students"] if student in new_data["students"]]
if len(group["students"]) <= group["size"]:
groups.append(group)
new_data["groups"] = groups
# Check for default aggregation
if new_data['default']:
self.database.aggregations.find_one_and_update({"courseid": course.get_id(), "default": True},
{"$set": {"default": False}})
aggregation = self.database.aggregations.find_one_and_update(
{"_id": ObjectId(aggregationid)},
{"$set": {"description": new_data["description"],
"students": students, "tutors": new_data["tutors"],
"groups": groups, "default": new_data['default']}}, return_document=ReturnDocument.AFTER)
return aggregation, errored_students
|
Edit a aggregation
|
def GET_AUTH(self, courseid, aggregationid=''): # pylint: disable=arguments-differ
""" Edit a aggregation """
course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=True)
if course.is_lti():
raise web.notfound()
return self.display_page(course, aggregationid)
|
Edit a aggregation
|
def POST_AUTH(self, courseid, aggregationid=''): # pylint: disable=arguments-differ
""" Edit a aggregation """
course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=True)
if course.is_lti():
raise web.notfound()
msg=''
error = False
errored_students = []
data = web.input(delete=[], tutors=[], groups=[], aggregationfile={})
if len(data["delete"]):
for classid in data["delete"]:
# Get the aggregation
aggregation = self.database.aggregations.find_one({"_id": ObjectId(classid), "courseid": courseid}) if ObjectId.is_valid(classid) else None
if aggregation is None:
msg = _("Classroom with id {} not found.").format(classid) if course.use_classrooms() else _("Team with id {} not found.").format(classid)
error = True
elif aggregation['default'] and aggregationid:
msg = _("You can't remove your default classroom.")
error = True
else:
self.database.aggregations.find_one_and_update({"courseid": courseid, "default": True},
{"$push": {
"students": {"$each": aggregation["students"]}
}})
self.database.aggregations.delete_one({"_id": ObjectId(classid)})
msg = _("Classroom updated.")
if aggregationid and aggregationid in data["delete"]:
raise web.seeother(self.app.get_homepath() + "/admin/" + courseid + "/aggregations")
try:
if "upload" in data:
self.database.aggregations.delete_many({"courseid": course.get_id()})
aggregations = custom_yaml.load(data["aggregationfile"].file)
else:
aggregations = json.loads(data["aggregations"])
for index, new_aggregation in enumerate(aggregations):
# In case of file upload, no id specified
new_aggregation['_id'] = new_aggregation['_id'] if '_id' in new_aggregation else 'None'
# In case of no aggregation usage, set the first entry default
new_aggregation["default"] = not aggregationid and index == 0
# If no groups field set, create group from class students if in groups only mode
if "groups" not in new_aggregation:
new_aggregation["groups"] = [] if aggregationid else [{'size': len(new_aggregation['students']),
'students': new_aggregation['students']}]
# Update the aggregation
aggregation, errors = self.update_aggregation(course, new_aggregation['_id'], new_aggregation)
# If file upload was done, get the default aggregation id
if course.use_classrooms() and aggregation['default']:
aggregationid = aggregation['_id']
errored_students += errors
if len(errored_students) > 0:
msg = _("Changes couldn't be applied for following students :") + "<ul>"
for student in errored_students:
msg += "<li>" + student + "</li>"
msg += "</ul>"
error = True
elif not error:
msg = _("Classroom updated.") if course.use_classrooms() else _("Teams updated.")
except:
msg = _('An error occurred while parsing the data.')
error = True
# Display the page
return self.display_page(course, aggregationid, msg, error)
|
Parse course registration or course creation and display the course list page
|
def POST_AUTH(self): # pylint: disable=arguments-differ
""" Parse course registration or course creation and display the course list page """
username = self.user_manager.session_username()
user_info = self.database.users.find_one({"username": username})
user_input = web.input()
success = None
# Handle registration to a course
if "register_courseid" in user_input and user_input["register_courseid"] != "":
try:
course = self.course_factory.get_course(user_input["register_courseid"])
if not course.is_registration_possible(user_info):
success = False
else:
success = self.user_manager.course_register_user(course, username, user_input.get("register_password", None))
except:
success = False
elif "new_courseid" in user_input and self.user_manager.user_is_superadmin():
try:
courseid = user_input["new_courseid"]
self.course_factory.create_course(courseid, {"name": courseid, "accessible": False})
success = True
except:
success = False
return self.show_page(success)
|
Display main course list page
|
def show_page(self, success):
""" Display main course list page """
username = self.user_manager.session_username()
user_info = self.database.users.find_one({"username": username})
all_courses = self.course_factory.get_all_courses()
# Display
open_courses = {courseid: course for courseid, course in all_courses.items()
if self.user_manager.course_is_open_to_user(course, username, False) and
self.user_manager.course_is_user_registered(course, username)}
open_courses = OrderedDict(sorted(iter(open_courses.items()), key=lambda x: x[1].get_name(self.user_manager.session_language())))
last_submissions = self.submission_manager.get_user_last_submissions(5, {"courseid": {"$in": list(open_courses.keys())}})
except_free_last_submissions = []
for submission in last_submissions:
try:
submission["task"] = open_courses[submission['courseid']].get_task(submission['taskid'])
except_free_last_submissions.append(submission)
except:
pass
registerable_courses = {courseid: course for courseid, course in all_courses.items() if
not self.user_manager.course_is_user_registered(course, username) and
course.is_registration_possible(user_info)}
registerable_courses = OrderedDict(sorted(iter(registerable_courses.items()), key=lambda x: x[1].get_name(self.user_manager.session_language())))
return self.template_helper.get_renderer().mycourses(open_courses, registerable_courses, except_free_last_submissions, success)
|
Init asyncio and ZMQ. Starts a daemon thread in which the asyncio loops run.
:return: a ZMQ context and a Thread object (as a tuple)
|
def start_asyncio_and_zmq(debug_asyncio=False):
""" Init asyncio and ZMQ. Starts a daemon thread in which the asyncio loops run.
:return: a ZMQ context and a Thread object (as a tuple)
"""
loop = ZMQEventLoop()
asyncio.set_event_loop(loop)
if debug_asyncio:
loop.set_debug(True)
zmq_context = Context()
t = threading.Thread(target=_run_asyncio, args=(loop, zmq_context), daemon=True)
t.start()
return zmq_context, t
|
Run asyncio (should be called in a thread) and close the loop and the zmq context when the thread ends
:param loop:
:param zmq_context:
:return:
|
def _run_asyncio(loop, zmq_context):
"""
Run asyncio (should be called in a thread) and close the loop and the zmq context when the thread ends
:param loop:
:param zmq_context:
:return:
"""
try:
asyncio.set_event_loop(loop)
loop.run_forever()
except:
pass
finally:
loop.close()
zmq_context.destroy(1000)
|
Restarts an agent when it is cancelled
|
async def _restart_on_cancel(logger, agent):
""" Restarts an agent when it is cancelled """
while True:
try:
await agent.run()
except asyncio.CancelledError:
logger.exception("Restarting agent")
pass
|
Helper that can start a simple complete INGInious arch locally if needed, or a client to a remote backend.
Intended to be used on command line, makes uses of exit() and the logger inginious.frontend.
:param configuration: configuration dict
:param tasks_fs: FileSystemProvider to the courses/tasks folders
:param context: a ZMQ context
:param course_factory: The course factory to be used by the frontend
:param is_testing: boolean
:return: a Client object
|
def create_arch(configuration, tasks_fs, context, course_factory):
""" Helper that can start a simple complete INGInious arch locally if needed, or a client to a remote backend.
Intended to be used on command line, makes uses of exit() and the logger inginious.frontend.
:param configuration: configuration dict
:param tasks_fs: FileSystemProvider to the courses/tasks folders
:param context: a ZMQ context
:param course_factory: The course factory to be used by the frontend
:param is_testing: boolean
:return: a Client object
"""
logger = logging.getLogger("inginious.frontend")
backend_link = configuration.get("backend", "local")
if backend_link == "local":
logger.info("Starting a simple arch (backend, docker-agent and mcq-agent) locally")
local_config = configuration.get("local-config", {})
concurrency = local_config.get("concurrency", multiprocessing.cpu_count())
debug_host = local_config.get("debug_host", None)
debug_ports = local_config.get("debug_ports", None)
tmp_dir = local_config.get("tmp_dir", "./agent_tmp")
if debug_ports is not None:
try:
debug_ports = debug_ports.split("-")
debug_ports = range(int(debug_ports[0]), int(debug_ports[1]))
except:
logger.error("debug_ports should be in the format 'begin-end', for example '1000-2000'")
exit(1)
else:
debug_ports = range(64100, 64111)
client = Client(context, "inproc://backend_client")
backend = Backend(context, "inproc://backend_agent", "inproc://backend_client")
agent_docker = DockerAgent(context, "inproc://backend_agent", "Docker - Local agent", concurrency, tasks_fs, debug_host, debug_ports, tmp_dir)
agent_mcq = MCQAgent(context, "inproc://backend_agent", "MCQ - Local agent", 1, tasks_fs, course_factory)
asyncio.ensure_future(_restart_on_cancel(logger, agent_docker))
asyncio.ensure_future(_restart_on_cancel(logger, agent_mcq))
asyncio.ensure_future(_restart_on_cancel(logger, backend))
elif backend_link in ["remote", "remote_manuel", "docker_machine"]: #old-style config
logger.error("Value '%s' for the 'backend' option is configuration.yaml is not supported anymore. \n"
"Have a look at the 'update' section of the INGInious documentation in order to upgrade your configuration.yaml", backend_link)
exit(1)
return None #... pycharm returns a warning else :-(
else:
logger.info("Creating a client to backend at %s", backend_link)
client = Client(context, backend_link)
# check for old-style configuration entries
old_style_configs = ["agents", 'containers', "machines", "docker_daemons"]
for c in old_style_configs:
if c in configuration:
logger.warning("Option %s in configuration.yaml is not used anymore.\n"
"Have a look at the 'update' section of the INGInious documentation in order to upgrade your configuration.yaml", c)
return client
|
Checks if user is authenticated and calls GET_AUTH or performs logout.
Otherwise, returns the login template.
|
def GET(self, *args, **kwargs):
"""
Checks if user is authenticated and calls GET_AUTH or performs logout.
Otherwise, returns the login template.
"""
if self.user_manager.session_logged_in():
if not self.user_manager.session_username() and not self.__class__.__name__ == "ProfilePage":
raise web.seeother("/preferences/profile")
if not self.is_lti_page and self.user_manager.session_lti_info() is not None: #lti session
self.user_manager.disconnect_user()
return self.template_helper.get_renderer().auth(self.user_manager.get_auth_methods(), False)
return self.GET_AUTH(*args, **kwargs)
else:
return self.template_helper.get_renderer().auth(self.user_manager.get_auth_methods(), False)
|
Checks if user is authenticated and calls POST_AUTH or performs login and calls GET_AUTH.
Otherwise, returns the login template.
|
def POST(self, *args, **kwargs):
"""
Checks if user is authenticated and calls POST_AUTH or performs login and calls GET_AUTH.
Otherwise, returns the login template.
"""
if self.user_manager.session_logged_in():
if not self.user_manager.session_username() and not self.__class__.__name__ == "ProfilePage":
raise web.seeother("/preferences/profile")
if not self.is_lti_page and self.user_manager.session_lti_info() is not None: # lti session
self.user_manager.disconnect_user()
return self.template_helper.get_renderer().auth(self.user_manager.get_auth_methods_fields(), False)
return self.POST_AUTH(*args, **kwargs)
else:
user_input = web.input()
if "login" in user_input and "password" in user_input:
if self.user_manager.auth_user(user_input["login"].strip(), user_input["password"]) is not None:
return self.GET_AUTH(*args, **kwargs)
else:
return self.template_helper.get_renderer().auth(self.user_manager.get_auth_methods(), True)
else:
return self.template_helper.get_renderer().auth(self.user_manager.get_auth_methods(), False)
|
Translate a /-separated PATH to the local filename syntax.
Components that mean special things to the local file system
(e.g. drive or directory names) are ignored. (XXX They should
probably be diagnosed.)
|
def translate_path(self, path):
"""Translate a /-separated PATH to the local filename syntax.
Components that mean special things to the local file system
(e.g. drive or directory names) are ignored. (XXX They should
probably be diagnosed.)
"""
# abandon query parameters
path = path.split('?', 1)[0]
path = path.split('#', 1)[0]
# Don't forget explicit trailing slash when normalizing. Issue17324
trailing_slash = path.rstrip().endswith('/')
path = posixpath.normpath(urllib.parse.unquote(path))
words = path.split('/')
words = [_f for _f in words if _f]
path = self.base_path
for word in words:
_, word = os.path.splitdrive(word)
_, word = os.path.split(word)
if word in (os.curdir, os.pardir):
continue
path = os.path.join(path, word)
if trailing_slash:
path += '/'
return path
|
Normalize the path
|
def normpath(self, path):
""" Normalize the path """
path2 = posixpath.normpath(urllib.parse.unquote(path))
if path.endswith("/"):
path2 += "/"
return path2
|
Helper for the GET methods of the two following classes
|
def _get_submissions(course_factory, submission_manager, user_manager, translations, courseid, taskid, with_input, submissionid=None):
"""
Helper for the GET methods of the two following classes
"""
try:
course = course_factory.get_course(courseid)
except:
raise APINotFound("Course not found")
if not user_manager.course_is_open_to_user(course, lti=False):
raise APIForbidden("You are not registered to this course")
try:
task = course.get_task(taskid)
except:
raise APINotFound("Task not found")
if submissionid is None:
submissions = submission_manager.get_user_submissions(task)
else:
try:
submissions = [submission_manager.get_submission(submissionid)]
except:
raise APINotFound("Submission not found")
if submissions[0]["taskid"] != task.get_id() or submissions[0]["courseid"] != course.get_id():
raise APINotFound("Submission not found")
output = []
for submission in submissions:
submission = submission_manager.get_feedback_from_submission(
submission,
show_everything=user_manager.has_staff_rights_on_course(course, user_manager.session_username()),
translation=translations.get(user_manager.session_language(), gettext.NullTranslations())
)
data = {
"id": str(submission["_id"]),
"submitted_on": str(submission["submitted_on"]),
"status": submission["status"]
}
if with_input:
data["input"] = submission_manager.get_input_from_submission(submission, True)
# base64 encode file to allow JSON encoding
for d in data["input"]:
if isinstance(d, dict) and d.keys() == {"filename", "value"}:
d["value"] = base64.b64encode(d["value"]).decode("utf8")
if submission["status"] == "done":
data["grade"] = submission.get("grade", 0)
data["result"] = submission.get("result", "crash")
data["feedback"] = submission.get("text", "")
data["problems_feedback"] = submission.get("problems", {})
output.append(data)
return 200, output
|
List all the submissions that the connected user made. Returns list of the form
::
[
{
"id": "submission_id1",
"submitted_on": "date",
"status" : "done", #can be "done", "waiting", "error" (execution status of the task).
"grade": 0.0,
"input": {}, #the input data. File are base64 encoded.
"result" : "success" #only if status=done. Result of the execution.
"feedback": "" #only if status=done. the HTML global feedback for the task
"problems_feedback": #only if status=done. HTML feedback per problem. Some pid may be absent.
{
"pid1": "feedback1",
#...
}
}
#...
]
If you use the endpoint /api/v0/courses/the_course_id/tasks/the_task_id/submissions/submissionid,
this dict will contain one entry or the page will return 404 Not Found.
|
def API_GET(self, courseid, taskid, submissionid): # pylint: disable=arguments-differ
"""
List all the submissions that the connected user made. Returns list of the form
::
[
{
"id": "submission_id1",
"submitted_on": "date",
"status" : "done", #can be "done", "waiting", "error" (execution status of the task).
"grade": 0.0,
"input": {}, #the input data. File are base64 encoded.
"result" : "success" #only if status=done. Result of the execution.
"feedback": "" #only if status=done. the HTML global feedback for the task
"problems_feedback": #only if status=done. HTML feedback per problem. Some pid may be absent.
{
"pid1": "feedback1",
#...
}
}
#...
]
If you use the endpoint /api/v0/courses/the_course_id/tasks/the_task_id/submissions/submissionid,
this dict will contain one entry or the page will return 404 Not Found.
"""
with_input = "input" in web.input()
return _get_submissions(self.course_factory, self.submission_manager, self.user_manager, self.app._translations, courseid, taskid, with_input, submissionid)
|
Creates a new submissions. Takes as (POST) input the key of the subproblems, with the value assigned each time.
Returns
- an error 400 Bad Request if all the input is not (correctly) given,
- an error 403 Forbidden if you are not allowed to create a new submission for this task
- an error 404 Not found if the course/task id not found
- an error 500 Internal server error if the grader is not available,
- 200 Ok, with {"submissionid": "the submission id"} as output.
|
def API_POST(self, courseid, taskid): # pylint: disable=arguments-differ
"""
Creates a new submissions. Takes as (POST) input the key of the subproblems, with the value assigned each time.
Returns
- an error 400 Bad Request if all the input is not (correctly) given,
- an error 403 Forbidden if you are not allowed to create a new submission for this task
- an error 404 Not found if the course/task id not found
- an error 500 Internal server error if the grader is not available,
- 200 Ok, with {"submissionid": "the submission id"} as output.
"""
try:
course = self.course_factory.get_course(courseid)
except:
raise APINotFound("Course not found")
username = self.user_manager.session_username()
if not self.user_manager.course_is_open_to_user(course, username, False):
raise APIForbidden("You are not registered to this course")
try:
task = course.get_task(taskid)
except:
raise APINotFound("Task not found")
self.user_manager.user_saw_task(username, courseid, taskid)
# Verify rights
if not self.user_manager.task_can_user_submit(task, username, False):
raise APIForbidden("You are not allowed to submit for this task")
init_var = {
problem.get_id(): problem.input_type()()
for problem in task.get_problems() if problem.input_type() in [dict, list]
}
user_input = task.adapt_input_for_backend(web.input(**init_var))
if not task.input_is_consistent(user_input, self.default_allowed_file_extensions, self.default_max_file_size):
raise APIInvalidArguments()
# Get debug info if the current user is an admin
debug = self.user_manager.has_admin_rights_on_course(course, username)
# Start the submission
try:
submissionid, _ = self.submission_manager.add_job(task, user_input, debug)
return 200, {"submissionid": str(submissionid)}
except Exception as ex:
raise APIError(500, str(ex))
|
GET request
|
def GET_AUTH(self, courseid, taskid): # pylint: disable=arguments-differ
""" GET request """
course, task = self.get_course_and_check_rights(courseid, taskid)
return self.page(course, task)
|
Get all data and display the page
|
def page(self, course, task):
""" Get all data and display the page """
user_list = self.user_manager.get_course_registered_users(course, False)
users = OrderedDict(sorted(list(self.user_manager.get_users_info(user_list).items()),
key=lambda k: k[1][0] if k[1] is not None else ""))
individual_results = list(self.database.user_tasks.find({"courseid": course.get_id(), "taskid": task.get_id(),
"username": {"$in": user_list}}))
individual_data = OrderedDict([(username, {"username": username, "realname": user[0] if user is not None else "",
"email": user[1] if user is not None else "",
"url": self.individual_submission_url_generator(task, username),
"tried": 0, "grade": 0, "status": "notviewed"})
for username, user in users.items()])
for user in individual_results:
individual_data[user["username"]]["tried"] = user["tried"]
if user["tried"] == 0:
individual_data[user["username"]]["status"] = "notattempted"
elif user["succeeded"]:
individual_data[user["username"]]["status"] = "succeeded"
else:
individual_data[user["username"]]["status"] = "failed"
individual_data[user["username"]]["grade"] = user["grade"]
aggregation_data = OrderedDict()
for aggregation in self.user_manager.get_course_aggregations(course):
aggregation_data[aggregation['_id']] = {"_id": aggregation['_id'], "description": aggregation['description'],
"url": self.aggregation_submission_url_generator(task, aggregation),
"tried": 0, "grade": 0, "status": "notviewed",
"tutors": aggregation["tutors"], "groups": aggregation["groups"]}
aggregation_results = list(self.database.submissions.aggregate(
[
{
"$match":
{
"courseid": course.get_id(),
"taskid": task.get_id(),
"username": {"$in": aggregation["students"]}
}
},
{
"$group":
{
"_id": "$taskid",
"tried": {"$sum": 1},
"succeeded": {"$sum": {"$cond": [{"$eq": ["$result", "success"]}, 1, 0]}},
"grade": {"$max": "$grade"}
}
}
]))
for g in aggregation_results:
aggregation_data[aggregation['_id']]["tried"] = g["tried"]
if g["tried"] == 0:
aggregation_data[aggregation['_id']]["status"] = "notattempted"
elif g["succeeded"]:
aggregation_data[aggregation['_id']]["status"] = "succeeded"
else:
aggregation_data[aggregation['_id']]["status"] = "failed"
aggregation_data[aggregation['_id']]["grade"] = g["grade"]
my_aggregations, other_aggregations = [], []
for aggregation in aggregation_data.values():
if self.user_manager.session_username() in aggregation["tutors"]:
my_aggregations.append(aggregation)
else:
other_aggregations.append(aggregation)
if "csv" in web.input() and web.input()["csv"] == "students":
return make_csv(list(individual_data.values()))
elif "csv" in web.input() and web.input()["csv"] == "aggregations":
return make_csv(list(aggregation_data.values()))
return self.template_helper.get_renderer().course_admin.task_info(course, task, individual_data.values(), [my_aggregations, other_aggregations])
|
Returns true if users can register for this course
|
def is_registration_possible(self, user_info):
""" Returns true if users can register for this course """
return self.get_accessibility().is_open() and self._registration.is_open() and self.is_user_accepted_by_access_control(user_info)
|
Return the AccessibleTime object associated with the accessibility of this course
|
def get_accessibility(self, plugin_override=True):
""" Return the AccessibleTime object associated with the accessibility of this course """
vals = self._hook_manager.call_hook('course_accessibility', course=self, default=self._accessible)
return vals[0] if len(vals) and plugin_override else self._accessible
|
Returns True if the user is allowed by the ACL
|
def is_user_accepted_by_access_control(self, user_info):
""" Returns True if the user is allowed by the ACL """
if self.get_access_control_method() is None:
return True
elif not user_info:
return False
elif self.get_access_control_method() == "username":
return user_info["username"] in self.get_access_control_list()
elif self.get_access_control_method() == "email":
return user_info["email"] in self.get_access_control_list()
elif self.get_access_control_method() == "binding":
return set(user_info["bindings"].keys()).intersection(set(self.get_access_control_list()))
return False
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.