INSTRUCTION
stringlengths
1
46.3k
RESPONSE
stringlengths
75
80.2k
DELETE request
def DELETE(self, *args, **kwargs): """ DELETE request """ return self._handle_api(self.API_DELETE, args, kwargs)
PATCH request
def PATCH(self, *args, **kwargs): """ PATCH request """ return self._handle_api(self.API_PATCH, args, kwargs)
HEAD request
def HEAD(self, *args, **kwargs): """ HEAD request """ return self._handle_api(self.API_HEAD, args, kwargs)
OPTIONS request
def OPTIONS(self, *args, **kwargs): """ OPTIONS request """ return self._handle_api(self.API_OPTIONS, args, kwargs)
Handle call to subclasses and convert the output to an appropriate value
def _handle_api(self, handler, handler_args, handler_kwargs): """ Handle call to subclasses and convert the output to an appropriate value """ try: status_code, return_value = handler(*handler_args, **handler_kwargs) except APIError as error: return error.send() web.ctx.status = _convert_http_status(status_code) return _api_convert_output(return_value)
Guess the method implemented by the subclass
def _guess_available_methods(self): """ Guess the method implemented by the subclass""" available_methods = [] for m in ["GET", "POST", "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS"]: self_method = getattr(type(self), "API_{}".format(m)) super_method = getattr(APIPage, "API_{}".format(m)) if self_method != super_method: available_methods.append(m) return available_methods
Verify that the user is authenticated
def _verify_authentication(self, handler, args, kwargs): """ Verify that the user is authenticated """ if not self.user_manager.session_logged_in(): raise APIForbidden() return handler(*args, **kwargs)
Send the API Exception to the client
def send(self): """ Send the API Exception to the client """ web.ctx.status = _convert_http_status(self.status_code) return _api_convert_output(self.return_value)
Callback called by Client when a job is done. Updates the submission in the database with the data returned after the completion of the job
def _job_done_callback(self, submissionid, task, result, grade, problems, tests, custom, state, archive, stdout, stderr, newsub=True): """ Callback called by Client when a job is done. Updates the submission in the database with the data returned after the completion of the job """ submission = self.get_submission(submissionid, False) submission = self.get_input_from_submission(submission) data = { "status": ("done" if result[0] == "success" or result[0] == "failed" else "error"), # error only if error was made by INGInious "result": result[0], "grade": grade, "text": result[1], "tests": tests, "problems": problems, "archive": (self._gridfs.put(archive) if archive is not None else None), "custom": custom, "state": state, "stdout": stdout, "stderr": stderr } unset_obj = { "jobid": "", "ssh_host": "", "ssh_port": "", "ssh_password": "" } # Save submission to database submission = self._database.submissions.find_one_and_update( {"_id": submission["_id"]}, {"$set": data, "$unset": unset_obj}, return_document=ReturnDocument.AFTER ) self._hook_manager.call_hook("submission_done", submission=submission, archive=archive, newsub=newsub) for username in submission["username"]: self._user_manager.update_user_stats(username, task, submission, result[0], grade, state, newsub) if "outcome_service_url" in submission and "outcome_result_id" in submission and "outcome_consumer_key" in submission: for username in submission["username"]: self._lti_outcome_manager.add(username, submission["courseid"], submission["taskid"], submission["outcome_consumer_key"], submission["outcome_service_url"], submission["outcome_result_id"])
Called before any new submission is inserted into the database. Allows you to modify obj, the new document that will be inserted into the database. Should be overridden in subclasses. :param task: Task related to the submission :param inputdata: input of the student :param debug: True, False or "ssh". See add_job. :param obj: the new document that will be inserted
def _before_submission_insertion(self, task, inputdata, debug, obj): """ Called before any new submission is inserted into the database. Allows you to modify obj, the new document that will be inserted into the database. Should be overridden in subclasses. :param task: Task related to the submission :param inputdata: input of the student :param debug: True, False or "ssh". See add_job. :param obj: the new document that will be inserted """ username = self._user_manager.session_username() if task.is_group_task() and not self._user_manager.has_staff_rights_on_course(task.get_course(), username): group = self._database.aggregations.find_one( {"courseid": task.get_course_id(), "groups.students": username}, {"groups": {"$elemMatch": {"students": username}}}) obj.update({"username": group["groups"][0]["students"]}) else: obj.update({"username": [username]}) lti_info = self._user_manager.session_lti_info() if lti_info is not None and task.get_course().lti_send_back_grade(): outcome_service_url = lti_info["outcome_service_url"] outcome_result_id = lti_info["outcome_result_id"] outcome_consumer_key = lti_info["consumer_key"] # safety check if outcome_result_id is None or outcome_service_url is None: self._logger.error("outcome_result_id or outcome_service_url is None, but grade needs to be sent back to TC! Ignoring.") return obj.update({"outcome_service_url": outcome_service_url, "outcome_result_id": outcome_result_id, "outcome_consumer_key": outcome_consumer_key})
Called after any new submission is inserted into the database, but before starting the job. Should be overridden in subclasses. :param task: Task related to the submission :param inputdata: input of the student :param debug: True, False or "ssh". See add_job. :param submission: the new document that was inserted (do not contain _id) :param submissionid: submission id of the submission
def _after_submission_insertion(self, task, inputdata, debug, submission, submissionid): """ Called after any new submission is inserted into the database, but before starting the job. Should be overridden in subclasses. :param task: Task related to the submission :param inputdata: input of the student :param debug: True, False or "ssh". See add_job. :param submission: the new document that was inserted (do not contain _id) :param submissionid: submission id of the submission """ # If we are submitting for a group, send the group (user list joined with ",") as username if "group" not in [p.get_id() for p in task.get_problems()]: # do not overwrite username = self._user_manager.session_username() if task.is_group_task() and not self._user_manager.has_staff_rights_on_course(task.get_course(), username): group = self._database.aggregations.find_one( {"courseid": task.get_course_id(), "groups.students": username}, {"groups": {"$elemMatch": {"students": username}}}) inputdata["username"] = ','.join(group["groups"][0]["students"]) return self._delete_exceeding_submissions(self._user_manager.session_username(), task)
Replay a submission: add the same job in the queue, keeping submission id, submission date and input data :param submission: Submission to replay :param copy: If copy is true, the submission will be copied to admin submissions before replay :param debug: If debug is true, more debug data will be saved
def replay_job(self, task, submission, copy=False, debug=False): """ Replay a submission: add the same job in the queue, keeping submission id, submission date and input data :param submission: Submission to replay :param copy: If copy is true, the submission will be copied to admin submissions before replay :param debug: If debug is true, more debug data will be saved """ if not self._user_manager.session_logged_in(): raise Exception("A user must be logged in to submit an object") # Don't enable ssh debug ssh_callback = lambda host, port, password: self._handle_ssh_callback(submission["_id"], host, port, password) # Load input data and add username to dict inputdata = bson.BSON.decode(self._gridfs.get(submission["input"]).read()) if not copy: submissionid = submission["_id"] username = submission["username"][0] # TODO: this may be inconsistent with add_job # Remove the submission archive : it will be regenerated if submission.get("archive", None) is not None: self._gridfs.delete(submission["archive"]) else: del submission["_id"] username = self._user_manager.session_username() submission["username"] = [username] submission["submitted_on"] = datetime.now() inputdata["@username"] = username inputdata["@lang"] = self._user_manager.session_language() submission["input"] = self._gridfs.put(bson.BSON.encode(inputdata)) submission["tests"] = {} # Be sure tags are reinitialized submissionid = self._database.submissions.insert(submission) jobid = self._client.new_job(task, inputdata, (lambda result, grade, problems, tests, custom, state, archive, stdout, stderr: self._job_done_callback(submissionid, task, result, grade, problems, tests, custom, state, archive, stdout, stderr, copy)), "Frontend - {}".format(submission["username"]), debug, ssh_callback) # Clean the submission document in db self._database.submissions.update( {"_id": submission["_id"]}, {"$set": {"jobid": jobid, "status": "waiting", "response_type": task.get_response_type()}, "$unset": {"result": "", "grade": "", "text": "", "tests": "", "problems": "", "archive": "", "state": "", "custom": ""} }) if not copy: self._logger.info("Replaying submission %s - %s - %s - %s", submission["username"], submission["courseid"], submission["taskid"], submission["_id"]) else: self._logger.info("Copying submission %s - %s - %s - %s as %s", submission["username"], submission["courseid"], submission["taskid"], submission["_id"], self._user_manager.session_username())
Get a submission from the database
def get_submission(self, submissionid, user_check=True): """ Get a submission from the database """ sub = self._database.submissions.find_one({'_id': ObjectId(submissionid)}) if user_check and not self.user_is_submission_owner(sub): return None return sub
Add a job in the queue and returns a submission id. :param task: Task instance :type task: inginious.frontend.tasks.WebAppTask :param inputdata: the input as a dictionary :type inputdata: dict :param debug: If debug is true, more debug data will be saved :type debug: bool or string :returns: the new submission id and the removed submission id
def add_job(self, task, inputdata, debug=False): """ Add a job in the queue and returns a submission id. :param task: Task instance :type task: inginious.frontend.tasks.WebAppTask :param inputdata: the input as a dictionary :type inputdata: dict :param debug: If debug is true, more debug data will be saved :type debug: bool or string :returns: the new submission id and the removed submission id """ if not self._user_manager.session_logged_in(): raise Exception("A user must be logged in to submit an object") username = self._user_manager.session_username() # Prevent student from submitting several submissions together waiting_submission = self._database.submissions.find_one({ "courseid": task.get_course_id(), "taskid": task.get_id(), "username": username, "status": "waiting"}) if waiting_submission is not None: raise Exception("A submission is already pending for this task!") obj = { "courseid": task.get_course_id(), "taskid": task.get_id(), "status": "waiting", "submitted_on": datetime.now(), "username": [username], "response_type": task.get_response_type() } # Send additional data to the client in inputdata. For now, the username and the language. New fields can be added with the # new_submission hook inputdata["@username"] = username inputdata["@lang"] = self._user_manager.session_language() # Retrieve input random states = self._database.user_tasks.find_one({"courseid": task.get_course_id(), "taskid": task.get_id(), "username": username}, {"random": 1, "state": 1}) inputdata["@random"] = states["random"] if "random" in states else [] inputdata["@state"] = states["state"] if "state" in states else "" self._hook_manager.call_hook("new_submission", submission=obj, inputdata=inputdata) obj["input"] = self._gridfs.put(bson.BSON.encode(inputdata)) self._before_submission_insertion(task, inputdata, debug, obj) submissionid = self._database.submissions.insert(obj) to_remove = self._after_submission_insertion(task, inputdata, debug, obj, submissionid) ssh_callback = lambda host, port, password: self._handle_ssh_callback(submissionid, host, port, password) jobid = self._client.new_job(task, inputdata, (lambda result, grade, problems, tests, custom, state, archive, stdout, stderr: self._job_done_callback(submissionid, task, result, grade, problems, tests, custom, state, archive, stdout, stderr, True)), "Frontend - {}".format(username), debug, ssh_callback) self._database.submissions.update( {"_id": submissionid, "status": "waiting"}, {"$set": {"jobid": jobid}} ) self._logger.info("New submission from %s - %s - %s/%s - %s", self._user_manager.session_username(), self._user_manager.session_email(), task.get_course_id(), task.get_id(), web.ctx['ip']) return submissionid, to_remove
Deletes exceeding submissions from the database, to keep the database relatively small
def _delete_exceeding_submissions(self, username, task, max_submissions_bound=-1): """ Deletes exceeding submissions from the database, to keep the database relatively small """ if max_submissions_bound <= 0: max_submissions = task.get_stored_submissions() elif task.get_stored_submissions() <= 0: max_submissions = max_submissions_bound else: max_submissions = min(max_submissions_bound, task.get_stored_submissions()) if max_submissions <= 0: return [] tasks = list(self._database.submissions.find( {"username": username, "courseid": task.get_course_id(), "taskid": task.get_id()}, projection=["_id", "status", "result", "grade", "submitted_on"], sort=[('submitted_on', pymongo.ASCENDING)])) # List the entries to keep to_keep = set([]) if task.get_evaluate() == 'best': # Find the best "status"="done" and "result"="success" idx_best = -1 for idx, val in enumerate(tasks): if val["status"] == "done": if idx_best == -1 or tasks[idx_best]["grade"] < val["grade"]: idx_best = idx # Always keep the best submission if idx_best != -1: to_keep.add(tasks[idx_best]["_id"]) elif task.get_evaluate() == 'student': user_task = self._database.user_tasks.find_one({ "courseid": task.get_course_id(), "taskid": task.get_id(), "username": username }) submissionid = user_task.get('submissionid', None) if submissionid: to_keep.add(submissionid) # Always keep running submissions for val in tasks: if val["status"] == "waiting": to_keep.add(val["_id"]) while len(to_keep) < max_submissions and len(tasks) > 0: to_keep.add(tasks.pop()["_id"]) to_delete = {val["_id"] for val in tasks}.difference(to_keep) self._database.submissions.delete_many({"_id": {"$in": list(to_delete)}}) return list(map(str, to_delete))
Get the input of a submission. If only_input is False, returns the full submissions with a dictionnary object at the key "input". Else, returns only the dictionnary.
def get_input_from_submission(self, submission, only_input=False): """ Get the input of a submission. If only_input is False, returns the full submissions with a dictionnary object at the key "input". Else, returns only the dictionnary. """ inp = bson.BSON.decode(self._gridfs.get(submission['input']).read()) if only_input: return inp else: submission["input"] = inp return submission
Get the input of a submission. If only_input is False, returns the full submissions with a dictionnary object at the key "input". Else, returns only the dictionnary. If show_everything is True, feedback normally hidden is shown.
def get_feedback_from_submission(self, submission, only_feedback=False, show_everything=False, translation=gettext.NullTranslations()): """ Get the input of a submission. If only_input is False, returns the full submissions with a dictionnary object at the key "input". Else, returns only the dictionnary. If show_everything is True, feedback normally hidden is shown. """ if only_feedback: submission = {"text": submission.get("text", None), "problems": dict(submission.get("problems", {}))} if "text" in submission: submission["text"] = ParsableText(submission["text"], submission["response_type"], show_everything, translation).parse() if "problems" in submission: for problem in submission["problems"]: if isinstance(submission["problems"][problem], str): # fallback for old-style submissions submission["problems"][problem] = (submission.get('result', 'crash'), ParsableText(submission["problems"][problem], submission["response_type"], show_everything, translation).parse()) else: # new-style submission submission["problems"][problem] = (submission["problems"][problem][0], ParsableText(submission["problems"][problem][1], submission["response_type"], show_everything, translation).parse()) return submission
Tells if a submission is running/in queue
def is_running(self, submissionid, user_check=True): """ Tells if a submission is running/in queue """ submission = self.get_submission(submissionid, user_check) return submission["status"] == "waiting"
Tells if a submission is done and its result is available
def is_done(self, submissionid_or_submission, user_check=True): """ Tells if a submission is done and its result is available """ # TODO: not a very nice way to avoid too many database call. Should be refactored. if isinstance(submissionid_or_submission, dict): submission = submissionid_or_submission else: submission = self.get_submission(submissionid_or_submission, False) if user_check and not self.user_is_submission_owner(submission): return None return submission["status"] == "done" or submission["status"] == "error"
Attempt to kill the remote job associated with this submission id. :param submissionid: :param user_check: Check if the current user owns this submission :return: True if the job was killed, False if an error occurred
def kill_running_submission(self, submissionid, user_check=True): """ Attempt to kill the remote job associated with this submission id. :param submissionid: :param user_check: Check if the current user owns this submission :return: True if the job was killed, False if an error occurred """ submission = self.get_submission(submissionid, user_check) if not submission: return False if "jobid" not in submission: return False return self._client.kill_job(submission["jobid"])
Returns true if the current user is the owner of this jobid, false else
def user_is_submission_owner(self, submission): """ Returns true if the current user is the owner of this jobid, false else """ if not self._user_manager.session_logged_in(): raise Exception("A user must be logged in to verify if he owns a jobid") return self._user_manager.session_username() in submission["username"]
Get all the user's submissions for a given task
def get_user_submissions(self, task): """ Get all the user's submissions for a given task """ if not self._user_manager.session_logged_in(): raise Exception("A user must be logged in to get his submissions") cursor = self._database.submissions.find({"username": self._user_manager.session_username(), "taskid": task.get_id(), "courseid": task.get_course_id()}) cursor.sort([("submitted_on", -1)]) return list(cursor)
Get last submissions of a user
def get_user_last_submissions(self, limit=5, request=None): """ Get last submissions of a user """ if request is None: request = {} request.update({"username": self._user_manager.session_username()}) # Before, submissions were first sorted by submission date, then grouped # and then resorted by submission date before limiting. Actually, grouping # and pushing, keeping the max date, followed by result filtering is much more # efficient data = self._database.submissions.aggregate([ {"$match": request}, {"$group": {"_id": {"courseid": "$courseid", "taskid": "$taskid"}, "submitted_on": {"$max": "$submitted_on"}, "submissions": {"$push": { "_id": "$_id", "result": "$result", "status" : "$status", "courseid": "$courseid", "taskid": "$taskid", "submitted_on": "$submitted_on" }}, }}, {"$project": { "submitted_on": 1, "submissions": { # This could be replaced by $filter if mongo v3.2 is set as dependency "$setDifference": [ {"$map": { "input": "$submissions", "as": "submission", "in": { "$cond": [{"$eq": ["$submitted_on", "$$submission.submitted_on"]}, "$$submission", False] } }}, [False] ] } }}, {"$sort": {"submitted_on": pymongo.DESCENDING}}, {"$limit": limit} ]) return [item["submissions"][0] for item in data]
:param submissions: a list of submissions :param sub_folders: possible values: []: put all submissions in / ['taskid']: put all submissions for each task in a different directory /taskid/ ['username']: put all submissions for each user in a different directory /username/ ['taskid','username']: /taskid/username/ ['username','taskid']: /username/taskid/ :return: a file-like object containing a tgz archive of all the submissions
def get_submission_archive(self, submissions, sub_folders, aggregations, archive_file=None): """ :param submissions: a list of submissions :param sub_folders: possible values: []: put all submissions in / ['taskid']: put all submissions for each task in a different directory /taskid/ ['username']: put all submissions for each user in a different directory /username/ ['taskid','username']: /taskid/username/ ['username','taskid']: /username/taskid/ :return: a file-like object containing a tgz archive of all the submissions """ tmpfile = archive_file if archive_file is not None else tempfile.TemporaryFile() tar = tarfile.open(fileobj=tmpfile, mode='w:gz') for submission in submissions: submission = self.get_input_from_submission(submission) submission_yaml = io.BytesIO(inginious.common.custom_yaml.dump(submission).encode('utf-8')) # Considering multiple single submissions for each user for username in submission["username"]: # Compute base path in the tar file base_path = "/" for sub_folder in sub_folders: if sub_folder == 'taskid': base_path = submission['taskid'] + base_path elif sub_folder == 'username': base_path = '_' + '-'.join(submission['username']) + base_path base_path = base_path[1:] elif sub_folder == 'aggregation': if username in aggregations: if aggregations[username] is None: # If classrooms are not used, and user is not grouped, his classroom is replaced by None base_path = '_' + '-'.join(submission['username']) + base_path base_path = base_path[1:] else: base_path = (aggregations[username]["description"] + " (" + str(aggregations[username]["_id"]) + ")").replace(" ", "_") + base_path base_path = '/' + base_path base_path = base_path[1:] submission_yaml_fname = base_path + str(submission["_id"]) + '/submission.test' # Avoid putting two times the same submission on the same place if submission_yaml_fname not in tar.getnames(): info = tarfile.TarInfo(name=submission_yaml_fname) info.size = submission_yaml.getbuffer().nbytes info.mtime = time.mktime(submission["submitted_on"].timetuple()) # Add file in tar archive tar.addfile(info, fileobj=submission_yaml) # If there is an archive, add it too if 'archive' in submission and submission['archive'] is not None and submission['archive'] != "": subfile = self._gridfs.get(submission['archive']) subtar = tarfile.open(fileobj=subfile, mode="r:gz") for member in subtar.getmembers(): subtarfile = subtar.extractfile(member) member.name = base_path + str(submission["_id"]) + "/archive/" + member.name tar.addfile(member, subtarfile) subtar.close() subfile.close() # If there files that were uploaded by the student, add them if submission['input'] is not None: for pid, problem in submission['input'].items(): # If problem is a dict, it is a file (from the specification of the problems) if isinstance(problem, dict): # Get the extension (match extensions with more than one dot too) DOUBLE_EXTENSIONS = ['.tar.gz', '.tar.bz2', '.tar.bz', '.tar.xz'] ext = "" if not problem['filename'].endswith(tuple(DOUBLE_EXTENSIONS)): _, ext = os.path.splitext(problem['filename']) else: for t_ext in DOUBLE_EXTENSIONS: if problem['filename'].endswith(t_ext): ext = t_ext subfile = io.BytesIO(problem['value']) taskfname = base_path + str(submission["_id"]) + '/uploaded_files/' + pid + ext # Generate file info info = tarfile.TarInfo(name=taskfname) info.size = subfile.getbuffer().nbytes info.mtime = time.mktime(submission["submitted_on"].timetuple()) # Add file in tar archive tar.addfile(info, fileobj=subfile) # Close tarfile and put tempfile cursor at 0 tar.close() tmpfile.seek(0) return tmpfile
Handles the creation of a remote ssh server
def _handle_ssh_callback(self, submission_id, host, port, password): """ Handles the creation of a remote ssh server """ if host is not None: # ignore late calls (a bit hacky, but...) obj = { "ssh_host": host, "ssh_port": port, "ssh_password": password } self._database.submissions.update_one({"_id": submission_id}, {"$set": obj})
Returns a dictionnary of {"fs_name": fs_class}, for each usable FileSystemProvider
def get_filesystems_providers(): """ Returns a dictionnary of {"fs_name": fs_class}, for each usable FileSystemProvider""" providers = {"local": LocalFSProvider} plugged_providers = pkg_resources.iter_entry_points("inginious.filesystems") for pp in plugged_providers: providers[pp.name] = pp.load() return providers
Given a dict containing an entry "module" which contains a FSProvider identifier, parse the configuration and returns a fs_provider. Exits if there is an error.
def filesystem_from_config_dict(config_fs): """ Given a dict containing an entry "module" which contains a FSProvider identifier, parse the configuration and returns a fs_provider. Exits if there is an error. """ if "module" not in config_fs: print("Key 'module' should be defined for the filesystem provider ('fs' configuration option)", file=sys.stderr) exit(1) filesystem_providers = get_filesystems_providers() if config_fs["module"] not in filesystem_providers: print("Unknown filesystem provider "+config_fs["module"], file=sys.stderr) exit(1) fs_class = filesystem_providers[config_fs["module"]] fs_args_needed = fs_class.get_needed_args() fs_args = {} for arg_name, (arg_type, arg_required, _) in fs_args_needed.items(): if arg_name in config_fs: fs_args[arg_name] = arg_type(config_fs[arg_name]) elif arg_required: print("fs option {} is required".format(arg_name), file=sys.stderr) exit(1) try: return fs_class.init_from_args(**fs_args) except: print("Unable to load class " + config_fs["module"], file=sys.stderr) raise
Given a partially configured argparse parser, containing all the wanted data BUT the filesystem, this function will configure the parser to get the correct FS from the commandline, and return a tuple (args, filesystem_provider).
def get_args_and_filesystem(parser): """Given a partially configured argparse parser, containing all the wanted data BUT the filesystem, this function will configure the parser to get the correct FS from the commandline, and return a tuple (args, filesystem_provider). """ filesystem_providers = get_filesystems_providers() fs_group = parser.add_mutually_exclusive_group() fs_group.add_argument("--tasks", help="Path to the task directory. " "By default, it is ./tasks. You must ensure that this directory is synchronized at any time" "with the backend and the client. Either this option or --fs must be indicated, but not both.", type=str, default="./tasks") fs_group.add_argument("--fs", help="(advanced users only) Name of the FSProvider to use. Using a FSProvider will add new args to be filled. " "Either --fs or --tasks must be filled, but not both.", type=str, choices=filesystem_providers.keys()) parser.add_argument("--fs-help", help="Display help to fill arguments for --fs. Only checked if --fs is filled.", action="store_true") # Partial parsing of the args, to get the value of --fs args = parser.parse_known_args()[0] # check fs if args.fs is not None: fs_class = filesystem_providers[args.fs] fs_args_needed = fs_class.get_needed_args() fs_args_group = parser.add_argument_group("FSProvider arguments") for arg_name, (arg_type, arg_required, arg_desc) in fs_args_needed.items(): fs_args_group.add_argument("--fs-" + arg_name, type=arg_type, help=arg_desc, required=arg_required) if args.fs_help: parser.parse_args(["--help"]) args = parser.parse_args() returned_args = {} for arg_name in fs_args_needed: val = getattr(args, ("fs-" + arg_name).replace("-", "_")) if val is not None: returned_args[arg_name] = val try: fsprovider = fs_class.init_from_args(**returned_args) except: print("Unable to load class " + args.fs, file=sys.stderr) raise else: # Verify that everything can finally be parsed args = parser.parse_args() fsprovider = LocalFSProvider(args.tasks) return args, fsprovider
Close all the running tasks watching for a container timeout. All references to containers are removed: any attempt to was_killed after a call to clean() will return None.
async def clean(self): """ Close all the running tasks watching for a container timeout. All references to containers are removed: any attempt to was_killed after a call to clean() will return None. """ for x in self._running_asyncio_tasks: x.cancel() self._container_had_error = set() self._watching = set() self._running_asyncio_tasks = set()
This method has to be called *once, and only once* for each container registered in `register_container`. :param container_id: the container id to check :return: a string containing "timeout" if the container was killed. None if it was not (std format for container errors)
async def was_killed(self, container_id): """ This method has to be called *once, and only once* for each container registered in `register_container`. :param container_id: the container id to check :return: a string containing "timeout" if the container was killed. None if it was not (std format for container errors) """ if container_id in self._watching: self._watching.remove(container_id) if container_id in self._container_had_error: self._container_had_error.remove(container_id) return "timeout" return None
Check timeout with docker stats :param container_id: :param timeout: in seconds (cpu time)
async def _handle_container_timeout(self, container_id, timeout): """ Check timeout with docker stats :param container_id: :param timeout: in seconds (cpu time) """ try: docker_stats = await self._docker_interface.get_stats(container_id) source = AsyncIteratorWrapper(docker_stats) nano_timeout = timeout * (10 ** 9) async for upd in source: if upd is None: await self._kill_it_with_fire(container_id) self._logger.debug("%i", upd['cpu_stats']['cpu_usage']['total_usage']) if upd['cpu_stats']['cpu_usage']['total_usage'] > nano_timeout: self._logger.info("Killing container %s as it used %i CPU seconds (max was %i)", container_id, int(upd['cpu_stats']['cpu_usage']['total_usage'] / (10 ** 9)), timeout) await self._kill_it_with_fire(container_id) return except asyncio.CancelledError: pass except: self._logger.exception("Exception in _handle_container_timeout")
Kills a container (should be called with loop.call_later(hard_timeout, ...)) and displays a message on the log :param container_id: :param hard_timeout: :return:
async def _handle_container_hard_timeout(self, container_id, hard_timeout): """ Kills a container (should be called with loop.call_later(hard_timeout, ...)) and displays a message on the log :param container_id: :param hard_timeout: :return: """ if container_id in self._watching: self._logger.info("Killing container %s as it used its %i wall time seconds", container_id, hard_timeout) await self._kill_it_with_fire(container_id)
Kill a container, with fire.
async def _kill_it_with_fire(self, container_id): """ Kill a container, with fire. """ if container_id in self._watching: self._watching.remove(container_id) self._container_had_error.add(container_id) try: await self._docker_interface.kill_container(container_id) except: pass
:param ignore_session: Ignore the cookieless session_id that should be put in the URL :param force_cookieless: Force the cookieless session; the link will include the session_creator if needed.
def get_homepath(self, ignore_session=False, force_cookieless=False): """ :param ignore_session: Ignore the cookieless session_id that should be put in the URL :param force_cookieless: Force the cookieless session; the link will include the session_creator if needed. """ if not ignore_session and self._session.get("session_id") is not None and self._session.get("cookieless", False): return web.ctx.homepath + "/@" + self._session.get("session_id") + "@" elif not ignore_session and force_cookieless: return web.ctx.homepath + "/@@" else: return web.ctx.homepath
Load the session from the store. session_id can be: - None: load from cookie - '': create a new cookieless session_id - a string which is the session_id to be used.
def load(self, session_id=None): """ Load the session from the store. session_id can be: - None: load from cookie - '': create a new cookieless session_id - a string which is the session_id to be used. """ if session_id is None: cookie_name = self._config.cookie_name self._data["session_id"] = web.cookies().get(cookie_name) self._data["cookieless"] = False else: if session_id == '': self._data["session_id"] = None # will be created else: self._data["session_id"] = session_id self._data["cookieless"] = True # protection against session_id tampering if self._data["session_id"] and not self._valid_session_id(self._data["session_id"]): self._data["session_id"] = None self._check_expiry() if self._data["session_id"]: d = self.store[self._data["session_id"]] self.update(d) self._validate_ip() if not self._data["session_id"]: self._data["session_id"] = self._generate_session_id() if self._initializer: if isinstance(self._initializer, dict): self.update(deepcopy(self._initializer)) elif hasattr(self._initializer, '__call__'): self._initializer() self._data["ip"] = web.ctx.ip
Generate a random id for session
def _generate_session_id(self): """Generate a random id for session""" while True: rand = os.urandom(16) now = time.time() secret_key = self._config.secret_key session_id = hashlib.sha1(("%s%s%s%s" % (rand, now, utils.safestr(web.ctx.ip), secret_key)).encode("utf-8")) session_id = session_id.hexdigest() if session_id not in self.store: break return session_id
Cleanup the stored sessions
def _cleanup(self): """Cleanup the stored sessions""" current_time = time.time() timeout = self._config.timeout if current_time - self._last_cleanup_time > timeout: self.store.cleanup(timeout) self._last_cleanup_time = current_time
Called when an expired session is atime
def expired(self): """Called when an expired session is atime""" self._data["_killed"] = True self.save() raise SessionExpired(self._config.expired_message)
Delete account from DB
def delete_account(self, data): """ Delete account from DB """ error = False msg = "" username = self.user_manager.session_username() # Check input format result = self.database.users.find_one_and_delete({"username": username, "email": data.get("delete_email", "")}) if not result: error = True msg = _("The specified email is incorrect.") else: self.database.submissions.remove({"username": username}) self.database.user_tasks.remove({"username": username}) all_courses = self.course_factory.get_all_courses() for courseid, course in all_courses.items(): if self.user_manager.course_is_open_to_user(course, username): self.user_manager.course_unregister_user(course, username) self.user_manager.disconnect_user() raise web.seeother("/index") return msg, error
GET request
def GET_AUTH(self): # pylint: disable=arguments-differ """ GET request """ userdata = self.database.users.find_one({"username": self.user_manager.session_username()}) if not userdata or not self.app.allow_deletion: raise web.notfound() return self.template_helper.get_renderer().preferences.delete("", False)
POST request
def POST_AUTH(self): # pylint: disable=arguments-differ """ POST request """ userdata = self.database.users.find_one({"username": self.user_manager.session_username()}) if not userdata or not self.app.allow_deletion: raise web.notfound() msg = "" error = False data = web.input() if "delete" in data: msg, error = self.delete_account(data) return self.template_helper.get_renderer().preferences.delete(msg, error)
Serialize a Python object into a YAML stream. If stream is None, return the produced string instead. Dict keys are produced in the order in which they appear in OrderedDicts. Safe version. If objects are not "conventional" objects, they will be dumped converted to string with the str() function. They will then not be recovered when loading with the load() function.
def dump(data, stream=None, **kwds): """ Serialize a Python object into a YAML stream. If stream is None, return the produced string instead. Dict keys are produced in the order in which they appear in OrderedDicts. Safe version. If objects are not "conventional" objects, they will be dumped converted to string with the str() function. They will then not be recovered when loading with the load() function. """ # Display OrderedDicts correctly class OrderedDumper(SafeDumper): pass def _dict_representer(dumper, data): return dumper.represent_mapping( original_yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, list(data.items())) # Display long strings correctly def _long_str_representer(dumper, data): if data.find("\n") != -1: # Drop some uneeded data # \t are forbidden in YAML data = data.replace("\t", " ") # empty spaces at end of line are always useless in INGInious, and forbidden in YAML data = "\n".join([p.rstrip() for p in data.split("\n")]) return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|') else: return dumper.represent_scalar('tag:yaml.org,2002:str', data) # Default representation for some odd objects def _default_representer(dumper, data): return _long_str_representer(dumper, str(data)) OrderedDumper.add_representer(str, _long_str_representer) OrderedDumper.add_representer(str, _long_str_representer) OrderedDumper.add_representer(OrderedDict, _dict_representer) OrderedDumper.add_representer(None, _default_representer) s = original_yaml.dump(data, stream, OrderedDumper, encoding='utf-8', allow_unicode=True, default_flow_style=False, indent=4, **kwds) if s is not None: return s.decode('utf-8') else: return
Util to remove parsable text from a dict, recursively
def _check_for_parsable_text(self, val): """ Util to remove parsable text from a dict, recursively """ if isinstance(val, ParsableText): return val.original_content() if isinstance(val, list): for key, val2 in enumerate(val): val[key] = self._check_for_parsable_text(val2) return val if isinstance(val, dict): for key, val2 in val.items(): val[key] = self._check_for_parsable_text(val2) return val
List tasks available to the connected client. Returns a dict in the form :: { "taskid1": { "name": "Name of the course", #the name of the course "authors": [], "deadline": "", "status": "success" # can be "succeeded", "failed" or "notattempted" "grade": 0.0, "grade_weight": 0.0, "context": "" # context of the task, in RST "problems": # dict of the subproblems { # see the format of task.yaml for the content of the dict. Contains everything but # responses of multiple-choice and match problems. } } #... } If you use the endpoint /api/v0/courses/the_course_id/tasks/the_task_id, this dict will contain one entry or the page will return 404 Not Found.
def API_GET(self, courseid, taskid=None): # pylint: disable=arguments-differ """ List tasks available to the connected client. Returns a dict in the form :: { "taskid1": { "name": "Name of the course", #the name of the course "authors": [], "deadline": "", "status": "success" # can be "succeeded", "failed" or "notattempted" "grade": 0.0, "grade_weight": 0.0, "context": "" # context of the task, in RST "problems": # dict of the subproblems { # see the format of task.yaml for the content of the dict. Contains everything but # responses of multiple-choice and match problems. } } #... } If you use the endpoint /api/v0/courses/the_course_id/tasks/the_task_id, this dict will contain one entry or the page will return 404 Not Found. """ try: course = self.course_factory.get_course(courseid) except: raise APINotFound("Course not found") if not self.user_manager.course_is_open_to_user(course, lti=False): raise APIForbidden("You are not registered to this course") if taskid is None: tasks = course.get_tasks() else: try: tasks = {taskid: course.get_task(taskid)} except: raise APINotFound("Task not found") output = [] for taskid, task in tasks.items(): task_cache = self.user_manager.get_task_cache(self.user_manager.session_username(), task.get_course_id(), task.get_id()) data = { "id": taskid, "name": task.get_name(self.user_manager.session_language()), "authors": task.get_authors(self.user_manager.session_language()), "deadline": task.get_deadline(), "status": "notviewed" if task_cache is None else "notattempted" if task_cache["tried"] == 0 else "succeeded" if task_cache["succeeded"] else "failed", "grade": task_cache.get("grade", 0.0) if task_cache is not None else 0.0, "grade_weight": task.get_grading_weight(), "context": task.get_context(self.user_manager.session_language()).original_content(), "problems": [] } for problem in task.get_problems(): pcontent = problem.get_original_content() pcontent["id"] = problem.get_id() if pcontent["type"] == "match": del pcontent["answer"] if pcontent["type"] == "multiple_choice": pcontent["choices"] = {key: val["text"] for key, val in enumerate(pcontent["choices"])} pcontent = self._check_for_parsable_text(pcontent) data["problems"].append(pcontent) output.append(data) return 200, output
Open existing input file
def load_input(): """ Open existing input file """ file = open(_input_file, 'r') result = json.loads(file.read().strip('\0').strip()) file.close() return result
Returns the specified problem answer in the form problem: problem id Returns string, or bytes if a file is loaded
def get_input(problem): """" Returns the specified problem answer in the form problem: problem id Returns string, or bytes if a file is loaded """ input_data = load_input() pbsplit = problem.split(":") problem_input = input_data['input'][pbsplit[0]] if isinstance(problem_input, dict) and "filename" in problem_input and "value" in problem_input: if len(pbsplit) > 1 and pbsplit[1] == 'filename': return problem_input["filename"] else: return open(problem_input["value"], 'rb').read() else: return problem_input
Parses a template file Replaces all occurences of @@problem_id@@ by the value of the 'problem_id' key in data dictionary input_filename: file to parse output_filename: if not specified, overwrite input file
def parse_template(input_filename, output_filename=''): """ Parses a template file Replaces all occurences of @@problem_id@@ by the value of the 'problem_id' key in data dictionary input_filename: file to parse output_filename: if not specified, overwrite input file """ data = load_input() with open(input_filename, 'rb') as file: template = file.read().decode("utf-8") # Check if 'input' in data if not 'input' in data: raise ValueError("Could not find 'input' in data") # Parse template for field in data['input']: subs = ["filename", "value"] if isinstance(data['input'][field], dict) and "filename" in data['input'][field] and "value" in data['input'][field] else [""] for sub in subs: displayed_field = field + (":" if sub else "") + sub regex = re.compile("@([^@]*)@" + displayed_field + '@([^@]*)@') for prefix, postfix in set(regex.findall(template)): if sub == "value": text = open(data['input'][field][sub], 'rb').read().decode('utf-8') elif sub: text = data['input'][field][sub] else: text = data['input'][field] rep = "\n".join([prefix + v + postfix for v in text.splitlines()]) template = template.replace("@{0}@{1}@{2}@".format(prefix, displayed_field, postfix), rep) if output_filename == '': output_filename=input_filename # Ensure directory of resulting file exists try: os.makedirs(os.path.dirname(output_filename)) except OSError as e: pass # Write file with open(output_filename, 'wb') as file: file.write(template.encode("utf-8"))
Send a ClientGetQueue message to the backend, if one is not already sent
async def _ask_queue_update(self): """ Send a ClientGetQueue message to the backend, if one is not already sent """ try: while True: await asyncio.sleep(self._queue_update_timer) if self._queue_update_last_attempt == 0 or self._queue_update_last_attempt > self._queue_update_last_attempt_max: if self._queue_update_last_attempt: self._logger.error("Asking for a job queue update despite previous update not yet received") else: self._logger.debug("Asking for a job queue update") self._queue_update_last_attempt = 1 await self._simple_send(ClientGetQueue()) else: self._logger.error("Not asking for a job queue update as previous update not yet received") except asyncio.CancelledError: return except KeyboardInterrupt: return
Returns a function that is only callable once; any other call will do nothing
def _callable_once(func): """ Returns a function that is only callable once; any other call will do nothing """ def once(*args, **kwargs): if not once.called: once.called = True return func(*args, **kwargs) once.called = False return once
Handles a BackendGetQueue containing a snapshot of the job queue
async def _handle_job_queue_update(self, message: BackendGetQueue): """ Handles a BackendGetQueue containing a snapshot of the job queue """ self._logger.debug("Received job queue update") self._queue_update_last_attempt = 0 self._queue_cache = message # Do some precomputation new_job_queue_cache = {} # format is job_id: (nb_jobs_before, max_remaining_time) for (job_id, is_local, _, _2, _3, _4, max_end) in message.jobs_running: if is_local: new_job_queue_cache[job_id] = (-1, max_end - time.time()) wait_time = 0 nb_tasks = 0 for (job_id, is_local, _, _2, timeout) in message.jobs_waiting: if timeout > 0: wait_time += timeout if is_local: new_job_queue_cache[job_id] = (nb_tasks, wait_time) nb_tasks += 1 self._queue_job_cache = new_job_queue_cache
Add a new job. Every callback will be called once and only once. :type task: Task :param inputdata: input from the student :type inputdata: Storage or dict :param callback: a function that will be called asynchronously in the client's process, with the results. it's signature must be (result, grade, problems, tests, custom, archive), where: result is itself a tuple containing the result string and the main feedback (i.e. ('success', 'You succeeded'); grade is a number between 0 and 100 indicating the grade of the users; problems is a dict of tuple, in the form {'problemid': result}; test is a dict of tests made in the container custom is a dict containing random things set in the container archive is either None or a bytes containing a tgz archive of files from the job :type callback: __builtin__.function or __builtin__.instancemethod :param launcher_name: for informational use :type launcher_name: str :param debug: Either True(outputs more info), False(default), or "ssh" (starts a remote ssh server. ssh_callback needs to be defined) :type debug: bool or string :param ssh_callback: a callback function that will be called with (host, port, password), the needed credentials to connect to the remote ssh server. May be called with host, port, password being None, meaning no session was open. :type ssh_callback: __builtin__.function or __builtin__.instancemethod or None :return: the new job id
def new_job(self, task, inputdata, callback, launcher_name="Unknown", debug=False, ssh_callback=None): """ Add a new job. Every callback will be called once and only once. :type task: Task :param inputdata: input from the student :type inputdata: Storage or dict :param callback: a function that will be called asynchronously in the client's process, with the results. it's signature must be (result, grade, problems, tests, custom, archive), where: result is itself a tuple containing the result string and the main feedback (i.e. ('success', 'You succeeded'); grade is a number between 0 and 100 indicating the grade of the users; problems is a dict of tuple, in the form {'problemid': result}; test is a dict of tests made in the container custom is a dict containing random things set in the container archive is either None or a bytes containing a tgz archive of files from the job :type callback: __builtin__.function or __builtin__.instancemethod :param launcher_name: for informational use :type launcher_name: str :param debug: Either True(outputs more info), False(default), or "ssh" (starts a remote ssh server. ssh_callback needs to be defined) :type debug: bool or string :param ssh_callback: a callback function that will be called with (host, port, password), the needed credentials to connect to the remote ssh server. May be called with host, port, password being None, meaning no session was open. :type ssh_callback: __builtin__.function or __builtin__.instancemethod or None :return: the new job id """ job_id = str(uuid.uuid4()) if debug == "ssh" and ssh_callback is None: self._logger.error("SSH callback not set in %s/%s", task.get_course_id(), task.get_id()) callback(("crash", "SSH callback not set."), 0.0, {}, {}, {}, None, "", "") return # wrap ssh_callback to ensure it is called at most once, and that it can always be called to simplify code ssh_callback = _callable_once(ssh_callback if ssh_callback is not None else lambda _1, _2, _3: None) environment = task.get_environment() if environment not in self._available_containers: self._logger.warning("Env %s not available for task %s/%s", environment, task.get_course_id(), task.get_id()) ssh_callback(None, None, None) # ssh_callback must be called once callback(("crash", "Environment not available."), 0.0, {}, {}, "", {}, None, "", "") return enable_network = task.allow_network_access_grading() try: limits = task.get_limits() time_limit = int(limits.get('time', 20)) hard_time_limit = int(limits.get('hard_time', 3 * time_limit)) mem_limit = int(limits.get('memory', 200)) except: self._logger.exception("Cannot retrieve limits for task %s/%s", task.get_course_id(), task.get_id()) ssh_callback(None, None, None) # ssh_callback must be called once callback(("crash", "Error while reading task limits"), 0.0, {}, {}, "", {}, None, "", "") return msg = ClientNewJob(job_id, task.get_course_id(), task.get_id(), inputdata, environment, enable_network, time_limit, hard_time_limit, mem_limit, debug, launcher_name) self._loop.call_soon_threadsafe(asyncio.ensure_future, self._create_transaction(msg, task=task, callback=callback, ssh_callback=ssh_callback)) return job_id
Kills a running job
def kill_job(self, job_id): """ Kills a running job """ self._loop.call_soon_threadsafe(asyncio.ensure_future, self._simple_send(ClientKillJob(job_id)))
Display main course list page
def show_page(self): """ Display main course list page """ username = self.user_manager.session_username() user_info = self.database.users.find_one({"username": username}) all_courses = self.course_factory.get_all_courses() # Display open_courses = {courseid: course for courseid, course in all_courses.items() if course.is_open_to_non_staff()} open_courses = OrderedDict(sorted(iter(open_courses.items()), key=lambda x: x[1].get_name(self.user_manager.session_language()))) return self.template_helper.get_renderer().index(open_courses, user_info)
Generates rst codeblock for given text and language
def get_codeblock(language, text): """ Generates rst codeblock for given text and language """ rst = "\n\n.. code-block:: " + language + "\n\n" for line in text.splitlines(): rst += "\t" + line + "\n" rst += "\n" return rst
Generates rst raw block for given image filename and format
def get_imageblock(filename, format=''): """ Generates rst raw block for given image filename and format""" _, extension = os.path.splitext(filename) with open(filename, "rb") as image_file: encoded_string = base64.b64encode(image_file.read()).decode('utf-8') return '\n\n.. raw:: html\n\n\t<img src="data:image/' + (format if format else extension[1:]) + ';base64,' + encoded_string +'">\n'
Generates rst admonition block given a bootstrap alert css class, title, and text
def get_admonition(cssclass, title, text): """ Generates rst admonition block given a bootstrap alert css class, title, and text""" rst = ("\n\n.. admonition:: " + title + "\n") if title else "\n\n.. note:: \n" rst += "\t:class: alert alert-" + cssclass + "\n\n" for line in text.splitlines(): rst += "\t" + line + "\n" rst += "\n" return rst
Indent (or de-indent) text
def indent_block(amount, text, indent_char='\t'): """ Indent (or de-indent) text""" rst = "" for line in text.splitlines(): if amount > 0: rst += indent_char*amount + line + "\n" else: rst += ''.join([c for i,c in enumerate(line) if (c == indent_char and (i+1) > abs(amount)) or (not c == indent_char)]) + "\n" return rst
From a bytestring given by a (distant) call to Message.dump(), retrieve the original message :param bmessage: bytestring given by a .dump() call on a message :return: the original message
def load(cls, bmessage): """ From a bytestring given by a (distant) call to Message.dump(), retrieve the original message :param bmessage: bytestring given by a .dump() call on a message :return: the original message """ message_dict = msgpack.loads(bmessage, encoding="utf8", use_list=False) try: obj = MessageMeta._registered_messages[message_dict["type"]].__new__(MessageMeta._registered_messages[message_dict["type"]]) object.__setattr__(obj, "__dict__", message_dict) except: raise TypeError("Unknown message type") from None if not obj._verify(): # pylint: disable=protected-access raise TypeError("Invalid message content") return obj
Install gettext with the default parameters
def init(): """ Install gettext with the default parameters """ if "_" not in builtins.__dict__: # avoid installing lang two times os.environ["LANGUAGE"] = inginious.input.get_lang() if inginious.DEBUG: gettext.install("messages", get_lang_dir_path()) else: gettext.install("messages", get_lang_dir_path())
Returns {"authenticated": false} or {"authenticated": true, "username": "your_username"} (always 200 OK)
def API_GET(self): # pylint: disable=arguments-differ """ Returns {"authenticated": false} or {"authenticated": true, "username": "your_username"} (always 200 OK) """ if self.user_manager.session_logged_in(): return 200, {"authenticated": True, "username": self.user_manager.session_username()} else: return 200, {"authenticated": False}
Authenticates the remote client. Takes as input: auth_method_id an id for an auth method as returned be /api/v0/auth_methods input_key_1 the first input key and its value input_key_2 the first input key and its value ... ... Response: a dict in the form {"status": "success"} (200 OK) or {"status": "error"} (403 Forbidden)
def API_POST(self): # pylint: disable=arguments-differ """ Authenticates the remote client. Takes as input: auth_method_id an id for an auth method as returned be /api/v0/auth_methods input_key_1 the first input key and its value input_key_2 the first input key and its value ... ... Response: a dict in the form {"status": "success"} (200 OK) or {"status": "error"} (403 Forbidden) """ user_input = web.input() if "auth_method_id" not in user_input: raise APIInvalidArguments() try: auth_method_id = int(user_input["auth_method_id"]) except: raise APIInvalidArguments() del user_input["auth_method_id"] try: if "login" in user_input and "password" in user_input and \ self.user_manager.auth_user(user_input["login"].strip(), user_input["password"]) is not None: return 200, {"status": "success", "username": self.user_manager.session_username()} except: pass return 403, {"status": "error"}
Returns all the auth methods available. (200 OK) Response: list of auth methods. The value of the dict is an auth method, represented by: id id of the auth method name the name of the authentication method, typically displayed by the webapp input a dictionary containing as key the name of the input (in the HTML sense of name), and, as value, a dictionary containing two fields: name the placeholder for the input type text or password
def API_GET(self): """ Returns all the auth methods available. (200 OK) Response: list of auth methods. The value of the dict is an auth method, represented by: id id of the auth method name the name of the authentication method, typically displayed by the webapp input a dictionary containing as key the name of the input (in the HTML sense of name), and, as value, a dictionary containing two fields: name the placeholder for the input type text or password """ to_display = [] for key, (name, data) in self.user_manager.get_auth_methods().items(): to_display.append({ "id": key, "name": name, "input": [{"id": ik, "name": iv["placeholder"], "type": iv["type"]} for ik, iv in data["input"].items()] }) return 200, to_display
GET request
def GET_AUTH(self, courseid): # pylint: disable=arguments-differ """ GET request """ course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=False) return self.page(course)
POST request
def POST_AUTH(self, courseid): # pylint: disable=arguments-differ """ POST request """ course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=False) errors = [] course_content = {} try: data = web.input() course_content = self.course_factory.get_course_descriptor_content(courseid) course_content['name'] = data['name'] if course_content['name'] == "": errors.append(_('Invalid name')) course_content['description'] = data['description'] course_content['admins'] = list(map(str.strip, data['admins'].split(','))) if not self.user_manager.user_is_superadmin() and self.user_manager.session_username() not in course_content['admins']: errors.append(_('You cannot remove yourself from the administrators of this course')) course_content['tutors'] = list(map(str.strip, data['tutors'].split(','))) if len(course_content['tutors']) == 1 and course_content['tutors'][0].strip() == "": course_content['tutors'] = [] course_content['groups_student_choice'] = True if data["groups_student_choice"] == "true" else False if course_content.get('use_classrooms', True) != (data['use_classrooms'] == "true"): self.database.aggregations.delete_many({"courseid": course.get_id()}) course_content['use_classrooms'] = True if data["use_classrooms"] == "true" else False if data["accessible"] == "custom": course_content['accessible'] = "{}/{}".format(data["accessible_start"], data["accessible_end"]) elif data["accessible"] == "true": course_content['accessible'] = True else: course_content['accessible'] = False try: AccessibleTime(course_content['accessible']) except: errors.append(_('Invalid accessibility dates')) course_content['allow_unregister'] = True if data["allow_unregister"] == "true" else False course_content['allow_preview'] = True if data["allow_preview"] == "true" else False if data["registration"] == "custom": course_content['registration'] = "{}/{}".format(data["registration_start"], data["registration_end"]) elif data["registration"] == "true": course_content['registration'] = True else: course_content['registration'] = False try: AccessibleTime(course_content['registration']) except: errors.append(_('Invalid registration dates')) course_content['registration_password'] = data['registration_password'] if course_content['registration_password'] == "": course_content['registration_password'] = None course_content['registration_ac'] = data['registration_ac'] if course_content['registration_ac'] not in ["None", "username", "binding", "email"]: errors.append(_('Invalid ACL value')) if course_content['registration_ac'] == "None": course_content['registration_ac'] = None course_content['registration_ac_list'] = data['registration_ac_list'].splitlines() course_content['is_lti'] = 'lti' in data and data['lti'] == "true" course_content['lti_keys'] = dict([x.split(":") for x in data['lti_keys'].splitlines() if x]) for lti_key in course_content['lti_keys'].keys(): if not re.match("^[a-zA-Z0-9]*$", lti_key): errors.append(_("LTI keys must be alphanumerical.")) course_content['lti_send_back_grade'] = 'lti_send_back_grade' in data and data['lti_send_back_grade'] == "true" except: errors.append(_('User returned an invalid form.')) if len(errors) == 0: self.course_factory.update_course_descriptor_content(courseid, course_content) errors = None course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=False) # don't forget to reload the modified course return self.page(course, errors, errors is None)
Get all data and display the page
def page(self, course, errors=None, saved=False): """ Get all data and display the page """ return self.template_helper.get_renderer().course_admin.settings(course, errors, saved)
Copy src to dest, recursively and with file overwrite.
def _recursive_overwrite(self, src, dest): """ Copy src to dest, recursively and with file overwrite. """ if os.path.isdir(src): if not os.path.isdir(dest): os.makedirs(dest) files = os.listdir(src) for f in files: self._recursive_overwrite(os.path.join(src, f), os.path.join(dest, f)) else: shutil.copyfile(src, dest, follow_symlinks=False)
Init the plugin Available configuration: :: plugins: - plugin_module: inginious.frontend.plugins.git_repo repo_directory: "./repo_submissions"
def init(plugin_manager, _, _2, config): """ Init the plugin Available configuration: :: plugins: - plugin_module: inginious.frontend.plugins.git_repo repo_directory: "./repo_submissions" """ submission_git_saver = SubmissionGitSaver(plugin_manager, config) submission_git_saver.daemon = True submission_git_saver.start()
Add a new submission to the repo (add the to queue, will be saved async)
def add(self, submission, archive, _): """ Add a new submission to the repo (add the to queue, will be saved async)""" self.queue.put((submission, submission["result"], submission["grade"], submission["problems"], submission["tests"], submission["custom"], archive))
saves a new submission in the repo (done async)
def save(self, submission, result, grade, problems, tests, custom, archive): # pylint: disable=unused-argument """ saves a new submission in the repo (done async) """ # Save submission to repo self._logger.info("Save submission " + str(submission["_id"]) + " to git repo") # Verify that the directory for the course exists if not os.path.exists(os.path.join(self.repopath, submission["courseid"])): os.mkdir(os.path.join(self.repopath, submission["courseid"])) # Idem with the task if not os.path.exists(os.path.join(self.repopath, submission["courseid"], submission["taskid"])): os.mkdir(os.path.join(self.repopath, submission["courseid"], submission["taskid"])) # Idem with the username, but empty it dirname = os.path.join(self.repopath, submission["courseid"], submission["taskid"], str.join("-", submission["username"])) if os.path.exists(dirname): shutil.rmtree(dirname) os.mkdir(dirname) # Now we can put the input, the output and the zip open(os.path.join(dirname, 'submitted_on'), "w+").write(str(submission["submitted_on"])) open(os.path.join(dirname, 'input.yaml'), "w+").write(inginious.common.custom_yaml.dump(submission["input"])) result_obj = { "result": result[0], "text": result[1], "problems": problems } open(os.path.join(dirname, 'result.yaml'), "w+").write(inginious.common.custom_yaml.dump(result_obj)) if archive is not None: os.mkdir(os.path.join(dirname, 'output')) tar = tarfile.open(mode='r:gz', fileobj=io.BytesIO(archive)) tar.extractall(os.path.join(dirname, 'output')) tar.close() self.git.add('--all', '.') title = " - ".join([str(submission["courseid"]) + "/" + str(submission["taskid"]), str(submission["_id"]), str.join("-", submission["username"]), ("success" if result[0] == "success" else "failed")]) self.git.commit('-m', title)
Returns the description of this tag translated=True can be use to avoid getting garbage when calling _() with an empty string since the description of a tag CAN be empty
def get_description(self, translated=False): """ Returns the description of this tag translated=True can be use to avoid getting garbage when calling _() with an empty string since the description of a tag CAN be empty """ if translated and self._description != "": return _(self._description) return self._description
Return a textual description of the type
def get_type_as_str(self): """ Return a textual description of the type """ if self.get_type() == 0: return _("Skill") elif self.get_type() == 1: return _("Misconception") elif self.get_type() == 2: return _("Category") else: return _("Unknown type")
Build a tuple of list of Tag objects based on the tag_dict. The tuple contains 3 lists. - The first list contains skill tags - The second list contains misconception tags - The third list contains category tags
def create_tags_from_dict(cls, tag_dict): """ Build a tuple of list of Tag objects based on the tag_dict. The tuple contains 3 lists. - The first list contains skill tags - The second list contains misconception tags - The third list contains category tags """ tag_list_common = [] tag_list_misconception = [] tag_list_organisational = [] for tag in tag_dict: try: id = tag_dict[tag]["id"] name = tag_dict[tag]["name"] visible = tag_dict[tag]["visible"] description = tag_dict[tag]["description"] type = tag_dict[tag]["type"] if type == 2: tag_list_organisational.insert(int(tag), Tag(id, name, description, visible, type)) elif type == 1: tag_list_misconception.insert(int(tag), Tag(id, name, description, visible, type)) else: tag_list_common.insert(int(tag), Tag(id, name, description, visible, type)) except KeyError: pass return tag_list_common, tag_list_misconception, tag_list_organisational
Check the tags arg only contains valid type for tags :param tags: output of create_tags_from_dict :return: True if correct format, False otherwise
def check_format(cls, tags): """ Check the tags arg only contains valid type for tags :param tags: output of create_tags_from_dict :return: True if correct format, False otherwise """ common, _, _ = tags for tag in common: if tag.get_type() != 0: # Unknown type -> incorrect return False return True
GET request
def GET_AUTH(self, courseid, aggregationid): # pylint: disable=arguments-differ """ GET request """ course, __ = self.get_course_and_check_rights(courseid) if course.is_lti(): raise web.notfound() return self.page(course, aggregationid)
Get all data and display the page
def page(self, course, aggregationid): """ Get all data and display the page """ aggregation = self.database.aggregations.find_one({"_id": ObjectId(aggregationid)}) data = list(self.database.submissions.aggregate( [ { "$match": { "courseid": course.get_id(), "username": {"$in": aggregation["students"]} } }, { "$group": { "_id": "$taskid", "tried": {"$sum": 1}, "succeeded": {"$sum": {"$cond": [{"$eq": ["$result", "success"]}, 1, 0]}}, "grade": {"$max": "$grade"} } } ])) tasks = course.get_tasks() result = dict([(taskid, {"taskid": taskid, "name": tasks[taskid].get_name(self.user_manager.session_language()), "tried": 0, "status": "notviewed", "grade": 0, "url": self.submission_url_generator(aggregationid, taskid)}) for taskid in tasks]) for taskdata in data: if taskdata["_id"] in result: result[taskdata["_id"]]["tried"] = taskdata["tried"] if taskdata["tried"] == 0: result[taskdata["_id"]]["status"] = "notattempted" elif taskdata["succeeded"]: result[taskdata["_id"]]["status"] = "succeeded" else: result[taskdata["_id"]]["status"] = "failed" result[taskdata["_id"]]["grade"] = taskdata["grade"] if "csv" in web.input(): return make_csv(result) results = sorted(list(result.values()), key=lambda result: (tasks[result["taskid"]].get_order(), result["taskid"])) return self.template_helper.get_renderer().course_admin.aggregation_info(course, aggregation, results)
Runs the agent. Answer to the requests made by the Backend. May raise an asyncio.CancelledError, in which case the agent should clean itself and restart completely.
async def run(self): """ Runs the agent. Answer to the requests made by the Backend. May raise an asyncio.CancelledError, in which case the agent should clean itself and restart completely. """ self._logger.info("Agent started") self.__backend_socket.connect(self.__backend_addr) # Tell the backend we are up and have `concurrency` threads available self._logger.info("Saying hello to the backend") await ZMQUtils.send(self.__backend_socket, AgentHello(self.__friendly_name, self.__concurrency, self.environments)) self.__last_ping = time.time() run_listen = self._loop.create_task(self.__run_listen()) self._loop.call_later(1, self._create_safe_task, self.__check_last_ping(run_listen)) await run_listen
Check if the last timeout is too old. If it is, kills the run_listen task
async def __check_last_ping(self, run_listen): """ Check if the last timeout is too old. If it is, kills the run_listen task """ if self.__last_ping < time.time()-10: self._logger.warning("Last ping too old. Restarting the agent.") run_listen.cancel() self.__cancel_remaining_safe_tasks() else: self._loop.call_later(1, self._create_safe_task, self.__check_last_ping(run_listen))
Listen to the backend
async def __run_listen(self): """ Listen to the backend """ while True: message = await ZMQUtils.recv(self.__backend_socket) await self.__handle_backend_message(message)
Dispatch messages received from clients to the right handlers
async def __handle_backend_message(self, message): """ Dispatch messages received from clients to the right handlers """ message_handlers = { BackendNewJob: self.__handle_new_job, BackendKillJob: self.kill_job, Ping: self.__handle_ping } try: func = message_handlers[message.__class__] except: raise TypeError("Unknown message type %s" % message.__class__) self._create_safe_task(func(message))
Handle a Ping message. Pong the backend
async def __handle_ping(self, _ : Ping): """ Handle a Ping message. Pong the backend """ self.__last_ping = time.time() await ZMQUtils.send(self.__backend_socket, Pong())
Send info about the SSH debug connection to the backend/client. Must be called *at most once* for each job. :exception JobNotRunningException: is raised when the job is not running anymore (send_job_result already called) :exception TooManyCallsException: is raised when this function has been called more than once
async def send_ssh_job_info(self, job_id: BackendJobId, host: str, port: int, key: str): """ Send info about the SSH debug connection to the backend/client. Must be called *at most once* for each job. :exception JobNotRunningException: is raised when the job is not running anymore (send_job_result already called) :exception TooManyCallsException: is raised when this function has been called more than once """ if job_id not in self.__running_job: raise JobNotRunningException() if self.__running_job[job_id]: raise TooManyCallsException() self.__running_job[job_id] = True # now we have sent ssh info await ZMQUtils.send(self.__backend_socket, AgentJobSSHDebug(job_id, host, port, key))
Send the result of a job back to the backend. Must be called *once and only once* for each job :exception JobNotRunningException: is raised when send_job_result is called more than once for a given job_id
async def send_job_result(self, job_id: BackendJobId, result: str, text: str = "", grade: float = None, problems: Dict[str, SPResult] = None, tests: Dict[str, Any] = None, custom: Dict[str, Any] = None, state: str = "", archive: Optional[bytes] = None, stdout: Optional[str] = None, stderr: Optional[str] = None): """ Send the result of a job back to the backend. Must be called *once and only once* for each job :exception JobNotRunningException: is raised when send_job_result is called more than once for a given job_id """ if job_id not in self.__running_job: raise JobNotRunningException() del self.__running_job[job_id] if grade is None: if result == "success": grade = 100.0 else: grade = 0.0 if problems is None: problems = {} if custom is None: custom = {} if tests is None: tests = {} await ZMQUtils.send(self.__backend_socket, AgentJobDone(job_id, (result, text), round(grade, 2), problems, tests, custom, state, archive, stdout, stderr))
Calls self._loop.create_task with a safe (== with logged exception) coroutine. When run() ends, these tasks are automatically cancelled
def _create_safe_task(self, coroutine): """ Calls self._loop.create_task with a safe (== with logged exception) coroutine. When run() ends, these tasks are automatically cancelled""" task = self._loop.create_task(coroutine) self.__asyncio_tasks_running.add(task) task.add_done_callback(self.__remove_safe_task)
Returns the content of a CSV file with the data of the dict/list data
def make_csv(data): """ Returns the content of a CSV file with the data of the dict/list data """ # Convert sub-dicts to news cols for entry in data: rval = entry if isinstance(data, dict): rval = data[entry] todel = [] toadd = {} for key, val in rval.items(): if isinstance(val, dict): for key2, val2 in val.items(): toadd[str(key) + "[" + str(key2) + "]"] = val2 todel.append(key) for k in todel: del rval[k] for k, v in toadd.items(): rval[k] = v # Convert everything to CSV columns = set() output = [[]] if isinstance(data, dict): output[0].append("id") for entry in data: for col in data[entry]: columns.add(col) else: for entry in data: for col in entry: columns.add(col) columns = sorted(columns) for col in columns: output[0].append(col) if isinstance(data, dict): for entry in data: new_output = [str(entry)] for col in columns: new_output.append(str(data[entry][col]) if col in data[entry] else "") output.append(new_output) else: for entry in data: new_output = [] for col in columns: new_output.append(str(entry[col]) if col in entry else "") output.append(new_output) csv_string = io.StringIO() csv_writer = UnicodeWriter(csv_string) for row in output: csv_writer.writerow(row) csv_string.seek(0) web.header('Content-Type', 'text/csv; charset=utf-8') web.header('Content-disposition', 'attachment; filename=export.csv') return csv_string.read()
Returns the HTML of the menu used in the administration. ```current``` is the current page of section
def get_menu(course, current, renderer, plugin_manager, user_manager): """ Returns the HTML of the menu used in the administration. ```current``` is the current page of section """ default_entries = [] if user_manager.has_admin_rights_on_course(course): default_entries += [("settings", "<i class='fa fa-cog fa-fw'></i>&nbsp; " + _("Course settings"))] default_entries += [("stats", "<i class='fa fa-area-chart fa-fw'></i>&nbsp; " + _("Stats")), ("students", "<i class='fa fa-user fa-fw'></i>&nbsp; " + _("Students"))] if not course.is_lti(): default_entries += [("aggregations", "<i class='fa fa-group fa-fw'></i>&nbsp; " + (_("Classrooms") if course.use_classrooms() else _("Teams")))] default_entries += [("tasks", "<i class='fa fa-tasks fa-fw'></i>&nbsp; " + _("Tasks")), ("submissions", "<i class='fa fa-search fa-fw'></i>&nbsp; " + _("View submissions")), ("download", "<i class='fa fa-download fa-fw'></i>&nbsp; " + _("Download submissions"))] if user_manager.has_admin_rights_on_course(course): if web.ctx.app_stack[0].webdav_host: default_entries += [("webdav", "<i class='fa fa-folder-open fa-fw'></i>&nbsp; " + _("WebDAV access"))] default_entries += [("replay", "<i class='fa fa-refresh fa-fw'></i>&nbsp; " + _("Replay submissions")), ("danger", "<i class='fa fa-bomb fa-fw'></i>&nbsp; " + _("Danger zone"))] # Hook should return a tuple (link,name) where link is the relative link from the index of the course administration. additional_entries = [entry for entry in plugin_manager.call_hook('course_admin_menu', course=course) if entry is not None] return renderer.course_admin.menu(course, default_entries + additional_entries, current)
Returns the course with id ``courseid`` and the task with id ``taskid``, and verify the rights of the user. Raise web.notfound() when there is no such course of if the users has not enough rights. :param courseid: the course on which to check rights :param taskid: If not None, returns also the task with id ``taskid`` :param allow_all_staff: allow admins AND tutors to see the page. If false, all only admins. :returns (Course, Task)
def get_course_and_check_rights(self, courseid, taskid=None, allow_all_staff=True): """ Returns the course with id ``courseid`` and the task with id ``taskid``, and verify the rights of the user. Raise web.notfound() when there is no such course of if the users has not enough rights. :param courseid: the course on which to check rights :param taskid: If not None, returns also the task with id ``taskid`` :param allow_all_staff: allow admins AND tutors to see the page. If false, all only admins. :returns (Course, Task) """ try: course = self.course_factory.get_course(courseid) if allow_all_staff: if not self.user_manager.has_staff_rights_on_course(course): raise web.notfound() else: if not self.user_manager.has_admin_rights_on_course(course): raise web.notfound() if taskid is None: return course, None else: return course, course.get_task(taskid) except: raise web.notfound()
Returns the submissions that have been selected by the admin :param course: course :param filter_type: users or aggregations :param selected_tasks: selected tasks id :param users: selected usernames :param aggregations: selected aggregations :param stype: single or all submissions :return:
def get_selected_submissions(self, course, filter_type, selected_tasks, users, aggregations, stype): """ Returns the submissions that have been selected by the admin :param course: course :param filter_type: users or aggregations :param selected_tasks: selected tasks id :param users: selected usernames :param aggregations: selected aggregations :param stype: single or all submissions :return: """ if filter_type == "users": self._validate_list(users) aggregations = list(self.database.aggregations.find({"courseid": course.get_id(), "students": {"$in": users}})) # Tweak if not using classrooms : classroom['students'] may content ungrouped users aggregations = dict([(username, aggregation if course.use_classrooms() or ( len(aggregation['groups']) and username in aggregation['groups'][0]["students"] ) else None ) for aggregation in aggregations for username in users]) else: self._validate_list(aggregations) aggregations = list( self.database.aggregations.find({"_id": {"$in": [ObjectId(cid) for cid in aggregations]}})) # Tweak if not using classrooms : classroom['students'] may content ungrouped users aggregations = dict([(username, aggregation if course.use_classrooms() or ( len(aggregation['groups']) and username in aggregation['groups'][0]["students"] ) else None ) for aggregation in aggregations for username in aggregation["students"]]) if stype == "single": user_tasks = list(self.database.user_tasks.find({"username": {"$in": list(aggregations.keys())}, "taskid": {"$in": selected_tasks}, "courseid": course.get_id()})) submissionsid = [user_task['submissionid'] for user_task in user_tasks if user_task['submissionid'] is not None] submissions = list(self.database.submissions.find({"_id": {"$in": submissionsid}})) else: submissions = list(self.database.submissions.find({"username": {"$in": list(aggregations.keys())}, "taskid": {"$in": selected_tasks}, "courseid": course.get_id(), "status": {"$in": ["done", "error"]}})) return submissions, aggregations
Writes a row to the CSV file
def writerow(self, row): """ Writes a row to the CSV file """ self.writer.writerow(row) # Fetch UTF-8 output from the queue ... data = self.queue.getvalue() # write to the target stream self.stream.write(data) # empty queue self.queue.truncate(0) self.queue.seek(0)
GET request
def GET_AUTH(self, courseid): # pylint: disable=arguments-differ """ GET request """ course, __ = self.get_course_and_check_rights(courseid) if self.user_manager.session_username() in course.get_tutors(): raise web.seeother(self.app.get_homepath() + '/admin/{}/tasks'.format(courseid)) else: raise web.seeother(self.app.get_homepath() + '/admin/{}/settings'.format(courseid))
Get the default renderer
def get_renderer(self, with_layout=True): """ Get the default renderer """ if with_layout and self.is_lti(): return self._default_renderer_lti elif with_layout: return self._default_renderer else: return self._default_renderer_nolayout
Create a template renderer on templates in the directory specified, and returns it. :param dir_path: the path to the template dir. If it is not absolute, it will be taken from the root of the inginious package. :param layout: can either be True (use the base layout of the running app), False (use no layout at all), or the path to the layout to use. If this path is relative, it is taken from the INGInious package root.
def get_custom_renderer(self, dir_path, layout=True): """ Create a template renderer on templates in the directory specified, and returns it. :param dir_path: the path to the template dir. If it is not absolute, it will be taken from the root of the inginious package. :param layout: can either be True (use the base layout of the running app), False (use no layout at all), or the path to the layout to use. If this path is relative, it is taken from the INGInious package root. """ # if dir_path/base is a absolute path, os.path.join(something, an_absolute_path) returns an_absolute_path. root_path = inginious.get_root_path() if isinstance(layout, str): layout_path = os.path.join(root_path, layout) elif layout is True: layout_path = os.path.join(root_path, self._layout) else: layout_path = None return web.template.render(os.path.join(root_path, dir_path), globals=self._template_globals, base=layout_path)
Add javascript links for the current page and for the plugins
def _javascript_helper(self, position): """ Add javascript links for the current page and for the plugins """ if position not in ["header", "footer"]: position = "footer" # Load javascript files from plugins if position == "header": entries = [entry for entry in self._plugin_manager.call_hook("javascript_header") if entry is not None] else: entries = [entry for entry in self._plugin_manager.call_hook("javascript_footer") if entry is not None] # Load javascript for the current page entries += self._get_ctx()["javascript"][position] entries = ["<script src='" + entry + "' type='text/javascript' charset='utf-8'></script>" for entry in entries] return "\n".join(entries)
Add CSS links for the current page and for the plugins
def _css_helper(self): """ Add CSS links for the current page and for the plugins """ entries = [entry for entry in self._plugin_manager.call_hook("css") if entry is not None] # Load javascript for the current page entries += self._get_ctx()["css"] entries = ["<link href='" + entry + "' rel='stylesheet'>" for entry in entries] return "\n".join(entries)
Get web.ctx object for the Template helper
def _get_ctx(self): """ Get web.ctx object for the Template helper """ if self._WEB_CTX_KEY not in web.ctx: web.ctx[self._WEB_CTX_KEY] = { "javascript": {"footer": [], "header": []}, "css": []} return web.ctx.get(self._WEB_CTX_KEY)
A generic hook that links the TemplateHelper with PluginManager
def _generic_hook(self, name, **kwargs): """ A generic hook that links the TemplateHelper with PluginManager """ entries = [entry for entry in self._plugin_manager.call_hook(name, **kwargs) if entry is not None] return "\n".join(entries)
Make a json dump of `data`, that can be used directly in a `<script>` tag. Available as json() inside templates
def _json_safe_dump(self, data): """ Make a json dump of `data`, that can be used directly in a `<script>` tag. Available as json() inside templates """ return json.dumps(data).replace(u'<', u'\\u003c') \ .replace(u'>', u'\\u003e') \ .replace(u'&', u'\\u0026') \ .replace(u"'", u'\\u0027')
GET request
def GET_AUTH(self, courseid): # pylint: disable=arguments-differ """ GET request """ course, __ = self.get_course_and_check_rights(courseid) if course.is_lti(): raise web.notfound() if "download" in web.input(): web.header('Content-Type', 'text/x-yaml', unique=True) web.header('Content-Disposition', 'attachment; filename="aggregations.yaml"', unique=True) if course.use_classrooms(): aggregations = [{"default": aggregation["default"], "description": aggregation["description"], "groups": aggregation["groups"], "students": aggregation["students"], "tutors": aggregation["tutors"]} for aggregation in self.user_manager.get_course_aggregations(course)] else: aggregations = [{"default": aggregation["default"], "description": aggregation["description"], "groups": aggregation["groups"], "students": aggregation["students"], "tutors": aggregation["tutors"]} for aggregation in self.user_manager.get_course_aggregations(course) if len(aggregation["groups"]) > 0] return yaml.dump(aggregations) return self.page(course)
POST request
def POST_AUTH(self, courseid): # pylint: disable=arguments-differ """ POST request """ course, __ = self.get_course_and_check_rights(courseid) if course.is_lti(): raise web.notfound() error = False try: if self.user_manager.has_admin_rights_on_course(course): data = web.input() if 'classroom' in data: default = True if self.database.aggregations.find_one({"courseid": courseid, "default": True}) is None else False self.database.aggregations.insert({"default": default, "courseid": courseid, "students": [], "tutors": [], "groups": [], "description": data['classroom']}) msg = _("New classroom created.") elif 'default' in data: self.database.aggregations.find_one_and_update({"courseid": courseid, "default": True}, {"$set": {"default": False}}) self.database.aggregations.find_one_and_update({"_id": ObjectId(data['default'])}, {"$set": {"default": True}}) msg = _("Default classroom changed.") else: # default, but with no classroom detected msg = _("Invalid classroom selected.") else: msg = _("You have no rights to add/change classrooms") error = True except: msg = _('User returned an invalid form.') error = True return self.page(course, msg, error)
Get all data and display the page
def page(self, course, msg="", error=False): """ Get all data and display the page """ aggregations = OrderedDict() taskids = list(course.get_tasks().keys()) for aggregation in self.user_manager.get_course_aggregations(course): aggregations[aggregation['_id']] = dict(list(aggregation.items()) + [("tried", 0), ("done", 0), ("url", self.submission_url_generator(aggregation['_id'])) ]) data = list(self.database.submissions.aggregate( [ { "$match": { "courseid": course.get_id(), "taskid": {"$in": taskids}, "username": {"$in": aggregation["students"]} } }, { "$group": { "_id": "$taskid", "tried": {"$sum": 1}, "done": {"$sum": {"$cond": [{"$eq": ["$result", "success"]}, 1, 0]}} } }, ])) for c in data: aggregations[aggregation['_id']]["tried"] += 1 if c["tried"] else 0 aggregations[aggregation['_id']]["done"] += 1 if c["done"] else 0 my_aggregations, other_aggregations = [], [] for aggregation in aggregations.values(): if self.user_manager.session_username() in aggregation["tutors"]: my_aggregations.append(aggregation) else: other_aggregations.append(aggregation) if "csv" in web.input(): return make_csv(data) return self.template_helper.get_renderer().course_admin.aggregation_list(course, [my_aggregations, other_aggregations], msg, error)
Runs a new job. It works exactly like the Client class, instead that there is no callback
def new_job(self, task, inputdata, launcher_name="Unknown", debug=False): """ Runs a new job. It works exactly like the Client class, instead that there is no callback """ bjobid = uuid.uuid4() self._waiting_jobs.append(str(bjobid)) self._client.new_job(task, inputdata, (lambda result, grade, problems, tests, custom, archive, stdout, stderr: self._callback(bjobid, result, grade, problems, tests, custom, archive, stdout, stderr)), launcher_name, debug) return bjobid