idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
40,900 | async def handle_client_get_queue ( self , client_addr , _ : ClientGetQueue ) : jobs_running = list ( ) for backend_job_id , content in self . _job_running . items ( ) : jobs_running . append ( ( content [ 1 ] . job_id , backend_job_id [ 0 ] == client_addr , self . _registered_agents [ content [ 0 ] ] , content [ 1 ] . course_id + "/" + content [ 1 ] . task_id , content [ 1 ] . launcher , int ( content [ 2 ] ) , int ( content [ 2 ] ) + content [ 1 ] . time_limit ) ) jobs_waiting = list ( ) for job_client_addr , msg in self . _waiting_jobs . items ( ) : if isinstance ( msg , ClientNewJob ) : jobs_waiting . append ( ( msg . job_id , job_client_addr [ 0 ] == client_addr , msg . course_id + "/" + msg . task_id , msg . launcher , msg . time_limit ) ) await ZMQUtils . send_with_addr ( self . _client_socket , client_addr , BackendGetQueue ( jobs_running , jobs_waiting ) ) | Handles a ClientGetQueue message . Send back info about the job queue |
40,901 | async def update_queue ( self ) : not_found_for_agent = [ ] while len ( self . _available_agents ) > 0 and len ( self . _waiting_jobs ) > 0 : agent_addr = self . _available_agents . pop ( 0 ) found = False client_addr , job_id , job_msg = None , None , None for ( client_addr , job_id ) , job_msg in self . _waiting_jobs . items ( ) : if job_msg . environment in self . _containers_on_agent [ agent_addr ] : found = True break if not found : self . _logger . debug ( "Nothing to do for agent %s" , agent_addr ) not_found_for_agent . append ( agent_addr ) continue del self . _waiting_jobs [ ( client_addr , job_id ) ] job_id = ( client_addr , job_msg . job_id ) self . _job_running [ job_id ] = ( agent_addr , job_msg , time . time ( ) ) self . _logger . info ( "Sending job %s %s to agent %s" , client_addr , job_msg . job_id , agent_addr ) await ZMQUtils . send_with_addr ( self . _agent_socket , agent_addr , BackendNewJob ( job_id , job_msg . course_id , job_msg . task_id , job_msg . inputdata , job_msg . environment , job_msg . enable_network , job_msg . time_limit , job_msg . hard_time_limit , job_msg . mem_limit , job_msg . debug ) ) self . _available_agents += not_found_for_agent | Send waiting jobs to available agents |
40,902 | async def handle_agent_hello ( self , agent_addr , message : AgentHello ) : self . _logger . info ( "Agent %s (%s) said hello" , agent_addr , message . friendly_name ) if agent_addr in self . _registered_agents : await self . _delete_agent ( agent_addr ) self . _registered_agents [ agent_addr ] = message . friendly_name self . _available_agents . extend ( [ agent_addr for _ in range ( 0 , message . available_job_slots ) ] ) self . _containers_on_agent [ agent_addr ] = message . available_containers . keys ( ) self . _ping_count [ agent_addr ] = 0 for container_name , container_info in message . available_containers . items ( ) : if container_name in self . _containers : if self . _containers [ container_name ] [ 0 ] == container_info [ "id" ] : self . _logger . debug ( "Registering container %s for agent %s" , container_name , str ( agent_addr ) ) self . _containers [ container_name ] [ 2 ] . append ( agent_addr ) elif self . _containers [ container_name ] [ 1 ] > container_info [ "created" ] : self . _logger . warning ( "Container %s has multiple version: \n" "\t Currently registered agents have version %s (%i)\n" "\t New agent %s has version %s (%i)" , container_name , self . _containers [ container_name ] [ 0 ] , self . _containers [ container_name ] [ 1 ] , str ( agent_addr ) , container_info [ "id" ] , container_info [ "created" ] ) self . _containers [ container_name ] [ 2 ] . append ( agent_addr ) else : self . _logger . warning ( "Container %s has multiple version: \n" "\t Currently registered agents have version %s (%i)\n" "\t New agent %s has version %s (%i)" , container_name , self . _containers [ container_name ] [ 0 ] , self . _containers [ container_name ] [ 1 ] , str ( agent_addr ) , container_info [ "id" ] , container_info [ "created" ] ) self . _containers [ container_name ] = ( container_info [ "id" ] , container_info [ "created" ] , self . _containers [ container_name ] [ 2 ] + [ agent_addr ] ) else : self . _logger . debug ( "Registering container %s for agent %s" , container_name , str ( agent_addr ) ) self . _containers [ container_name ] = ( container_info [ "id" ] , container_info [ "created" ] , [ agent_addr ] ) await self . update_queue ( ) await self . send_container_update_to_client ( self . _registered_clients ) | Handle an AgentAvailable message . Add agent_addr to the list of available agents |
40,903 | async def handle_agent_job_started ( self , agent_addr , message : AgentJobStarted ) : self . _logger . debug ( "Job %s %s started on agent %s" , message . job_id [ 0 ] , message . job_id [ 1 ] , agent_addr ) await ZMQUtils . send_with_addr ( self . _client_socket , message . job_id [ 0 ] , BackendJobStarted ( message . job_id [ 1 ] ) ) | Handle an AgentJobStarted message . Send the data back to the client |
40,904 | async def handle_agent_job_done ( self , agent_addr , message : AgentJobDone ) : if agent_addr in self . _registered_agents : self . _logger . info ( "Job %s %s finished on agent %s" , message . job_id [ 0 ] , message . job_id [ 1 ] , agent_addr ) del self . _job_running [ message . job_id ] await ZMQUtils . send_with_addr ( self . _client_socket , message . job_id [ 0 ] , BackendJobDone ( message . job_id [ 1 ] , message . result , message . grade , message . problems , message . tests , message . custom , message . state , message . archive , message . stdout , message . stderr ) ) self . _available_agents . append ( agent_addr ) else : self . _logger . warning ( "Job result %s %s from non-registered agent %s" , message . job_id [ 0 ] , message . job_id [ 1 ] , agent_addr ) await self . update_queue ( ) | Handle an AgentJobDone message . Send the data back to the client and start new job if needed |
40,905 | async def handle_agent_job_ssh_debug ( self , _ , message : AgentJobSSHDebug ) : await ZMQUtils . send_with_addr ( self . _client_socket , message . job_id [ 0 ] , BackendJobSSHDebug ( message . job_id [ 1 ] , message . host , message . port , message . password ) ) | Handle an AgentJobSSHDebug message . Send the data back to the client |
40,906 | async def _do_ping ( self ) : for agent_addr , friendly_name in list ( self . _registered_agents . items ( ) ) : try : ping_count = self . _ping_count . get ( agent_addr , 0 ) if ping_count > 5 : self . _logger . warning ( "Agent %s (%s) does not respond: removing from list." , agent_addr , friendly_name ) delete_agent = True else : self . _ping_count [ agent_addr ] = ping_count + 1 await ZMQUtils . send_with_addr ( self . _agent_socket , agent_addr , Ping ( ) ) delete_agent = False except : self . _logger . exception ( "Failed to send ping to agent %s (%s). Removing it from list." , agent_addr , friendly_name ) delete_agent = True if delete_agent : try : await self . _delete_agent ( agent_addr ) except : self . _logger . exception ( "Failed to delete agent %s (%s)!" , agent_addr , friendly_name ) self . _loop . call_later ( 1 , self . _create_safe_task , self . _do_ping ( ) ) | Ping the agents |
40,907 | async def _delete_agent ( self , agent_addr ) : self . _available_agents = [ agent for agent in self . _available_agents if agent != agent_addr ] del self . _registered_agents [ agent_addr ] await self . _recover_jobs ( agent_addr ) | Deletes an agent |
40,908 | async def _recover_jobs ( self , agent_addr ) : for ( client_addr , job_id ) , ( agent , job_msg , _ ) in reversed ( list ( self . _job_running . items ( ) ) ) : if agent == agent_addr : await ZMQUtils . send_with_addr ( self . _client_socket , client_addr , BackendJobDone ( job_id , ( "crash" , "Agent restarted" ) , 0.0 , { } , { } , { } , "" , None , None , None ) ) del self . _job_running [ ( client_addr , job_id ) ] await self . update_queue ( ) | Recover the jobs sent to a crashed agent |
40,909 | def parse_date ( date , default = None ) : if date == "" : if default is not None : return default else : raise Exception ( "Unknown format for " + date ) for format_type in [ "%Y-%m-%d %H:%M:%S" , "%Y-%m-%d %H:%M" , "%Y-%m-%d %H" , "%Y-%m-%d" , "%d/%m/%Y %H:%M:%S" , "%d/%m/%Y %H:%M" , "%d/%m/%Y %H" , "%d/%m/%Y" ] : try : return datetime . strptime ( date , format_type ) except ValueError : pass raise Exception ( "Unknown format for " + date ) | Parse a valid date |
40,910 | def GET ( self ) : if self . user_manager . session_logged_in ( ) or not self . app . allow_registration : raise web . notfound ( ) error = False reset = None msg = "" data = web . input ( ) if "activate" in data : msg , error = self . activate_user ( data ) elif "reset" in data : msg , error , reset = self . get_reset_data ( data ) return self . template_helper . get_renderer ( ) . register ( reset , msg , error ) | Handles GET request |
40,911 | def get_reset_data ( self , data ) : error = False reset = None msg = "" user = self . database . users . find_one ( { "reset" : data [ "reset" ] } ) if user is None : error = True msg = "Invalid reset hash." else : reset = { "hash" : data [ "reset" ] , "username" : user [ "username" ] , "realname" : user [ "realname" ] } return msg , error , reset | Returns the user info to reset |
40,912 | def register_user ( self , data ) : error = False msg = "" email_re = re . compile ( r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$' , re . IGNORECASE ) if re . match ( r"^[-_|~0-9A-Z]{4,}$" , data [ "username" ] , re . IGNORECASE ) is None : error = True msg = _ ( "Invalid username format." ) elif email_re . match ( data [ "email" ] ) is None : error = True msg = _ ( "Invalid email format." ) elif len ( data [ "passwd" ] ) < 6 : error = True msg = _ ( "Password too short." ) elif data [ "passwd" ] != data [ "passwd2" ] : error = True msg = _ ( "Passwords don't match !" ) if not error : existing_user = self . database . users . find_one ( { "$or" : [ { "username" : data [ "username" ] } , { "email" : data [ "email" ] } ] } ) if existing_user is not None : error = True if existing_user [ "username" ] == data [ "username" ] : msg = _ ( "This username is already taken !" ) else : msg = _ ( "This email address is already in use !" ) else : passwd_hash = hashlib . sha512 ( data [ "passwd" ] . encode ( "utf-8" ) ) . hexdigest ( ) activate_hash = hashlib . sha512 ( str ( random . getrandbits ( 256 ) ) . encode ( "utf-8" ) ) . hexdigest ( ) self . database . users . insert ( { "username" : data [ "username" ] , "realname" : data [ "realname" ] , "email" : data [ "email" ] , "password" : passwd_hash , "activate" : activate_hash , "bindings" : { } , "language" : self . user_manager . _session . get ( "language" , "en" ) } ) try : web . sendmail ( web . config . smtp_sendername , data [ "email" ] , _ ( "Welcome on INGInious" ) , _ ( ) + web . ctx . home + "/register?activate=" + activate_hash ) msg = _ ( "You are succesfully registered. An email has been sent to you for activation." ) except : error = True msg = _ ( "Something went wrong while sending you activation email. Please contact the administrator." ) return msg , error | Parses input and register user |
40,913 | def lost_passwd ( self , data ) : error = False msg = "" email_re = re . compile ( r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$' , re . IGNORECASE ) if email_re . match ( data [ "recovery_email" ] ) is None : error = True msg = _ ( "Invalid email format." ) if not error : reset_hash = hashlib . sha512 ( str ( random . getrandbits ( 256 ) ) . encode ( "utf-8" ) ) . hexdigest ( ) user = self . database . users . find_one_and_update ( { "email" : data [ "recovery_email" ] } , { "$set" : { "reset" : reset_hash } } ) if user is None : error = True msg = _ ( "This email address was not found in database." ) else : try : web . sendmail ( web . config . smtp_sendername , data [ "recovery_email" ] , _ ( "INGInious password recovery" ) , _ ( ) . format ( realname = user [ "realname" ] ) + web . ctx . home + "/register?reset=" + reset_hash ) msg = _ ( "An email has been sent to you to reset your password." ) except : error = True msg = _ ( "Something went wrong while sending you reset email. Please contact the administrator." ) return msg , error | Send a reset link to user to recover its password |
40,914 | def reset_passwd ( self , data ) : error = False msg = "" if len ( data [ "passwd" ] ) < 6 : error = True msg = _ ( "Password too short." ) elif data [ "passwd" ] != data [ "passwd2" ] : error = True msg = _ ( "Passwords don't match !" ) if not error : passwd_hash = hashlib . sha512 ( data [ "passwd" ] . encode ( "utf-8" ) ) . hexdigest ( ) user = self . database . users . find_one_and_update ( { "reset" : data [ "reset_hash" ] } , { "$set" : { "password" : passwd_hash } , "$unset" : { "reset" : True , "activate" : True } } ) if user is None : error = True msg = _ ( "Invalid reset hash." ) else : msg = _ ( "Your password has been successfully changed." ) return msg , error | Reset the user password |
40,915 | def POST ( self ) : if self . user_manager . session_logged_in ( ) or not self . app . allow_registration : raise web . notfound ( ) reset = None msg = "" error = False data = web . input ( ) if "register" in data : msg , error = self . register_user ( data ) elif "lostpasswd" in data : msg , error = self . lost_passwd ( data ) elif "resetpasswd" in data : msg , error , reset = self . get_reset_data ( data ) if reset : msg , error = self . reset_passwd ( data ) if not error : reset = None return self . template_helper . get_renderer ( ) . register ( reset , msg , error ) | Handles POST request |
40,916 | def get_readable_tasks ( self , course ) : course_fs = self . _filesystem . from_subfolder ( course . get_id ( ) ) tasks = [ task [ 0 : len ( task ) - 1 ] for task in course_fs . list ( folders = True , files = False , recursive = False ) if self . _task_file_exists ( course_fs . from_subfolder ( task ) ) ] return tasks | Returns the list of all available tasks in a course |
40,917 | def _task_file_exists ( self , task_fs ) : for filename in [ "task.{}" . format ( ext ) for ext in self . get_available_task_file_extensions ( ) ] : if task_fs . exists ( filename ) : return True return False | Returns true if a task file exists in this directory |
40,918 | def delete_all_possible_task_files ( self , courseid , taskid ) : if not id_checker ( courseid ) : raise InvalidNameException ( "Course with invalid name: " + courseid ) if not id_checker ( taskid ) : raise InvalidNameException ( "Task with invalid name: " + taskid ) task_fs = self . get_task_fs ( courseid , taskid ) for ext in self . get_available_task_file_extensions ( ) : try : task_fs . delete ( "task." + ext ) except : pass | Deletes all possibles task files in directory to allow to change the format |
40,919 | def prepare_request ( settings ) : settings [ "sp" ] [ "assertionConsumerService" ] = { "url" : web . ctx . homedomain + web . ctx . homepath + "/auth/callback/" + settings [ "id" ] , "binding" : "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" } data = web . input ( ) return { 'https' : 'on' if web . ctx . protocol == 'https' else 'off' , 'http_host' : web . ctx . environ [ "SERVER_NAME" ] , 'server_port' : web . ctx . environ [ "SERVER_PORT" ] , 'script_name' : web . ctx . homepath , 'get_data' : data . copy ( ) , 'post_data' : data . copy ( ) , 'query_string' : web . ctx . query } | Prepare SAML request |
40,920 | def _checkpath ( self , path ) : if path . startswith ( "/" ) or ".." in path or path . strip ( ) != path : raise NotFoundException ( ) | Checks that a given path is valid . If it s not raises NotFoundException |
40,921 | def API_GET ( self , courseid = None ) : output = [ ] if courseid is None : courses = self . course_factory . get_all_courses ( ) else : try : courses = { courseid : self . course_factory . get_course ( courseid ) } except : raise APINotFound ( "Course not found" ) username = self . user_manager . session_username ( ) user_info = self . database . users . find_one ( { "username" : username } ) for courseid , course in courses . items ( ) : if self . user_manager . course_is_open_to_user ( course , username , False ) or course . is_registration_possible ( user_info ) : data = { "id" : courseid , "name" : course . get_name ( self . user_manager . session_language ( ) ) , "require_password" : course . is_password_needed_for_registration ( ) , "is_registered" : self . user_manager . course_is_open_to_user ( course , username , False ) } if self . user_manager . course_is_open_to_user ( course , username , False ) : data [ "tasks" ] = { taskid : task . get_name ( self . user_manager . session_language ( ) ) for taskid , task in course . get_tasks ( ) . items ( ) } data [ "grade" ] = self . user_manager . get_course_cache ( username , course ) [ "grade" ] output . append ( data ) return 200 , output | List courses available to the connected client . Returns a dict in the form |
40,922 | def _api_convert_output ( return_value ) : content_type = web . ctx . environ . get ( 'CONTENT_TYPE' , 'text/json' ) if "text/json" in content_type : web . header ( 'Content-Type' , 'text/json; charset=utf-8' ) return json . dumps ( return_value ) if "text/html" in content_type : web . header ( 'Content-Type' , 'text/html; charset=utf-8' ) dump = yaml . dump ( return_value ) return "<pre>" + web . websafe ( dump ) + "</pre>" if "text/yaml" in content_type or "text/x-yaml" in content_type or "application/yaml" in content_type or "application/x-yaml" in content_type : web . header ( 'Content-Type' , 'text/yaml; charset=utf-8' ) dump = yaml . dump ( return_value ) return dump web . header ( 'Content-Type' , 'text/json; charset=utf-8' ) return json . dumps ( return_value ) | Convert the output to what the client asks |
40,923 | def _handle_api ( self , handler , handler_args , handler_kwargs ) : try : status_code , return_value = handler ( * handler_args , ** handler_kwargs ) except APIError as error : return error . send ( ) web . ctx . status = _convert_http_status ( status_code ) return _api_convert_output ( return_value ) | Handle call to subclasses and convert the output to an appropriate value |
40,924 | def _guess_available_methods ( self ) : available_methods = [ ] for m in [ "GET" , "POST" , "PUT" , "DELETE" , "PATCH" , "HEAD" , "OPTIONS" ] : self_method = getattr ( type ( self ) , "API_{}" . format ( m ) ) super_method = getattr ( APIPage , "API_{}" . format ( m ) ) if self_method != super_method : available_methods . append ( m ) return available_methods | Guess the method implemented by the subclass |
40,925 | def _verify_authentication ( self , handler , args , kwargs ) : if not self . user_manager . session_logged_in ( ) : raise APIForbidden ( ) return handler ( * args , ** kwargs ) | Verify that the user is authenticated |
40,926 | def send ( self ) : web . ctx . status = _convert_http_status ( self . status_code ) return _api_convert_output ( self . return_value ) | Send the API Exception to the client |
40,927 | def _job_done_callback ( self , submissionid , task , result , grade , problems , tests , custom , state , archive , stdout , stderr , newsub = True ) : submission = self . get_submission ( submissionid , False ) submission = self . get_input_from_submission ( submission ) data = { "status" : ( "done" if result [ 0 ] == "success" or result [ 0 ] == "failed" else "error" ) , "result" : result [ 0 ] , "grade" : grade , "text" : result [ 1 ] , "tests" : tests , "problems" : problems , "archive" : ( self . _gridfs . put ( archive ) if archive is not None else None ) , "custom" : custom , "state" : state , "stdout" : stdout , "stderr" : stderr } unset_obj = { "jobid" : "" , "ssh_host" : "" , "ssh_port" : "" , "ssh_password" : "" } submission = self . _database . submissions . find_one_and_update ( { "_id" : submission [ "_id" ] } , { "$set" : data , "$unset" : unset_obj } , return_document = ReturnDocument . AFTER ) self . _hook_manager . call_hook ( "submission_done" , submission = submission , archive = archive , newsub = newsub ) for username in submission [ "username" ] : self . _user_manager . update_user_stats ( username , task , submission , result [ 0 ] , grade , state , newsub ) if "outcome_service_url" in submission and "outcome_result_id" in submission and "outcome_consumer_key" in submission : for username in submission [ "username" ] : self . _lti_outcome_manager . add ( username , submission [ "courseid" ] , submission [ "taskid" ] , submission [ "outcome_consumer_key" ] , submission [ "outcome_service_url" ] , submission [ "outcome_result_id" ] ) | Callback called by Client when a job is done . Updates the submission in the database with the data returned after the completion of the job |
40,928 | def _before_submission_insertion ( self , task , inputdata , debug , obj ) : username = self . _user_manager . session_username ( ) if task . is_group_task ( ) and not self . _user_manager . has_staff_rights_on_course ( task . get_course ( ) , username ) : group = self . _database . aggregations . find_one ( { "courseid" : task . get_course_id ( ) , "groups.students" : username } , { "groups" : { "$elemMatch" : { "students" : username } } } ) obj . update ( { "username" : group [ "groups" ] [ 0 ] [ "students" ] } ) else : obj . update ( { "username" : [ username ] } ) lti_info = self . _user_manager . session_lti_info ( ) if lti_info is not None and task . get_course ( ) . lti_send_back_grade ( ) : outcome_service_url = lti_info [ "outcome_service_url" ] outcome_result_id = lti_info [ "outcome_result_id" ] outcome_consumer_key = lti_info [ "consumer_key" ] if outcome_result_id is None or outcome_service_url is None : self . _logger . error ( "outcome_result_id or outcome_service_url is None, but grade needs to be sent back to TC! Ignoring." ) return obj . update ( { "outcome_service_url" : outcome_service_url , "outcome_result_id" : outcome_result_id , "outcome_consumer_key" : outcome_consumer_key } ) | Called before any new submission is inserted into the database . Allows you to modify obj the new document that will be inserted into the database . Should be overridden in subclasses . |
40,929 | def get_submission ( self , submissionid , user_check = True ) : sub = self . _database . submissions . find_one ( { '_id' : ObjectId ( submissionid ) } ) if user_check and not self . user_is_submission_owner ( sub ) : return None return sub | Get a submission from the database |
40,930 | def _delete_exceeding_submissions ( self , username , task , max_submissions_bound = - 1 ) : if max_submissions_bound <= 0 : max_submissions = task . get_stored_submissions ( ) elif task . get_stored_submissions ( ) <= 0 : max_submissions = max_submissions_bound else : max_submissions = min ( max_submissions_bound , task . get_stored_submissions ( ) ) if max_submissions <= 0 : return [ ] tasks = list ( self . _database . submissions . find ( { "username" : username , "courseid" : task . get_course_id ( ) , "taskid" : task . get_id ( ) } , projection = [ "_id" , "status" , "result" , "grade" , "submitted_on" ] , sort = [ ( 'submitted_on' , pymongo . ASCENDING ) ] ) ) to_keep = set ( [ ] ) if task . get_evaluate ( ) == 'best' : idx_best = - 1 for idx , val in enumerate ( tasks ) : if val [ "status" ] == "done" : if idx_best == - 1 or tasks [ idx_best ] [ "grade" ] < val [ "grade" ] : idx_best = idx if idx_best != - 1 : to_keep . add ( tasks [ idx_best ] [ "_id" ] ) elif task . get_evaluate ( ) == 'student' : user_task = self . _database . user_tasks . find_one ( { "courseid" : task . get_course_id ( ) , "taskid" : task . get_id ( ) , "username" : username } ) submissionid = user_task . get ( 'submissionid' , None ) if submissionid : to_keep . add ( submissionid ) for val in tasks : if val [ "status" ] == "waiting" : to_keep . add ( val [ "_id" ] ) while len ( to_keep ) < max_submissions and len ( tasks ) > 0 : to_keep . add ( tasks . pop ( ) [ "_id" ] ) to_delete = { val [ "_id" ] for val in tasks } . difference ( to_keep ) self . _database . submissions . delete_many ( { "_id" : { "$in" : list ( to_delete ) } } ) return list ( map ( str , to_delete ) ) | Deletes exceeding submissions from the database to keep the database relatively small |
40,931 | def is_done ( self , submissionid_or_submission , user_check = True ) : if isinstance ( submissionid_or_submission , dict ) : submission = submissionid_or_submission else : submission = self . get_submission ( submissionid_or_submission , False ) if user_check and not self . user_is_submission_owner ( submission ) : return None return submission [ "status" ] == "done" or submission [ "status" ] == "error" | Tells if a submission is done and its result is available |
40,932 | def user_is_submission_owner ( self , submission ) : if not self . _user_manager . session_logged_in ( ) : raise Exception ( "A user must be logged in to verify if he owns a jobid" ) return self . _user_manager . session_username ( ) in submission [ "username" ] | Returns true if the current user is the owner of this jobid false else |
40,933 | def get_user_submissions ( self , task ) : if not self . _user_manager . session_logged_in ( ) : raise Exception ( "A user must be logged in to get his submissions" ) cursor = self . _database . submissions . find ( { "username" : self . _user_manager . session_username ( ) , "taskid" : task . get_id ( ) , "courseid" : task . get_course_id ( ) } ) cursor . sort ( [ ( "submitted_on" , - 1 ) ] ) return list ( cursor ) | Get all the user s submissions for a given task |
40,934 | def get_user_last_submissions ( self , limit = 5 , request = None ) : if request is None : request = { } request . update ( { "username" : self . _user_manager . session_username ( ) } ) data = self . _database . submissions . aggregate ( [ { "$match" : request } , { "$group" : { "_id" : { "courseid" : "$courseid" , "taskid" : "$taskid" } , "submitted_on" : { "$max" : "$submitted_on" } , "submissions" : { "$push" : { "_id" : "$_id" , "result" : "$result" , "status" : "$status" , "courseid" : "$courseid" , "taskid" : "$taskid" , "submitted_on" : "$submitted_on" } } , } } , { "$project" : { "submitted_on" : 1 , "submissions" : { "$setDifference" : [ { "$map" : { "input" : "$submissions" , "as" : "submission" , "in" : { "$cond" : [ { "$eq" : [ "$submitted_on" , "$$submission.submitted_on" ] } , "$$submission" , False ] } } } , [ False ] ] } } } , { "$sort" : { "submitted_on" : pymongo . DESCENDING } } , { "$limit" : limit } ] ) return [ item [ "submissions" ] [ 0 ] for item in data ] | Get last submissions of a user |
40,935 | def _handle_ssh_callback ( self , submission_id , host , port , password ) : if host is not None : obj = { "ssh_host" : host , "ssh_port" : port , "ssh_password" : password } self . _database . submissions . update_one ( { "_id" : submission_id } , { "$set" : obj } ) | Handles the creation of a remote ssh server |
40,936 | def filesystem_from_config_dict ( config_fs ) : if "module" not in config_fs : print ( "Key 'module' should be defined for the filesystem provider ('fs' configuration option)" , file = sys . stderr ) exit ( 1 ) filesystem_providers = get_filesystems_providers ( ) if config_fs [ "module" ] not in filesystem_providers : print ( "Unknown filesystem provider " + config_fs [ "module" ] , file = sys . stderr ) exit ( 1 ) fs_class = filesystem_providers [ config_fs [ "module" ] ] fs_args_needed = fs_class . get_needed_args ( ) fs_args = { } for arg_name , ( arg_type , arg_required , _ ) in fs_args_needed . items ( ) : if arg_name in config_fs : fs_args [ arg_name ] = arg_type ( config_fs [ arg_name ] ) elif arg_required : print ( "fs option {} is required" . format ( arg_name ) , file = sys . stderr ) exit ( 1 ) try : return fs_class . init_from_args ( ** fs_args ) except : print ( "Unable to load class " + config_fs [ "module" ] , file = sys . stderr ) raise | Given a dict containing an entry module which contains a FSProvider identifier parse the configuration and returns a fs_provider . Exits if there is an error . |
40,937 | async def _kill_it_with_fire ( self , container_id ) : if container_id in self . _watching : self . _watching . remove ( container_id ) self . _container_had_error . add ( container_id ) try : await self . _docker_interface . kill_container ( container_id ) except : pass | Kill a container with fire . |
40,938 | def _cleanup ( self ) : current_time = time . time ( ) timeout = self . _config . timeout if current_time - self . _last_cleanup_time > timeout : self . store . cleanup ( timeout ) self . _last_cleanup_time = current_time | Cleanup the stored sessions |
40,939 | def expired ( self ) : self . _data [ "_killed" ] = True self . save ( ) raise SessionExpired ( self . _config . expired_message ) | Called when an expired session is atime |
40,940 | def delete_account ( self , data ) : error = False msg = "" username = self . user_manager . session_username ( ) result = self . database . users . find_one_and_delete ( { "username" : username , "email" : data . get ( "delete_email" , "" ) } ) if not result : error = True msg = _ ( "The specified email is incorrect." ) else : self . database . submissions . remove ( { "username" : username } ) self . database . user_tasks . remove ( { "username" : username } ) all_courses = self . course_factory . get_all_courses ( ) for courseid , course in all_courses . items ( ) : if self . user_manager . course_is_open_to_user ( course , username ) : self . user_manager . course_unregister_user ( course , username ) self . user_manager . disconnect_user ( ) raise web . seeother ( "/index" ) return msg , error | Delete account from DB |
40,941 | def dump ( data , stream = None , ** kwds ) : class OrderedDumper ( SafeDumper ) : pass def _dict_representer ( dumper , data ) : return dumper . represent_mapping ( original_yaml . resolver . BaseResolver . DEFAULT_MAPPING_TAG , list ( data . items ( ) ) ) def _long_str_representer ( dumper , data ) : if data . find ( "\n" ) != - 1 : data = data . replace ( "\t" , " " ) data = "\n" . join ( [ p . rstrip ( ) for p in data . split ( "\n" ) ] ) return dumper . represent_scalar ( 'tag:yaml.org,2002:str' , data , style = '|' ) else : return dumper . represent_scalar ( 'tag:yaml.org,2002:str' , data ) def _default_representer ( dumper , data ) : return _long_str_representer ( dumper , str ( data ) ) OrderedDumper . add_representer ( str , _long_str_representer ) OrderedDumper . add_representer ( str , _long_str_representer ) OrderedDumper . add_representer ( OrderedDict , _dict_representer ) OrderedDumper . add_representer ( None , _default_representer ) s = original_yaml . dump ( data , stream , OrderedDumper , encoding = 'utf-8' , allow_unicode = True , default_flow_style = False , indent = 4 , ** kwds ) if s is not None : return s . decode ( 'utf-8' ) else : return | Serialize a Python object into a YAML stream . If stream is None return the produced string instead . Dict keys are produced in the order in which they appear in OrderedDicts . |
40,942 | def _check_for_parsable_text ( self , val ) : if isinstance ( val , ParsableText ) : return val . original_content ( ) if isinstance ( val , list ) : for key , val2 in enumerate ( val ) : val [ key ] = self . _check_for_parsable_text ( val2 ) return val if isinstance ( val , dict ) : for key , val2 in val . items ( ) : val [ key ] = self . _check_for_parsable_text ( val2 ) return val | Util to remove parsable text from a dict recursively |
40,943 | def API_GET ( self , courseid , taskid = None ) : try : course = self . course_factory . get_course ( courseid ) except : raise APINotFound ( "Course not found" ) if not self . user_manager . course_is_open_to_user ( course , lti = False ) : raise APIForbidden ( "You are not registered to this course" ) if taskid is None : tasks = course . get_tasks ( ) else : try : tasks = { taskid : course . get_task ( taskid ) } except : raise APINotFound ( "Task not found" ) output = [ ] for taskid , task in tasks . items ( ) : task_cache = self . user_manager . get_task_cache ( self . user_manager . session_username ( ) , task . get_course_id ( ) , task . get_id ( ) ) data = { "id" : taskid , "name" : task . get_name ( self . user_manager . session_language ( ) ) , "authors" : task . get_authors ( self . user_manager . session_language ( ) ) , "deadline" : task . get_deadline ( ) , "status" : "notviewed" if task_cache is None else "notattempted" if task_cache [ "tried" ] == 0 else "succeeded" if task_cache [ "succeeded" ] else "failed" , "grade" : task_cache . get ( "grade" , 0.0 ) if task_cache is not None else 0.0 , "grade_weight" : task . get_grading_weight ( ) , "context" : task . get_context ( self . user_manager . session_language ( ) ) . original_content ( ) , "problems" : [ ] } for problem in task . get_problems ( ) : pcontent = problem . get_original_content ( ) pcontent [ "id" ] = problem . get_id ( ) if pcontent [ "type" ] == "match" : del pcontent [ "answer" ] if pcontent [ "type" ] == "multiple_choice" : pcontent [ "choices" ] = { key : val [ "text" ] for key , val in enumerate ( pcontent [ "choices" ] ) } pcontent = self . _check_for_parsable_text ( pcontent ) data [ "problems" ] . append ( pcontent ) output . append ( data ) return 200 , output | List tasks available to the connected client . Returns a dict in the form |
40,944 | def load_input ( ) : file = open ( _input_file , 'r' ) result = json . loads ( file . read ( ) . strip ( '\0' ) . strip ( ) ) file . close ( ) return result | Open existing input file |
40,945 | def parse_template ( input_filename , output_filename = '' ) : data = load_input ( ) with open ( input_filename , 'rb' ) as file : template = file . read ( ) . decode ( "utf-8" ) if not 'input' in data : raise ValueError ( "Could not find 'input' in data" ) for field in data [ 'input' ] : subs = [ "filename" , "value" ] if isinstance ( data [ 'input' ] [ field ] , dict ) and "filename" in data [ 'input' ] [ field ] and "value" in data [ 'input' ] [ field ] else [ "" ] for sub in subs : displayed_field = field + ( ":" if sub else "" ) + sub regex = re . compile ( "@([^@]*)@" + displayed_field + '@([^@]*)@' ) for prefix , postfix in set ( regex . findall ( template ) ) : if sub == "value" : text = open ( data [ 'input' ] [ field ] [ sub ] , 'rb' ) . read ( ) . decode ( 'utf-8' ) elif sub : text = data [ 'input' ] [ field ] [ sub ] else : text = data [ 'input' ] [ field ] rep = "\n" . join ( [ prefix + v + postfix for v in text . splitlines ( ) ] ) template = template . replace ( "@{0}@{1}@{2}@" . format ( prefix , displayed_field , postfix ) , rep ) if output_filename == '' : output_filename = input_filename try : os . makedirs ( os . path . dirname ( output_filename ) ) except OSError as e : pass with open ( output_filename , 'wb' ) as file : file . write ( template . encode ( "utf-8" ) ) | Parses a template file Replaces all occurences of |
40,946 | def _callable_once ( func ) : def once ( * args , ** kwargs ) : if not once . called : once . called = True return func ( * args , ** kwargs ) once . called = False return once | Returns a function that is only callable once ; any other call will do nothing |
40,947 | async def _ask_queue_update ( self ) : try : while True : await asyncio . sleep ( self . _queue_update_timer ) if self . _queue_update_last_attempt == 0 or self . _queue_update_last_attempt > self . _queue_update_last_attempt_max : if self . _queue_update_last_attempt : self . _logger . error ( "Asking for a job queue update despite previous update not yet received" ) else : self . _logger . debug ( "Asking for a job queue update" ) self . _queue_update_last_attempt = 1 await self . _simple_send ( ClientGetQueue ( ) ) else : self . _logger . error ( "Not asking for a job queue update as previous update not yet received" ) except asyncio . CancelledError : return except KeyboardInterrupt : return | Send a ClientGetQueue message to the backend if one is not already sent |
40,948 | async def _handle_job_queue_update ( self , message : BackendGetQueue ) : self . _logger . debug ( "Received job queue update" ) self . _queue_update_last_attempt = 0 self . _queue_cache = message new_job_queue_cache = { } for ( job_id , is_local , _ , _2 , _3 , _4 , max_end ) in message . jobs_running : if is_local : new_job_queue_cache [ job_id ] = ( - 1 , max_end - time . time ( ) ) wait_time = 0 nb_tasks = 0 for ( job_id , is_local , _ , _2 , timeout ) in message . jobs_waiting : if timeout > 0 : wait_time += timeout if is_local : new_job_queue_cache [ job_id ] = ( nb_tasks , wait_time ) nb_tasks += 1 self . _queue_job_cache = new_job_queue_cache | Handles a BackendGetQueue containing a snapshot of the job queue |
40,949 | def new_job ( self , task , inputdata , callback , launcher_name = "Unknown" , debug = False , ssh_callback = None ) : job_id = str ( uuid . uuid4 ( ) ) if debug == "ssh" and ssh_callback is None : self . _logger . error ( "SSH callback not set in %s/%s" , task . get_course_id ( ) , task . get_id ( ) ) callback ( ( "crash" , "SSH callback not set." ) , 0.0 , { } , { } , { } , None , "" , "" ) return ssh_callback = _callable_once ( ssh_callback if ssh_callback is not None else lambda _1 , _2 , _3 : None ) environment = task . get_environment ( ) if environment not in self . _available_containers : self . _logger . warning ( "Env %s not available for task %s/%s" , environment , task . get_course_id ( ) , task . get_id ( ) ) ssh_callback ( None , None , None ) callback ( ( "crash" , "Environment not available." ) , 0.0 , { } , { } , "" , { } , None , "" , "" ) return enable_network = task . allow_network_access_grading ( ) try : limits = task . get_limits ( ) time_limit = int ( limits . get ( 'time' , 20 ) ) hard_time_limit = int ( limits . get ( 'hard_time' , 3 * time_limit ) ) mem_limit = int ( limits . get ( 'memory' , 200 ) ) except : self . _logger . exception ( "Cannot retrieve limits for task %s/%s" , task . get_course_id ( ) , task . get_id ( ) ) ssh_callback ( None , None , None ) callback ( ( "crash" , "Error while reading task limits" ) , 0.0 , { } , { } , "" , { } , None , "" , "" ) return msg = ClientNewJob ( job_id , task . get_course_id ( ) , task . get_id ( ) , inputdata , environment , enable_network , time_limit , hard_time_limit , mem_limit , debug , launcher_name ) self . _loop . call_soon_threadsafe ( asyncio . ensure_future , self . _create_transaction ( msg , task = task , callback = callback , ssh_callback = ssh_callback ) ) return job_id | Add a new job . Every callback will be called once and only once . |
40,950 | def kill_job ( self , job_id ) : self . _loop . call_soon_threadsafe ( asyncio . ensure_future , self . _simple_send ( ClientKillJob ( job_id ) ) ) | Kills a running job |
40,951 | def get_codeblock ( language , text ) : rst = "\n\n.. code-block:: " + language + "\n\n" for line in text . splitlines ( ) : rst += "\t" + line + "\n" rst += "\n" return rst | Generates rst codeblock for given text and language |
40,952 | def get_imageblock ( filename , format = '' ) : _ , extension = os . path . splitext ( filename ) with open ( filename , "rb" ) as image_file : encoded_string = base64 . b64encode ( image_file . read ( ) ) . decode ( 'utf-8' ) return '\n\n.. raw:: html\n\n\t<img src="data:image/' + ( format if format else extension [ 1 : ] ) + ';base64,' + encoded_string + '">\n' | Generates rst raw block for given image filename and format |
40,953 | def get_admonition ( cssclass , title , text ) : rst = ( "\n\n.. admonition:: " + title + "\n" ) if title else "\n\n.. note:: \n" rst += "\t:class: alert alert-" + cssclass + "\n\n" for line in text . splitlines ( ) : rst += "\t" + line + "\n" rst += "\n" return rst | Generates rst admonition block given a bootstrap alert css class title and text |
40,954 | def init ( ) : if "_" not in builtins . __dict__ : os . environ [ "LANGUAGE" ] = inginious . input . get_lang ( ) if inginious . DEBUG : gettext . install ( "messages" , get_lang_dir_path ( ) ) else : gettext . install ( "messages" , get_lang_dir_path ( ) ) | Install gettext with the default parameters |
40,955 | def _recursive_overwrite ( self , src , dest ) : if os . path . isdir ( src ) : if not os . path . isdir ( dest ) : os . makedirs ( dest ) files = os . listdir ( src ) for f in files : self . _recursive_overwrite ( os . path . join ( src , f ) , os . path . join ( dest , f ) ) else : shutil . copyfile ( src , dest , follow_symlinks = False ) | Copy src to dest recursively and with file overwrite . |
40,956 | def init ( plugin_manager , _ , _2 , config ) : submission_git_saver = SubmissionGitSaver ( plugin_manager , config ) submission_git_saver . daemon = True submission_git_saver . start ( ) | Init the plugin |
40,957 | def get_type_as_str ( self ) : if self . get_type ( ) == 0 : return _ ( "Skill" ) elif self . get_type ( ) == 1 : return _ ( "Misconception" ) elif self . get_type ( ) == 2 : return _ ( "Category" ) else : return _ ( "Unknown type" ) | Return a textual description of the type |
40,958 | def create_tags_from_dict ( cls , tag_dict ) : tag_list_common = [ ] tag_list_misconception = [ ] tag_list_organisational = [ ] for tag in tag_dict : try : id = tag_dict [ tag ] [ "id" ] name = tag_dict [ tag ] [ "name" ] visible = tag_dict [ tag ] [ "visible" ] description = tag_dict [ tag ] [ "description" ] type = tag_dict [ tag ] [ "type" ] if type == 2 : tag_list_organisational . insert ( int ( tag ) , Tag ( id , name , description , visible , type ) ) elif type == 1 : tag_list_misconception . insert ( int ( tag ) , Tag ( id , name , description , visible , type ) ) else : tag_list_common . insert ( int ( tag ) , Tag ( id , name , description , visible , type ) ) except KeyError : pass return tag_list_common , tag_list_misconception , tag_list_organisational | Build a tuple of list of Tag objects based on the tag_dict . The tuple contains 3 lists . - The first list contains skill tags - The second list contains misconception tags - The third list contains category tags |
40,959 | async def run ( self ) : self . _logger . info ( "Agent started" ) self . __backend_socket . connect ( self . __backend_addr ) self . _logger . info ( "Saying hello to the backend" ) await ZMQUtils . send ( self . __backend_socket , AgentHello ( self . __friendly_name , self . __concurrency , self . environments ) ) self . __last_ping = time . time ( ) run_listen = self . _loop . create_task ( self . __run_listen ( ) ) self . _loop . call_later ( 1 , self . _create_safe_task , self . __check_last_ping ( run_listen ) ) await run_listen | Runs the agent . Answer to the requests made by the Backend . May raise an asyncio . CancelledError in which case the agent should clean itself and restart completely . |
40,960 | async def __check_last_ping ( self , run_listen ) : if self . __last_ping < time . time ( ) - 10 : self . _logger . warning ( "Last ping too old. Restarting the agent." ) run_listen . cancel ( ) self . __cancel_remaining_safe_tasks ( ) else : self . _loop . call_later ( 1 , self . _create_safe_task , self . __check_last_ping ( run_listen ) ) | Check if the last timeout is too old . If it is kills the run_listen task |
40,961 | async def __run_listen ( self ) : while True : message = await ZMQUtils . recv ( self . __backend_socket ) await self . __handle_backend_message ( message ) | Listen to the backend |
40,962 | async def __handle_ping ( self , _ : Ping ) : self . __last_ping = time . time ( ) await ZMQUtils . send ( self . __backend_socket , Pong ( ) ) | Handle a Ping message . Pong the backend |
40,963 | def get_menu ( course , current , renderer , plugin_manager , user_manager ) : default_entries = [ ] if user_manager . has_admin_rights_on_course ( course ) : default_entries += [ ( "settings" , "<i class='fa fa-cog fa-fw'></i> " + _ ( "Course settings" ) ) ] default_entries += [ ( "stats" , "<i class='fa fa-area-chart fa-fw'></i> " + _ ( "Stats" ) ) , ( "students" , "<i class='fa fa-user fa-fw'></i> " + _ ( "Students" ) ) ] if not course . is_lti ( ) : default_entries += [ ( "aggregations" , "<i class='fa fa-group fa-fw'></i> " + ( _ ( "Classrooms" ) if course . use_classrooms ( ) else _ ( "Teams" ) ) ) ] default_entries += [ ( "tasks" , "<i class='fa fa-tasks fa-fw'></i> " + _ ( "Tasks" ) ) , ( "submissions" , "<i class='fa fa-search fa-fw'></i> " + _ ( "View submissions" ) ) , ( "download" , "<i class='fa fa-download fa-fw'></i> " + _ ( "Download submissions" ) ) ] if user_manager . has_admin_rights_on_course ( course ) : if web . ctx . app_stack [ 0 ] . webdav_host : default_entries += [ ( "webdav" , "<i class='fa fa-folder-open fa-fw'></i> " + _ ( "WebDAV access" ) ) ] default_entries += [ ( "replay" , "<i class='fa fa-refresh fa-fw'></i> " + _ ( "Replay submissions" ) ) , ( "danger" , "<i class='fa fa-bomb fa-fw'></i> " + _ ( "Danger zone" ) ) ] additional_entries = [ entry for entry in plugin_manager . call_hook ( 'course_admin_menu' , course = course ) if entry is not None ] return renderer . course_admin . menu ( course , default_entries + additional_entries , current ) | Returns the HTML of the menu used in the administration . current is the current page of section |
40,964 | def writerow ( self , row ) : self . writer . writerow ( row ) data = self . queue . getvalue ( ) self . stream . write ( data ) self . queue . truncate ( 0 ) self . queue . seek ( 0 ) | Writes a row to the CSV file |
40,965 | def get_renderer ( self , with_layout = True ) : if with_layout and self . is_lti ( ) : return self . _default_renderer_lti elif with_layout : return self . _default_renderer else : return self . _default_renderer_nolayout | Get the default renderer |
40,966 | def _javascript_helper ( self , position ) : if position not in [ "header" , "footer" ] : position = "footer" if position == "header" : entries = [ entry for entry in self . _plugin_manager . call_hook ( "javascript_header" ) if entry is not None ] else : entries = [ entry for entry in self . _plugin_manager . call_hook ( "javascript_footer" ) if entry is not None ] entries += self . _get_ctx ( ) [ "javascript" ] [ position ] entries = [ "<script src='" + entry + "' type='text/javascript' charset='utf-8'></script>" for entry in entries ] return "\n" . join ( entries ) | Add javascript links for the current page and for the plugins |
40,967 | def _css_helper ( self ) : entries = [ entry for entry in self . _plugin_manager . call_hook ( "css" ) if entry is not None ] entries += self . _get_ctx ( ) [ "css" ] entries = [ "<link href='" + entry + "' rel='stylesheet'>" for entry in entries ] return "\n" . join ( entries ) | Add CSS links for the current page and for the plugins |
40,968 | def _get_ctx ( self ) : if self . _WEB_CTX_KEY not in web . ctx : web . ctx [ self . _WEB_CTX_KEY ] = { "javascript" : { "footer" : [ ] , "header" : [ ] } , "css" : [ ] } return web . ctx . get ( self . _WEB_CTX_KEY ) | Get web . ctx object for the Template helper |
40,969 | def _generic_hook ( self , name , ** kwargs ) : entries = [ entry for entry in self . _plugin_manager . call_hook ( name , ** kwargs ) if entry is not None ] return "\n" . join ( entries ) | A generic hook that links the TemplateHelper with PluginManager |
40,970 | def new_job ( self , task , inputdata , launcher_name = "Unknown" , debug = False ) : bjobid = uuid . uuid4 ( ) self . _waiting_jobs . append ( str ( bjobid ) ) self . _client . new_job ( task , inputdata , ( lambda result , grade , problems , tests , custom , archive , stdout , stderr : self . _callback ( bjobid , result , grade , problems , tests , custom , archive , stdout , stderr ) ) , launcher_name , debug ) return bjobid | Runs a new job . It works exactly like the Client class instead that there is no callback |
40,971 | def _callback ( self , bjobid , result , grade , problems , tests , custom , archive , stdout , stderr ) : self . _jobs_done [ str ( bjobid ) ] = ( result , grade , problems , tests , custom , archive , stdout , stderr ) self . _waiting_jobs . remove ( str ( bjobid ) ) | Callback for self . _client . new_job |
40,972 | def init ( plugin_manager , _ , _2 , conf ) : encryption = conf . get ( "encryption" , "none" ) if encryption not in [ "none" , "ssl" , "tls" ] : raise Exception ( "Unknown encryption method {}" . format ( encryption ) ) if encryption == "none" : conf [ "encryption" ] = None if conf . get ( "port" , 0 ) == 0 : conf [ "port" ] = None the_method = LdapAuthMethod ( conf . get ( "id" ) , conf . get ( 'name' , 'LDAP' ) , conf . get ( "imlink" , "" ) , conf ) plugin_manager . add_page ( r'/auth/page/([^/]+)' , LDAPAuthenticationPage ) plugin_manager . register_auth_method ( the_method ) | Allow to connect through a LDAP service |
40,973 | def cleanup ( self , timeout ) : cutoff = time ( ) - timeout self . collection . remove ( { _atime : { '$lt' : cutoff } } ) | Removes all sessions older than timeout seconds . Called automatically on every session access . |
40,974 | def load ( self , client , webpy_app , course_factory , task_factory , database , user_manager , submission_manager , config ) : self . _app = webpy_app self . _task_factory = task_factory self . _database = database self . _user_manager = user_manager self . _submission_manager = submission_manager self . _loaded = True for entry in config : module = importlib . import_module ( entry [ "plugin_module" ] ) module . init ( self , course_factory , client , entry ) | Loads the plugin manager . Must be done after the initialisation of the client |
40,975 | def add_page ( self , pattern , classname ) : if not self . _loaded : raise PluginManagerNotLoadedException ( ) self . _app . add_mapping ( pattern , classname ) | Add a new page to the web application . Only available after that the Plugin Manager is loaded |
40,976 | def add_task_file_manager ( self , task_file_manager ) : if not self . _loaded : raise PluginManagerNotLoadedException ( ) self . _task_factory . add_custom_task_file_manager ( task_file_manager ) | Add a task file manager . Only available after that the Plugin Manager is loaded |
40,977 | def register_auth_method ( self , auth_method ) : if not self . _loaded : raise PluginManagerNotLoadedException ( ) self . _user_manager . register_auth_method ( auth_method ) | Register a new authentication method |
40,978 | def dump_course ( self , courseid ) : filepath = os . path . join ( self . backup_dir , courseid , datetime . datetime . now ( ) . strftime ( "%Y%m%d.%H%M%S" ) + ".zip" ) if not os . path . exists ( os . path . dirname ( filepath ) ) : os . makedirs ( os . path . dirname ( filepath ) ) with zipfile . ZipFile ( filepath , "w" , allowZip64 = True ) as zipf : aggregations = self . database . aggregations . find ( { "courseid" : courseid } ) zipf . writestr ( "aggregations.json" , bson . json_util . dumps ( aggregations ) , zipfile . ZIP_DEFLATED ) user_tasks = self . database . user_tasks . find ( { "courseid" : courseid } ) zipf . writestr ( "user_tasks.json" , bson . json_util . dumps ( user_tasks ) , zipfile . ZIP_DEFLATED ) submissions = self . database . submissions . find ( { "courseid" : courseid } ) zipf . writestr ( "submissions.json" , bson . json_util . dumps ( submissions ) , zipfile . ZIP_DEFLATED ) submissions . rewind ( ) for submission in submissions : for key in [ "input" , "archive" ] : if key in submission and type ( submission [ key ] ) == bson . objectid . ObjectId : infile = self . submission_manager . get_gridfs ( ) . get ( submission [ key ] ) zipf . writestr ( key + "/" + str ( submission [ key ] ) + ".data" , infile . read ( ) , zipfile . ZIP_DEFLATED ) self . _logger . info ( "Course %s dumped to backup directory." , courseid ) self . wipe_course ( courseid ) | Create a zip file containing all information about a given course in database and then remove it from db |
40,979 | def delete_course ( self , courseid ) : self . wipe_course ( courseid ) self . course_factory . delete_course ( courseid ) filepath = os . path . join ( self . backup_dir , courseid ) if os . path . exists ( os . path . dirname ( filepath ) ) : for backup in glob . glob ( os . path . join ( filepath , '*.zip' ) ) : os . remove ( backup ) self . _logger . info ( "Course %s files erased." , courseid ) | Erase all course data |
40,980 | def show_input ( self , template_helper , language , seed ) : header = ParsableText ( self . gettext ( language , self . _header ) , "rst" , translation = self . _translations . get ( language , gettext . NullTranslations ( ) ) ) return str ( DisplayableCodeProblem . get_renderer ( template_helper ) . tasks . code ( self . get_id ( ) , header , 8 , 0 , self . _language , self . _optional , self . _default ) ) | Show BasicCodeProblem and derivatives |
40,981 | def show_input ( self , template_helper , language , seed ) : choices = [ ] limit = self . _limit if limit == 0 : limit = len ( self . _choices ) rand = Random ( "{}#{}#{}" . format ( self . get_task ( ) . get_id ( ) , self . get_id ( ) , seed ) ) random_order_choices = list ( self . _choices ) rand . shuffle ( random_order_choices ) if self . _multiple : for entry in random_order_choices : if entry [ 'valid' ] : choices . append ( entry ) limit = limit - 1 for entry in random_order_choices : if limit == 0 : break if not entry [ 'valid' ] : choices . append ( entry ) limit = limit - 1 else : for entry in random_order_choices : if not entry [ 'valid' ] and limit > 1 : choices . append ( entry ) limit = limit - 1 for entry in random_order_choices : if entry [ 'valid' ] and limit > 0 : choices . append ( entry ) limit = limit - 1 rand . shuffle ( choices ) header = ParsableText ( self . gettext ( language , self . _header ) , "rst" , translation = self . _translations . get ( language , gettext . NullTranslations ( ) ) ) return str ( DisplayableMultipleChoiceProblem . get_renderer ( template_helper ) . tasks . multiple_choice ( self . get_id ( ) , header , self . _multiple , choices , lambda text : ParsableText ( self . gettext ( language , text ) if text else "" , "rst" , translation = self . _translations . get ( language , gettext . NullTranslations ( ) ) ) ) ) | Show multiple choice problems |
40,982 | def _parse_lti_data ( self , courseid , taskid ) : post_input = web . webapi . rawinput ( "POST" ) self . logger . debug ( '_parse_lti_data:' + str ( post_input ) ) try : course = self . course_factory . get_course ( courseid ) except exceptions . CourseNotFoundException as ex : raise web . notfound ( str ( ex ) ) try : test = LTIWebPyToolProvider . from_webpy_request ( ) validator = LTIValidator ( self . database . nonce , course . lti_keys ( ) ) verified = test . is_valid_request ( validator ) except Exception : self . logger . exception ( "..." ) self . logger . info ( "Error while validating LTI request for %s" , str ( post_input ) ) raise web . forbidden ( _ ( "Error while validating LTI request" ) ) if verified : self . logger . debug ( 'parse_lit_data for %s' , str ( post_input ) ) user_id = post_input [ "user_id" ] roles = post_input . get ( "roles" , "Student" ) . split ( "," ) realname = self . _find_realname ( post_input ) email = post_input . get ( "lis_person_contact_email_primary" , "" ) lis_outcome_service_url = post_input . get ( "lis_outcome_service_url" , None ) outcome_result_id = post_input . get ( "lis_result_sourcedid" , None ) consumer_key = post_input [ "oauth_consumer_key" ] if course . lti_send_back_grade ( ) : if lis_outcome_service_url is None or outcome_result_id is None : self . logger . info ( 'Error: lis_outcome_service_url is None but lti_send_back_grade is True' ) raise web . forbidden ( _ ( "In order to send grade back to the TC, INGInious needs the parameters lis_outcome_service_url and " "lis_outcome_result_id in the LTI basic-launch-request. Please contact your administrator." ) ) else : lis_outcome_service_url = None outcome_result_id = None tool_name = post_input . get ( 'tool_consumer_instance_name' , 'N/A' ) tool_desc = post_input . get ( 'tool_consumer_instance_description' , 'N/A' ) tool_url = post_input . get ( 'tool_consumer_instance_url' , 'N/A' ) context_title = post_input . get ( 'context_title' , 'N/A' ) context_label = post_input . get ( 'context_label' , 'N/A' ) session_id = self . user_manager . create_lti_session ( user_id , roles , realname , email , courseid , taskid , consumer_key , lis_outcome_service_url , outcome_result_id , tool_name , tool_desc , tool_url , context_title , context_label ) loggedin = self . user_manager . attempt_lti_login ( ) return session_id , loggedin else : self . logger . info ( "Couldn't validate LTI request" ) raise web . forbidden ( _ ( "Couldn't validate LTI request" ) ) | Verify and parse the data for the LTI basic launch |
40,983 | def _find_realname ( self , post_input ) : if "lis_person_name_full" in post_input : return post_input [ "lis_person_name_full" ] if "lis_person_name_given" in post_input and "lis_person_name_family" in post_input : return post_input [ "lis_person_name_given" ] + post_input [ "lis_person_name_family" ] if "lis_person_contact_email_primary" in post_input : return post_input [ "lis_person_contact_email_primary" ] if "lis_person_name_family" in post_input : return post_input [ "lis_person_name_family" ] if "lis_person_name_given" in post_input : return post_input [ "lis_person_name_given" ] return post_input [ "user_id" ] | Returns the most appropriate name to identify the user |
40,984 | def fast_stats ( data ) : total_submission = len ( data ) total_submission_best = 0 total_submission_best_succeeded = 0 for submission in data : if "best" in submission and submission [ "best" ] : total_submission_best = total_submission_best + 1 if "result" in submission and submission [ "result" ] == "success" : total_submission_best_succeeded += 1 statistics = [ ( _ ( "Number of submissions" ) , total_submission ) , ( _ ( "Evaluation submissions (Total)" ) , total_submission_best ) , ( _ ( "Evaluation submissions (Succeeded)" ) , total_submission_best_succeeded ) , ( _ ( "Evaluation submissions (Failed)" ) , total_submission_best - total_submission_best_succeeded ) , ] return statistics | Compute base statistics about submissions |
40,985 | def _register_transaction ( self , send_msg , recv_msg , coroutine_recv , coroutine_abrt , get_key = None , inter_msg = None ) : if get_key is None : get_key = lambda x : None if inter_msg is None : inter_msg = [ ] self . _msgs_registered [ send_msg . __msgtype__ ] = ( [ recv_msg . __msgtype__ ] + [ x . __msgtype__ for x , _ in inter_msg ] , get_key , None , None , [ ] ) self . _msgs_registered [ recv_msg . __msgtype__ ] = ( [ ] , get_key , coroutine_recv , coroutine_abrt , [ recv_msg . __msgtype__ ] + [ x . __msgtype__ for x , _ in inter_msg ] ) self . _transactions [ recv_msg . __msgtype__ ] = { } for msg_class , handler in inter_msg : self . _msgs_registered [ msg_class . __msgtype__ ] = ( [ ] , get_key , handler , None , [ ] ) self . _transactions [ msg_class . __msgtype__ ] = { } | Register a type of message to be sent . After this message has been sent if the answer is received callback_recv is called . If the remote server becomes dones calls callback_abrt . |
40,986 | async def _reconnect ( self ) : for msg_class in self . _transactions : _1 , _2 , _3 , coroutine_abrt , _4 = self . _msgs_registered [ msg_class ] if coroutine_abrt is not None : for key in self . _transactions [ msg_class ] : for args , kwargs in self . _transactions [ msg_class ] [ key ] : self . _loop . create_task ( coroutine_abrt ( key , * args , ** kwargs ) ) self . _transactions [ msg_class ] = { } await self . _on_disconnect ( ) for task in self . _restartable_tasks : task . cancel ( ) self . _restartable_tasks = [ ] self . _socket . disconnect ( self . _router_addr ) await self . client_start ( ) | Called when the remote server is innacessible and the connection has to be restarted |
40,987 | async def client_start ( self ) : await self . _start_socket ( ) await self . _on_connect ( ) self . _ping_count = 0 task_socket = self . _loop . create_task ( self . _run_socket ( ) ) task_ping = self . _loop . create_task ( self . _do_ping ( ) ) self . _restartable_tasks . append ( task_ping ) self . _restartable_tasks . append ( task_socket ) | Starts the client |
40,988 | async def _run_socket ( self ) : try : while True : message = await ZMQUtils . recv ( self . _socket ) msg_class = message . __msgtype__ if msg_class in self . _handlers_registered : self . _loop . create_task ( self . _handlers_registered [ msg_class ] ( message ) ) elif msg_class in self . _transactions : _1 , get_key , coroutine_recv , _2 , responsible = self . _msgs_registered [ msg_class ] key = get_key ( message ) if key in self . _transactions [ msg_class ] : for args , kwargs in self . _transactions [ msg_class ] [ key ] : self . _loop . create_task ( coroutine_recv ( message , * args , ** kwargs ) ) for key2 in responsible : del self . _transactions [ key2 ] [ key ] else : raise Exception ( "Received message %s for an unknown transaction %s" , msg_class , key ) else : raise Exception ( "Received unknown message %s" , msg_class ) except asyncio . CancelledError : return except KeyboardInterrupt : return | Task that runs this client . |
40,989 | def load_feedback ( ) : result = { } if os . path . exists ( _feedback_file ) : f = open ( _feedback_file , 'r' ) cont = f . read ( ) f . close ( ) else : cont = '{}' try : result = json . loads ( cont ) if cont else { } except ValueError as e : result = { "result" : "crash" , "text" : "Feedback file has been modified by user !" } return result | Open existing feedback file |
40,990 | def save_feedback ( rdict ) : if not os . path . exists ( _feedback_dir ) : os . makedirs ( _feedback_dir ) jcont = json . dumps ( rdict ) f = open ( _feedback_file , 'w' ) f . write ( jcont ) f . close ( ) | Save feedback file |
40,991 | def set_problem_result ( result , problem_id ) : rdict = load_feedback ( ) if not 'problems' in rdict : rdict [ 'problems' ] = { } cur_val = rdict [ 'problems' ] . get ( problem_id , '' ) rdict [ 'problems' ] [ problem_id ] = [ result , cur_val ] if type ( cur_val ) == str else [ result , cur_val [ 1 ] ] save_feedback ( rdict ) | Set problem specific result value |
40,992 | def set_global_feedback ( feedback , append = False ) : rdict = load_feedback ( ) rdict [ 'text' ] = rdict . get ( 'text' , '' ) + feedback if append else feedback save_feedback ( rdict ) | Set global feedback in case of error |
40,993 | def set_problem_feedback ( feedback , problem_id , append = False ) : rdict = load_feedback ( ) if not 'problems' in rdict : rdict [ 'problems' ] = { } cur_val = rdict [ 'problems' ] . get ( problem_id , '' ) rdict [ 'problems' ] [ problem_id ] = ( cur_val + feedback if append else feedback ) if type ( cur_val ) == str else [ cur_val [ 0 ] , ( cur_val [ 1 ] + feedback if append else feedback ) ] save_feedback ( rdict ) | Set problem specific feedback |
40,994 | def _display_big_warning ( self , content ) : print ( "" ) print ( BOLD + WARNING + "--- WARNING ---" + ENDC ) print ( WARNING + content + ENDC ) print ( "" ) | Displays a BIG warning |
40,995 | def _ask_local_config ( self ) : options = { "backend" : "local" , "local-config" : { } } while True : concurrency = self . _ask_with_default ( "Maximum concurrency (number of tasks running simultaneously). Leave it empty to use the number of " "CPU of your host." , "" ) if concurrency == "" : break try : concurrency = int ( concurrency ) except : self . _display_error ( "Invalid number" ) continue if concurrency <= 0 : self . _display_error ( "Invalid number" ) continue options [ "local-config" ] [ "concurrency" ] = concurrency break hostname = self . _ask_with_default ( "What is the external hostname/address of your machine? You can leave this empty and let INGInious " "autodetect it." , "" ) if hostname != "" : options [ "local-config" ] [ "debug_host" ] = hostname self . _display_info ( "You can now enter the port range for the remote debugging feature of INGInious. Please verify that these " "ports are open in your firewall. You can leave this parameters empty, the default is 64100-64200" ) port_range = None while True : start_port = self . _ask_with_default ( "Beginning of the range" , "" ) if start_port != "" : try : start_port = int ( start_port ) except : self . _display_error ( "Invalid number" ) continue end_port = self . _ask_with_default ( "End of the range" , str ( start_port + 100 ) ) try : end_port = int ( end_port ) except : self . _display_error ( "Invalid number" ) continue if start_port > end_port : self . _display_error ( "Invalid range" ) continue port_range = str ( start_port ) + "-" + str ( end_port ) else : break if port_range != None : options [ "local-config" ] [ "debug_ports" ] = port_range return options | Ask some parameters about the local configuration |
40,996 | def ask_backend ( self ) : response = self . _ask_boolean ( "Do you have a local docker daemon (on Linux), do you use docker-machine via a local machine, or do you use " "Docker for macOS?" , True ) if ( response ) : self . _display_info ( "If you use docker-machine on macOS, please see " "http://inginious.readthedocs.io/en/latest/install_doc/troubleshooting.html" ) return "local" else : self . _display_info ( "You will have to run inginious-backend and inginious-agent yourself. Please run the commands without argument " "and/or read the documentation for more info" ) return self . _display_question ( "Please enter the address of your backend" ) | Ask the user to choose the backend |
40,997 | def try_mongodb_opts ( self , host = "localhost" , database_name = 'INGInious' ) : try : mongo_client = MongoClient ( host = host ) except Exception as e : self . _display_warning ( "Cannot connect to MongoDB on host %s: %s" % ( host , str ( e ) ) ) return None try : database = mongo_client [ database_name ] except Exception as e : self . _display_warning ( "Cannot access database %s: %s" % ( database_name , str ( e ) ) ) return None try : GridFS ( database ) except Exception as e : self . _display_warning ( "Cannot access gridfs %s: %s" % ( database_name , str ( e ) ) ) return None return database | Try MongoDB configuration |
40,998 | def configure_task_directory ( self ) : self . _display_question ( "Please choose a directory in which to store the course/task files. By default, the tool will put them in the current " "directory" ) task_directory = None while task_directory is None : task_directory = self . _ask_with_default ( "Task directory" , "." ) if not os . path . exists ( task_directory ) : self . _display_error ( "Path does not exists" ) if self . _ask_boolean ( "Would you like to retry?" , True ) : task_directory = None if os . path . exists ( task_directory ) : self . _display_question ( "Demonstration tasks can be downloaded to let you discover INGInious." ) if self . _ask_boolean ( "Would you like to download them ?" , True ) : try : filename , _ = urllib . request . urlretrieve ( "https://api.github.com/repos/UCL-INGI/INGInious-demo-tasks/tarball" ) with tarfile . open ( filename , mode = "r:gz" ) as thetarfile : members = thetarfile . getmembers ( ) commonpath = os . path . commonpath ( [ tarinfo . name for tarinfo in members ] ) for member in members : member . name = member . name [ len ( commonpath ) + 1 : ] if member . name : thetarfile . extract ( member , task_directory ) self . _display_info ( "Successfully downloaded and copied demonstration tasks." ) except Exception as e : self . _display_error ( "An error occurred while copying the directory: %s" % str ( e ) ) else : self . _display_warning ( "Skipping copying the 'test' course because the task dir does not exists" ) return { "tasks_directory" : task_directory } | Configure task directory |
40,999 | def download_containers ( self , to_download , current_options ) : if current_options [ "backend" ] == "local" : self . _display_info ( "Connecting to the local Docker daemon..." ) try : docker_connection = docker . from_env ( ) except : self . _display_error ( "Cannot connect to local Docker daemon. Skipping download." ) return for image in to_download : try : self . _display_info ( "Downloading image %s. This can take some time." % image ) docker_connection . images . pull ( image + ":latest" ) except Exception as e : self . _display_error ( "An error occurred while pulling the image: %s." % str ( e ) ) else : self . _display_warning ( "This installation tool does not support the backend configuration directly, if it's not local. You will have to " "pull the images by yourself. Here is the list: %s" % str ( to_download ) ) | Download the chosen containers on all the agents |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.