idx int64 0 251k | question stringlengths 53 3.53k | target stringlengths 5 1.23k | len_question int64 20 893 | len_target int64 3 238 |
|---|---|---|---|---|
228,900 | def _css_helper ( self ) : entries = [ entry for entry in self . _plugin_manager . call_hook ( "css" ) if entry is not None ] # Load javascript for the current page entries += self . _get_ctx ( ) [ "css" ] entries = [ "<link href='" + entry + "' rel='stylesheet'>" for entry in entries ] return "\n" . join ( entries ) | Add CSS links for the current page and for the plugins | 92 | 11 |
228,901 | def _get_ctx ( self ) : if self . _WEB_CTX_KEY not in web . ctx : web . ctx [ self . _WEB_CTX_KEY ] = { "javascript" : { "footer" : [ ] , "header" : [ ] } , "css" : [ ] } return web . ctx . get ( self . _WEB_CTX_KEY ) | Get web . ctx object for the Template helper | 92 | 10 |
228,902 | def _generic_hook ( self , name , * * kwargs ) : entries = [ entry for entry in self . _plugin_manager . call_hook ( name , * * kwargs ) if entry is not None ] return "\n" . join ( entries ) | A generic hook that links the TemplateHelper with PluginManager | 58 | 11 |
228,903 | def new_job ( self , task , inputdata , launcher_name = "Unknown" , debug = False ) : bjobid = uuid . uuid4 ( ) self . _waiting_jobs . append ( str ( bjobid ) ) self . _client . new_job ( task , inputdata , ( lambda result , grade , problems , tests , custom , archive , stdout , stderr : self . _callback ( bjobid , result , grade , problems , tests , custom , archive , stdout , stderr ) ) , launcher_name , debug ) return bjobid | Runs a new job . It works exactly like the Client class instead that there is no callback | 129 | 19 |
228,904 | def _callback ( self , bjobid , result , grade , problems , tests , custom , archive , stdout , stderr ) : self . _jobs_done [ str ( bjobid ) ] = ( result , grade , problems , tests , custom , archive , stdout , stderr ) self . _waiting_jobs . remove ( str ( bjobid ) ) | Callback for self . _client . new_job | 82 | 10 |
228,905 | def init ( plugin_manager , _ , _2 , conf ) : encryption = conf . get ( "encryption" , "none" ) if encryption not in [ "none" , "ssl" , "tls" ] : raise Exception ( "Unknown encryption method {}" . format ( encryption ) ) if encryption == "none" : conf [ "encryption" ] = None if conf . get ( "port" , 0 ) == 0 : conf [ "port" ] = None the_method = LdapAuthMethod ( conf . get ( "id" ) , conf . get ( 'name' , 'LDAP' ) , conf . get ( "imlink" , "" ) , conf ) plugin_manager . add_page ( r'/auth/page/([^/]+)' , LDAPAuthenticationPage ) plugin_manager . register_auth_method ( the_method ) | Allow to connect through a LDAP service | 190 | 8 |
228,906 | def cleanup ( self , timeout ) : cutoff = time ( ) - timeout self . collection . remove ( { _atime : { '$lt' : cutoff } } ) | Removes all sessions older than timeout seconds . Called automatically on every session access . | 36 | 16 |
228,907 | def load ( self , client , webpy_app , course_factory , task_factory , database , user_manager , submission_manager , config ) : self . _app = webpy_app self . _task_factory = task_factory self . _database = database self . _user_manager = user_manager self . _submission_manager = submission_manager self . _loaded = True for entry in config : module = importlib . import_module ( entry [ "plugin_module" ] ) module . init ( self , course_factory , client , entry ) | Loads the plugin manager . Must be done after the initialisation of the client | 127 | 16 |
228,908 | def add_page ( self , pattern , classname ) : if not self . _loaded : raise PluginManagerNotLoadedException ( ) self . _app . add_mapping ( pattern , classname ) | Add a new page to the web application . Only available after that the Plugin Manager is loaded | 44 | 18 |
228,909 | def add_task_file_manager ( self , task_file_manager ) : if not self . _loaded : raise PluginManagerNotLoadedException ( ) self . _task_factory . add_custom_task_file_manager ( task_file_manager ) | Add a task file manager . Only available after that the Plugin Manager is loaded | 58 | 15 |
228,910 | def register_auth_method ( self , auth_method ) : if not self . _loaded : raise PluginManagerNotLoadedException ( ) self . _user_manager . register_auth_method ( auth_method ) | Register a new authentication method | 47 | 5 |
228,911 | def dump_course ( self , courseid ) : filepath = os . path . join ( self . backup_dir , courseid , datetime . datetime . now ( ) . strftime ( "%Y%m%d.%H%M%S" ) + ".zip" ) if not os . path . exists ( os . path . dirname ( filepath ) ) : os . makedirs ( os . path . dirname ( filepath ) ) with zipfile . ZipFile ( filepath , "w" , allowZip64 = True ) as zipf : aggregations = self . database . aggregations . find ( { "courseid" : courseid } ) zipf . writestr ( "aggregations.json" , bson . json_util . dumps ( aggregations ) , zipfile . ZIP_DEFLATED ) user_tasks = self . database . user_tasks . find ( { "courseid" : courseid } ) zipf . writestr ( "user_tasks.json" , bson . json_util . dumps ( user_tasks ) , zipfile . ZIP_DEFLATED ) submissions = self . database . submissions . find ( { "courseid" : courseid } ) zipf . writestr ( "submissions.json" , bson . json_util . dumps ( submissions ) , zipfile . ZIP_DEFLATED ) submissions . rewind ( ) for submission in submissions : for key in [ "input" , "archive" ] : if key in submission and type ( submission [ key ] ) == bson . objectid . ObjectId : infile = self . submission_manager . get_gridfs ( ) . get ( submission [ key ] ) zipf . writestr ( key + "/" + str ( submission [ key ] ) + ".data" , infile . read ( ) , zipfile . ZIP_DEFLATED ) self . _logger . info ( "Course %s dumped to backup directory." , courseid ) self . wipe_course ( courseid ) | Create a zip file containing all information about a given course in database and then remove it from db | 442 | 19 |
228,912 | def delete_course ( self , courseid ) : # Wipes the course (delete database) self . wipe_course ( courseid ) # Deletes the course from the factory (entire folder) self . course_factory . delete_course ( courseid ) # Removes backup filepath = os . path . join ( self . backup_dir , courseid ) if os . path . exists ( os . path . dirname ( filepath ) ) : for backup in glob . glob ( os . path . join ( filepath , '*.zip' ) ) : os . remove ( backup ) self . _logger . info ( "Course %s files erased." , courseid ) | Erase all course data | 145 | 5 |
228,913 | def show_input ( self , template_helper , language , seed ) : header = ParsableText ( self . gettext ( language , self . _header ) , "rst" , translation = self . _translations . get ( language , gettext . NullTranslations ( ) ) ) return str ( DisplayableCodeProblem . get_renderer ( template_helper ) . tasks . code ( self . get_id ( ) , header , 8 , 0 , self . _language , self . _optional , self . _default ) ) | Show BasicCodeProblem and derivatives | 116 | 6 |
228,914 | def show_input ( self , template_helper , language , seed ) : choices = [ ] limit = self . _limit if limit == 0 : limit = len ( self . _choices ) # no limit rand = Random ( "{}#{}#{}" . format ( self . get_task ( ) . get_id ( ) , self . get_id ( ) , seed ) ) # Ensure that the choices are random # we *do* need to copy the choices here random_order_choices = list ( self . _choices ) rand . shuffle ( random_order_choices ) if self . _multiple : # take only the valid choices in the first pass for entry in random_order_choices : if entry [ 'valid' ] : choices . append ( entry ) limit = limit - 1 # take everything else in a second pass for entry in random_order_choices : if limit == 0 : break if not entry [ 'valid' ] : choices . append ( entry ) limit = limit - 1 else : # need to have ONE valid entry for entry in random_order_choices : if not entry [ 'valid' ] and limit > 1 : choices . append ( entry ) limit = limit - 1 for entry in random_order_choices : if entry [ 'valid' ] and limit > 0 : choices . append ( entry ) limit = limit - 1 rand . shuffle ( choices ) header = ParsableText ( self . gettext ( language , self . _header ) , "rst" , translation = self . _translations . get ( language , gettext . NullTranslations ( ) ) ) return str ( DisplayableMultipleChoiceProblem . get_renderer ( template_helper ) . tasks . multiple_choice ( self . get_id ( ) , header , self . _multiple , choices , lambda text : ParsableText ( self . gettext ( language , text ) if text else "" , "rst" , translation = self . _translations . get ( language , gettext . NullTranslations ( ) ) ) ) ) | Show multiple choice problems | 439 | 4 |
228,915 | def _parse_lti_data ( self , courseid , taskid ) : post_input = web . webapi . rawinput ( "POST" ) self . logger . debug ( '_parse_lti_data:' + str ( post_input ) ) try : course = self . course_factory . get_course ( courseid ) except exceptions . CourseNotFoundException as ex : raise web . notfound ( str ( ex ) ) try : test = LTIWebPyToolProvider . from_webpy_request ( ) validator = LTIValidator ( self . database . nonce , course . lti_keys ( ) ) verified = test . is_valid_request ( validator ) except Exception : self . logger . exception ( "..." ) self . logger . info ( "Error while validating LTI request for %s" , str ( post_input ) ) raise web . forbidden ( _ ( "Error while validating LTI request" ) ) if verified : self . logger . debug ( 'parse_lit_data for %s' , str ( post_input ) ) user_id = post_input [ "user_id" ] roles = post_input . get ( "roles" , "Student" ) . split ( "," ) realname = self . _find_realname ( post_input ) email = post_input . get ( "lis_person_contact_email_primary" , "" ) lis_outcome_service_url = post_input . get ( "lis_outcome_service_url" , None ) outcome_result_id = post_input . get ( "lis_result_sourcedid" , None ) consumer_key = post_input [ "oauth_consumer_key" ] if course . lti_send_back_grade ( ) : if lis_outcome_service_url is None or outcome_result_id is None : self . logger . info ( 'Error: lis_outcome_service_url is None but lti_send_back_grade is True' ) raise web . forbidden ( _ ( "In order to send grade back to the TC, INGInious needs the parameters lis_outcome_service_url and " "lis_outcome_result_id in the LTI basic-launch-request. Please contact your administrator." ) ) else : lis_outcome_service_url = None outcome_result_id = None tool_name = post_input . get ( 'tool_consumer_instance_name' , 'N/A' ) tool_desc = post_input . get ( 'tool_consumer_instance_description' , 'N/A' ) tool_url = post_input . get ( 'tool_consumer_instance_url' , 'N/A' ) context_title = post_input . get ( 'context_title' , 'N/A' ) context_label = post_input . get ( 'context_label' , 'N/A' ) session_id = self . user_manager . create_lti_session ( user_id , roles , realname , email , courseid , taskid , consumer_key , lis_outcome_service_url , outcome_result_id , tool_name , tool_desc , tool_url , context_title , context_label ) loggedin = self . user_manager . attempt_lti_login ( ) return session_id , loggedin else : self . logger . info ( "Couldn't validate LTI request" ) raise web . forbidden ( _ ( "Couldn't validate LTI request" ) ) | Verify and parse the data for the LTI basic launch | 785 | 12 |
228,916 | def _find_realname ( self , post_input ) : # First, try the full name if "lis_person_name_full" in post_input : return post_input [ "lis_person_name_full" ] if "lis_person_name_given" in post_input and "lis_person_name_family" in post_input : return post_input [ "lis_person_name_given" ] + post_input [ "lis_person_name_family" ] # Then the email if "lis_person_contact_email_primary" in post_input : return post_input [ "lis_person_contact_email_primary" ] # Then only part of the full name if "lis_person_name_family" in post_input : return post_input [ "lis_person_name_family" ] if "lis_person_name_given" in post_input : return post_input [ "lis_person_name_given" ] return post_input [ "user_id" ] | Returns the most appropriate name to identify the user | 227 | 9 |
228,917 | def fast_stats ( data ) : total_submission = len ( data ) total_submission_best = 0 total_submission_best_succeeded = 0 for submission in data : if "best" in submission and submission [ "best" ] : total_submission_best = total_submission_best + 1 if "result" in submission and submission [ "result" ] == "success" : total_submission_best_succeeded += 1 statistics = [ ( _ ( "Number of submissions" ) , total_submission ) , ( _ ( "Evaluation submissions (Total)" ) , total_submission_best ) , ( _ ( "Evaluation submissions (Succeeded)" ) , total_submission_best_succeeded ) , ( _ ( "Evaluation submissions (Failed)" ) , total_submission_best - total_submission_best_succeeded ) , # add here new common statistics ] return statistics | Compute base statistics about submissions | 215 | 6 |
228,918 | def _register_transaction ( self , send_msg , recv_msg , coroutine_recv , coroutine_abrt , get_key = None , inter_msg = None ) : if get_key is None : get_key = lambda x : None if inter_msg is None : inter_msg = [ ] # format is (other_msg, get_key, recv_handler, abrt_handler,responsible_for) # where responsible_for is the list of classes whose transaction will be killed when this message is received. self . _msgs_registered [ send_msg . __msgtype__ ] = ( [ recv_msg . __msgtype__ ] + [ x . __msgtype__ for x , _ in inter_msg ] , get_key , None , None , [ ] ) self . _msgs_registered [ recv_msg . __msgtype__ ] = ( [ ] , get_key , coroutine_recv , coroutine_abrt , [ recv_msg . __msgtype__ ] + [ x . __msgtype__ for x , _ in inter_msg ] ) self . _transactions [ recv_msg . __msgtype__ ] = { } for msg_class , handler in inter_msg : self . _msgs_registered [ msg_class . __msgtype__ ] = ( [ ] , get_key , handler , None , [ ] ) self . _transactions [ msg_class . __msgtype__ ] = { } | Register a type of message to be sent . After this message has been sent if the answer is received callback_recv is called . If the remote server becomes dones calls callback_abrt . | 326 | 40 |
228,919 | async def _reconnect ( self ) : # 1. Close all transactions for msg_class in self . _transactions : _1 , _2 , _3 , coroutine_abrt , _4 = self . _msgs_registered [ msg_class ] if coroutine_abrt is not None : for key in self . _transactions [ msg_class ] : for args , kwargs in self . _transactions [ msg_class ] [ key ] : self . _loop . create_task ( coroutine_abrt ( key , * args , * * kwargs ) ) self . _transactions [ msg_class ] = { } # 2. Call on_disconnect await self . _on_disconnect ( ) # 3. Stop tasks for task in self . _restartable_tasks : task . cancel ( ) self . _restartable_tasks = [ ] # 4. Restart socket self . _socket . disconnect ( self . _router_addr ) # 5. Re-do start sequence await self . client_start ( ) | Called when the remote server is innacessible and the connection has to be restarted | 232 | 19 |
228,920 | async def client_start ( self ) : await self . _start_socket ( ) await self . _on_connect ( ) self . _ping_count = 0 # Start the loops, and don't forget to add them to the list of asyncio task to close when the client restarts task_socket = self . _loop . create_task ( self . _run_socket ( ) ) task_ping = self . _loop . create_task ( self . _do_ping ( ) ) self . _restartable_tasks . append ( task_ping ) self . _restartable_tasks . append ( task_socket ) | Starts the client | 138 | 4 |
228,921 | async def _run_socket ( self ) : try : while True : message = await ZMQUtils . recv ( self . _socket ) msg_class = message . __msgtype__ if msg_class in self . _handlers_registered : # If a handler is registered, give the message to it self . _loop . create_task ( self . _handlers_registered [ msg_class ] ( message ) ) elif msg_class in self . _transactions : # If there are transaction associated, check if the key is ok _1 , get_key , coroutine_recv , _2 , responsible = self . _msgs_registered [ msg_class ] key = get_key ( message ) if key in self . _transactions [ msg_class ] : # key exists; call all the coroutines for args , kwargs in self . _transactions [ msg_class ] [ key ] : self . _loop . create_task ( coroutine_recv ( message , * args , * * kwargs ) ) # remove all transaction parts for key2 in responsible : del self . _transactions [ key2 ] [ key ] else : # key does not exist raise Exception ( "Received message %s for an unknown transaction %s" , msg_class , key ) else : raise Exception ( "Received unknown message %s" , msg_class ) except asyncio . CancelledError : return except KeyboardInterrupt : return | Task that runs this client . | 315 | 6 |
228,922 | def load_feedback ( ) : result = { } if os . path . exists ( _feedback_file ) : f = open ( _feedback_file , 'r' ) cont = f . read ( ) f . close ( ) else : cont = '{}' try : result = json . loads ( cont ) if cont else { } except ValueError as e : result = { "result" : "crash" , "text" : "Feedback file has been modified by user !" } return result | Open existing feedback file | 111 | 4 |
228,923 | def save_feedback ( rdict ) : # Check for output folder if not os . path . exists ( _feedback_dir ) : os . makedirs ( _feedback_dir ) jcont = json . dumps ( rdict ) f = open ( _feedback_file , 'w' ) f . write ( jcont ) f . close ( ) | Save feedback file | 78 | 3 |
228,924 | def set_problem_result ( result , problem_id ) : rdict = load_feedback ( ) if not 'problems' in rdict : rdict [ 'problems' ] = { } cur_val = rdict [ 'problems' ] . get ( problem_id , '' ) rdict [ 'problems' ] [ problem_id ] = [ result , cur_val ] if type ( cur_val ) == str else [ result , cur_val [ 1 ] ] save_feedback ( rdict ) | Set problem specific result value | 114 | 5 |
228,925 | def set_global_feedback ( feedback , append = False ) : rdict = load_feedback ( ) rdict [ 'text' ] = rdict . get ( 'text' , '' ) + feedback if append else feedback save_feedback ( rdict ) | Set global feedback in case of error | 57 | 7 |
228,926 | def set_problem_feedback ( feedback , problem_id , append = False ) : rdict = load_feedback ( ) if not 'problems' in rdict : rdict [ 'problems' ] = { } cur_val = rdict [ 'problems' ] . get ( problem_id , '' ) rdict [ 'problems' ] [ problem_id ] = ( cur_val + feedback if append else feedback ) if type ( cur_val ) == str else [ cur_val [ 0 ] , ( cur_val [ 1 ] + feedback if append else feedback ) ] save_feedback ( rdict ) | Set problem specific feedback | 136 | 4 |
228,927 | def _display_big_warning ( self , content ) : print ( "" ) print ( BOLD + WARNING + "--- WARNING ---" + ENDC ) print ( WARNING + content + ENDC ) print ( "" ) | Displays a BIG warning | 46 | 5 |
228,928 | def _ask_local_config ( self ) : options = { "backend" : "local" , "local-config" : { } } # Concurrency while True : concurrency = self . _ask_with_default ( "Maximum concurrency (number of tasks running simultaneously). Leave it empty to use the number of " "CPU of your host." , "" ) if concurrency == "" : break try : concurrency = int ( concurrency ) except : self . _display_error ( "Invalid number" ) continue if concurrency <= 0 : self . _display_error ( "Invalid number" ) continue options [ "local-config" ] [ "concurrency" ] = concurrency break # Debug hostname hostname = self . _ask_with_default ( "What is the external hostname/address of your machine? You can leave this empty and let INGInious " "autodetect it." , "" ) if hostname != "" : options [ "local-config" ] [ "debug_host" ] = hostname self . _display_info ( "You can now enter the port range for the remote debugging feature of INGInious. Please verify that these " "ports are open in your firewall. You can leave this parameters empty, the default is 64100-64200" ) # Debug port range port_range = None while True : start_port = self . _ask_with_default ( "Beginning of the range" , "" ) if start_port != "" : try : start_port = int ( start_port ) except : self . _display_error ( "Invalid number" ) continue end_port = self . _ask_with_default ( "End of the range" , str ( start_port + 100 ) ) try : end_port = int ( end_port ) except : self . _display_error ( "Invalid number" ) continue if start_port > end_port : self . _display_error ( "Invalid range" ) continue port_range = str ( start_port ) + "-" + str ( end_port ) else : break if port_range != None : options [ "local-config" ] [ "debug_ports" ] = port_range return options | Ask some parameters about the local configuration | 476 | 7 |
228,929 | def ask_backend ( self ) : response = self . _ask_boolean ( "Do you have a local docker daemon (on Linux), do you use docker-machine via a local machine, or do you use " "Docker for macOS?" , True ) if ( response ) : self . _display_info ( "If you use docker-machine on macOS, please see " "http://inginious.readthedocs.io/en/latest/install_doc/troubleshooting.html" ) return "local" else : self . _display_info ( "You will have to run inginious-backend and inginious-agent yourself. Please run the commands without argument " "and/or read the documentation for more info" ) return self . _display_question ( "Please enter the address of your backend" ) | Ask the user to choose the backend | 182 | 7 |
228,930 | def try_mongodb_opts ( self , host = "localhost" , database_name = 'INGInious' ) : try : mongo_client = MongoClient ( host = host ) except Exception as e : self . _display_warning ( "Cannot connect to MongoDB on host %s: %s" % ( host , str ( e ) ) ) return None try : database = mongo_client [ database_name ] except Exception as e : self . _display_warning ( "Cannot access database %s: %s" % ( database_name , str ( e ) ) ) return None try : GridFS ( database ) except Exception as e : self . _display_warning ( "Cannot access gridfs %s: %s" % ( database_name , str ( e ) ) ) return None return database | Try MongoDB configuration | 179 | 4 |
228,931 | def configure_task_directory ( self ) : self . _display_question ( "Please choose a directory in which to store the course/task files. By default, the tool will put them in the current " "directory" ) task_directory = None while task_directory is None : task_directory = self . _ask_with_default ( "Task directory" , "." ) if not os . path . exists ( task_directory ) : self . _display_error ( "Path does not exists" ) if self . _ask_boolean ( "Would you like to retry?" , True ) : task_directory = None if os . path . exists ( task_directory ) : self . _display_question ( "Demonstration tasks can be downloaded to let you discover INGInious." ) if self . _ask_boolean ( "Would you like to download them ?" , True ) : try : filename , _ = urllib . request . urlretrieve ( "https://api.github.com/repos/UCL-INGI/INGInious-demo-tasks/tarball" ) with tarfile . open ( filename , mode = "r:gz" ) as thetarfile : members = thetarfile . getmembers ( ) commonpath = os . path . commonpath ( [ tarinfo . name for tarinfo in members ] ) for member in members : member . name = member . name [ len ( commonpath ) + 1 : ] if member . name : thetarfile . extract ( member , task_directory ) self . _display_info ( "Successfully downloaded and copied demonstration tasks." ) except Exception as e : self . _display_error ( "An error occurred while copying the directory: %s" % str ( e ) ) else : self . _display_warning ( "Skipping copying the 'test' course because the task dir does not exists" ) return { "tasks_directory" : task_directory } | Configure task directory | 420 | 4 |
228,932 | def download_containers ( self , to_download , current_options ) : if current_options [ "backend" ] == "local" : self . _display_info ( "Connecting to the local Docker daemon..." ) try : docker_connection = docker . from_env ( ) except : self . _display_error ( "Cannot connect to local Docker daemon. Skipping download." ) return for image in to_download : try : self . _display_info ( "Downloading image %s. This can take some time." % image ) docker_connection . images . pull ( image + ":latest" ) except Exception as e : self . _display_error ( "An error occurred while pulling the image: %s." % str ( e ) ) else : self . _display_warning ( "This installation tool does not support the backend configuration directly, if it's not local. You will have to " "pull the images by yourself. Here is the list: %s" % str ( to_download ) ) | Download the chosen containers on all the agents | 218 | 8 |
228,933 | def configure_containers ( self , current_options ) : containers = [ ( "default" , "Default container. For Bash and Python 2 tasks" ) , ( "cpp" , "Contains gcc and g++ for compiling C++" ) , ( "java7" , "Contains Java 7" ) , ( "java8scala" , "Contains Java 8 and Scala" ) , ( "mono" , "Contains Mono, which allows to run C#, F# and many other languages" ) , ( "oz" , "Contains Mozart 2, an implementation of the Oz multi-paradigm language, made for education" ) , ( "php" , "Contains PHP 5" ) , ( "pythia0compat" , "Compatibility container for Pythia 0" ) , ( "pythia1compat" , "Compatibility container for Pythia 1" ) , ( "r" , "Can run R scripts" ) , ( "sekexe" , "Can run an user-mode-linux for advanced tasks" ) ] default_download = [ "default" ] self . _display_question ( "The tool will now propose to download some base container image for multiple languages." ) self . _display_question ( "Please note that the download of these images can take a lot of time, so choose only the images you need" ) to_download = [ ] for container_name , description in containers : if self . _ask_boolean ( "Download %s (%s) ?" % ( container_name , description ) , container_name in default_download ) : to_download . append ( "ingi/inginious-c-%s" % container_name ) self . download_containers ( to_download , current_options ) wants = self . _ask_boolean ( "Do you want to manually add some images?" , False ) while wants : image = self . _ask_with_default ( "Container image name (leave this field empty to skip)" , "" ) if image == "" : break self . _display_info ( "Configuration of the containers done." ) | Configures the container dict | 460 | 5 |
228,934 | def configure_backup_directory ( self ) : self . _display_question ( "Please choose a directory in which to store the backup files. By default, the tool will them in the current " "directory" ) backup_directory = None while backup_directory is None : backup_directory = self . _ask_with_default ( "Backup directory" , "." ) if not os . path . exists ( backup_directory ) : self . _display_error ( "Path does not exists" ) if self . _ask_boolean ( "Would you like to retry?" , True ) : backup_directory = None return { "backup_directory" : backup_directory } | Configure backup directory | 146 | 4 |
228,935 | def ldap_plugin ( self ) : name = self . _ask_with_default ( "Authentication method name (will be displayed on the login page)" , "LDAP" ) prefix = self . _ask_with_default ( "Prefix to append to the username before db storage. Usefull when you have more than one auth method with " "common usernames." , "" ) ldap_host = self . _ask_with_default ( "LDAP Host" , "ldap.your.domain.com" ) encryption = 'none' while True : encryption = self . _ask_with_default ( "Encryption (either 'ssl', 'tls', or 'none')" , 'none' ) if encryption not in [ 'none' , 'ssl' , 'tls' ] : self . _display_error ( "Invalid value" ) else : break base_dn = self . _ask_with_default ( "Base DN" , "ou=people,c=com" ) request = self . _ask_with_default ( "Request to find a user. '{}' will be replaced by the username" , "uid={}" ) require_cert = self . _ask_boolean ( "Require certificate validation?" , encryption is not None ) return { "plugin_module" : "inginious.frontend.plugins.auth.ldap_auth" , "host" : ldap_host , "encryption" : encryption , "base_dn" : base_dn , "request" : request , "prefix" : prefix , "name" : name , "require_cert" : require_cert } | Configures the LDAP plugin | 360 | 6 |
228,936 | def configure_authentication ( self , database ) : options = { "plugins" : [ ] , "superadmins" : [ ] } self . _display_info ( "We will now create the first user." ) username = self . _ask_with_default ( "Enter the login of the superadmin" , "superadmin" ) realname = self . _ask_with_default ( "Enter the name of the superadmin" , "INGInious SuperAdmin" ) email = self . _ask_with_default ( "Enter the email address of the superadmin" , "superadmin@inginious.org" ) password = self . _ask_with_default ( "Enter the password of the superadmin" , "superadmin" ) database . users . insert ( { "username" : username , "realname" : realname , "email" : email , "password" : hashlib . sha512 ( password . encode ( "utf-8" ) ) . hexdigest ( ) , "bindings" : { } , "language" : "en" } ) options [ "superadmins" ] . append ( username ) while True : if not self . _ask_boolean ( "Would you like to add another auth method?" , False ) : break self . _display_info ( "You can choose an authentication plugin between:" ) self . _display_info ( "- 1. LDAP auth plugin. This plugin allows to connect to a distant LDAP host." ) plugin = self . _ask_with_default ( "Enter the corresponding number to your choice" , '1' ) if plugin not in [ '1' ] : continue elif plugin == '1' : options [ "plugins" ] . append ( self . ldap_plugin ( ) ) return options | Configure the authentication | 386 | 4 |
228,937 | def _close_app ( app , mongo_client , client ) : app . stop ( ) client . close ( ) mongo_client . close ( ) | Ensures that the app is properly closed | 34 | 9 |
228,938 | async def _init_clean ( self ) : # Data about running containers self . _containers_running = { } self . _container_for_job = { } self . _student_containers_running = { } self . _student_containers_for_job = { } self . _containers_killed = dict ( ) # Delete tmp_dir, and recreate-it again try : await self . _ashutil . rmtree ( self . _tmp_dir ) except OSError : pass try : await self . _aos . mkdir ( self . _tmp_dir ) except OSError : pass # Docker self . _docker = AsyncProxy ( DockerInterface ( ) ) # Auto discover containers self . _logger . info ( "Discovering containers" ) self . _containers = await self . _docker . get_containers ( ) self . _assigned_external_ports = { } # container_id : [external_ports] if self . _address_host is None and len ( self . _containers ) != 0 : self . _logger . info ( "Guessing external host IP" ) self . _address_host = await self . _docker . get_host_ip ( next ( iter ( self . _containers . values ( ) ) ) [ "id" ] ) if self . _address_host is None : self . _logger . warning ( "Cannot find external host IP. Please indicate it in the configuration. Remote SSH debug has been deactivated." ) self . _external_ports = None else : self . _logger . info ( "External address for SSH remote debug is %s" , self . _address_host ) # Watchers self . _timeout_watcher = TimeoutWatcher ( self . _docker ) | Must be called when the agent is starting | 388 | 8 |
228,939 | async def _end_clean ( self ) : await self . _timeout_watcher . clean ( ) async def close_and_delete ( container_id ) : try : await self . _docker . remove_container ( container_id ) except : pass for container_id in self . _containers_running : await close_and_delete ( container_id ) for container_id in self . _student_containers_running : await close_and_delete ( container_id ) | Must be called when the agent is closing | 105 | 8 |
228,940 | async def _watch_docker_events ( self ) : try : source = AsyncIteratorWrapper ( self . _docker . sync . event_stream ( filters = { "event" : [ "die" , "oom" ] } ) ) async for i in source : if i [ "Type" ] == "container" and i [ "status" ] == "die" : container_id = i [ "id" ] try : retval = int ( i [ "Actor" ] [ "Attributes" ] [ "exitCode" ] ) except asyncio . CancelledError : raise except : self . _logger . exception ( "Cannot parse exitCode for container %s" , container_id ) retval = - 1 if container_id in self . _containers_running : self . _create_safe_task ( self . handle_job_closing ( container_id , retval ) ) elif container_id in self . _student_containers_running : self . _create_safe_task ( self . handle_student_job_closing ( container_id , retval ) ) elif i [ "Type" ] == "container" and i [ "status" ] == "oom" : container_id = i [ "id" ] if container_id in self . _containers_running or container_id in self . _student_containers_running : self . _logger . info ( "Container %s did OOM, killing it" , container_id ) self . _containers_killed [ container_id ] = "overflow" try : self . _create_safe_task ( self . _docker . kill_container ( container_id ) ) except asyncio . CancelledError : raise except : # this call can sometimes fail, and that is normal. pass else : raise TypeError ( str ( i ) ) except asyncio . CancelledError : pass except : self . _logger . exception ( "Exception in _watch_docker_events" ) | Get raw docker events and convert them to more readable objects and then give them to self . _docker_events_subscriber | 432 | 26 |
228,941 | async def handle_student_job_closing ( self , container_id , retval ) : try : self . _logger . debug ( "Closing student %s" , container_id ) try : job_id , parent_container_id , socket_id , write_stream = self . _student_containers_running [ container_id ] del self . _student_containers_running [ container_id ] except asyncio . CancelledError : raise except : self . _logger . warning ( "Student container %s that has finished(p1) was not launched by this agent" , str ( container_id ) , exc_info = True ) return # Delete remaining student containers if job_id in self . _student_containers_for_job : # if it does not exists, then the parent container has closed self . _student_containers_for_job [ job_id ] . remove ( container_id ) killed = await self . _timeout_watcher . was_killed ( container_id ) if container_id in self . _containers_killed : killed = self . _containers_killed [ container_id ] del self . _containers_killed [ container_id ] if killed == "timeout" : retval = 253 elif killed == "overflow" : retval = 252 try : await self . _write_to_container_stdin ( write_stream , { "type" : "run_student_retval" , "retval" : retval , "socket_id" : socket_id } ) except asyncio . CancelledError : raise except : pass # parent container closed # Do not forget to remove the container try : await self . _docker . remove_container ( container_id ) except asyncio . CancelledError : raise except : pass # ignore except asyncio . CancelledError : raise except : self . _logger . exception ( "Exception in handle_student_job_closing" ) | Handle a closing student container . Do some cleaning verify memory limits timeouts ... and returns data to the associated grading container | 425 | 23 |
228,942 | async def kill_job ( self , message : BackendKillJob ) : try : if message . job_id in self . _container_for_job : self . _containers_killed [ self . _container_for_job [ message . job_id ] ] = "killed" await self . _docker . kill_container ( self . _container_for_job [ message . job_id ] ) else : self . _logger . warning ( "Cannot kill container for job %s because it is not running" , str ( message . job_id ) ) except asyncio . CancelledError : raise except : self . _logger . exception ( "Exception in handle_kill_job" ) | Handles kill messages . Kill things . | 153 | 8 |
228,943 | def get_user_lists ( self , course , aggregationid = '' ) : tutor_list = course . get_staff ( ) # Determine student list and if they are grouped student_list = list ( self . database . aggregations . aggregate ( [ { "$match" : { "courseid" : course . get_id ( ) } } , { "$unwind" : "$students" } , { "$project" : { "classroom" : "$_id" , "students" : 1 , "grouped" : { "$anyElementTrue" : { "$map" : { "input" : "$groups.students" , "as" : "group" , "in" : { "$anyElementTrue" : { "$map" : { "input" : "$$group" , "as" : "groupmember" , "in" : { "$eq" : [ "$$groupmember" , "$students" ] } } } } } } } } } ] ) ) student_list = dict ( [ ( student [ "students" ] , student ) for student in student_list ] ) users_info = self . user_manager . get_users_info ( list ( student_list . keys ( ) ) + tutor_list ) if aggregationid : # Order the non-registered students other_students = [ student_list [ entry ] [ 'students' ] for entry in student_list . keys ( ) if not student_list [ entry ] [ 'classroom' ] == ObjectId ( aggregationid ) ] other_students = sorted ( other_students , key = lambda val : ( ( "0" + users_info [ val ] [ 0 ] ) if users_info [ val ] else ( "1" + val ) ) ) return student_list , tutor_list , other_students , users_info else : return student_list , tutor_list , users_info | Get the available student and tutor lists for aggregation edition | 414 | 10 |
228,944 | def update_aggregation ( self , course , aggregationid , new_data ) : student_list = self . user_manager . get_course_registered_users ( course , False ) # If aggregation is new if aggregationid == 'None' : # Remove _id for correct insertion del new_data [ '_id' ] new_data [ "courseid" ] = course . get_id ( ) # Insert the new aggregation result = self . database . aggregations . insert_one ( new_data ) # Retrieve new aggregation id aggregationid = result . inserted_id new_data [ '_id' ] = result . inserted_id aggregation = new_data else : aggregation = self . database . aggregations . find_one ( { "_id" : ObjectId ( aggregationid ) , "courseid" : course . get_id ( ) } ) # Check tutors new_data [ "tutors" ] = [ tutor for tutor in new_data [ "tutors" ] if tutor in course . get_staff ( ) ] students , groups , errored_students = [ ] , [ ] , [ ] # Check the students for student in new_data [ "students" ] : if student in student_list : # Remove user from the other aggregation self . database . aggregations . find_one_and_update ( { "courseid" : course . get_id ( ) , "groups.students" : student } , { "$pull" : { "groups.$.students" : student , "students" : student } } ) self . database . aggregations . find_one_and_update ( { "courseid" : course . get_id ( ) , "students" : student } , { "$pull" : { "students" : student } } ) students . append ( student ) else : # Check if user can be registered user_info = self . user_manager . get_user_info ( student ) if user_info is None or student in aggregation [ "tutors" ] : errored_students . append ( student ) else : students . append ( student ) removed_students = [ student for student in aggregation [ "students" ] if student not in new_data [ "students" ] ] self . database . aggregations . find_one_and_update ( { "courseid" : course . get_id ( ) , "default" : True } , { "$push" : { "students" : { "$each" : removed_students } } } ) new_data [ "students" ] = students # Check the groups for group in new_data [ "groups" ] : group [ "students" ] = [ student for student in group [ "students" ] if student in new_data [ "students" ] ] if len ( group [ "students" ] ) <= group [ "size" ] : groups . append ( group ) new_data [ "groups" ] = groups # Check for default aggregation if new_data [ 'default' ] : self . database . aggregations . find_one_and_update ( { "courseid" : course . get_id ( ) , "default" : True } , { "$set" : { "default" : False } } ) aggregation = self . database . aggregations . find_one_and_update ( { "_id" : ObjectId ( aggregationid ) } , { "$set" : { "description" : new_data [ "description" ] , "students" : students , "tutors" : new_data [ "tutors" ] , "groups" : groups , "default" : new_data [ 'default' ] } } , return_document = ReturnDocument . AFTER ) return aggregation , errored_students | Update aggregation and returns a list of errored students | 821 | 10 |
228,945 | def POST_AUTH ( self ) : # pylint: disable=arguments-differ username = self . user_manager . session_username ( ) user_info = self . database . users . find_one ( { "username" : username } ) user_input = web . input ( ) success = None # Handle registration to a course if "register_courseid" in user_input and user_input [ "register_courseid" ] != "" : try : course = self . course_factory . get_course ( user_input [ "register_courseid" ] ) if not course . is_registration_possible ( user_info ) : success = False else : success = self . user_manager . course_register_user ( course , username , user_input . get ( "register_password" , None ) ) except : success = False elif "new_courseid" in user_input and self . user_manager . user_is_superadmin ( ) : try : courseid = user_input [ "new_courseid" ] self . course_factory . create_course ( courseid , { "name" : courseid , "accessible" : False } ) success = True except : success = False return self . show_page ( success ) | Parse course registration or course creation and display the course list page | 277 | 13 |
228,946 | async def _restart_on_cancel ( logger , agent ) : while True : try : await agent . run ( ) except asyncio . CancelledError : logger . exception ( "Restarting agent" ) pass | Restarts an agent when it is cancelled | 48 | 8 |
228,947 | def GET ( self , * args , * * kwargs ) : if self . user_manager . session_logged_in ( ) : if not self . user_manager . session_username ( ) and not self . __class__ . __name__ == "ProfilePage" : raise web . seeother ( "/preferences/profile" ) if not self . is_lti_page and self . user_manager . session_lti_info ( ) is not None : #lti session self . user_manager . disconnect_user ( ) return self . template_helper . get_renderer ( ) . auth ( self . user_manager . get_auth_methods ( ) , False ) return self . GET_AUTH ( * args , * * kwargs ) else : return self . template_helper . get_renderer ( ) . auth ( self . user_manager . get_auth_methods ( ) , False ) | Checks if user is authenticated and calls GET_AUTH or performs logout . Otherwise returns the login template . | 206 | 23 |
228,948 | def normpath ( self , path ) : path2 = posixpath . normpath ( urllib . parse . unquote ( path ) ) if path . endswith ( "/" ) : path2 += "/" return path2 | Normalize the path | 50 | 4 |
228,949 | def _get_submissions ( course_factory , submission_manager , user_manager , translations , courseid , taskid , with_input , submissionid = None ) : try : course = course_factory . get_course ( courseid ) except : raise APINotFound ( "Course not found" ) if not user_manager . course_is_open_to_user ( course , lti = False ) : raise APIForbidden ( "You are not registered to this course" ) try : task = course . get_task ( taskid ) except : raise APINotFound ( "Task not found" ) if submissionid is None : submissions = submission_manager . get_user_submissions ( task ) else : try : submissions = [ submission_manager . get_submission ( submissionid ) ] except : raise APINotFound ( "Submission not found" ) if submissions [ 0 ] [ "taskid" ] != task . get_id ( ) or submissions [ 0 ] [ "courseid" ] != course . get_id ( ) : raise APINotFound ( "Submission not found" ) output = [ ] for submission in submissions : submission = submission_manager . get_feedback_from_submission ( submission , show_everything = user_manager . has_staff_rights_on_course ( course , user_manager . session_username ( ) ) , translation = translations . get ( user_manager . session_language ( ) , gettext . NullTranslations ( ) ) ) data = { "id" : str ( submission [ "_id" ] ) , "submitted_on" : str ( submission [ "submitted_on" ] ) , "status" : submission [ "status" ] } if with_input : data [ "input" ] = submission_manager . get_input_from_submission ( submission , True ) # base64 encode file to allow JSON encoding for d in data [ "input" ] : if isinstance ( d , dict ) and d . keys ( ) == { "filename" , "value" } : d [ "value" ] = base64 . b64encode ( d [ "value" ] ) . decode ( "utf8" ) if submission [ "status" ] == "done" : data [ "grade" ] = submission . get ( "grade" , 0 ) data [ "result" ] = submission . get ( "result" , "crash" ) data [ "feedback" ] = submission . get ( "text" , "" ) data [ "problems_feedback" ] = submission . get ( "problems" , { } ) output . append ( data ) return 200 , output | Helper for the GET methods of the two following classes | 577 | 10 |
228,950 | def API_GET ( self , courseid , taskid , submissionid ) : # pylint: disable=arguments-differ with_input = "input" in web . input ( ) return _get_submissions ( self . course_factory , self . submission_manager , self . user_manager , self . app . _translations , courseid , taskid , with_input , submissionid ) | List all the submissions that the connected user made . Returns list of the form | 89 | 15 |
228,951 | def is_registration_possible ( self , user_info ) : return self . get_accessibility ( ) . is_open ( ) and self . _registration . is_open ( ) and self . is_user_accepted_by_access_control ( user_info ) | Returns true if users can register for this course | 63 | 9 |
228,952 | def get_accessibility ( self , plugin_override = True ) : vals = self . _hook_manager . call_hook ( 'course_accessibility' , course = self , default = self . _accessible ) return vals [ 0 ] if len ( vals ) and plugin_override else self . _accessible | Return the AccessibleTime object associated with the accessibility of this course | 70 | 13 |
228,953 | def is_user_accepted_by_access_control ( self , user_info ) : if self . get_access_control_method ( ) is None : return True elif not user_info : return False elif self . get_access_control_method ( ) == "username" : return user_info [ "username" ] in self . get_access_control_list ( ) elif self . get_access_control_method ( ) == "email" : return user_info [ "email" ] in self . get_access_control_list ( ) elif self . get_access_control_method ( ) == "binding" : return set ( user_info [ "bindings" ] . keys ( ) ) . intersection ( set ( self . get_access_control_list ( ) ) ) return False | Returns True if the user is allowed by the ACL | 180 | 10 |
228,954 | def allow_unregister ( self , plugin_override = True ) : vals = self . _hook_manager . call_hook ( 'course_allow_unregister' , course = self , default = self . _allow_unregister ) return vals [ 0 ] if len ( vals ) and plugin_override else self . _allow_unregister | Returns True if students can unregister from course | 78 | 9 |
228,955 | def get_name ( self , language ) : return self . gettext ( language , self . _name ) if self . _name else "" | Return the name of this course | 30 | 6 |
228,956 | def get_description ( self , language ) : description = self . gettext ( language , self . _description ) if self . _description else '' return ParsableText ( description , "rst" , self . _translations . get ( language , gettext . NullTranslations ( ) ) ) | Returns the course description | 63 | 4 |
228,957 | def get_all_tags_names_as_list ( self , admin = False , language = "en" ) : if admin : if self . _all_tags_cache_list_admin != { } and language in self . _all_tags_cache_list_admin : return self . _all_tags_cache_list_admin [ language ] #Cache hit else : if self . _all_tags_cache_list != { } and language in self . _all_tags_cache_list : return self . _all_tags_cache_list [ language ] #Cache hit #Cache miss, computes everything s_stud = set ( ) s_admin = set ( ) ( common , _ , org ) = self . get_all_tags ( ) for tag in common + org : # Is tag_name_with_translation correct by doing that like that ? tag_name_with_translation = self . gettext ( language , tag . get_name ( ) ) if tag . get_name ( ) else "" s_admin . add ( tag_name_with_translation ) if tag . is_visible_for_student ( ) : s_stud . add ( tag_name_with_translation ) self . _all_tags_cache_list_admin [ language ] = natsorted ( s_admin , key = lambda y : y . lower ( ) ) self . _all_tags_cache_list [ language ] = natsorted ( s_stud , key = lambda y : y . lower ( ) ) if admin : return self . _all_tags_cache_list_admin [ language ] return self . _all_tags_cache_list [ language ] | Computes and cache two list containing all tags name sorted by natural order on name | 363 | 16 |
228,958 | def update_all_tags_cache ( self ) : self . _all_tags_cache = None self . _all_tags_cache_list = { } self . _all_tags_cache_list_admin = { } self . _organisational_tags_to_task = { } self . get_all_tags ( ) self . get_all_tags_names_as_list ( ) self . get_organisational_tags_to_task ( ) | Force the cache refreshing | 104 | 4 |
228,959 | def get_app ( config ) : mongo_client = MongoClient ( host = config . get ( 'mongo_opt' , { } ) . get ( 'host' , 'localhost' ) ) database = mongo_client [ config . get ( 'mongo_opt' , { } ) . get ( 'database' , 'INGInious' ) ] # Create the FS provider if "tasks_directory" not in config : raise RuntimeError ( "WebDav access is only supported if INGInious is using a local filesystem to access tasks" ) fs_provider = LocalFSProvider ( config [ "tasks_directory" ] ) course_factory , task_factory = create_factories ( fs_provider , { } , None , WebAppCourse , WebAppTask ) user_manager = UserManager ( MongoStore ( database , 'sessions' ) , database , config . get ( 'superadmins' , [ ] ) ) config = dict ( wsgidav_app . DEFAULT_CONFIG ) config [ "provider_mapping" ] = { "/" : INGIniousFilesystemProvider ( course_factory , task_factory ) } config [ "domaincontroller" ] = INGIniousDAVDomainController ( user_manager , course_factory ) config [ "verbose" ] = 0 app = wsgidav_app . WsgiDAVApp ( config ) return app | Init the webdav app | 313 | 6 |
228,960 | def getDomainRealm ( self , inputURL , environ ) : # we don't get the realm here, its already been resolved in # request_resolver if inputURL . startswith ( "/" ) : inputURL = inputURL [ 1 : ] parts = inputURL . split ( "/" ) return parts [ 0 ] | Resolve a relative url to the appropriate realm name . | 70 | 11 |
228,961 | def isRealmUser ( self , realmname , username , environ ) : try : course = self . course_factory . get_course ( realmname ) ok = self . user_manager . has_admin_rights_on_course ( course , username = username ) return ok except : return False | Returns True if this username is valid for the realm False otherwise . | 65 | 13 |
228,962 | def getRealmUserPassword ( self , realmname , username , environ ) : return self . user_manager . get_user_api_key ( username , create = True ) | Return the password for the given username for the realm . | 39 | 11 |
228,963 | def getResourceInst ( self , path , environ ) : self . _count_getResourceInst += 1 fp = self . _locToFilePath ( path ) if not os . path . exists ( fp ) : return None if os . path . isdir ( fp ) : return FolderResource ( path , environ , fp ) return FileResource ( path , environ , fp ) | Return info dictionary for path . | 86 | 6 |
228,964 | def contains_is_html ( cls , data ) : for key , val in data . items ( ) : if isinstance ( key , str ) and key . endswith ( "IsHTML" ) : return True if isinstance ( val , ( OrderedDict , dict ) ) and cls . contains_is_html ( val ) : return True return False | Detect if the problem has at least one xyzIsHTML key | 79 | 13 |
228,965 | def parse_problem ( self , problem_content ) : del problem_content [ "@order" ] return self . task_factory . get_problem_types ( ) . get ( problem_content [ "type" ] ) . parse_problem ( problem_content ) | Parses a problem modifying some data | 57 | 8 |
228,966 | def wipe_task ( self , courseid , taskid ) : submissions = self . database . submissions . find ( { "courseid" : courseid , "taskid" : taskid } ) for submission in submissions : for key in [ "input" , "archive" ] : if key in submission and type ( submission [ key ] ) == bson . objectid . ObjectId : self . submission_manager . get_gridfs ( ) . delete ( submission [ key ] ) self . database . aggregations . remove ( { "courseid" : courseid , "taskid" : taskid } ) self . database . user_tasks . remove ( { "courseid" : courseid , "taskid" : taskid } ) self . database . submissions . remove ( { "courseid" : courseid , "taskid" : taskid } ) self . _logger . info ( "Task %s/%s wiped." , courseid , taskid ) | Wipe the data associated to the taskid from DB | 208 | 11 |
228,967 | def _exception_free_callback ( self , callback , * args , * * kwargs ) : try : return callback ( * args , * * kwargs ) except Exception : self . _logger . exception ( "An exception occurred while calling a hook! " , exc_info = True ) return None | A wrapper that remove all exceptions raised from hooks | 67 | 9 |
228,968 | def add_hook ( self , name , callback , prio = 0 ) : hook_list = self . _hooks . get ( name , [ ] ) add = ( lambda * args , * * kwargs : self . _exception_free_callback ( callback , * args , * * kwargs ) ) , - prio pos = bisect . bisect_right ( list ( x [ 1 ] for x in hook_list ) , - prio ) hook_list [ pos : pos ] = [ add ] self . _hooks [ name ] = hook_list | Add a new hook that can be called with the call_hook function . prio is the priority . Higher priority hooks are called before lower priority ones . This function does not enforce a particular order between hooks with the same priorities . | 125 | 46 |
228,969 | def input_is_consistent ( self , task_input , default_allowed_extension , default_max_size ) : for problem in self . _problems : if not problem . input_is_consistent ( task_input , default_allowed_extension , default_max_size ) : return False return True | Check if an input for a task is consistent . Return true if this is case false else | 70 | 18 |
228,970 | def get_limits ( self ) : vals = self . _hook_manager . call_hook ( 'task_limits' , course = self . get_course ( ) , task = self , default = self . _limits ) return vals [ 0 ] if len ( vals ) else self . _limits | Return the limits of this task | 66 | 6 |
228,971 | def allow_network_access_grading ( self ) : vals = self . _hook_manager . call_hook ( 'task_network_grading' , course = self . get_course ( ) , task = self , default = self . _network_grading ) return vals [ 0 ] if len ( vals ) else self . _network_grading | Return True if the grading container should have access to the network | 76 | 12 |
228,972 | def _create_task_problem ( self , problemid , problem_content , task_problem_types ) : # Basic checks if not id_checker ( problemid ) : raise Exception ( "Invalid problem _id: " + problemid ) if problem_content . get ( 'type' , "" ) not in task_problem_types : raise Exception ( "Invalid type for problem " + problemid ) return task_problem_types . get ( problem_content . get ( 'type' , "" ) ) ( self , problemid , problem_content , self . _translations ) | Creates a new instance of the right class for a given problem . | 124 | 14 |
228,973 | def course_menu ( course , template_helper ) : scoreboards = course . get_descriptor ( ) . get ( 'scoreboard' , [ ] ) if scoreboards != [ ] : return str ( template_helper . get_custom_renderer ( 'frontend/plugins/scoreboard' , layout = False ) . course_menu ( course ) ) else : return None | Displays the link to the scoreboards on the course page if the plugin is activated for this course | 84 | 20 |
228,974 | def task_menu ( course , task , template_helper ) : scoreboards = course . get_descriptor ( ) . get ( 'scoreboard' , [ ] ) try : tolink = [ ] for sid , scoreboard in enumerate ( scoreboards ) : if task . get_id ( ) in scoreboard [ "content" ] : tolink . append ( ( sid , scoreboard [ "name" ] ) ) if tolink : return str ( template_helper . get_custom_renderer ( 'frontend/plugins/scoreboard' , layout = False ) . task_menu ( course , tolink ) ) return None except : return None | Displays the link to the scoreboards on the task page if the plugin is activated for this course and the task is used in scoreboards | 139 | 28 |
228,975 | def get_user_lists ( self , course , classroomid ) : tutor_list = course . get_staff ( ) # Determine if user is grouped or not in the classroom student_list = list ( self . database . classrooms . aggregate ( [ { "$match" : { "_id" : ObjectId ( classroomid ) } } , { "$unwind" : "$students" } , { "$project" : { "students" : 1 , "grouped" : { "$anyElementTrue" : { "$map" : { "input" : "$groups.students" , "as" : "group" , "in" : { "$anyElementTrue" : { "$map" : { "input" : "$$group" , "as" : "groupmember" , "in" : { "$eq" : [ "$$groupmember" , "$students" ] } } } } } } } } } ] ) ) student_list = dict ( [ ( student [ "students" ] , student ) for student in student_list ] ) other_students = [ entry [ 'students' ] for entry in list ( self . database . classrooms . aggregate ( [ { "$match" : { "courseid" : course . get_id ( ) , "_id" : { "$ne" : ObjectId ( classroomid ) } } } , { "$unwind" : "$students" } , { "$project" : { "_id" : 0 , "students" : 1 } } ] ) ) ] users_info = self . user_manager . get_users_info ( other_students + list ( student_list . keys ( ) ) + tutor_list ) # Order the non-registered students other_students = sorted ( other_students , key = lambda val : ( ( "0" + users_info [ val ] [ 0 ] ) if users_info [ val ] else ( "1" + val ) ) ) return student_list , tutor_list , other_students , users_info | Get the available student and tutor lists for classroom edition | 439 | 10 |
228,976 | def update_classroom ( self , course , classroomid , new_data ) : student_list , tutor_list , other_students , _ = self . get_user_lists ( course , classroomid ) # Check tutors new_data [ "tutors" ] = [ tutor for tutor in map ( str . strip , new_data [ "tutors" ] ) if tutor in tutor_list ] students , groups , errored_students = [ ] , [ ] , [ ] new_data [ "students" ] = map ( str . strip , new_data [ "students" ] ) # Check the students for student in new_data [ "students" ] : if student in student_list : students . append ( student ) else : if student in other_students : # Remove user from the other classroom self . database . classrooms . find_one_and_update ( { "courseid" : course . get_id ( ) , "groups.students" : student } , { "$pull" : { "groups.$.students" : student , "students" : student } } ) self . database . classrooms . find_one_and_update ( { "courseid" : course . get_id ( ) , "students" : student } , { "$pull" : { "students" : student } } ) students . append ( student ) else : # Check if user can be registered user_info = self . user_manager . get_user_info ( student ) if user_info is None or student in tutor_list : errored_students . append ( student ) else : students . append ( student ) removed_students = [ student for student in student_list if student not in new_data [ "students" ] ] self . database . classrooms . find_one_and_update ( { "courseid" : course . get_id ( ) , "default" : True } , { "$push" : { "students" : { "$each" : removed_students } } } ) new_data [ "students" ] = students # Check the groups for group in new_data [ "groups" ] : group [ "students" ] = [ student for student in map ( str . strip , group [ "students" ] ) if student in new_data [ "students" ] ] if len ( group [ "students" ] ) <= group [ "size" ] : groups . append ( group ) new_data [ "groups" ] = groups classroom = self . database . classrooms . find_one_and_update ( { "_id" : ObjectId ( classroomid ) } , { "$set" : { "description" : new_data [ "description" ] , "students" : students , "tutors" : new_data [ "tutors" ] , "groups" : groups } } , return_document = ReturnDocument . AFTER ) return classroom , errored_students | Update classroom and returns a list of errored students | 644 | 10 |
228,977 | def parse ( self , debug = False ) : if self . _parsed is None : try : if self . _mode == "html" : self . _parsed = self . html ( self . _content , self . _show_everything , self . _translation ) else : self . _parsed = self . rst ( self . _content , self . _show_everything , self . _translation , debug = debug ) except Exception as e : if debug : raise BaseException ( "Parsing failed" ) from e else : self . _parsed = self . _translation . gettext ( "<b>Parsing failed</b>: <pre>{}</pre>" ) . format ( html . escape ( self . _content ) ) return self . _parsed | Returns parsed text | 172 | 3 |
228,978 | def POST_AUTH ( self , courseid , taskid ) : # pylint: disable=arguments-differ if not id_checker ( taskid ) : raise Exception ( "Invalid task id" ) self . get_course_and_check_rights ( courseid , allow_all_staff = False ) request = web . input ( file = { } ) if request . get ( "action" ) == "upload" and request . get ( 'path' ) is not None and request . get ( 'file' ) is not None : return self . action_upload ( courseid , taskid , request . get ( 'path' ) , request . get ( 'file' ) ) elif request . get ( "action" ) == "edit_save" and request . get ( 'path' ) is not None and request . get ( 'content' ) is not None : return self . action_edit_save ( courseid , taskid , request . get ( 'path' ) , request . get ( 'content' ) ) else : return self . show_tab_file ( courseid , taskid ) | Upload or modify a file | 241 | 5 |
228,979 | def show_tab_file ( self , courseid , taskid , error = None ) : return self . template_helper . get_renderer ( False ) . course_admin . edit_tabs . files ( self . course_factory . get_course ( courseid ) , taskid , self . get_task_filelist ( self . task_factory , courseid , taskid ) , error ) | Return the file tab | 90 | 4 |
228,980 | def action_edit ( self , courseid , taskid , path ) : wanted_path = self . verify_path ( courseid , taskid , path ) if wanted_path is None : return "Internal error" try : content = self . task_factory . get_task_fs ( courseid , taskid ) . get ( wanted_path ) . decode ( "utf-8" ) return json . dumps ( { "content" : content } ) except : return json . dumps ( { "error" : "not-readable" } ) | Edit a file | 117 | 3 |
228,981 | def action_edit_save ( self , courseid , taskid , path , content ) : wanted_path = self . verify_path ( courseid , taskid , path ) if wanted_path is None : return json . dumps ( { "error" : True } ) try : self . task_factory . get_task_fs ( courseid , taskid ) . put ( wanted_path , content . encode ( "utf-8" ) ) return json . dumps ( { "ok" : True } ) except : return json . dumps ( { "error" : True } ) | Save an edited file | 125 | 4 |
228,982 | def action_download ( self , courseid , taskid , path ) : wanted_path = self . verify_path ( courseid , taskid , path ) if wanted_path is None : raise web . notfound ( ) task_fs = self . task_factory . get_task_fs ( courseid , taskid ) ( method , mimetype_or_none , file_or_url ) = task_fs . distribute ( wanted_path ) if method == "local" : web . header ( 'Content-Type' , mimetype_or_none ) return file_or_url elif method == "url" : raise web . redirect ( file_or_url ) else : raise web . notfound ( ) | Download a file or a directory | 158 | 6 |
228,983 | def write_json_or_yaml ( file_path , content ) : with codecs . open ( file_path , "w" , "utf-8" ) as f : f . write ( get_json_or_yaml ( file_path , content ) ) | Write JSON or YAML depending on the file extension . | 60 | 12 |
228,984 | def get_json_or_yaml ( file_path , content ) : if os . path . splitext ( file_path ) [ 1 ] == ".json" : return json . dumps ( content , sort_keys = False , indent = 4 , separators = ( ',' , ': ' ) ) else : return inginious . common . custom_yaml . dump ( content ) | Generate JSON or YAML depending on the file extension . | 85 | 13 |
228,985 | def _set_session ( self , username , realname , email , language ) : self . _session . loggedin = True self . _session . email = email self . _session . username = username self . _session . realname = realname self . _session . language = language self . _session . token = None if "lti" not in self . _session : self . _session . lti = None | Init the session . Preserves potential LTI information . | 90 | 11 |
228,986 | def _destroy_session ( self ) : self . _session . loggedin = False self . _session . email = None self . _session . username = None self . _session . realname = None self . _session . token = None self . _session . lti = None | Destroy the session | 60 | 3 |
228,987 | def create_lti_session ( self , user_id , roles , realname , email , course_id , task_id , consumer_key , outcome_service_url , outcome_result_id , tool_name , tool_desc , tool_url , context_title , context_label ) : self . _destroy_session ( ) # don't forget to destroy the current session (cleans the threaded dict from web.py) self . _session . load ( '' ) # creates a new cookieless session session_id = self . _session . session_id self . _session . lti = { "email" : email , "username" : user_id , "realname" : realname , "roles" : roles , "task" : ( course_id , task_id ) , "outcome_service_url" : outcome_service_url , "outcome_result_id" : outcome_result_id , "consumer_key" : consumer_key , "context_title" : context_title , "context_label" : context_label , "tool_description" : tool_desc , "tool_name" : tool_name , "tool_url" : tool_url } return session_id | Creates an LTI cookieless session . Returns the new session id | 269 | 15 |
228,988 | def user_saw_task ( self , username , courseid , taskid ) : self . _database . user_tasks . update ( { "username" : username , "courseid" : courseid , "taskid" : taskid } , { "$setOnInsert" : { "username" : username , "courseid" : courseid , "taskid" : taskid , "tried" : 0 , "succeeded" : False , "grade" : 0.0 , "submissionid" : None , "state" : "" } } , upsert = True ) | Set in the database that the user has viewed this task | 129 | 11 |
228,989 | def update_user_stats ( self , username , task , submission , result_str , grade , state , newsub ) : self . user_saw_task ( username , submission [ "courseid" ] , submission [ "taskid" ] ) if newsub : old_submission = self . _database . user_tasks . find_one_and_update ( { "username" : username , "courseid" : submission [ "courseid" ] , "taskid" : submission [ "taskid" ] } , { "$inc" : { "tried" : 1 , "tokens.amount" : 1 } } ) # Check if the submission is the default download set_default = task . get_evaluate ( ) == 'last' or ( task . get_evaluate ( ) == 'student' and old_submission is None ) or ( task . get_evaluate ( ) == 'best' and old_submission . get ( 'grade' , 0.0 ) <= grade ) if set_default : self . _database . user_tasks . find_one_and_update ( { "username" : username , "courseid" : submission [ "courseid" ] , "taskid" : submission [ "taskid" ] } , { "$set" : { "succeeded" : result_str == "success" , "grade" : grade , "state" : state , "submissionid" : submission [ '_id' ] } } ) else : old_submission = self . _database . user_tasks . find_one ( { "username" : username , "courseid" : submission [ "courseid" ] , "taskid" : submission [ "taskid" ] } ) if task . get_evaluate ( ) == 'best' : # if best, update cache consequently (with best submission) def_sub = list ( self . _database . submissions . find ( { "username" : username , "courseid" : task . get_course_id ( ) , "taskid" : task . get_id ( ) , "status" : "done" } ) . sort ( [ ( "grade" , pymongo . DESCENDING ) , ( "submitted_on" , pymongo . DESCENDING ) ] ) . limit ( 1 ) ) if len ( def_sub ) > 0 : self . _database . user_tasks . find_one_and_update ( { "username" : username , "courseid" : submission [ "courseid" ] , "taskid" : submission [ "taskid" ] } , { "$set" : { "succeeded" : def_sub [ 0 ] [ "result" ] == "success" , "grade" : def_sub [ 0 ] [ "grade" ] , "state" : def_sub [ 0 ] [ "state" ] , "submissionid" : def_sub [ 0 ] [ '_id' ] } } ) elif old_submission [ "submissionid" ] == submission [ "_id" ] : # otherwise, update cache if needed self . _database . user_tasks . find_one_and_update ( { "username" : username , "courseid" : submission [ "courseid" ] , "taskid" : submission [ "taskid" ] } , { "$set" : { "succeeded" : submission [ "result" ] == "success" , "grade" : submission [ "grade" ] , "state" : submission [ "state" ] } } ) | Update stats with a new submission | 777 | 6 |
228,990 | def get_course_aggregations ( self , course ) : return natsorted ( list ( self . _database . aggregations . find ( { "courseid" : course . get_id ( ) } ) ) , key = lambda x : x [ "description" ] ) | Returns a list of the course aggregations | 60 | 8 |
228,991 | def get_accessible_time ( self , plugin_override = True ) : vals = self . _hook_manager . call_hook ( 'task_accessibility' , course = self . get_course ( ) , task = self , default = self . _accessible ) return vals [ 0 ] if len ( vals ) and plugin_override else self . _accessible | Get the accessible time of this task | 81 | 7 |
228,992 | def get_deadline ( self ) : if self . get_accessible_time ( ) . is_always_accessible ( ) : return _ ( "No deadline" ) elif self . get_accessible_time ( ) . is_never_accessible ( ) : return _ ( "It's too late" ) else : # Prefer to show the soft deadline rather than the hard one return self . get_accessible_time ( ) . get_soft_end_date ( ) . strftime ( "%d/%m/%Y %H:%M:%S" ) | Returns a string containing the deadline for this task | 123 | 9 |
228,993 | def get_authors ( self , language ) : return self . gettext ( language , self . _author ) if self . _author else "" | Return the list of this task s authors | 30 | 8 |
228,994 | def adapt_input_for_backend ( self , input_data ) : for problem in self . _problems : input_data = problem . adapt_input_for_backend ( input_data ) return input_data | Adapt the input from web . py for the inginious . backend | 49 | 14 |
228,995 | def get_users ( self , course ) : users = OrderedDict ( sorted ( list ( self . user_manager . get_users_info ( self . user_manager . get_course_registered_users ( course ) ) . items ( ) ) , key = lambda k : k [ 1 ] [ 0 ] if k [ 1 ] is not None else "" ) ) return users | Returns a sorted list of users | 82 | 6 |
228,996 | def get_course ( self , courseid ) : try : course = self . course_factory . get_course ( courseid ) except : raise web . notfound ( ) return course | Return the course | 40 | 3 |
228,997 | def show_page ( self , course ) : username = self . user_manager . session_username ( ) if not self . user_manager . course_is_open_to_user ( course , lti = False ) : return self . template_helper . get_renderer ( ) . course_unavailable ( ) else : tasks = course . get_tasks ( ) last_submissions = self . submission_manager . get_user_last_submissions ( 5 , { "courseid" : course . get_id ( ) , "taskid" : { "$in" : list ( tasks . keys ( ) ) } } ) for submission in last_submissions : submission [ "taskname" ] = tasks [ submission [ 'taskid' ] ] . get_name ( self . user_manager . session_language ( ) ) tasks_data = { } user_tasks = self . database . user_tasks . find ( { "username" : username , "courseid" : course . get_id ( ) , "taskid" : { "$in" : list ( tasks . keys ( ) ) } } ) is_admin = self . user_manager . has_staff_rights_on_course ( course , username ) tasks_score = [ 0.0 , 0.0 ] for taskid , task in tasks . items ( ) : tasks_data [ taskid ] = { "visible" : task . get_accessible_time ( ) . after_start ( ) or is_admin , "succeeded" : False , "grade" : 0.0 } tasks_score [ 1 ] += task . get_grading_weight ( ) if tasks_data [ taskid ] [ "visible" ] else 0 for user_task in user_tasks : tasks_data [ user_task [ "taskid" ] ] [ "succeeded" ] = user_task [ "succeeded" ] tasks_data [ user_task [ "taskid" ] ] [ "grade" ] = user_task [ "grade" ] weighted_score = user_task [ "grade" ] * tasks [ user_task [ "taskid" ] ] . get_grading_weight ( ) tasks_score [ 0 ] += weighted_score if tasks_data [ user_task [ "taskid" ] ] [ "visible" ] else 0 course_grade = round ( tasks_score [ 0 ] / tasks_score [ 1 ] ) if tasks_score [ 1 ] > 0 else 0 tag_list = course . get_all_tags_names_as_list ( is_admin , self . user_manager . session_language ( ) ) user_info = self . database . users . find_one ( { "username" : username } ) return self . template_helper . get_renderer ( ) . course ( user_info , course , last_submissions , tasks , tasks_data , course_grade , tag_list ) | Prepares and shows the course page | 641 | 7 |
228,998 | def mavparms ( logfile ) : mlog = mavutil . mavlink_connection ( filename ) while True : try : m = mlog . recv_match ( type = [ 'PARAM_VALUE' , 'PARM' ] ) if m is None : return except Exception : return if m . get_type ( ) == 'PARAM_VALUE' : pname = str ( m . param_id ) . strip ( ) value = m . param_value else : pname = m . Name value = m . Value if len ( pname ) > 0 : if args . changesOnly is True and pname in parms and parms [ pname ] != value : print ( "%s %-15s %.6f -> %.6f" % ( time . asctime ( time . localtime ( m . _timestamp ) ) , pname , parms [ pname ] , value ) ) parms [ pname ] = value | extract mavlink parameters | 209 | 6 |
228,999 | def send ( self , mavmsg , force_mavlink1 = False ) : buf = mavmsg . pack ( self , force_mavlink1 = force_mavlink1 ) self . file . write ( buf ) self . seq = ( self . seq + 1 ) % 256 self . total_packets_sent += 1 self . total_bytes_sent += len ( buf ) if self . send_callback : self . send_callback ( mavmsg , * self . send_callback_args , * * self . send_callback_kwargs ) | send a MAVLink message | 123 | 6 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.