idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
44,700 | def render_to_console ( self , message : str , ** kwargs ) : rendered = templating . render ( message , ** kwargs ) return self . write_to_console ( rendered ) | Renders the specified message to the console using Jinja2 template rendering with the kwargs as render variables . The message will also be dedented prior to rendering in the same fashion as other Cauldron template rendering actions . |
44,701 | def last_update_time ( self ) -> float : stdout = self . stdout_interceptor stderr = self . stderr_interceptor return max ( [ self . _last_update_time , stdout . last_write_time if stdout else 0 , stderr . last_write_time if stderr else 0 , ] ) | The last time at which the report was modified . |
44,702 | def results_cache_path ( self ) -> str : if not self . project : return '' return os . path . join ( self . project . results_path , '.cache' , 'steps' , '{}.json' . format ( self . id ) ) | Location where step report is cached between sessions to prevent loss of display data between runs . |
44,703 | def clear ( self ) -> 'Report' : self . body = [ ] self . data = SharedCache ( ) self . files = SharedCache ( ) self . _last_update_time = time . time ( ) return self | Clear all user - data stored in this instance and reset it to its originally loaded state |
44,704 | def append_body ( self , dom : str ) : self . flush_stdout ( ) self . body . append ( dom ) self . _last_update_time = time . time ( ) | Appends the specified HTML - formatted DOM string to the currently stored report body for the step . |
44,705 | def flush_stdout ( self ) : try : contents = self . stdout_interceptor . flush_all ( ) except Exception : return if len ( contents ) > 0 : self . body . append ( render_texts . preformatted_text ( contents ) ) self . _last_update_time = time . time ( ) return contents | Empties the standard out redirect buffer and renders the contents to the body as a preformatted text box . |
44,706 | def import_class ( klass ) : mod = __import__ ( klass . rpartition ( '.' ) [ 0 ] ) for segment in klass . split ( '.' ) [ 1 : - 1 ] : mod = getattr ( mod , segment ) return getattr ( mod , klass . rpartition ( '.' ) [ 2 ] ) | Import the named class and return that class |
44,707 | def create ( project : 'projects.Project' , include_path : str ) -> COMPONENT : source_path = environ . paths . clean ( os . path . join ( project . source_directory , include_path ) ) if not os . path . exists ( source_path ) : return COMPONENT ( [ ] , [ ] ) if os . path . isdir ( source_path ) : glob_path = os . path . join ( source_path , '**' , '*' ) include_paths = glob . iglob ( glob_path , recursive = True ) else : include_paths = [ source_path ] destination_path = os . path . join ( project . output_directory , include_path ) return COMPONENT ( includes = filter ( lambda web_include : web_include is not None , map ( functools . partial ( to_web_include , project ) , include_paths ) ) , files = [ file_io . FILE_COPY_ENTRY ( source = source_path , destination = destination_path ) ] ) | Creates a COMPONENT instance for the project component specified by the include path |
44,708 | def create_many ( project : 'projects.Project' , include_paths : typing . List [ str ] ) -> COMPONENT : return definitions . merge_components ( * map ( functools . partial ( create , project ) , include_paths ) ) | Creates a single COMPONENT instance for all of the specified project include paths |
44,709 | def to_web_include ( project : 'projects.Project' , file_path : str ) -> WEB_INCLUDE : if not file_path . endswith ( '.css' ) and not file_path . endswith ( '.js' ) : return None slug = file_path [ len ( project . source_directory ) : ] url = '/{}' . format ( slug ) . replace ( '\\' , '/' ) . replace ( '//' , '/' ) return WEB_INCLUDE ( name = ':project:{}' . format ( url ) , src = url ) | Converts the given file_path into a WEB_INCLUDE instance that represents the deployed version of this file to be loaded into the results project page |
44,710 | def reformat ( source : str ) -> str : value = source if source else '' return dedent ( value . strip ( '\n' ) ) . strip ( ) | Formats the source string to strip newlines on both ends and dedents the the entire string |
44,711 | def get_environment ( ) -> Environment : env = JINJA_ENVIRONMENT loader = env . loader resource_path = environ . configs . make_path ( 'resources' , 'templates' , override_key = 'template_path' ) if not loader : env . filters [ 'id' ] = get_id env . filters [ 'latex' ] = get_latex if not loader or resource_path not in loader . searchpath : env . loader = FileSystemLoader ( resource_path ) return env | Returns the jinja2 templating environment updated with the most recent cauldron environment configurations |
44,712 | def render ( template : typing . Union [ str , Template ] , ** kwargs ) : if not hasattr ( template , 'render' ) : template = get_environment ( ) . from_string ( textwrap . dedent ( template ) ) return template . render ( cauldron_template_uid = make_template_uid ( ) , ** kwargs ) | Renders a template string using Jinja2 and the Cauldron templating environment . |
44,713 | def render_file ( path : str , ** kwargs ) : with open ( path , 'r' ) as f : contents = f . read ( ) return get_environment ( ) . from_string ( contents ) . render ( cauldron_template_uid = make_template_uid ( ) , ** kwargs ) | Renders a file at the specified absolute path . The file can reside anywhere on the local disk as Cauldron s template environment path searching is ignored . |
44,714 | def render_template ( template_name : str , ** kwargs ) : return get_environment ( ) . get_template ( template_name ) . render ( cauldron_template_uid = make_template_uid ( ) , ** kwargs ) | Renders the template file with the given filename from within Cauldron s template environment folder . |
44,715 | def clean ( path : str ) -> str : if not path or path == '.' : path = os . curdir if path . startswith ( '~' ) : path = os . path . expanduser ( path ) return os . path . realpath ( os . path . abspath ( path ) ) | Cleans the specified path by expanding shorthand elements redirecting to the real path for symbolic links and removing any relative components to return a complete absolute path to the specified location . |
44,716 | def package ( * args : str ) -> str : return clean ( os . path . join ( os . path . dirname ( __file__ ) , '..' , * args ) ) | Creates an absolute path to a file or folder within the cauldron package using the relative path elements specified by the args . |
44,717 | def confirm ( question : str , default : bool = True ) -> bool : result = input ( '{question} [{yes}/{no}]:' . format ( question = question , yes = '(Y)' if default else 'Y' , no = 'N' if default else '(N)' ) ) if not result : return default if result [ 0 ] . lower ( ) in [ 'y' , 't' , '1' ] : return True return False | Requests confirmation of the specified question and returns that result |
44,718 | def fetch_last ( response : Response ) -> typing . Union [ str , None ] : recent_paths = environ . configs . fetch ( 'recent_paths' , [ ] ) if len ( recent_paths ) < 1 : response . fail ( code = 'NO_RECENT_PROJECTS' , message = 'No projects have been opened recently' ) . console ( ) return None return recent_paths [ 0 ] | Returns the last opened project path if such a path exists |
44,719 | def of_project ( project : 'projects.Project' ) -> dict : source_directory = project . source_directory libraries_status = [ { } if d . startswith ( source_directory ) else of_directory ( d ) for d in project . library_directories ] return dict ( project = of_directory ( source_directory ) , libraries = libraries_status ) | Returns the file status information for every file within the project source directory and its shared library folders . |
44,720 | def of_file ( path : str , root_directory : str = None ) -> dict : slug = ( path if root_directory is None else path [ len ( root_directory ) : ] . lstrip ( os . sep ) ) if not os . path . exists ( path ) or os . path . isdir ( path ) : return dict ( size = - 1 , modified = - 1 , path = slug ) size = os . path . getsize ( path ) modified = max ( os . path . getmtime ( path ) , os . path . getctime ( path ) ) return dict ( modified = modified , path = slug , size = size ) | Returns a dictionary containing status information for the specified file including when its name relative to the root directory when it was last modified and its size . |
44,721 | def of_directory ( directory : str , root_directory : str = None ) -> dict : glob_path = os . path . join ( directory , '**/*' ) root = root_directory if root_directory else directory results = filter ( lambda result : ( result [ 'modified' ] != - 1 ) , [ of_file ( path , root ) for path in glob . iglob ( glob_path , recursive = True ) ] ) return dict ( [ ( result [ 'path' ] , result ) for result in results ] ) | Returns a dictionary containing status entries recursively for all files within the specified directory and its descendant directories . |
44,722 | def run_local ( context : cli . CommandContext , project : projects . Project , project_steps : typing . List [ projects . ProjectStep ] , force : bool , continue_after : bool , single_step : bool , limit : int , print_status : bool , skip_library_reload : bool = False ) -> environ . Response : skip_reload = ( skip_library_reload or environ . modes . has ( environ . modes . TESTING ) ) if not skip_reload : runner . reload_libraries ( ) environ . log_header ( 'RUNNING' , 5 ) steps_run = [ ] if single_step : ps = project_steps [ 0 ] if len ( project_steps ) > 0 else None force = force or ( single_step and bool ( ps is not None ) ) steps_run = runner . section ( response = context . response , project = project , starting = ps , limit = 1 , force = force ) elif continue_after or len ( project_steps ) == 0 : ps = project_steps [ 0 ] if len ( project_steps ) > 0 else None steps_run = runner . complete ( context . response , project , ps , force = force , limit = limit ) else : for ps in project_steps : steps_run += runner . section ( response = context . response , project = project , starting = ps , limit = max ( 1 , limit ) , force = force or ( limit < 1 and len ( project_steps ) < 2 ) , skips = steps_run + [ ] ) project . write ( ) environ . log_blanks ( ) step_changes = [ ] for ps in steps_run : step_changes . append ( dict ( name = ps . definition . name , action = 'updated' , step = writing . step_writer . serialize ( ps ) . _asdict ( ) ) ) context . response . update ( step_changes = step_changes ) if print_status or context . response . failed : context . response . update ( project = project . kernel_serialize ( ) ) return context . response | Execute the run command locally within this cauldron environment |
44,723 | def loadalldatas ( ) : dependency_order = [ 'common' , 'profiles' , 'blog' , 'democomments' ] for app in dependency_order : project . recursive_load ( os . path . join ( paths . project_paths . manage_root , app ) ) | Loads all demo fixtures . |
44,724 | def _insert_timestamp ( self , slug , max_length = 255 ) : timestamp = str ( int ( time . time ( ) ) ) ts_len = len ( timestamp ) + 1 while len ( slug ) + ts_len > max_length : slug = '-' . join ( slug . split ( '-' ) [ : - 1 ] ) slug = '-' . join ( [ slug , timestamp ] ) return slug | Appends a timestamp integer to the given slug yet ensuring the result is less than the specified max_length . |
44,725 | def _slugify_title ( self ) : self . slug = slugify ( self . title ) while len ( self . slug ) > 255 : self . slug = '-' . join ( self . slug . split ( '-' ) [ : - 1 ] ) if Entry . objects . filter ( slug = self . slug ) . exclude ( id = self . id ) . exists ( ) : self . slug = self . _insert_timestamp ( self . slug ) | Slugify the Entry title but ensure it s less than the maximum number of characters . This method also ensures that a slug is unique by appending a timestamp to any duplicate slugs . |
44,726 | def run ( self ) : self . signals ( ) with self . listener ( ) : for job in self . jobs ( ) : if not job : self . jid = None self . title ( 'Sleeping for %fs' % self . interval ) time . sleep ( self . interval ) else : self . jid = job . jid self . title ( 'Working on %s (%s)' % ( job . jid , job . klass_name ) ) with Worker . sandbox ( self . sandbox ) : job . sandbox = self . sandbox job . process ( ) if self . shutdown : break | Run jobs popping one after another |
44,727 | def initialize ( ) : cauldron_module = get_cauldron_module ( ) if cauldron_module is not None : return cauldron_module sys . path . append ( ROOT_DIRECTORY ) cauldron_module = get_cauldron_module ( ) if cauldron_module is not None : return cauldron_module raise ImportError ( ' ' . join ( ( 'Unable to import cauldron.' 'The package was not installed in a known location.' ) ) ) | Initializes the cauldron library by confirming that it can be imported by the importlib library . If the attempt to import it fails the system path will be modified and the attempt retried . If both attempts fail an import error will be raised . |
44,728 | def run ( arguments : typing . List [ str ] = None ) : initialize ( ) from cauldron . invoke import parser from cauldron . invoke import invoker args = parser . parse ( arguments ) exit_code = invoker . run ( args . get ( 'command' ) , args ) sys . exit ( exit_code ) | Executes the cauldron command |
44,729 | def author_display ( author , * args ) : url = getattr ( author , 'get_absolute_url' , lambda : None ) ( ) short_name = getattr ( author , 'get_short_name' , lambda : six . text_type ( author ) ) ( ) if url : return mark_safe ( '<a href="{}">{}</a>' . format ( url , short_name ) ) else : return short_name | Returns either the linked or not - linked profile name . |
44,730 | def in_project_directory ( ) -> bool : current_directory = os . path . realpath ( os . curdir ) project_path = os . path . join ( current_directory , 'cauldron.json' ) return os . path . exists ( project_path ) and os . path . isfile ( project_path ) | Returns whether or not the current working directory is a Cauldron project directory which contains a cauldron . json file . |
44,731 | def load_shared_data ( path : typing . Union [ str , None ] ) -> dict : if path is None : return dict ( ) if not os . path . exists ( path ) : raise FileNotFoundError ( 'No such shared data file "{}"' . format ( path ) ) try : with open ( path , 'r' ) as fp : data = json . load ( fp ) except Exception : raise IOError ( 'Unable to read shared data file "{}"' . format ( path ) ) if not isinstance ( data , dict ) : raise ValueError ( 'Shared data must load into a dictionary object' ) return data | Load shared data from a JSON file stored on disk |
44,732 | def run_version ( args : dict ) -> int : version = environ . package_settings . get ( 'version' , 'unknown' ) print ( 'VERSION: {}' . format ( version ) ) return 0 | Displays the current version |
44,733 | def run_batch ( args : dict ) -> int : batcher . run_project ( project_directory = args . get ( 'project_directory' ) , log_path = args . get ( 'logging_path' ) , output_directory = args . get ( 'output_directory' ) , shared_data = load_shared_data ( args . get ( 'shared_data_path' ) ) ) return 0 | Runs a batch operation for the given arguments |
44,734 | def run_shell ( args : dict ) -> int : if args . get ( 'project_directory' ) : return run_batch ( args ) shell = CauldronShell ( ) if in_project_directory ( ) : shell . cmdqueue . append ( 'open "{}"' . format ( os . path . realpath ( os . curdir ) ) ) shell . cmdloop ( ) return 0 | Run the shell sub command |
44,735 | def parse ( args : typing . List [ str ] = None , arg_parser : ArgumentParser = None ) -> dict : parser = arg_parser or create_parser ( ) return vars ( parser . parse_args ( args ) ) | Parses the arguments for the cauldron server |
44,736 | def create_parser ( arg_parser : ArgumentParser = None ) -> ArgumentParser : parser = arg_parser or ArgumentParser ( ) parser . description = 'Cauldron kernel server' parser . add_argument ( '-p' , '--port' , dest = 'port' , type = int , default = 5010 ) parser . add_argument ( '-d' , '--debug' , dest = 'debug' , default = False , action = 'store_true' ) parser . add_argument ( '-v' , '--version' , dest = 'version' , default = False , action = 'store_true' ) parser . add_argument ( '-c' , '--code' , dest = 'authentication_code' , type = str , default = '' ) parser . add_argument ( '-n' , '--name' , dest = 'host' , type = str , default = None ) return parser | Creates an argument parser populated with the arg formats for the server command . |
44,737 | def match_rules ( tree , rules , fun = None , multi = False ) : if multi : context = match_rules_context_multi ( tree , rules ) else : context = match_rules_context ( tree , rules ) if not context : return None if fun : args = fun . __code__ . co_varnames if multi : res = [ ] for c in context : action_context = { } for arg in args : if arg in c : action_context [ arg ] = c [ arg ] res . append ( fun ( ** action_context ) ) return res else : action_context = { } for arg in args : if arg in context : action_context [ arg ] = context [ arg ] return fun ( ** action_context ) else : return context | Matches a Tree structure with the given query rules . |
44,738 | def initialize ( spark_home_path : str = None ) : if not spark_home_path : spark_home_path = os . environ . get ( 'SPARK_HOME' ) spark_home_path = environ . paths . clean ( spark_home_path ) if not os . path . exists ( spark_home_path ) : raise FileNotFoundError ( errno . ENOENT , os . strerror ( errno . ENOENT ) , spark_home_path ) spark_python_path = os . path . join ( spark_home_path , 'python' ) if not os . path . exists ( spark_python_path ) : raise FileNotFoundError ( errno . ENOENT , os . strerror ( errno . ENOENT ) , spark_python_path ) spark_pylib_path = os . path . join ( spark_python_path , 'lib' ) if not os . path . exists ( spark_pylib_path ) : raise FileNotFoundError ( errno . ENOENT , os . strerror ( errno . ENOENT ) , spark_python_path ) lib_glob = os . path . join ( spark_pylib_path , '*.zip' ) lib_sources = [ path for path in glob . iglob ( lib_glob ) ] unload ( ) for p in lib_sources : if p not in sys . path : sys . path . append ( p ) spark_environment . update ( dict ( spark_home_path = spark_home_path , spark_python_path = spark_python_path , spark_pylib_path = spark_pylib_path , libs = lib_sources ) ) | Registers and initializes the PySpark library dependencies so that the pyspark package can be imported and used within the notebook . |
44,739 | def is_remote_project ( self ) -> bool : project_path = environ . paths . clean ( self . source_directory ) return project_path . find ( 'cd-remote-project' ) != - 1 | Whether or not this project is remote |
44,740 | def library_directories ( self ) -> typing . List [ str ] : def listify ( value ) : return [ value ] if isinstance ( value , str ) else list ( value ) is_local_project = not self . is_remote_project folders = [ f for f in listify ( self . settings . fetch ( 'library_folders' , [ 'libs' ] ) ) if is_local_project or not f . startswith ( '..' ) ] folders . append ( '../__cauldron_shared_libs' ) folders . append ( self . source_directory ) return [ environ . paths . clean ( os . path . join ( self . source_directory , folder ) ) for folder in folders ] | The list of directories to all of the library locations |
44,741 | def results_path ( self ) -> str : def possible_paths ( ) : yield self . _results_path yield self . settings . fetch ( 'path_results' ) yield environ . configs . fetch ( 'results_directory' ) yield environ . paths . results ( self . uuid ) return next ( p for p in possible_paths ( ) if p is not None ) | The path where the project results will be written |
44,742 | def output_directory ( self ) -> str : return os . path . join ( self . results_path , 'reports' , self . uuid , 'latest' ) | Returns the directory where the project results files will be written |
44,743 | def refresh ( self , force : bool = False ) -> bool : lm = self . last_modified is_newer = lm is not None and lm >= os . path . getmtime ( self . source_path ) if not force and is_newer : return False old_definition = self . settings . fetch ( None ) new_definition = definitions . load_project_definition ( self . source_directory ) if not force and old_definition == new_definition : return False self . settings . clear ( ) . put ( ** new_definition ) old_step_definitions = old_definition . get ( 'steps' , [ ] ) new_step_definitions = new_definition . get ( 'steps' , [ ] ) if not force and old_step_definitions == new_step_definitions : return True old_steps = self . steps self . steps = [ ] for step_data in new_step_definitions : matches = [ s for s in old_step_definitions if s == step_data ] if len ( matches ) > 0 : index = old_step_definitions . index ( matches [ 0 ] ) self . steps . append ( old_steps [ index ] ) else : self . add_step ( step_data ) self . last_modified = time . time ( ) return True | Loads the cauldron . json definition file for the project and populates the project with the loaded data . Any existing data will be overwritten if the new definition file differs from the previous one . |
44,744 | def preformatted_text ( source : str ) -> str : environ . abort_thread ( ) if not source : return '' source = render_utils . html_escape ( source ) return '<pre class="preformatted-textbox">{text}</pre>' . format ( text = str ( textwrap . dedent ( source ) ) ) | Renders preformatted text box |
44,745 | def markdown ( source : str = None , source_path : str = None , preserve_lines : bool = False , font_size : float = None , ** kwargs ) -> dict : environ . abort_thread ( ) library_includes = [ ] rendered = textwrap . dedent ( templating . render_file ( source_path , ** kwargs ) if source_path else templating . render ( source or '' , ** kwargs ) ) if md is None : raise ImportError ( 'Unable to import the markdown package' ) offset = 0 while offset < len ( rendered ) : bound_chars = '$$' start_index = rendered . find ( bound_chars , offset ) if start_index < 0 : break inline = rendered [ start_index + 2 ] != '$' bound_chars = '$$' if inline else '$$$' end_index = rendered . find ( bound_chars , start_index + len ( bound_chars ) ) if end_index < 0 : break end_index += len ( bound_chars ) chunk = rendered [ start_index : end_index ] . strip ( '$' ) . strip ( ) . replace ( '@' , '\\' ) if inline : chunk = chunk . replace ( '\\' , '\\\\' ) chunk = latex ( chunk , inline ) rendered = '{pre}{gap}{latex}{gap}{post}' . format ( pre = rendered [ : start_index ] , latex = chunk , post = rendered [ end_index : ] , gap = '' if inline else '\n\n' ) if 'katex' not in library_includes : library_includes . append ( 'katex' ) offset = end_index extensions = [ 'markdown.extensions.extra' , 'markdown.extensions.admonition' , 'markdown.extensions.sane_lists' , 'markdown.extensions.nl2br' if preserve_lines else None ] body = templating . render_template ( 'markdown-block.html' , text = md . markdown ( rendered , extensions = [ e for e in extensions if e is not None ] ) , font_size = font_size ) pattern = re . compile ( 'src="(?P<url>[^"]+)"' ) body = pattern . sub ( r'data-src="\g<url>"' , body ) return dict ( body = body , library_includes = library_includes , rendered = rendered ) | Renders a markdown file with support for Jinja2 templating . Any keyword arguments will be passed to Jinja2 for templating prior to rendering the markdown to HTML for display within the notebook . |
44,746 | def populate_extra_files ( ) : out = [ 'cauldron/settings.json' ] for entry in glob . iglob ( 'cauldron/resources/examples/**/*' , recursive = True ) : out . append ( entry ) for entry in glob . iglob ( 'cauldron/resources/templates/**/*' , recursive = True ) : out . append ( entry ) for entry in glob . iglob ( 'cauldron/resources/web/**/*' , recursive = True ) : out . append ( entry ) return out | Creates a list of non - python data files to include in package distribution |
44,747 | def create_rename_entry ( step : 'projects.ProjectStep' , insertion_index : int = None , stash_path : str = None ) -> typing . Union [ None , STEP_RENAME ] : project = step . project name = step . definition . name name_parts = naming . explode_filename ( name , project . naming_scheme ) index = project . index_of_step ( name ) name_index = index if insertion_index is not None and insertion_index <= index : name_index += 1 name_parts [ 'index' ] = name_index new_name = naming . assemble_filename ( scheme = project . naming_scheme , ** name_parts ) if name == new_name : return None if not stash_path : fd , stash_path = tempfile . mkstemp ( prefix = '{}-{}--{}--' . format ( step . reference_id , name , new_name ) ) os . close ( fd ) return STEP_RENAME ( id = step . reference_id , index = index , old_name = name , new_name = new_name , old_path = step . source_path , stash_path = stash_path , new_path = os . path . join ( step . project . source_directory , new_name ) ) | Creates a STEP_RENAME for the given ProjectStep instance |
44,748 | def assemble_url ( endpoint : str , remote_connection : 'environ.RemoteConnection' = None ) -> str : url_root = ( remote_connection . url if remote_connection else environ . remote_connection . url ) url_root = url_root if url_root else 'localhost:5010' parts = [ 'http://' if not url_root . startswith ( 'http' ) else '' , url_root . rstrip ( '/' ) , '/' , endpoint . lstrip ( '/' ) ] return '' . join ( parts ) | Assembles a fully - resolved remote connection URL from the given endpoint and remote_connection structure . If the remote_connection is omitted the global remote_connection object stored in the environ module will be used in its place . |
44,749 | def parse_http_response ( http_response : HttpResponse ) -> 'environ.Response' : try : response = environ . Response . deserialize ( http_response . json ( ) ) except Exception as error : response = environ . Response ( ) . fail ( code = 'INVALID_REMOTE_RESPONSE' , error = error , message = 'Invalid HTTP response from remote connection' ) . console ( whitespace = 1 ) . response response . http_response = http_response return response | Returns a Cauldron response object parsed from the serialized JSON data specified in the http_response argument . If the response doesn t contain valid Cauldron response data an error Cauldron response object is returned instead . |
44,750 | def send_request ( endpoint : str , data : dict = None , remote_connection : 'environ.RemoteConnection' = None , method : str = None , timeout : int = 10 , max_retries : int = 10 , ** kwargs ) -> 'environ.Response' : if max_retries < 0 : return environ . Response ( ) . fail ( code = 'COMMUNICATION_ERROR' , error = None , message = 'Unable to communicate with the remote kernel.' ) . console ( whitespace = 1 ) . response url = assemble_url ( endpoint , remote_connection ) retriable_errors = ( requests . ConnectionError , requests . HTTPError , requests . Timeout ) default_method = 'POST' if data is not None else 'GET' try : http_response = requests . request ( method = method or default_method , url = url , json = data , timeout = 10 , ** kwargs ) except retriable_errors : return send_request ( endpoint = endpoint , data = data , remote_connection = remote_connection , method = method , timeout = timeout , max_retries = max_retries - 1 , ** kwargs ) return parse_http_response ( http_response ) | Sends a request to the remote kernel specified by the RemoteConnection object and processes the result . If the request fails or times out it will be retried until the max retries is reached . After that a failed response will be returned instead . |
44,751 | def view ( route : str ) : project = cauldron . project . get_internal_project ( ) results_path = project . results_path if project else None if not project or not results_path : return '' , 204 path = os . path . join ( results_path , route ) if not os . path . exists ( path ) : return '' , 204 return flask . send_file ( path , mimetype = mimetypes . guess_type ( path ) [ 0 ] , cache_timeout = - 1 ) | Retrieves the contents of the file specified by the view route if it exists . |
44,752 | def save ( project : 'projects.Project' , write_list : typing . List [ tuple ] = None ) -> typing . List [ tuple ] : try : writes = ( to_write_list ( project ) if write_list is None else write_list . copy ( ) ) except Exception as err : raise environ . systems . remove ( project . output_directory ) os . makedirs ( project . output_directory ) file_io . deploy ( writes ) return writes | Computes the file write list for the current state of the project if no write_list was specified in the arguments and then writes each entry in that list to disk . |
44,753 | def add_library_path ( path : str ) -> bool : if not os . path . exists ( path ) : return False if path not in sys . path : sys . path . append ( path ) return True | Adds the path to the Python system path if not already added and the path exists . |
44,754 | def remove_library_path ( path : str ) -> bool : if path in sys . path : sys . path . remove ( path ) return True return False | Removes the path from the Python system path if it is found in the system paths . |
44,755 | def reload_libraries ( library_directories : list = None ) : directories = library_directories or [ ] project = cauldron . project . get_internal_project ( ) if project : directories += project . library_directories if not directories : return def reload_module ( path : str , library_directory : str ) : path = os . path . dirname ( path ) if path . endswith ( '__init__.py' ) else path start_index = len ( library_directory ) + 1 end_index = - 3 if path . endswith ( '.py' ) else None package_path = path [ start_index : end_index ] module = sys . modules . get ( package_path . replace ( os . sep , '.' ) ) return importlib . reload ( module ) if module is not None else None def reload_library ( directory : str ) -> list : if not add_library_path ( directory ) : remove_library_path ( directory ) return [ ] glob_path = os . path . join ( directory , '**' , '*.py' ) return [ reload_module ( path , directory ) for path in glob . glob ( glob_path , recursive = True ) ] return [ reloaded_module for directory in directories for reloaded_module in reload_library ( directory ) if reload_module is not None ] | Reload the libraries stored in the project s local and shared library directories |
44,756 | def complete ( response : Response , project : typing . Union [ Project , None ] , starting : ProjectStep = None , force : bool = False , limit : int = - 1 ) -> list : if project is None : project = cauldron . project . get_internal_project ( ) starting_index = 0 if starting : starting_index = project . steps . index ( starting ) count = 0 steps_run = [ ] for ps in project . steps : if 0 < limit <= count : break if ps . index < starting_index : continue if not force and not ps . is_dirty ( ) : if limit < 1 : environ . log ( '[{}]: Nothing to update' . format ( ps . definition . name ) ) continue count += 1 steps_run . append ( ps ) success = source . run_step ( response , project , ps , force = True ) if not success or project . stop_condition . halt : return steps_run return steps_run | Runs the entire project writes the results files and returns the URL to the report file |
44,757 | def elapsed_time ( self ) -> float : current_time = datetime . utcnow ( ) start = self . start_time or current_time end = self . end_time or current_time return ( end - start ) . total_seconds ( ) | The number of seconds that has elapsed since the step started running if the step is still running . Or if the step has already finished running the amount of time that elapsed during the last execution of the step . |
44,758 | def get_elapsed_timestamp ( self ) -> str : t = self . elapsed_time minutes = int ( t / 60 ) seconds = int ( t - ( 60 * minutes ) ) millis = int ( 100 * ( t - int ( t ) ) ) return '{:>02d}:{:>02d}.{:<02d}' . format ( minutes , seconds , millis ) | A human - readable version of the elapsed time for the last execution of the step . The value is derived from the ProjectStep . elapsed_time property . |
44,759 | def get_dom ( self ) -> str : if self . is_running : return self . dumps ( ) if self . dom is not None : return self . dom dom = self . dumps ( ) self . dom = dom return dom | Retrieves the current value of the DOM for the step |
44,760 | def dumps ( self ) -> str : code_file_path = os . path . join ( self . project . source_directory , self . filename ) code = dict ( filename = self . filename , path = code_file_path , code = render . code_file ( code_file_path ) ) if not self . is_running : self . report . flush_stdout ( ) body = self . report . body [ : ] if self . is_running : body . append ( self . report . read_stdout ( ) ) body = '' . join ( body ) has_body = len ( body ) > 0 and ( body . find ( '<div' ) != - 1 or body . find ( '<span' ) != - 1 or body . find ( '<p' ) != - 1 or body . find ( '<pre' ) != - 1 or body . find ( '<h' ) != - 1 or body . find ( '<ol' ) != - 1 or body . find ( '<ul' ) != - 1 or body . find ( '<li' ) != - 1 ) std_err = ( self . report . read_stderr ( ) if self . is_running else self . report . flush_stderr ( ) ) . strip ( '\n' ) . rstrip ( ) is_visible = self . is_visible or self . is_running or self . error dom = templating . render_template ( 'step-body.html' , last_display_update = self . report . last_update_time , elapsed_time = self . get_elapsed_timestamp ( ) , code = code , body = body , has_body = has_body , id = self . definition . name , title = self . report . title , subtitle = self . report . subtitle , summary = self . report . summary , error = self . error , index = self . index , is_running = self . is_running , is_visible = is_visible , progress_message = self . progress_message , progress = int ( round ( max ( 0 , min ( 100 , 100 * self . progress ) ) ) ) , sub_progress_message = self . sub_progress_message , sub_progress = int ( round ( max ( 0 , min ( 100 , 100 * self . sub_progress ) ) ) ) , std_err = std_err ) if not self . is_running : self . dom = dom return dom | Writes the step information to an HTML - formatted string |
44,761 | def retry ( * excepts ) : @ decorator . decorator def new_func ( func , job ) : try : func ( job ) except tuple ( excepts ) : job . retry ( ) return new_func | A decorator to specify a bunch of exceptions that should be caught and the job retried . It turns out this comes up with relative frequency |
44,762 | def tracked ( self ) : results = json . loads ( self . client ( 'track' ) ) results [ 'jobs' ] = [ Job ( self , ** job ) for job in results [ 'jobs' ] ] return results | Return an array of job objects that are being tracked |
44,763 | def tagged ( self , tag , offset = 0 , count = 25 ) : return json . loads ( self . client ( 'tag' , 'get' , tag , offset , count ) ) | Return the paginated jids of jobs tagged with a tag |
44,764 | def failed ( self , group = None , start = 0 , limit = 25 ) : if not group : return json . loads ( self . client ( 'failed' ) ) else : results = json . loads ( self . client ( 'failed' , group , start , limit ) ) results [ 'jobs' ] = self . get ( * results [ 'jobs' ] ) return results | If no group is provided this returns a JSON blob of the counts of the various types of failures known . If a type is provided returns paginated job objects affected by that kind of failure . |
44,765 | def get ( self , * jids ) : if jids : return [ Job ( self . client , ** j ) for j in json . loads ( self . client ( 'multiget' , * jids ) ) ] return [ ] | Return jobs objects for all the jids |
44,766 | def title ( cls , message = None ) : if message == None : return getproctitle ( ) else : setproctitle ( 'qless-py-worker %s' % message ) logger . info ( message ) | Set the title of the process |
44,767 | def divide ( cls , jobs , count ) : jobs = list ( zip ( * zip_longest ( * [ iter ( jobs ) ] * count ) ) ) jobs = jobs or [ ( ) ] * count for index in range ( count ) : jobs [ index ] = [ j for j in jobs [ index ] if j != None ] return jobs | Divide up the provided jobs into count evenly - sized groups |
44,768 | def clean ( cls , path ) : for pth in os . listdir ( path ) : pth = os . path . abspath ( os . path . join ( path , pth ) ) if os . path . isdir ( pth ) : logger . debug ( 'Removing directory %s' % pth ) shutil . rmtree ( pth ) else : logger . debug ( 'Removing file %s' % pth ) os . remove ( pth ) | Clean up all the files in a provided path |
44,769 | def sandbox ( cls , path ) : try : os . makedirs ( path ) logger . debug ( 'Making %s' % path ) except OSError : if not os . path . isdir ( path ) : raise finally : cls . clean ( path ) try : yield finally : cls . clean ( path ) | Ensures path exists before yielding cleans up after |
44,770 | def resumable ( self ) : jids = self . client . workers [ self . client . worker_name ] [ 'jobs' ] jobs = self . client . jobs . get ( * jids ) queue_names = set ( [ queue . name for queue in self . queues ] ) return [ job for job in jobs if job . queue_name in queue_names ] | Find all the jobs that we d previously been working on |
44,771 | def jobs ( self ) : for job in self . resume : try : if job . heartbeat ( ) : yield job except exceptions . LostLockException : logger . exception ( 'Cannot resume %s' % job . jid ) while True : seen = False for queue in self . queues : job = queue . pop ( ) if job : seen = True yield job if not seen : yield None | Generator for all the jobs |
44,772 | def listener ( self ) : channels = [ 'ql:w:' + self . client . worker_name ] listener = Listener ( self . client . redis , channels ) thread = threading . Thread ( target = self . listen , args = ( listener , ) ) thread . start ( ) try : yield finally : listener . unlisten ( ) thread . join ( ) | Listen for pubsub messages relevant to this worker in a thread |
44,773 | def listen ( self , listener ) : for message in listener . listen ( ) : try : data = json . loads ( message [ 'data' ] ) if data [ 'event' ] in ( 'canceled' , 'lock_lost' , 'put' ) : self . kill ( data [ 'jid' ] ) except : logger . exception ( 'Pubsub error' ) | Listen for events that affect our ownership of a job |
44,774 | def signals ( self , signals = ( 'QUIT' , 'USR1' , 'USR2' ) ) : for sig in signals : signal . signal ( getattr ( signal , 'SIG' + sig ) , self . handler ) | Register our signal handler |
44,775 | def truncate_entry_titles ( apps , schema_editor ) : Entry = apps . get_model ( "andablog" , "Entry" ) for entry in Entry . objects . all ( ) : while len ( entry . title ) > TITLE_LENGTH : entry . title = ' ' . join ( entry . title . split ( ) [ : - 1 ] ) entry . save ( ) | This function will truncate the values of Entry . title so they are 255 characters or less . |
44,776 | def process ( self , job ) : sandbox = self . sandboxes . pop ( 0 ) try : with Worker . sandbox ( sandbox ) : job . sandbox = sandbox job . process ( ) finally : self . greenlets . pop ( job . jid , None ) self . sandboxes . append ( sandbox ) | Process a job |
44,777 | def kill ( self , jid ) : greenlet = self . greenlets . get ( jid ) if greenlet is not None : logger . warn ( 'Lost ownership of %s' % jid ) greenlet . kill ( ) | Stop the greenlet processing the provided jid |
44,778 | def run ( self ) : self . signals ( ) with self . listener ( ) : try : generator = self . jobs ( ) while not self . shutdown : self . pool . wait_available ( ) job = next ( generator ) if job : job . klass greenlet = gevent . Greenlet ( self . process , job ) self . greenlets [ job . jid ] = greenlet self . pool . start ( greenlet ) else : logger . debug ( 'Sleeping for %fs' % self . interval ) gevent . sleep ( self . interval ) except StopIteration : logger . info ( 'Exhausted jobs' ) finally : logger . info ( 'Waiting for greenlets to finish' ) self . pool . join ( ) | Work on jobs |
44,779 | def init_db_conn ( connection_name , connection_string , scopefunc = None ) : engine = create_engine ( connection_string ) session = scoped_session ( sessionmaker ( ) , scopefunc = scopefunc ) session . configure ( bind = engine ) pool . connections [ connection_name ] = Connection ( engine , session ) | Initialize a postgresql connection by each connection string defined in the configuration file |
44,780 | def initialize ( g , app ) : if 'DATABASES' in app . config and 'POSTGRESQL' in app . config [ 'DATABASES' ] : for k , v in app . config [ 'DATABASES' ] [ 'POSTGRESQL' ] . items ( ) : init_db_conn ( k , v ) if 'test' not in sys . argv : @ app . before_request def before_request ( ) : from flask import _app_ctx_stack for k , v in app . config [ 'DATABASES' ] [ 'POSTGRESQL' ] . items ( ) : init_db_conn ( k , v , scopefunc = _app_ctx_stack ) g . postgresql_pool = pool @ app . teardown_request def teardown_request ( exception ) : pool = getattr ( g , 'postgresql_pool' , None ) if pool is not None : for k , v in pool . connections . items ( ) : v . session . remove ( ) else : @ app . before_request def before_request ( ) : for k , v in app . config [ 'DATABASES' ] [ 'POSTGRESQL' ] . items ( ) : init_db_conn ( k , v ) g . postgresql_pool = pool | If postgresql url is defined in configuration params a scoped session will be created |
44,781 | def set_request ( self , r ) : for k in self . environments . keys ( ) : self . environments [ k ] . globals [ 'REQUEST' ] = r | Appends request object to the globals dict |
44,782 | def json_response ( self , status = 200 , data = { } , headers = { } ) : mimetype = 'application/json' header_dict = { } for k , v in headers . items ( ) : header_dict [ k ] = v return Response ( json . dumps ( data ) , status = status , mimetype = mimetype , headers = header_dict ) | To set flask to inject specific headers on response request such as CORS_ORIGIN headers |
44,783 | def template_response ( self , template_name , headers = { } , ** values ) : response = make_response ( self . render_template ( template_name , ** values ) ) for field , value in headers . items ( ) : response . headers . set ( field , value ) return response | Constructs a response allowing custom template name and content_type |
44,784 | def describe_key_pairs ( ) : region_keys = { } for r in boto3 . client ( 'ec2' , 'us-west-2' ) . describe_regions ( ) [ 'Regions' ] : region = r [ 'RegionName' ] client = boto3 . client ( 'ec2' , region_name = region ) try : pairs = client . describe_key_pairs ( ) if pairs : region_keys [ region ] = pairs except Exception as e : app . logger . info ( e ) return region_keys | Returns all key pairs for region |
44,785 | def init_app ( module , BASE_DIR , ** kwargs ) : global app def init_config ( ) : if 'FLASK_PHILO_SETTINGS_MODULE' not in os . environ : raise ConfigurationError ( 'No settings has been defined' ) app . config [ 'BASE_DIR' ] = BASE_DIR for v in dir ( default_settings ) : if not v . startswith ( '_' ) : app . config [ v ] = getattr ( default_settings , v ) app . debug = app . config [ 'DEBUG' ] settings = importlib . import_module ( os . environ [ 'FLASK_PHILO_SETTINGS_MODULE' ] ) for v in dir ( settings ) : if not v . startswith ( '_' ) : app . config [ v ] = getattr ( settings , v ) def init_urls ( ) : urls_module = importlib . import_module ( app . config [ 'URLS' ] ) for route in urls_module . URLS : app . add_url_rule ( route [ 0 ] , view_func = route [ 1 ] . as_view ( route [ 2 ] ) ) def init_logging ( ) : hndlr = logging . StreamHandler ( ) formatter = logging . Formatter ( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) hndlr . setFormatter ( formatter ) app . logger . addHandler ( hndlr ) log_level = app . config [ 'LOG_LEVEL' ] app . logger . setLevel ( getattr ( logging , log_level ) ) def init_flask_oauthlib ( ) : oauth . init_app ( app ) def init_cors ( app ) : if 'CORS' in app . config : CORS ( app , resources = app . config [ 'CORS' ] , supports_credentials = app . config . get ( "CORS_SUPPORT_CREDENTIALS" , False ) , allow_headers = app . config . get ( "CORS_ALLOW_HEADERS" , "Content-Type,Authorization,accept-language,accept" ) ) init_db ( g , app ) init_logging ( ) init_urls ( ) init_flask_oauthlib ( ) init_jinja2 ( g , app ) init_cors ( app ) app = Flask ( module ) init_config ( ) return app | Initalize an app call this method once from start_app |
44,786 | def execute_command ( cmd , ** kwargs ) : cmd_dict = { c : 'flask_philo.commands_flask_philo.' + c for c in dir ( commands_flask_philo ) if not c . startswith ( '_' ) and c != 'os' } try : import console_commands for cm in console_commands . __all__ : if not cm . startswith ( '_' ) : cmd_dict [ cm ] = 'console_commands.' + cm except Exception : pass if cmd not in cmd_dict : raise ConfigurationError ( 'command {} does not exists' . format ( cmd ) ) cmd_module = importlib . import_module ( cmd_dict [ cmd ] ) kwargs [ 'app' ] = app cmd_module . run ( ** kwargs ) | execute a console command |
44,787 | def _convert ( self , value ) : if isinstance ( value , PasswordHash ) : return value elif isinstance ( value , str ) : value = value . encode ( 'utf-8' ) return PasswordHash . new ( value , self . rounds ) elif value is not None : raise TypeError ( 'Cannot convert {} to a PasswordHash' . format ( type ( value ) ) ) | Returns a PasswordHash from the given string . |
44,788 | def initialize ( g , app ) : if 'DATABASES' in app . config and 'REDIS' in app . config [ 'DATABASES' ] : for k , v in app . config [ 'DATABASES' ] [ 'REDIS' ] . items ( ) : init_db_conn ( k , ** v ) @ app . before_request def before_request ( ) : for k , v in app . config [ 'DATABASES' ] [ 'REDIS' ] . items ( ) : init_db_conn ( k , ** v ) g . redis_pool = redis_pool if 'test' not in sys . argv : @ app . teardown_request def teardown_request ( exception ) : pool = getattr ( g , 'redis_pool' , None ) if pool is not None : pool . close ( ) | If redis connection parameters are defined in configuration params a session will be created |
44,789 | def _initialize_from_dict ( self , data ) : self . _json = data self . _validate ( ) for name , value in self . _json . items ( ) : if name in self . _properties : if '$ref' in self . _properties [ name ] : if 'decimal' in self . _properties [ name ] [ '$ref' ] : value = Decimal ( value ) if 'format' in self . _properties [ name ] : format = self . _properties [ name ] [ 'format' ] if 'date-time' == format : value = utils . string_to_datetime ( value ) elif 'date' == format : value = utils . string_to_date ( value ) setattr ( self , name , value ) | Loads serializer from a request object |
44,790 | def _initialize_from_model ( self , model ) : for name , value in model . __dict__ . items ( ) : if name in self . _properties : setattr ( self , name , value ) | Loads a model from |
44,791 | def update ( self ) : obj = self . __model__ . objects . get_for_update ( id = self . id ) for name , value in self . __dict__ . items ( ) : if name in self . _properties : setattr ( obj , name , value ) obj . update ( ) return obj | Finds record and update it based in serializer values |
44,792 | def to_json ( self ) : data = { } for k , v in self . __dict__ . items ( ) : if not k . startswith ( '_' ) : if isinstance ( v , datetime ) : v = utils . datetime_to_string ( v ) elif isinstance ( v , date ) : v = utils . date_to_string ( v ) elif isinstance ( v , uuid . UUID ) : v = str ( v ) elif isinstance ( v , Decimal ) : v = str ( v ) data [ k ] = v return data | Returns a json representation |
44,793 | def OPERATING_SYSTEM ( stats , info ) : info . append ( ( 'architecture' , platform . machine ( ) . lower ( ) ) ) info . append ( ( 'distribution' , "%s;%s" % ( platform . linux_distribution ( ) [ 0 : 2 ] ) ) ) info . append ( ( 'system' , "%s;%s" % ( platform . system ( ) , platform . release ( ) ) ) ) | General information about the operating system . |
44,794 | def SESSION_TIME ( stats , info ) : duration = time . time ( ) - stats . started_time secs = int ( duration ) msecs = int ( ( duration - secs ) * 1000 ) info . append ( ( 'session_time' , '%d.%d' % ( secs , msecs ) ) ) | Total time of this session . |
44,795 | def PYTHON_VERSION ( stats , info ) : version = sys . version . replace ( ' \n' , ' ' ) . replace ( '\n' , ' ' ) python = ';' . join ( [ str ( c ) for c in sys . version_info ] + [ version ] ) info . append ( ( 'python' , python ) ) | Python interpreter version . |
44,796 | def read_config ( self ) : if self . enabled and not os . path . isdir ( self . location ) : try : os . makedirs ( self . location , 0o700 ) except OSError : logger . warning ( "Couldn't create %s, usage statistics won't be " "collected" , self . location ) self . status = Stats . ERRORED status_file = os . path . join ( self . location , 'status' ) if self . enabled and os . path . exists ( status_file ) : with open ( status_file , 'r' ) as fp : status = fp . read ( ) . strip ( ) if status == 'ENABLED' : self . status = Stats . ENABLED elif status == 'DISABLED' : self . status = Stats . DISABLED | Reads the configuration . |
44,797 | def write_config ( self , enabled ) : status_file = os . path . join ( self . location , 'status' ) with open ( status_file , 'w' ) as fp : if enabled is Stats . ENABLED : fp . write ( 'ENABLED' ) elif enabled is Stats . DISABLED : fp . write ( 'DISABLED' ) else : raise ValueError ( "Unknown reporting state %r" % enabled ) | Writes the configuration . |
44,798 | def enable_reporting ( self ) : if self . status == Stats . ENABLED : return if not self . enableable : logger . critical ( "Can't enable reporting" ) return self . status = Stats . ENABLED self . write_config ( self . status ) | Call this method to explicitly enable reporting . |
44,799 | def disable_reporting ( self ) : if self . status == Stats . DISABLED : return if not self . disableable : logger . critical ( "Can't disable reporting" ) return self . status = Stats . DISABLED self . write_config ( self . status ) if os . path . exists ( self . location ) : old_reports = [ f for f in os . listdir ( self . location ) if f . startswith ( 'report_' ) ] for old_filename in old_reports : fullname = os . path . join ( self . location , old_filename ) os . remove ( fullname ) logger . info ( "Deleted %d pending reports" , len ( old_reports ) ) | Call this method to explicitly disable reporting . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.