idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
18,200
def to_csv ( self , path , mode = WRITE_MODE , dialect = 'excel' , compression = None , newline = '' , ** fmtparams ) : if 'b' in mode : newline = None with universal_write_open ( path , mode = mode , compression = compression , newline = newline ) as output : csv_writer = csv . writer ( output , dialect = dialect , ** fmtparams ) for row in self : csv_writer . writerow ( [ six . u ( str ( element ) ) for element in row ] )
Saves the sequence to a csv file . Each element should be an iterable which will be expanded to the elements of each row .
18,201
def _to_sqlite3_by_table ( self , conn , table_name ) : def _insert_item ( item ) : if isinstance ( item , dict ) : cols = ', ' . join ( item . keys ( ) ) placeholders = ', ' . join ( '?' * len ( item ) ) sql = 'INSERT INTO {} ({}) VALUES ({})' . format ( table_name , cols , placeholders ) conn . execute ( sql , tuple ( item . values ( ) ) ) elif is_namedtuple ( item ) : cols = ', ' . join ( item . _fields ) placeholders = ', ' . join ( '?' * len ( item ) ) sql = 'INSERT INTO {} ({}) VALUES ({})' . format ( table_name , cols , placeholders ) conn . execute ( sql , item ) elif isinstance ( item , ( list , tuple ) ) : placeholders = ', ' . join ( '?' * len ( item ) ) sql = 'INSERT INTO {} VALUES ({})' . format ( table_name , placeholders ) conn . execute ( sql , item ) else : raise TypeError ( 'item must be one of dict, namedtuple, tuple or list got {}' . format ( type ( item ) ) ) self . for_each ( _insert_item )
Saves the sequence to the specified table of sqlite3 database . Each element can be a dictionary namedtuple tuple or list . Target table must be created in advance .
18,202
def to_sqlite3 ( self , conn , target , * args , ** kwargs ) : insert_regex = re . compile ( r'(insert|update)\s+into' , flags = re . IGNORECASE ) if insert_regex . match ( target ) : insert_f = self . _to_sqlite3_by_query else : insert_f = self . _to_sqlite3_by_table if isinstance ( conn , ( sqlite3 . Connection , sqlite3 . Cursor ) ) : insert_f ( conn , target ) conn . commit ( ) elif isinstance ( conn , str ) : with sqlite3 . connect ( conn , * args , ** kwargs ) as input_conn : insert_f ( input_conn , target ) input_conn . commit ( ) else : raise ValueError ( 'conn must be a must be a file path or sqlite3 Connection/Cursor' )
Saves the sequence to sqlite3 database . Target table must be created in advance . The table schema is inferred from the elements in the sequence if only target table name is supplied .
18,203
def to_pandas ( self , columns = None ) : import pandas return pandas . DataFrame . from_records ( self . to_list ( ) , columns = columns )
Converts sequence to a pandas DataFrame using pandas . DataFrame . from_records
18,204
def open ( self , path , delimiter = None , mode = 'r' , buffering = - 1 , encoding = None , errors = None , newline = None ) : if not re . match ( '^[rbt]{1,3}$' , mode ) : raise ValueError ( 'mode argument must be only have r, b, and t' ) file_open = get_read_function ( path , self . disable_compression ) file = file_open ( path , mode = mode , buffering = buffering , encoding = encoding , errors = errors , newline = newline ) if delimiter is None : return self ( file ) else : return self ( '' . join ( list ( file ) ) . split ( delimiter ) )
Reads and parses input files as defined .
18,205
def csv ( self , csv_file , dialect = 'excel' , ** fmt_params ) : if isinstance ( csv_file , str ) : file_open = get_read_function ( csv_file , self . disable_compression ) input_file = file_open ( csv_file ) elif hasattr ( csv_file , 'next' ) or hasattr ( csv_file , '__next__' ) : input_file = csv_file else : raise ValueError ( 'csv_file must be a file path or implement the iterator interface' ) csv_input = csvapi . reader ( input_file , dialect = dialect , ** fmt_params ) return self ( csv_input ) . cache ( delete_lineage = True )
Reads and parses the input of a csv stream or file .
18,206
def jsonl ( self , jsonl_file ) : if isinstance ( jsonl_file , str ) : file_open = get_read_function ( jsonl_file , self . disable_compression ) input_file = file_open ( jsonl_file ) else : input_file = jsonl_file return self ( input_file ) . map ( jsonapi . loads ) . cache ( delete_lineage = True )
Reads and parses the input of a jsonl file stream or file .
18,207
def json ( self , json_file ) : if isinstance ( json_file , str ) : file_open = get_read_function ( json_file , self . disable_compression ) input_file = file_open ( json_file ) json_input = jsonapi . load ( input_file ) elif hasattr ( json_file , 'read' ) : json_input = jsonapi . load ( json_file ) else : raise ValueError ( 'json_file must be a file path or implement the iterator interface' ) if isinstance ( json_input , list ) : return self ( json_input ) else : return self ( six . viewitems ( json_input ) )
Reads and parses the input of a json file handler or file .
18,208
def sqlite3 ( self , conn , sql , parameters = None , * args , ** kwargs ) : if parameters is None : parameters = ( ) if isinstance ( conn , ( sqlite3api . Connection , sqlite3api . Cursor ) ) : return self ( conn . execute ( sql , parameters ) ) elif isinstance ( conn , str ) : with sqlite3api . connect ( conn , * args , ** kwargs ) as input_conn : return self ( input_conn . execute ( sql , parameters ) ) else : raise ValueError ( 'conn must be a must be a file path or sqlite3 Connection/Cursor' )
Reads input by querying from a sqlite database .
18,209
def full ( self ) : if self . _parent . _maxsize <= 0 : return False else : return self . qsize ( ) >= self . _parent . _maxsize
Return True if there are maxsize items in the queue .
18,210
async def join ( self ) : while True : with self . _parent . _sync_mutex : if self . _parent . _unfinished_tasks == 0 : break await self . _parent . _finished . wait ( )
Block until all items in the queue have been gotten and processed .
18,211
def save ( self , to_dir , compressionlevel = 9 ) : u if os . path . exists ( to_dir ) and not os . path . isdir ( to_dir ) : raise Exception ( 'Not a directory : %s' % to_dir ) elif not os . path . exists ( to_dir ) : os . makedirs ( to_dir , mode = int ( '0755' , 8 ) ) _save ( os . path . join ( to_dir , FILE_USER_FST_DATA ) , self . compiledFST [ 0 ] , compressionlevel ) _save ( os . path . join ( to_dir , FILE_USER_ENTRIES_DATA ) , pickle . dumps ( self . entries ) , compressionlevel )
u Save compressed compiled dictionary data .
18,212
def analyze ( self , text ) : u for cfilter in self . char_filters : text = cfilter . filter ( text ) tokens = self . tokenizer . tokenize ( text , stream = True , wakati = False ) for tfilter in self . token_filters : tokens = tfilter . filter ( tokens ) return tokens
u Analyze the input text with custom CharFilters Tokenizer and TokenFilters .
18,213
def compileFST ( fst ) : u arcs = [ ] address = { } pos = 0 for ( num , s ) in enumerate ( fst . dictionary . values ( ) ) : for i , ( c , v ) in enumerate ( sorted ( s . trans_map . items ( ) , reverse = True ) ) : bary = bytearray ( ) flag = 0 output_size , output = 0 , bytes ( ) if i == 0 : flag += FLAG_LAST_ARC if v [ 'output' ] : flag += FLAG_ARC_HAS_OUTPUT output_size = len ( v [ 'output' ] ) output = v [ 'output' ] bary += pack ( 'b' , flag ) if PY3 : bary += pack ( 'B' , c ) else : bary += pack ( 'c' , c ) if output_size > 0 : bary += pack ( 'I' , output_size ) bary += output next_addr = address . get ( v [ 'state' ] . id ) assert next_addr is not None target = ( pos + len ( bary ) + 4 ) - next_addr assert target > 0 bary += pack ( 'I' , target ) if PY3 : arcs . append ( bytes ( bary ) ) else : arcs . append ( b'' . join ( chr ( b ) for b in bary ) ) pos += len ( bary ) if s . is_final ( ) : bary = bytearray ( ) flag = FLAG_FINAL_ARC output_count = 0 if s . final_output and any ( len ( e ) > 0 for e in s . final_output ) : flag += FLAG_ARC_HAS_FINAL_OUTPUT output_count = len ( s . final_output ) if not s . trans_map : flag += FLAG_LAST_ARC bary += pack ( 'b' , flag ) if output_count : bary += pack ( 'I' , output_count ) for out in s . final_output : output_size = len ( out ) bary += pack ( 'I' , output_size ) if output_size : bary += out if PY3 : arcs . append ( bytes ( bary ) ) else : arcs . append ( b'' . join ( chr ( b ) for b in bary ) ) pos += len ( bary ) address [ s . id ] = pos logger . debug ( 'compiled arcs size: %d' % len ( arcs ) ) arcs . reverse ( ) return b'' . join ( arcs )
u convert FST to byte array representing arcs
18,214
def tokenize ( self , text , stream = False , wakati = False , baseform_unk = True , dotfile = '' ) : u if self . wakati : wakati = True if stream : return self . __tokenize_stream ( text , wakati , baseform_unk , '' ) elif dotfile and len ( text ) < Tokenizer . MAX_CHUNK_SIZE : return list ( self . __tokenize_stream ( text , wakati , baseform_unk , dotfile ) ) else : return list ( self . __tokenize_stream ( text , wakati , baseform_unk , '' ) )
u Tokenize the input text .
18,215
def get_ip ( request , real_ip_only = False , right_most_proxy = False ) : best_matched_ip = None warnings . warn ( 'get_ip is deprecated and will be removed in 3.0.' , DeprecationWarning ) for key in defs . IPWARE_META_PRECEDENCE_ORDER : value = request . META . get ( key , request . META . get ( key . replace ( '_' , '-' ) , '' ) ) . strip ( ) if value is not None and value != '' : ips = [ ip . strip ( ) . lower ( ) for ip in value . split ( ',' ) ] if right_most_proxy and len ( ips ) > 1 : ips = reversed ( ips ) for ip_str in ips : if ip_str and is_valid_ip ( ip_str ) : if not ip_str . startswith ( NON_PUBLIC_IP_PREFIX ) : return ip_str if not real_ip_only : loopback = defs . IPWARE_LOOPBACK_PREFIX if best_matched_ip is None : best_matched_ip = ip_str elif best_matched_ip . startswith ( loopback ) and not ip_str . startswith ( loopback ) : best_matched_ip = ip_str return best_matched_ip
Returns client s best - matched ip - address or None
18,216
def get_real_ip ( request , right_most_proxy = False ) : warnings . warn ( 'get_real_ip is deprecated and will be removed in 3.0.' , DeprecationWarning ) return get_ip ( request , real_ip_only = True , right_most_proxy = right_most_proxy )
Returns client s best - matched real externally - routable ip - address or None
18,217
def is_valid_ipv6 ( ip_str ) : try : socket . inet_pton ( socket . AF_INET6 , ip_str ) except socket . error : return False return True
Check the validity of an IPv6 address
18,218
def get_request_meta ( request , key ) : value = request . META . get ( key , request . META . get ( key . replace ( '_' , '-' ) , '' ) ) . strip ( ) if value == '' : return None return value
Given a key it returns a cleaned up version of the value from request . META or None
18,219
def get_ips_from_string ( ip_str ) : ip_list = [ ] for ip in ip_str . split ( ',' ) : clean_ip = ip . strip ( ) . lower ( ) if clean_ip : ip_list . append ( clean_ip ) ip_count = len ( ip_list ) if ip_count > 0 : if is_valid_ip ( ip_list [ 0 ] ) and is_valid_ip ( ip_list [ - 1 ] ) : return ip_list , ip_count return [ ] , 0
Given a string it returns a list of one or more valid IP addresses
18,220
def parse_args ( sys_argv , usage ) : args = sys_argv [ 1 : ] parser = OptionParser ( usage = usage ) options , args = parser . parse_args ( args ) template , context = args return template , context
Return an OptionParser for the script .
18,221
def render ( template , context = None , ** kwargs ) : renderer = Renderer ( ) return renderer . render ( template , context , ** kwargs )
Return the given template string rendered using the given context .
18,222
def _get_value ( context , key ) : if isinstance ( context , dict ) : if key in context : return context [ key ] elif type ( context ) . __module__ != _BUILTIN_MODULE : try : attr = getattr ( context , key ) except AttributeError : pass else : if callable ( attr ) : return attr ( ) return attr return _NOT_FOUND
Retrieve a key s value from a context item .
18,223
def create ( * context , ** kwargs ) : items = context context = ContextStack ( ) for item in items : if item is None : continue if isinstance ( item , ContextStack ) : context . _stack . extend ( item . _stack ) else : context . push ( item ) if kwargs : context . push ( kwargs ) return context
Build a ContextStack instance from a sequence of context - like items .
18,224
def get ( self , name ) : if name == '.' : try : return self . top ( ) except IndexError : raise KeyNotFoundError ( "." , "empty context stack" ) parts = name . split ( '.' ) try : result = self . _get_simple ( parts [ 0 ] ) except KeyNotFoundError : raise KeyNotFoundError ( name , "first part" ) for part in parts [ 1 : ] : result = _get_value ( result , part ) if result is _NOT_FOUND : raise KeyNotFoundError ( name , "missing %s" % repr ( part ) ) return result
Resolve a dotted name against the current context stack .
18,225
def _get_simple ( self , name ) : for item in reversed ( self . _stack ) : result = _get_value ( item , name ) if result is not _NOT_FOUND : return result raise KeyNotFoundError ( name , "part missing" )
Query the stack for a non - dotted name .
18,226
def _to_unicode_soft ( self , s ) : if isinstance ( s , unicode ) : return s return self . unicode ( s )
Convert a basestring to unicode preserving any unicode subclass .
18,227
def unicode ( self , b , encoding = None ) : if encoding is None : encoding = self . string_encoding return unicode ( b , encoding , self . decode_errors )
Convert a byte string to unicode using string_encoding and decode_errors .
18,228
def _make_loader ( self ) : return Loader ( file_encoding = self . file_encoding , extension = self . file_extension , to_unicode = self . unicode , search_dirs = self . search_dirs )
Create a Loader instance using current attributes .
18,229
def _make_load_template ( self ) : loader = self . _make_loader ( ) def load_template ( template_name ) : return loader . load_name ( template_name ) return load_template
Return a function that loads a template by name .
18,230
def _make_load_partial ( self ) : if self . partials is None : return self . _make_load_template ( ) partials = self . partials def load_partial ( name ) : template = partials . get ( name ) if template is None : raise TemplateNotFoundError ( "Name %s not found in partials: %s" % ( repr ( name ) , type ( partials ) ) ) return self . _to_unicode_hard ( template ) return load_partial
Return a function that loads a partial by name .
18,231
def _is_missing_tags_strict ( self ) : val = self . missing_tags if val == MissingTags . strict : return True elif val == MissingTags . ignore : return False raise Exception ( "Unsupported 'missing_tags' value: %s" % repr ( val ) )
Return whether missing_tags is set to strict .
18,232
def _make_render_engine ( self ) : resolve_context = self . _make_resolve_context ( ) resolve_partial = self . _make_resolve_partial ( ) engine = RenderEngine ( literal = self . _to_unicode_hard , escape = self . _escape_to_unicode , resolve_context = resolve_context , resolve_partial = resolve_partial , to_str = self . str_coerce ) return engine
Return a RenderEngine instance for rendering .
18,233
def _render_object ( self , obj , * context , ** kwargs ) : loader = self . _make_loader ( ) if isinstance ( obj , TemplateSpec ) : loader = SpecLoader ( loader ) template = loader . load ( obj ) else : template = loader . load_object ( obj ) context = [ obj ] + list ( context ) return self . _render_string ( template , * context , ** kwargs )
Render the template associated with the given object .
18,234
def render_name ( self , template_name , * context , ** kwargs ) : loader = self . _make_loader ( ) template = loader . load_name ( template_name ) return self . _render_string ( template , * context , ** kwargs )
Render the template with the given name using the given context .
18,235
def render_path ( self , template_path , * context , ** kwargs ) : loader = self . _make_loader ( ) template = loader . read ( template_path ) return self . _render_string ( template , * context , ** kwargs )
Render the template at the given path using the given context .
18,236
def _render_string ( self , template , * context , ** kwargs ) : template = self . _to_unicode_hard ( template ) render_func = lambda engine , stack : engine . render ( template , stack ) return self . _render_final ( render_func , * context , ** kwargs )
Render the given template string using the given context .
18,237
def render ( self , template , * context , ** kwargs ) : if is_string ( template ) : return self . _render_string ( template , * context , ** kwargs ) if isinstance ( template , ParsedTemplate ) : render_func = lambda engine , stack : template . render ( engine , stack ) return self . _render_final ( render_func , * context , ** kwargs ) return self . _render_object ( template , * context , ** kwargs )
Render the given template string view template or parsed template .
18,238
def read ( path ) : f = open ( path , 'rb' ) try : b = f . read ( ) finally : f . close ( ) return b . decode ( FILE_ENCODING )
Read and return the contents of a text file as a unicode string .
18,239
def strip_html_comments ( text ) : lines = text . splitlines ( True ) new_lines = filter ( lambda line : not line . startswith ( "<!--" ) , lines ) return "" . join ( new_lines )
Strip HTML comments from a unicode string .
18,240
def convert_md_to_rst ( md_path , rst_temp_path ) : command = "pandoc --write=rst --output=%s %s" % ( rst_temp_path , md_path ) print ( "converting with pandoc: %s to %s\n % ( md_path , rst_temp_path , command ) ) if os . path . exists ( rst_temp_path ) : os . remove ( rst_temp_path ) os . system ( command ) if not os . path . exists ( rst_temp_path ) : s = ( "Error running: %s\n" " Did you install pandoc per the %s docstring?" % ( command , __file__ ) ) sys . exit ( s ) return read ( rst_temp_path )
Convert the contents of a file from Markdown to reStructuredText .
18,241
def get_object_directory ( self , obj ) : if not hasattr ( obj , '__module__' ) : return None module = sys . modules [ obj . __module__ ] if not hasattr ( module , '__file__' ) : return None path = module . __file__ return os . path . dirname ( path )
Return the directory containing an object s defining class .
18,242
def make_template_name ( self , obj ) : template_name = obj . __class__ . __name__ def repl ( match ) : return '_' + match . group ( 0 ) . lower ( ) return re . sub ( '[A-Z]' , repl , template_name ) [ 1 : ]
Return the canonical template name for an object instance .
18,243
def make_file_name ( self , template_name , template_extension = None ) : file_name = template_name if template_extension is None : template_extension = self . template_extension if template_extension is not False : file_name += os . path . extsep + template_extension return file_name
Generate and return the file name for the given template name .
18,244
def _find_path ( self , search_dirs , file_name ) : for dir_path in search_dirs : file_path = os . path . join ( dir_path , file_name ) if os . path . exists ( file_path ) : return file_path return None
Search for the given file and return the path .
18,245
def _find_path_required ( self , search_dirs , file_name ) : path = self . _find_path ( search_dirs , file_name ) if path is None : raise TemplateNotFoundError ( 'File %s not found in dirs: %s' % ( repr ( file_name ) , repr ( search_dirs ) ) ) return path
Return the path to a template with the given file name .
18,246
def find_name ( self , template_name , search_dirs ) : file_name = self . make_file_name ( template_name ) return self . _find_path_required ( search_dirs , file_name )
Return the path to a template with the given name .
18,247
def find_object ( self , obj , search_dirs , file_name = None ) : if file_name is None : template_name = self . make_template_name ( obj ) file_name = self . make_file_name ( template_name ) dir_path = self . get_object_directory ( obj ) if dir_path is not None : search_dirs = [ dir_path ] + search_dirs path = self . _find_path_required ( search_dirs , file_name ) return path
Return the path to a template associated with the given object .
18,248
def unicode ( self , s , encoding = None ) : if isinstance ( s , unicode ) : return unicode ( s ) return self . to_unicode ( s , encoding )
Convert a string to unicode using the given encoding and return it .
18,249
def read ( self , path , encoding = None ) : b = common . read ( path ) if encoding is None : encoding = self . file_encoding return self . unicode ( b , encoding )
Read the template at the given path and return it as a unicode string .
18,250
def load_file ( self , file_name ) : locator = self . _make_locator ( ) path = locator . find_file ( file_name , self . search_dirs ) return self . read ( path )
Find and return the template with the given file name .
18,251
def load_name ( self , name ) : locator = self . _make_locator ( ) path = locator . find_name ( name , self . search_dirs ) return self . read ( path )
Find and return the template with the given template name .
18,252
def load_object ( self , obj ) : locator = self . _make_locator ( ) path = locator . find_object ( obj , self . search_dirs ) return self . read ( path )
Find and return the template associated to the given object .
18,253
def parse ( template , delimiters = None ) : if type ( template ) is not unicode : raise Exception ( "Template is not unicode: %s" % type ( template ) ) parser = _Parser ( delimiters ) return parser . parse ( template )
Parse a unicode template string and return a ParsedTemplate instance .
18,254
def parse ( self , template ) : self . _compile_delimiters ( ) start_index = 0 content_end_index , parsed_section , section_key = None , None , None parsed_template = ParsedTemplate ( ) states = [ ] while True : match = self . _template_re . search ( template , start_index ) if match is None : break match_index = match . start ( ) end_index = match . end ( ) matches = match . groupdict ( ) if matches [ 'change' ] is not None : matches . update ( tag = '=' , tag_key = matches [ 'delims' ] ) elif matches [ 'raw' ] is not None : matches . update ( tag = '&' , tag_key = matches [ 'raw_name' ] ) tag_type = matches [ 'tag' ] tag_key = matches [ 'tag_key' ] leading_whitespace = matches [ 'whitespace' ] did_tag_begin_line = match_index == 0 or template [ match_index - 1 ] in END_OF_LINE_CHARACTERS did_tag_end_line = end_index == len ( template ) or template [ end_index ] in END_OF_LINE_CHARACTERS is_tag_interpolating = tag_type in [ '' , '&' ] if did_tag_begin_line and did_tag_end_line and not is_tag_interpolating : if end_index < len ( template ) : end_index += template [ end_index ] == '\r' and 1 or 0 if end_index < len ( template ) : end_index += template [ end_index ] == '\n' and 1 or 0 elif leading_whitespace : match_index += len ( leading_whitespace ) leading_whitespace = '' if start_index != match_index : parsed_template . add ( template [ start_index : match_index ] ) start_index = end_index if tag_type in ( '#' , '^' ) : state = ( tag_type , end_index , section_key , parsed_template ) states . append ( state ) section_key , parsed_template = tag_key , ParsedTemplate ( ) continue if tag_type == '/' : if tag_key != section_key : raise ParsingError ( "Section end tag mismatch: %s != %s" % ( tag_key , section_key ) ) parsed_section = parsed_template ( tag_type , section_start_index , section_key , parsed_template ) = states . pop ( ) node = self . _make_section_node ( template , tag_type , tag_key , parsed_section , section_start_index , match_index ) else : node = self . _make_interpolation_node ( tag_type , tag_key , leading_whitespace ) parsed_template . add ( node ) if start_index != len ( template ) : parsed_template . add ( template [ start_index : ] ) return parsed_template
Parse a template string starting at some index .
18,255
def _make_interpolation_node ( self , tag_type , tag_key , leading_whitespace ) : if tag_type == '!' : return _CommentNode ( ) if tag_type == '=' : delimiters = tag_key . split ( ) self . _change_delimiters ( delimiters ) return _ChangeNode ( delimiters ) if tag_type == '' : return _EscapeNode ( tag_key ) if tag_type == '&' : return _LiteralNode ( tag_key ) if tag_type == '>' : return _PartialNode ( tag_key , leading_whitespace ) raise Exception ( "Invalid symbol for interpolation tag: %s" % repr ( tag_type ) )
Create and return a non - section node for the parse tree .
18,256
def _make_section_node ( self , template , tag_type , tag_key , parsed_section , section_start_index , section_end_index ) : if tag_type == '#' : return _SectionNode ( tag_key , parsed_section , self . _delimiters , template , section_start_index , section_end_index ) if tag_type == '^' : return _InvertedNode ( tag_key , parsed_section ) raise Exception ( "Invalid symbol for section tag: %s" % repr ( tag_type ) )
Create and return a section node for the parse tree .
18,257
def _find ( self , spec ) : if spec . template_path is not None : return spec . template_path dir_path , file_name = self . _find_relative ( spec ) locator = self . loader . _make_locator ( ) if dir_path is None : path = locator . find_object ( spec , self . loader . search_dirs , file_name = file_name ) else : obj_dir = locator . get_object_directory ( spec ) path = os . path . join ( obj_dir , dir_path , file_name ) return path
Find and return the path to the template associated to the instance .
18,258
def load ( self , spec ) : if spec . template is not None : return self . loader . unicode ( spec . template , spec . template_encoding ) path = self . _find ( spec ) return self . loader . read ( path , spec . template_encoding )
Find and return the template associated to a TemplateSpec instance .
18,259
def fetch_string ( self , context , name ) : val = self . resolve_context ( context , name ) if callable ( val ) : return self . _render_value ( val ( ) , context ) if not is_string ( val ) : return self . to_str ( val ) return val
Get a value from the given context as a basestring instance .
18,260
def fetch_section_data ( self , context , name ) : data = self . resolve_context ( context , name ) if not data : data = [ ] else : try : iter ( data ) except TypeError : data = [ data ] else : if is_string ( data ) or isinstance ( data , dict ) : data = [ data ] return data
Fetch the value of a section as a list .
18,261
def _render_value ( self , val , context , delimiters = None ) : if not is_string ( val ) : val = self . to_str ( val ) if type ( val ) is not unicode : val = self . literal ( val ) return self . render ( val , context , delimiters )
Render an arbitrary value .
18,262
def render ( self , template , context_stack , delimiters = None ) : parsed_template = parse ( template , delimiters ) return parsed_template . render ( self , context_stack )
Render a unicode template string and return as unicode .
18,263
def evaluate ( self , script ) : if WEBENGINE : return self . dom . runJavaScript ( "{}" . format ( script ) ) else : return self . dom . evaluateJavaScript ( "{}" . format ( script ) )
Evaluate script in page frame .
18,264
def set_input_value ( self , selector , value ) : script = 'document.querySelector("%s").setAttribute("value", "%s")' script = script % ( selector , value ) self . evaluate ( script )
Set the value of the input matched by given selector .
18,265
def main ( ) : from spyder . utils . qthelpers import qapplication app = qapplication ( ) widget = NotebookClient ( plugin = None , name = '' ) widget . show ( ) widget . set_url ( 'http://google.com' ) sys . exit ( app . exec_ ( ) )
Simple test .
18,266
def show_kernel_error ( self , error ) : eol = sourcecode . get_eol_chars ( error ) if eol : error = error . replace ( eol , '<br>' ) error = error . replace ( '-' , '&#8209' ) message = _ ( "An error occurred while starting the kernel" ) kernel_error_template = Template ( KERNEL_ERROR ) page = kernel_error_template . substitute ( css_path = CSS_PATH , message = message , error = error ) self . setHtml ( page )
Show kernel initialization errors .
18,267
def show_loading_page ( self ) : loading_template = Template ( LOADING ) loading_img = get_image_path ( 'loading_sprites.png' ) if os . name == 'nt' : loading_img = loading_img . replace ( '\\' , '/' ) message = _ ( "Connecting to kernel..." ) page = loading_template . substitute ( css_path = CSS_PATH , loading_img = loading_img , message = message ) self . setHtml ( page )
Show a loading animation while the kernel is starting .
18,268
def register ( self , server_info ) : self . path = os . path . relpath ( self . filename , start = server_info [ 'notebook_dir' ] ) if os . name == 'nt' : self . path = self . path . replace ( '\\' , '/' ) self . server_url = server_info [ 'url' ] self . token = server_info [ 'token' ] url = url_path_join ( self . server_url , 'notebooks' , url_escape ( self . path ) ) self . file_url = self . add_token ( url )
Register attributes that can be computed with the server info .
18,269
def go_to ( self , url_or_text ) : if is_text_string ( url_or_text ) : url = QUrl ( url_or_text ) else : url = url_or_text self . notebookwidget . load ( url )
Go to page utl .
18,270
def get_short_name ( self ) : sname = osp . splitext ( osp . basename ( self . filename ) ) [ 0 ] if len ( sname ) > 20 : fm = QFontMetrics ( QFont ( ) ) sname = fm . elidedText ( sname , Qt . ElideRight , 110 ) return sname
Get a short name for the notebook .
18,271
def get_kernel_id ( self ) : sessions_url = self . get_session_url ( ) sessions_req = requests . get ( sessions_url ) . content . decode ( ) sessions = json . loads ( sessions_req ) if os . name == 'nt' : path = self . path . replace ( '\\' , '/' ) else : path = self . path for session in sessions : notebook_path = session . get ( 'notebook' , { } ) . get ( 'path' ) if notebook_path is not None and notebook_path == path : kernel_id = session [ 'kernel' ] [ 'id' ] return kernel_id
Get the kernel id of the client .
18,272
def shutdown_kernel ( self ) : kernel_id = self . get_kernel_id ( ) if kernel_id : delete_url = self . add_token ( url_path_join ( self . server_url , 'api/kernels/' , kernel_id ) ) delete_req = requests . delete ( delete_url ) if delete_req . status_code != 204 : QMessageBox . warning ( self , _ ( "Server error" ) , _ ( "The Jupyter Notebook server " "failed to shutdown the kernel " "associated with this notebook. " "If you want to shut it down, " "you'll have to close Spyder." ) )
Shutdown the kernel of the client .
18,273
def register_plugin ( self ) : self . focus_changed . connect ( self . main . plugin_focus_changed ) self . main . add_dockwidget ( self ) self . ipyconsole = self . main . ipyconsole self . create_new_client ( give_focus = False ) icon_path = os . path . join ( PACKAGE_PATH , 'images' , 'icon.svg' ) self . main . add_to_fileswitcher ( self , self . tabwidget , self . clients , QIcon ( icon_path ) ) self . recent_notebook_menu . aboutToShow . connect ( self . setup_menu_actions )
Register plugin in Spyder s main window .
18,274
def check_compatibility ( self ) : message = '' value = True if PYQT4 or PYSIDE : message = _ ( "You are working with Qt4 and in order to use this " "plugin you need to have Qt5.<br><br>" "Please update your Qt and/or PyQt packages to " "meet this requirement." ) value = False return value , message
Check compatibility for PyQt and sWebEngine .
18,275
def update_notebook_actions ( self ) : if self . recent_notebooks : self . clear_recent_notebooks_action . setEnabled ( True ) else : self . clear_recent_notebooks_action . setEnabled ( False ) client = self . get_current_client ( ) if client : if client . get_filename ( ) != WELCOME : self . save_as_action . setEnabled ( True ) self . open_console_action . setEnabled ( True ) self . options_menu . clear ( ) add_actions ( self . options_menu , self . menu_actions ) return self . save_as_action . setEnabled ( False ) self . open_console_action . setEnabled ( False ) self . options_menu . clear ( ) add_actions ( self . options_menu , self . menu_actions )
Update actions of the recent notebooks menu .
18,276
def add_to_recent ( self , notebook ) : if notebook not in self . recent_notebooks : self . recent_notebooks . insert ( 0 , notebook ) self . recent_notebooks = self . recent_notebooks [ : 20 ]
Add an entry to recent notebooks . We only maintain the list of the 20 most recent notebooks .
18,277
def get_focus_client ( self ) : widget = QApplication . focusWidget ( ) for client in self . get_clients ( ) : if widget is client or widget is client . notebookwidget : return client
Return current notebook with focus if any .
18,278
def get_current_client ( self ) : try : client = self . tabwidget . currentWidget ( ) except AttributeError : client = None if client is not None : return client
Return the currently selected notebook .
18,279
def get_current_client_name ( self , short = False ) : client = self . get_current_client ( ) if client : if short : return client . get_short_name ( ) else : return client . get_filename ( )
Get the current client name .
18,280
def create_new_client ( self , filename = None , give_focus = True ) : if not filename : if not osp . isdir ( NOTEBOOK_TMPDIR ) : os . makedirs ( NOTEBOOK_TMPDIR ) nb_name = 'untitled' + str ( self . untitled_num ) + '.ipynb' filename = osp . join ( NOTEBOOK_TMPDIR , nb_name ) nb_contents = nbformat . v4 . new_notebook ( ) nbformat . write ( nb_contents , filename ) self . untitled_num += 1 if not self . testing : CONF . set ( 'main' , 'spyder_pythonpath' , self . main . get_spyder_pythonpath ( ) ) try : server_info = nbopen ( filename ) except ( subprocess . CalledProcessError , NBServerError ) : QMessageBox . critical ( self , _ ( "Server error" ) , _ ( "The Jupyter Notebook server failed to start or it is " "taking too much time to do it. Please start it in a " "system terminal with the command 'jupyter notebook' to " "check for errors." ) ) self . untitled_num -= 1 self . create_welcome_client ( ) return welcome_client = self . create_welcome_client ( ) client = NotebookClient ( self , filename ) self . add_tab ( client ) if NOTEBOOK_TMPDIR not in filename : self . add_to_recent ( filename ) self . setup_menu_actions ( ) client . register ( server_info ) client . load_notebook ( ) if welcome_client and not self . testing : self . tabwidget . setCurrentIndex ( 0 )
Create a new notebook or load a pre - existing one .
18,281
def create_welcome_client ( self ) : if self . tabwidget . count ( ) == 0 : welcome = open ( WELCOME ) . read ( ) client = NotebookClient ( self , WELCOME , ini_message = welcome ) self . add_tab ( client ) return client
Create a welcome client with some instructions .
18,282
def save_as ( self , name = None , close = False ) : current_client = self . get_current_client ( ) current_client . save ( ) original_path = current_client . get_filename ( ) if not name : original_name = osp . basename ( original_path ) else : original_name = name filename , _selfilter = getsavefilename ( self , _ ( "Save notebook" ) , original_name , FILES_FILTER ) if filename : nb_contents = nbformat . read ( original_path , as_version = 4 ) nbformat . write ( nb_contents , filename ) if not close : self . close_client ( save = True ) self . create_new_client ( filename = filename )
Save notebook as .
18,283
def open_notebook ( self , filenames = None ) : if not filenames : filenames , _selfilter = getopenfilenames ( self , _ ( "Open notebook" ) , '' , FILES_FILTER ) if filenames : for filename in filenames : self . create_new_client ( filename = filename )
Open a notebook from file .
18,284
def open_console ( self , client = None ) : if not client : client = self . get_current_client ( ) if self . ipyconsole is not None : kernel_id = client . get_kernel_id ( ) if not kernel_id : QMessageBox . critical ( self , _ ( 'Error opening console' ) , _ ( 'There is no kernel associated to this notebook.' ) ) return self . ipyconsole . _create_client_for_kernel ( kernel_id , None , None , None ) ipyclient = self . ipyconsole . get_current_client ( ) ipyclient . allow_rename = False self . ipyconsole . rename_client_tab ( ipyclient , client . get_short_name ( ) )
Open an IPython console for the given client or the current one .
18,285
def add_tab ( self , widget ) : self . clients . append ( widget ) index = self . tabwidget . addTab ( widget , widget . get_short_name ( ) ) self . tabwidget . setCurrentIndex ( index ) self . tabwidget . setTabToolTip ( index , widget . get_filename ( ) ) if self . dockwidget and not self . ismaximized : self . dockwidget . setVisible ( True ) self . dockwidget . raise_ ( ) self . activateWindow ( ) widget . notebookwidget . setFocus ( )
Add tab .
18,286
def set_stack_index ( self , index , instance ) : if instance == self : self . tabwidget . setCurrentIndex ( index )
Set the index of the current notebook .
18,287
def get_version ( module = 'spyder_notebook' ) : with open ( os . path . join ( HERE , module , '_version.py' ) , 'r' ) as f : data = f . read ( ) lines = data . split ( '\n' ) for line in lines : if line . startswith ( 'VERSION_INFO' ) : version_tuple = ast . literal_eval ( line . split ( '=' ) [ - 1 ] . strip ( ) ) version = '.' . join ( map ( str , version_tuple ) ) break return version
Get version .
18,288
def find_best_server ( filename ) : servers = [ si for si in notebookapp . list_running_servers ( ) if filename . startswith ( si [ 'notebook_dir' ] ) ] try : return max ( servers , key = lambda si : len ( si [ 'notebook_dir' ] ) ) except ValueError : return None
Find the best server to open a notebook with .
18,289
def nbopen ( filename ) : filename = osp . abspath ( filename ) home_dir = get_home_dir ( ) server_info = find_best_server ( filename ) if server_info is not None : print ( "Using existing server at" , server_info [ 'notebook_dir' ] ) return server_info else : if filename . startswith ( home_dir ) : nbdir = home_dir else : nbdir = osp . dirname ( filename ) print ( "Starting new server" ) command = [ sys . executable , '-m' , 'notebook' , '--no-browser' , '--notebook-dir={}' . format ( nbdir ) , '--NotebookApp.password=' , "--KernelSpecManager.kernel_spec_class='{}'" . format ( KERNELSPEC ) ] if os . name == 'nt' : creation_flag = 0x08000000 else : creation_flag = 0 if DEV : env = os . environ . copy ( ) env [ "PYTHONPATH" ] = osp . dirname ( get_module_path ( 'spyder' ) ) proc = subprocess . Popen ( command , creationflags = creation_flag , env = env ) else : proc = subprocess . Popen ( command , creationflags = creation_flag ) def kill_server_and_childs ( pid ) : ps_proc = psutil . Process ( pid ) for child in ps_proc . children ( recursive = True ) : child . kill ( ) ps_proc . kill ( ) atexit . register ( kill_server_and_childs , proc . pid ) for _x in range ( 100 ) : server_info = find_best_server ( filename ) if server_info is not None : break else : time . sleep ( 0.25 ) if server_info is None : raise NBServerError ( ) return server_info
Open a notebook using the best available server .
18,290
def profile ( * args , ** kwargs ) : if _is_initialized ( ) : def wrapper ( f ) : return wrapHttpEndpoint ( f ) return wrapper raise Exception ( "before measuring anything, you need to call init_app()" )
http endpoint decorator
18,291
def registerInternalRouters ( app ) : urlPath = CONF . get ( "endpointRoot" , "flask-profiler" ) fp = Blueprint ( 'flask-profiler' , __name__ , url_prefix = "/" + urlPath , static_folder = "static/dist/" , static_url_path = '/static/dist' ) @ fp . route ( "/" . format ( urlPath ) ) @ auth . login_required def index ( ) : return fp . send_static_file ( "index.html" ) @ fp . route ( "/api/measurements/" . format ( urlPath ) ) @ auth . login_required def filterMeasurements ( ) : args = dict ( request . args . items ( ) ) measurements = collection . filter ( args ) return jsonify ( { "measurements" : list ( measurements ) } ) @ fp . route ( "/api/measurements/grouped" . format ( urlPath ) ) @ auth . login_required def getMeasurementsSummary ( ) : args = dict ( request . args . items ( ) ) measurements = collection . getSummary ( args ) return jsonify ( { "measurements" : list ( measurements ) } ) @ fp . route ( "/api/measurements/<measurementId>" . format ( urlPath ) ) @ auth . login_required def getContext ( measurementId ) : return jsonify ( collection . get ( measurementId ) ) @ fp . route ( "/api/measurements/timeseries/" . format ( urlPath ) ) @ auth . login_required def getRequestsTimeseries ( ) : args = dict ( request . args . items ( ) ) return jsonify ( { "series" : collection . getTimeseries ( args ) } ) @ fp . route ( "/api/measurements/methodDistribution/" . format ( urlPath ) ) @ auth . login_required def getMethodDistribution ( ) : args = dict ( request . args . items ( ) ) return jsonify ( { "distribution" : collection . getMethodDistribution ( args ) } ) @ fp . route ( "/db/dumpDatabase" ) @ auth . login_required def dumpDatabase ( ) : response = jsonify ( { "summary" : collection . getSummary ( ) } ) response . headers [ "Content-Disposition" ] = "attachment; filename=dump.json" return response @ fp . route ( "/db/deleteDatabase" ) @ auth . login_required def deleteDatabase ( ) : response = jsonify ( { "status" : collection . truncate ( ) } ) return response @ fp . after_request def x_robots_tag_header ( response ) : response . headers [ 'X-Robots-Tag' ] = 'noindex, nofollow' return response app . register_blueprint ( fp )
These are the endpoints which are used to display measurements in the flask - profiler dashboard .
18,292
def line_showLog ( self ) : if self . kernel . mva is None : print ( "Can't show log because no session exists" ) else : return self . kernel . Display ( HTML ( self . kernel . cachedlog ) )
SAS Kernel magic to show the SAS log for the previous submitted code . This magic is only available within the SAS Kernel
18,293
def _which_display ( self , log : str , output : str ) -> HTML : lines = re . split ( r'[\n]\s*' , log ) i = 0 elog = [ ] for line in lines : i += 1 e = [ ] if line . startswith ( 'ERROR' ) : logger . debug ( "In ERROR Condition" ) e = lines [ ( max ( i - 15 , 0 ) ) : ( min ( i + 16 , len ( lines ) ) ) ] elog = elog + e tlog = '\n' . join ( elog ) logger . debug ( "elog count: " + str ( len ( elog ) ) ) logger . debug ( "tlog: " + str ( tlog ) ) color_log = highlight ( log , SASLogLexer ( ) , HtmlFormatter ( full = True , style = SASLogStyle , lineseparator = "<br>" ) ) self . cachedlog = color_log if len ( elog ) == 0 and len ( output ) > self . lst_len : debug1 = 1 logger . debug ( "DEBUG1: " + str ( debug1 ) + " no error and LST output " ) return HTML ( output ) elif len ( elog ) == 0 and len ( output ) <= self . lst_len : debug1 = 2 logger . debug ( "DEBUG1: " + str ( debug1 ) + " no error and no LST" ) return HTML ( color_log ) elif len ( elog ) > 0 and len ( output ) <= self . lst_len : debug1 = 3 logger . debug ( "DEBUG1: " + str ( debug1 ) + " error and no LST" ) return HTML ( color_log ) else : debug1 = 4 logger . debug ( "DEBUG1: " + str ( debug1 ) + " errors and LST" ) return HTML ( color_log + output )
Determines if the log or lst should be returned as the results for the cell based on parsing the log looking for errors and the presence of lst output .
18,294
def do_execute_direct ( self , code : str , silent : bool = False ) -> [ str , dict ] : if not code . strip ( ) : return { 'status' : 'ok' , 'execution_count' : self . execution_count , 'payload' : [ ] , 'user_expressions' : { } } if self . mva is None : self . _allow_stdin = True self . _start_sas ( ) if self . lst_len < 0 : self . _get_lst_len ( ) if code . startswith ( 'Obfuscated SAS Code' ) : logger . debug ( "decoding string" ) tmp1 = code . split ( ) decode = base64 . b64decode ( tmp1 [ - 1 ] ) code = decode . decode ( 'utf-8' ) if code . startswith ( 'showSASLog_11092015' ) == False and code . startswith ( "CompleteshowSASLog_11092015" ) == False : logger . debug ( "code type: " + str ( type ( code ) ) ) logger . debug ( "code length: " + str ( len ( code ) ) ) logger . debug ( "code string: " + code ) if code . startswith ( "/*SASKernelTest*/" ) : res = self . mva . submit ( code , "text" ) else : res = self . mva . submit ( code , prompt = self . promptDict ) self . promptDict = { } if res [ 'LOG' ] . find ( "SAS process has terminated unexpectedly" ) > - 1 : print ( res [ 'LOG' ] , '\n' "Restarting SAS session on your behalf" ) self . do_shutdown ( True ) return res [ 'LOG' ] output = res [ 'LST' ] log = res [ 'LOG' ] return self . _which_display ( log , output ) elif code . startswith ( "CompleteshowSASLog_11092015" ) == True and code . startswith ( 'showSASLog_11092015' ) == False : full_log = highlight ( self . mva . saslog ( ) , SASLogLexer ( ) , HtmlFormatter ( full = True , style = SASLogStyle , lineseparator = "<br>" , title = "Full SAS Log" ) ) return full_log . replace ( '\n' , ' ' ) else : return self . cachedlog . replace ( '\n' , ' ' )
This is the main method that takes code from the Jupyter cell and submits it to the SAS server
18,295
def get_completions ( self , info ) : if info [ 'line_num' ] > 1 : relstart = info [ 'column' ] - ( info [ 'help_pos' ] - info [ 'start' ] ) else : relstart = info [ 'start' ] seg = info [ 'line' ] [ : relstart ] if relstart > 0 and re . match ( '(?i)proc' , seg . rsplit ( None , 1 ) [ - 1 ] ) : potentials = re . findall ( '(?i)^' + info [ 'obj' ] + '.*' , self . strproclist , re . MULTILINE ) return potentials else : lastproc = info [ 'code' ] . lower ( ) [ : info [ 'help_pos' ] ] . rfind ( 'proc' ) lastdata = info [ 'code' ] . lower ( ) [ : info [ 'help_pos' ] ] . rfind ( 'data ' ) proc = False data = False if lastproc + lastdata == - 2 : pass else : if lastproc > lastdata : proc = True else : data = True if proc : lastsemi = info [ 'code' ] . rfind ( ';' ) mykey = 's' if lastproc > lastsemi : mykey = 'p' procer = re . search ( '(?i)proc\s\w+' , info [ 'code' ] [ lastproc : ] ) method = procer . group ( 0 ) . split ( ' ' ) [ - 1 ] . upper ( ) + mykey mylist = self . compglo [ method ] [ 0 ] potentials = re . findall ( '(?i)' + info [ 'obj' ] + '.+' , '\n' . join ( str ( x ) for x in mylist ) , re . MULTILINE ) return potentials elif data : lastsemi = info [ 'code' ] . rfind ( ';' ) mykey = 's' if lastproc > lastsemi : mykey = 'p' mylist = self . compglo [ 'DATA' + mykey ] [ 0 ] potentials = re . findall ( '(?i)^' + info [ 'obj' ] + '.*' , '\n' . join ( str ( x ) for x in mylist ) , re . MULTILINE ) return potentials else : potentials = [ '' ] return potentials
Get completions from kernel for procs and statements .
18,296
def do_shutdown ( self , restart ) : print ( "in shutdown function" ) if self . hist_file : with open ( self . hist_file , 'wb' ) as fid : data = '\n' . join ( self . hist_cache [ - self . max_hist_cache : ] ) fid . write ( data . encode ( 'utf-8' ) ) if self . mva : self . mva . _endsas ( ) self . mva = None if restart : self . Print ( "Restarting kernel..." ) self . reload_magics ( ) self . restart_kernel ( ) self . Print ( "Done!" ) return { 'status' : 'ok' , 'restart' : restart }
Shut down the app gracefully saving history .
18,297
def get_globals ( self ) : if self . shell : globals_ = dict ( _initial_globals ) else : globals_ = dict ( self . current_frame . f_globals ) globals_ [ '_' ] = self . db . last_obj if cut is not None : globals_ . setdefault ( 'cut' , cut ) globals_ [ ' wdb' ] = self . db globals_ . update ( self . current_locals ) for var , val in self . db . extra_vars . items ( ) : globals_ [ var ] = val self . db . extra_items = { } return globals_
Get enriched globals
18,298
def handle_exc ( self ) : exc_info = sys . exc_info ( ) type_ , value = exc_info [ : 2 ] self . db . obj_cache [ id ( exc_info ) ] = exc_info return '<a href="%d" class="inspect">%s: %s</a>' % ( id ( exc_info ) , escape ( type_ . __name__ ) , escape ( repr ( value ) ) )
Return a formated exception traceback for wdb . js use
18,299
def fail ( self , cmd , title = None , message = None ) : if message is None : message = self . handle_exc ( ) else : message = escape ( message ) self . db . send ( 'Echo|%s' % dump ( { 'for' : escape ( title or '%s failed' % cmd ) , 'val' : message } ) )
Send back captured exceptions