idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
240,000
def magicrun ( text , shell , prompt_template = "default" , aliases = None , envvars = None , extra_commands = None , speed = 1 , test_mode = False , commentecho = False , ) : goto_regulartype = magictype ( text , prompt_template , speed ) if goto_regulartype : return goto_regulartype run_command ( text , shell , aliases = aliases , envvars = envvars , extra_commands = extra_commands , test_mode = test_mode , ) return goto_regulartype
Echo out each character in text as keyboard characters are pressed wait for a RETURN keypress then run the text in a shell context .
130
28
240,001
def run_commands ( self ) : more = 0 prompt = sys . ps1 for command in self . commands : try : prompt = sys . ps2 if more else sys . ps1 try : magictype ( command , prompt_template = prompt , speed = self . speed ) except EOFError : self . write ( "\n" ) break else : if command . strip ( ) == "exit()" : return more = self . push ( command ) except KeyboardInterrupt : self . write ( "\nKeyboardInterrupt\n" ) self . resetbuffer ( ) more = 0 sys . exit ( 1 ) echo_prompt ( prompt ) wait_for ( RETURNS )
Automatically type and execute all commands .
146
8
240,002
def interact ( self , banner = None ) : try : sys . ps1 except AttributeError : sys . ps1 = ">>>" try : sys . ps2 except AttributeError : sys . ps2 = "... " cprt = ( 'Type "help", "copyright", "credits" or "license" for ' "more information." ) if banner is None : self . write ( "Python %s on %s\n%s\n" % ( sys . version , sys . platform , cprt ) ) else : self . write ( "%s\n" % str ( banner ) ) self . run_commands ( )
Run an interactive session .
139
5
240,003
def start_ipython_player ( commands , speed = 1 ) : PlayerTerminalIPythonApp . commands = commands PlayerTerminalIPythonApp . speed = speed PlayerTerminalIPythonApp . launch_instance ( )
Starts a new magic IPython shell .
47
9
240,004
def on_feed_key ( self , key_press ) : if key_press . key in { Keys . Escape , Keys . ControlC } : echo ( carriage_return = True ) raise Abort ( ) if key_press . key == Keys . Backspace : if self . current_command_pos > 0 : self . current_command_pos -= 1 return key_press ret = None if key_press . key != Keys . CPRResponse : if self . current_command_pos < len ( self . current_command ) : current_key = self . current_command_key ret = KeyPress ( current_key ) increment = min ( [ self . speed , len ( self . current_command ) - self . current_command_pos ] ) self . current_command_pos += increment else : # Command is finished, wait for Enter if key_press . key != Keys . Enter : return None self . current_command_index += 1 self . current_command_pos = 0 ret = key_press return ret
Handles the magictyping when a key is pressed
218
12
240,005
def init_shell ( self ) : self . shell = PlayerTerminalInteractiveShell . instance ( commands = self . commands , speed = self . speed , parent = self , display_banner = False , profile_dir = self . profile_dir , ipython_dir = self . ipython_dir , user_ns = self . user_ns , ) self . shell . configurables . append ( self )
initialize the InteractiveShell instance
89
6
240,006
def raw_mode ( ) : if WIN : # No implementation for windows yet. yield # needed for the empty context manager to work else : # imports are placed here because this will fail under Windows import tty import termios if not isatty ( sys . stdin ) : f = open ( "/dev/tty" ) fd = f . fileno ( ) else : fd = sys . stdin . fileno ( ) f = None try : old_settings = termios . tcgetattr ( fd ) tty . setraw ( fd ) except termios . error : pass try : yield finally : # this block sets the terminal to sane mode again, # also in case an exception occured in the context manager try : termios . tcsetattr ( fd , termios . TCSADRAIN , old_settings ) # sys.stdout.flush() # not needed I think. if f is not None : f . close ( ) except termios . error : pass
Enables terminal raw mode during the context .
210
9
240,007
def int_to_string ( number , alphabet , padding = None ) : output = "" alpha_len = len ( alphabet ) while number : number , digit = divmod ( number , alpha_len ) output += alphabet [ digit ] if padding : remainder = max ( padding - len ( output ) , 0 ) output = output + alphabet [ 0 ] * remainder return output [ : : - 1 ]
Convert a number to a string using the given alphabet . The output has the most significant digit first .
83
21
240,008
def string_to_int ( string , alphabet ) : number = 0 alpha_len = len ( alphabet ) for char in string : number = number * alpha_len + alphabet . index ( char ) return number
Convert a string to a number using the given alphabet . The input is assumed to have the most significant digit first .
44
24
240,009
def decode ( self , string , legacy = False ) : if legacy : string = string [ : : - 1 ] return _uu . UUID ( int = string_to_int ( string , self . _alphabet ) )
Decode a string according to the current alphabet into a UUID Raises ValueError when encountering illegal characters or a too - long string .
48
28
240,010
def set_alphabet ( self , alphabet ) : # Turn the alphabet into a set and sort it to prevent duplicates # and ensure reproducibility. new_alphabet = list ( sorted ( set ( alphabet ) ) ) if len ( new_alphabet ) > 1 : self . _alphabet = new_alphabet self . _alpha_len = len ( self . _alphabet ) else : raise ValueError ( "Alphabet with more than " "one unique symbols required." )
Set the alphabet to be used for new UUIDs .
103
12
240,011
def encoded_length ( self , num_bytes = 16 ) : factor = math . log ( 256 ) / math . log ( self . _alpha_len ) return int ( math . ceil ( factor * num_bytes ) )
Returns the string length of the shortened UUID .
49
10
240,012
def asm_module ( exprs , dst_reg , sym_to_reg , triple_or_target = None ) : if not llvmlite_available : raise RuntimeError ( "llvmlite module unavailable! can't assemble..." ) target = llvm_get_target ( triple_or_target ) M = ll . Module ( ) fntype = ll . FunctionType ( ll . VoidType ( ) , [ ] ) func = ll . Function ( M , fntype , name = '__arybo' ) func . attributes . add ( "naked" ) func . attributes . add ( "nounwind" ) BB = func . append_basic_block ( ) IRB = ll . IRBuilder ( ) IRB . position_at_end ( BB ) sym_to_value = { sym : IRB . load_reg ( IntType ( reg [ 1 ] ) , reg [ 0 ] , reg [ 0 ] ) for sym , reg in six . iteritems ( sym_to_reg ) } ret = to_llvm_ir ( exprs , sym_to_value , IRB ) IRB . store_reg ( ret , IntType ( dst_reg [ 1 ] ) , dst_reg [ 0 ] ) # See https://llvm.org/bugs/show_bug.cgi?id=15806 IRB . unreachable ( ) return M
Generate an LLVM module for a list of expressions
299
11
240,013
def asm_binary ( exprs , dst_reg , sym_to_reg , triple_or_target = None ) : if not llvmlite_available : raise RuntimeError ( "llvmlite module unavailable! can't assemble..." ) target = llvm_get_target ( triple_or_target ) M = asm_module ( exprs , dst_reg , sym_to_reg , target ) # Use LLVM to compile the '__arybo' function. As the function is naked and # is the only, we just got to dump the .text section to get the binary # assembly. # No need for keystone or whatever hype stuff. llvmlite does the job. M = llvm . parse_assembly ( str ( M ) ) M . verify ( ) target_machine = target . create_target_machine ( ) obj_bin = target_machine . emit_object ( M ) obj = llvm . ObjectFileRef . from_data ( obj_bin ) for s in obj . sections ( ) : if s . is_text ( ) : return s . data ( ) raise RuntimeError ( "unable to get the assembled binary!" )
Compile and assemble an expression for a given architecture .
252
11
240,014
def expr_contains ( e , o ) : if o == e : return True if e . has_args ( ) : for a in e . args ( ) : if expr_contains ( a , o ) : return True return False
Returns true if o is in e
51
7
240,015
def zext ( self , n ) : if n <= self . nbits : raise ValueError ( "n must be > %d bits" % self . nbits ) mba_ret = self . __new_mba ( n ) ret = mba_ret . from_cst ( 0 ) for i in range ( self . nbits ) : ret . vec [ i ] = self . vec [ i ] return mba_ret . from_vec ( ret )
Zero - extend the variable to n bits . n bits must be stricly larger than the actual number of bits or a ValueError is thrown
101
29
240,016
def sext ( self , n ) : if n <= self . nbits : raise ValueError ( "n must be > %d bits" % self . nbits ) mba_ret = self . __new_mba ( n ) ret = mba_ret . from_cst ( 0 ) for i in range ( self . nbits ) : ret . vec [ i ] = self . vec [ i ] last_bit = self . vec [ self . nbits - 1 ] for i in range ( self . nbits , n ) : ret . vec [ i ] = last_bit return mba_ret . from_vec ( ret )
Sign - extend the variable to n bits . n bits must be stricly larger than the actual number of bits or a ValueError is thrown
139
29
240,017
def evaluate ( self , values ) : ret = self . mba . evaluate ( self . vec , values ) if isinstance ( ret , six . integer_types ) : return ret return self . from_vec ( self . mba , ret )
Evaluates the expression to an integer
52
8
240,018
def vectorial_decomp ( self , symbols ) : try : symbols = [ s . vec for s in symbols ] N = sum ( map ( lambda s : len ( s ) , symbols ) ) symbols_ = Vector ( N ) i = 0 for v in symbols : for s in v : symbols_ [ i ] = s i += 1 symbols = symbols_ except TypeError : pass return self . mba . vectorial_decomp ( symbols , self . vec )
Compute the vectorial decomposition of the expression according to the given symbols .
99
16
240,019
def var ( self , name ) : ret = self . from_vec ( self . var_symbols ( name ) ) ret . name = name return ret
Get an n - bit named symbolic variable
34
8
240,020
def permut2expr ( self , P ) : if len ( P ) > ( 1 << self . nbits ) : raise ValueError ( "P must not contain more than %d elements" % ( 1 << self . nbits ) ) X = self . var ( 'X' ) ret = super ( MBA , self ) . permut2expr ( P , X . vec ) return self . from_vec ( ret ) , X
Convert a substitution table into an arybo application
92
11
240,021
def response_hook ( self , r , * * kwargs ) : if r . status_code == 401 : # Handle server auth. www_authenticate = r . headers . get ( 'www-authenticate' , '' ) . lower ( ) auth_type = _auth_type_from_header ( www_authenticate ) if auth_type is not None : return self . retry_using_http_NTLM_auth ( 'www-authenticate' , 'Authorization' , r , auth_type , kwargs ) elif r . status_code == 407 : # If we didn't have server auth, do proxy auth. proxy_authenticate = r . headers . get ( 'proxy-authenticate' , '' ) . lower ( ) auth_type = _auth_type_from_header ( proxy_authenticate ) if auth_type is not None : return self . retry_using_http_NTLM_auth ( 'proxy-authenticate' , 'Proxy-authorization' , r , auth_type , kwargs ) return r
The actual hook handler .
231
5
240,022
def dummy ( DF , cols = None ) : dummies = ( get_dummies ( DF [ col ] ) for col in ( DF . columns if cols is None else cols ) ) return concat ( dummies , axis = 1 , keys = DF . columns )
Dummy code select columns of a DataFrame .
59
10
240,023
def cos_r ( self , N = None ) : # percent=0.9 if not hasattr ( self , 'F' ) or self . F . shape [ 1 ] < self . rank : self . fs_r ( N = self . rank ) # generate F self . dr = norm ( self . F , axis = 1 ) ** 2 # cheaper than diag(self.F.dot(self.F.T))? return apply_along_axis ( lambda _ : _ / self . dr , 0 , self . F [ : , : N ] ** 2 )
Return the squared cosines for each row .
123
9
240,024
def cos_c ( self , N = None ) : # percent=0.9, if not hasattr ( self , 'G' ) or self . G . shape [ 1 ] < self . rank : self . fs_c ( N = self . rank ) # generate self . dc = norm ( self . G , axis = 1 ) ** 2 # cheaper than diag(self.G.dot(self.G.T))? return apply_along_axis ( lambda _ : _ / self . dc , 0 , self . G [ : , : N ] ** 2 )
Return the squared cosines for each column .
123
9
240,025
def cont_r ( self , percent = 0.9 , N = None ) : if not hasattr ( self , 'F' ) : self . fs_r ( N = self . rank ) # generate F return apply_along_axis ( lambda _ : _ / self . L [ : N ] , 1 , apply_along_axis ( lambda _ : _ * self . r , 0 , self . F [ : , : N ] ** 2 ) )
Return the contribution of each row .
97
7
240,026
def cont_c ( self , percent = 0.9 , N = None ) : # bug? check axis number 0 vs 1 here if not hasattr ( self , 'G' ) : self . fs_c ( N = self . rank ) # generate G return apply_along_axis ( lambda _ : _ / self . L [ : N ] , 1 , apply_along_axis ( lambda _ : _ * self . c , 0 , self . G [ : , : N ] ** 2 ) )
Return the contribution of each column .
107
7
240,027
def fs_r_sup ( self , DF , N = None ) : if not hasattr ( self , 'G' ) : self . fs_c ( N = self . rank ) # generate G if N and ( not isinstance ( N , int ) or N <= 0 ) : raise ValueError ( "ncols should be a positive integer." ) s = - sqrt ( self . E ) if self . cor else self . s N = min ( N , self . rank ) if N else self . rank S_inv = diagsvd ( - 1 / s [ : N ] , len ( self . G . T ) , N ) # S = diagsvd(s[:N], len(self.tau), N) return _mul ( DF . div ( DF . sum ( axis = 1 ) , axis = 0 ) , self . G , S_inv ) [ : , : N ]
Find the supplementary row factor scores .
196
7
240,028
def fs_c_sup ( self , DF , N = None ) : if not hasattr ( self , 'F' ) : self . fs_r ( N = self . rank ) # generate F if N and ( not isinstance ( N , int ) or N <= 0 ) : raise ValueError ( "ncols should be a positive integer." ) s = - sqrt ( self . E ) if self . cor else self . s N = min ( N , self . rank ) if N else self . rank S_inv = diagsvd ( - 1 / s [ : N ] , len ( self . F . T ) , N ) # S = diagsvd(s[:N], len(self.tau), N) return _mul ( ( DF / DF . sum ( ) ) . T , self . F , S_inv ) [ : , : N ]
Find the supplementary column factor scores .
190
7
240,029
def data_recognise ( self , data = None ) : data = data or self . data data_lower = data . lower ( ) if data_lower . startswith ( u"http://" ) or data_lower . startswith ( u"https://" ) : return u'url' elif data_lower . startswith ( u"mailto:" ) : return u'email' elif data_lower . startswith ( u"matmsg:to:" ) : return u'emailmessage' elif data_lower . startswith ( u"tel:" ) : return u'telephone' elif data_lower . startswith ( u"smsto:" ) : return u'sms' elif data_lower . startswith ( u"mmsto:" ) : return u'mms' elif data_lower . startswith ( u"geo:" ) : return u'geo' elif data_lower . startswith ( u"mebkm:title:" ) : return u'bookmark' elif data_lower . startswith ( u"mecard:" ) : return u'phonebook' else : return u'text'
Returns an unicode string indicating the data type of the data paramater
260
14
240,030
def data_to_string ( self ) : # FIX-ME: if we don't add the BOM_UTF8 char, QtQR doesn't decode # correctly; but if we add it, mobile apps don't.- # Apparently is a zbar bug. if self . data_type == 'text' : return BOM_UTF8 + self . __class__ . data_encode [ self . data_type ] ( self . data ) . encode ( 'utf-8' ) else : return self . __class__ . data_encode [ self . data_type ] ( self . data ) . encode ( 'utf-8' )
Returns a UTF8 string with the QR Code s data
139
11
240,031
def split_six ( series = None ) : if pd is None : raise ImportError ( 'The Pandas package is required' ' for this functionality' ) if np is None : raise ImportError ( 'The NumPy package is required' ' for this functionality' ) def base ( x ) : if x > 0 : base = pow ( 10 , math . floor ( math . log10 ( x ) ) ) return round ( x / base ) * base else : return 0 quants = [ 0 , 50 , 75 , 85 , 90 ] # Some weirdness in series quantiles a la 0.13. arr = series . values return [ base ( np . percentile ( arr , x ) ) for x in quants ]
Given a Pandas Series get a domain of values from zero to the 90% quantile rounded to the nearest order - of - magnitude integer . For example 2100 is rounded to 2000 2790 to 3000 .
152
41
240,032
def to_linear ( self , index = None ) : if index is None : n = len ( self . index ) - 1 index = [ self . index [ i ] * ( 1. - i / ( n - 1. ) ) + self . index [ i + 1 ] * i / ( n - 1. ) for i in range ( n ) ] colors = [ self . rgba_floats_tuple ( x ) for x in index ] return LinearColormap ( colors , index = index , vmin = self . vmin , vmax = self . vmax )
Transforms the StepColormap into a LinearColormap .
125
14
240,033
def add_to ( self , parent , name = None , index = None ) : parent . add_child ( self , name = name , index = index ) return self
Add element to a parent .
36
6
240,034
def to_json ( self , depth = - 1 , * * kwargs ) : return json . dumps ( self . to_dict ( depth = depth , ordered = True ) , * * kwargs )
Returns a JSON representation of the object .
45
8
240,035
def save ( self , outfile , close_file = True , * * kwargs ) : if isinstance ( outfile , text_type ) or isinstance ( outfile , binary_type ) : fid = open ( outfile , 'wb' ) else : fid = outfile root = self . get_root ( ) html = root . render ( * * kwargs ) fid . write ( html . encode ( 'utf8' ) ) if close_file : fid . close ( )
Saves an Element into a file .
106
8
240,036
def get_code ( self ) : if self . code is None : self . code = urlopen ( self . url ) . read ( ) return self . code
Opens the link and returns the response s content .
34
11
240,037
def _repr_html_ ( self , * * kwargs ) : html = self . render ( * * kwargs ) html = "data:text/html;charset=utf-8;base64," + base64 . b64encode ( html . encode ( 'utf8' ) ) . decode ( 'utf8' ) # noqa if self . height is None : iframe = ( '<div style="width:{width};">' '<div style="position:relative;width:100%;height:0;padding-bottom:{ratio};">' # noqa '<iframe src="{html}" style="position:absolute;width:100%;height:100%;left:0;top:0;' # noqa 'border:none !important;" ' 'allowfullscreen webkitallowfullscreen mozallowfullscreen>' '</iframe>' '</div></div>' ) . format iframe = iframe ( html = html , width = self . width , ratio = self . ratio ) else : iframe = ( '<iframe src="{html}" width="{width}" height="{height}"' 'style="border:none !important;" ' '"allowfullscreen" "webkitallowfullscreen" "mozallowfullscreen">' # noqa '</iframe>' ) . format iframe = iframe ( html = html , width = self . width , height = self . height ) return iframe
Displays the Figure in a Jupyter notebook .
316
12
240,038
def add_subplot ( self , x , y , n , margin = 0.05 ) : width = 1. / y height = 1. / x left = ( ( n - 1 ) % y ) * width top = ( ( n - 1 ) // y ) * height left = left + width * margin top = top + height * margin width = width * ( 1 - 2. * margin ) height = height * ( 1 - 2. * margin ) div = Div ( position = 'absolute' , width = '{}%' . format ( 100. * width ) , height = '{}%' . format ( 100. * height ) , left = '{}%' . format ( 100. * left ) , top = '{}%' . format ( 100. * top ) , ) self . add_child ( div ) return div
Creates a div child subplot in a matplotlib . figure . add_subplot style .
182
21
240,039
def _elapsed ( self ) : self . last_time = time . time ( ) return self . last_time - self . start
Returns elapsed time at update .
29
6
240,040
def _calc_eta ( self ) : elapsed = self . _elapsed ( ) if self . cnt == 0 or elapsed < 0.001 : return None rate = float ( self . cnt ) / elapsed self . eta = ( float ( self . max_iter ) - float ( self . cnt ) ) / rate
Calculates estimated time left until completion .
71
9
240,041
def _print_title ( self ) : if self . title : self . _stream_out ( '{}\n' . format ( self . title ) ) self . _stream_flush ( )
Prints tracking title at initialization .
42
7
240,042
def _cache_eta ( self ) : self . _calc_eta ( ) self . _cached_output += ' | ETA: ' + self . _get_time ( self . eta )
Prints the estimated time left .
45
7
240,043
def _adjust_width ( self ) : if self . bar_width > self . max_iter : self . bar_width = int ( self . max_iter )
Shrinks bar if number of iterations is less than the bar width
36
14
240,044
def _print ( self , force_flush = False ) : self . _stream_flush ( ) next_perc = self . _calc_percent ( ) if self . update_interval : do_update = time . time ( ) - self . last_time >= self . update_interval elif force_flush : do_update = True else : do_update = next_perc > self . last_progress if do_update and self . active : self . last_progress = next_perc self . _cache_percent_indicator ( self . last_progress ) if self . track : self . _cached_output += ' Time elapsed: ' + self . _get_time ( self . _elapsed ( ) ) self . _cache_eta ( ) if self . item_id : self . _cache_item_id ( ) self . _stream_out ( '\r%s' % self . _cached_output ) self . _stream_flush ( ) self . _cached_output = ''
Prints formatted percentage and tracked time to the screen .
225
11
240,045
def next ( self ) : try : line = self . _get_next_line ( ) except StopIteration : # we've reached the end of the file; if we're processing the # rotated log file or the file has been renamed, we can continue with the actual file; otherwise # update the offset file if self . _is_new_file ( ) : self . _rotated_logfile = None self . _fh . close ( ) self . _offset = 0 # open up current logfile and continue try : line = self . _get_next_line ( ) except StopIteration : # oops, empty file self . _update_offset_file ( ) raise else : self . _update_offset_file ( ) raise if self . paranoid : self . _update_offset_file ( ) elif self . every_n and self . every_n <= self . _since_update : self . _update_offset_file ( ) return line
Return the next line in the file updating the offset .
207
11
240,046
def read ( self ) : lines = self . readlines ( ) if lines : try : return '' . join ( lines ) except TypeError : return '' . join ( force_text ( line ) for line in lines ) else : return None
Read in all unread lines and return them as a single string .
50
14
240,047
def _filehandle ( self ) : if not self . _fh or self . _is_closed ( ) : filename = self . _rotated_logfile or self . filename if filename . endswith ( '.gz' ) : self . _fh = gzip . open ( filename , 'r' ) else : self . _fh = open ( filename , "r" , 1 ) if self . read_from_end and not exists ( self . _offset_file ) : self . _fh . seek ( 0 , os . SEEK_END ) else : self . _fh . seek ( self . _offset ) return self . _fh
Return a filehandle to the file being tailed with the position set to the current offset .
144
19
240,048
def _update_offset_file ( self ) : if self . on_update : self . on_update ( ) offset = self . _filehandle ( ) . tell ( ) inode = stat ( self . filename ) . st_ino fh = open ( self . _offset_file , "w" ) fh . write ( "%s\n%s\n" % ( inode , offset ) ) fh . close ( ) self . _since_update = 0
Update the offset file with the current inode and offset .
103
12
240,049
def _determine_rotated_logfile ( self ) : rotated_filename = self . _check_rotated_filename_candidates ( ) if rotated_filename and exists ( rotated_filename ) : if stat ( rotated_filename ) . st_ino == self . _offset_file_inode : return rotated_filename # if the inode hasn't changed, then the file shrank; this is expected with copytruncate, # otherwise print a warning if stat ( self . filename ) . st_ino == self . _offset_file_inode : if self . copytruncate : return rotated_filename else : sys . stderr . write ( "[pygtail] [WARN] file size of %s shrank, and copytruncate support is " "disabled (expected at least %d bytes, was %d bytes).\n" % ( self . filename , self . _offset , stat ( self . filename ) . st_size ) ) return None
We suspect the logfile has been rotated so try to guess what the rotated filename is and return it .
211
21
240,050
def _check_rotated_filename_candidates ( self ) : # savelog(8) candidate = "%s.0" % self . filename if ( exists ( candidate ) and exists ( "%s.1.gz" % self . filename ) and ( stat ( candidate ) . st_mtime > stat ( "%s.1.gz" % self . filename ) . st_mtime ) ) : return candidate # logrotate(8) # with delaycompress candidate = "%s.1" % self . filename if exists ( candidate ) : return candidate # without delaycompress candidate = "%s.1.gz" % self . filename if exists ( candidate ) : return candidate rotated_filename_patterns = [ # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]" , # logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz" , # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]" , # logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress` "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz" , # for TimedRotatingFileHandler "%s.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]" , ] if self . log_patterns : rotated_filename_patterns . extend ( self . log_patterns ) # break into directory and filename components to support cases where the # the file is prepended as part of rotation file_dir , rel_filename = os . path . split ( self . filename ) for rotated_filename_pattern in rotated_filename_patterns : candidates = glob . glob ( os . path . join ( file_dir , rotated_filename_pattern % rel_filename ) ) if candidates : candidates . sort ( ) return candidates [ - 1 ] # return most recent # no match return None
Check for various rotated logfile filename patterns and return the first match we find .
674
16
240,051
def create_s3_session ( ) : sess = requests . Session ( ) retries = Retry ( total = 3 , backoff_factor = .5 , status_forcelist = [ 500 , 502 , 503 , 504 ] ) sess . mount ( 'https://' , HTTPAdapter ( max_retries = retries ) ) return sess
Creates a session with automatic retries on 5xx errors .
76
13
240,052
def load_module ( self , fullname ) : mod = sys . modules . setdefault ( fullname , imp . new_module ( fullname ) ) mod . __file__ = self . _path mod . __loader__ = self mod . __path__ = [ ] mod . __package__ = fullname return mod
Returns an empty module .
68
5
240,053
def load_module ( self , fullname ) : mod = sys . modules . get ( fullname ) if mod is not None : return mod # We're creating an object rather than a module. It's a hack, but it's approved by Guido: # https://mail.python.org/pipermail/python-ideas/2012-May/014969.html mod = _from_core_node ( self . _store , self . _root ) sys . modules [ fullname ] = mod return mod
Returns an object that lazily looks up tables and groups .
111
12
240,054
def find_module ( self , fullname , path = None ) : if not fullname . startswith ( self . _module_name + '.' ) : # Not a quilt submodule. return None submodule = fullname [ len ( self . _module_name ) + 1 : ] parts = submodule . split ( '.' ) # Pop the team prefix if this is a team import. if self . _teams : team = parts . pop ( 0 ) else : team = None # Handle full paths first. if len ( parts ) == 2 : store , pkg = PackageStore . find_package ( team , parts [ 0 ] , parts [ 1 ] ) if pkg is not None : return PackageLoader ( store , pkg ) else : return None # Return fake loaders for partial paths. for store_dir in PackageStore . find_store_dirs ( ) : store = PackageStore ( store_dir ) if len ( parts ) == 0 : assert self . _teams path = store . team_path ( team ) elif len ( parts ) == 1 : path = store . user_path ( team , parts [ 0 ] ) if os . path . isdir ( path ) : return FakeLoader ( path ) # Nothing is found. return None
Looks up the table based on the module path .
271
10
240,055
def _have_pyspark ( ) : if _have_pyspark . flag is None : try : if PackageStore . get_parquet_lib ( ) is ParquetLib . SPARK : import pyspark # pylint:disable=W0612 _have_pyspark . flag = True else : _have_pyspark . flag = False except ImportError : _have_pyspark . flag = False return _have_pyspark . flag
Check if we re running Pyspark
103
8
240,056
def _path_hash ( path , transform , kwargs ) : sortedargs = [ "%s:%r:%s" % ( key , value , type ( value ) ) for key , value in sorted ( iteritems ( kwargs ) ) ] srcinfo = "{path}:{transform}:{{{kwargs}}}" . format ( path = os . path . abspath ( path ) , transform = transform , kwargs = "," . join ( sortedargs ) ) return digest_string ( srcinfo )
Generate a hash of source file path + transform + args
110
12
240,057
def _gen_glob_data ( dir , pattern , child_table ) : dir = pathlib . Path ( dir ) matched = False used_names = set ( ) # Used by to_nodename to prevent duplicate names # sorted so that renames (if any) are consistently ordered for filepath in sorted ( dir . glob ( pattern ) ) : if filepath . is_dir ( ) : continue else : matched = True # create node info node_table = { } if child_table is None else child_table . copy ( ) filepath = filepath . relative_to ( dir ) node_table [ RESERVED [ 'file' ] ] = str ( filepath ) node_name = to_nodename ( filepath . stem , invalid = used_names ) used_names . add ( node_name ) print ( "Matched with {!r}: {!r} from {!r}" . format ( pattern , node_name , str ( filepath ) ) ) yield node_name , node_table if not matched : print ( "Warning: {!r} matched no files." . format ( pattern ) ) return
Generates node data by globbing a directory for a pattern
246
12
240,058
def _remove_keywords ( d ) : return { k : v for k , v in iteritems ( d ) if k not in RESERVED }
copy the dict filter_keywords
34
7
240,059
def build_package ( team , username , package , subpath , yaml_path , checks_path = None , dry_run = False , env = 'default' ) : def find ( key , value ) : """ find matching nodes recursively; only descend iterables that aren't strings """ if isinstance ( value , Iterable ) and not isinstance ( value , string_types ) : for k , v in iteritems ( value ) : if k == key : yield v elif isinstance ( v , dict ) : for result in find ( key , v ) : yield result elif isinstance ( v , list ) : for item in v : for result in find ( key , item ) : yield result build_data = load_yaml ( yaml_path ) # default to 'checks.yml' if build.yml contents: contains checks, but # there's no inlined checks: defined by build.yml if ( checks_path is None and list ( find ( 'checks' , build_data [ 'contents' ] ) ) and 'checks' not in build_data ) : checks_path = 'checks.yml' checks_contents = load_yaml ( checks_path , optional = True ) elif checks_path is not None : checks_contents = load_yaml ( checks_path ) else : checks_contents = None build_package_from_contents ( team , username , package , subpath , os . path . dirname ( yaml_path ) , build_data , checks_contents = checks_contents , dry_run = dry_run , env = env )
Builds a package from a given Yaml file and installs it locally .
353
15
240,060
def send_comment_email ( email , package_owner , package_name , commenter ) : link = '{CATALOG_URL}/package/{owner}/{pkg}/comments' . format ( CATALOG_URL = CATALOG_URL , owner = package_owner , pkg = package_name ) subject = "New comment on {package_owner}/{package_name}" . format ( package_owner = package_owner , package_name = package_name ) html = render_template ( 'comment_email.html' , commenter = commenter , link = link ) body = render_template ( 'comment_email.txt' , commenter = commenter , link = link ) send_email ( recipients = [ email ] , sender = DEFAULT_SENDER , subject = subject , html = html , body = body )
Send email to owner of package regarding new comment
183
9
240,061
def hash_contents ( contents ) : assert isinstance ( contents , GroupNode ) result = hashlib . sha256 ( ) def _hash_int ( value ) : result . update ( struct . pack ( ">L" , value ) ) def _hash_str ( string ) : assert isinstance ( string , string_types ) _hash_int ( len ( string ) ) result . update ( string . encode ( ) ) def _hash_object ( obj ) : _hash_str ( obj . json_type ) if isinstance ( obj , ( TableNode , FileNode ) ) : hashes = obj . hashes _hash_int ( len ( hashes ) ) for hval in hashes : _hash_str ( hval ) elif isinstance ( obj , GroupNode ) : children = obj . children _hash_int ( len ( children ) ) for key , child in sorted ( iteritems ( children ) ) : _hash_str ( key ) _hash_object ( child ) else : assert False , "Unexpected object: %r" % obj # Backward compatibility: only hash metadata_hash if it's present. if obj . metadata_hash is not None : _hash_str ( obj . metadata_hash ) _hash_object ( contents ) return result . hexdigest ( )
Creates a hash of key names and hashes in a package dictionary .
276
14
240,062
def find_object_hashes ( root , meta_only = False ) : stack = [ root ] while stack : obj = stack . pop ( ) if not meta_only and isinstance ( obj , ( TableNode , FileNode ) ) : for objhash in obj . hashes : yield objhash stack . extend ( itervalues ( obj . get_children ( ) ) ) if obj . metadata_hash is not None : yield obj . metadata_hash
Iterator that returns hashes of all of the file and table nodes .
97
13
240,063
def _send_event_task ( args ) : endpoint = args [ 'endpoint' ] json_message = args [ 'json_message' ] _consumer_impl . send ( endpoint , json_message )
Actually sends the MixPanel event . Runs in a uwsgi worker process .
45
16
240,064
def send ( self , endpoint , json_message ) : _send_event_task . spool ( endpoint = endpoint , json_message = json_message )
Queues the message to be sent .
34
8
240,065
def main ( args = None ) : parser = argument_parser ( ) args = parser . parse_args ( args ) # If 'func' isn't present, something is misconfigured above or no (positional) arg was given. if not hasattr ( args , 'func' ) : args = parser . parse_args ( [ 'help' ] ) # show help # Convert argparse.Namespace into dict and clean it up. # We can then pass it directly to the helper function. kwargs = vars ( args ) # handle the '--dev' option if kwargs . pop ( 'dev' ) or os . environ . get ( 'QUILT_DEV_MODE' , '' ) . strip ( ) . lower ( ) == 'true' : # Enables CLI ctrl-c tracebacks, and whatever anyone else uses it for quilt . _DEV_MODE = True else : # Disables CLI ctrl-c tracebacks, etc. quilt . _DEV_MODE = False func = kwargs . pop ( 'func' ) try : func ( * * kwargs ) return 0 except QuiltException as ex : print ( ex . message , file = sys . stderr ) return 1 except requests . exceptions . ConnectionError as ex : print ( "Failed to connect: %s" % ex , file = sys . stderr ) return 1
Build and run parser
298
4
240,066
def is_identifier ( string ) : matched = PYTHON_IDENTIFIER_RE . match ( string ) return bool ( matched ) and not keyword . iskeyword ( string )
Check if string could be a valid python identifier
41
9
240,067
def fs_link ( path , linkpath , linktype = 'soft' ) : global WIN_SOFTLINK global WIN_HARDLINK WIN_NO_ERROR = 22 assert linktype in ( 'soft' , 'hard' ) path , linkpath = pathlib . Path ( path ) , pathlib . Path ( linkpath ) # Checks if not path . exists ( ) : # particularly important on Windows to prevent false success raise QuiltException ( "Path to link to does not exist: {}" . format ( path ) ) if linkpath . exists ( ) : raise QuiltException ( "Link path already exists: {}" . format ( linkpath ) ) # Windows if os . name == 'nt' : # clear out any pre-existing, un-checked errors ctypes . WinError ( ) # Check Windows version (reasonably) supports symlinks if not sys . getwindowsversion ( ) [ 0 ] >= 6 : raise QuiltException ( "Unsupported operation: This version of Windows does not support linking." ) # Acquire the windows CreateXLinkW() function if linktype == 'soft' : if WIN_SOFTLINK is None : WIN_SOFTLINK = ctypes . windll . kernel32 . CreateSymbolicLinkW WIN_SOFTLINK . restype = ctypes . c_bool create_link = lambda l , p : WIN_SOFTLINK ( str ( l ) , str ( p ) , p . is_dir ( ) ) elif linktype == 'hard' : if WIN_HARDLINK is None : WIN_HARDLINK = ctypes . windll . kernel32 . CreateHardLinkW WIN_HARDLINK . restype = ctypes . c_bool create_link = WIN_HARDLINK # Call and check results create_link ( linkpath , path ) # Check WinError, because the return value for CreateSymbolicLinkW's type is suspect due to a # (possible) bug: https://stackoverflow.com/questions/33010440/createsymboliclink-on-windows-10 # We have user results with similar effects (success reported, but not actual) error = ctypes . WinError ( ) if error . winerror : raise QuiltException ( "Linking failed: " + str ( error ) , original_error = error ) # Handle the case wehere linking failed and windows gave no error: if not linkpath . exists ( ) and linkpath . is_symlink ( ) : raise QuiltException ( "Linking failed: Expected symlink at: {}" . format ( linkpath ) ) # Linux, OSX else : try : if linktype == 'soft' : linkpath . symlink_to ( path ) elif linktype == 'hard' : os . link ( str ( path ) , str ( linkpath ) ) except OSError as error : raise QuiltException ( "Linking failed: " + str ( error ) , original_error = error )
Create a hard or soft link of path at linkpath
652
11
240,068
def read ( self , size = - 1 ) : buf = self . _fd . read ( size ) self . _progress_cb ( len ( buf ) ) return buf
Read bytes and update the progress bar .
36
8
240,069
def create_dirs ( self ) : if not os . path . isdir ( self . _path ) : os . makedirs ( self . _path ) for dir_name in [ self . OBJ_DIR , self . TMP_OBJ_DIR , self . PKG_DIR , self . CACHE_DIR ] : path = os . path . join ( self . _path , dir_name ) if not os . path . isdir ( path ) : os . mkdir ( path ) if not os . path . exists ( self . _version_path ( ) ) : self . _write_format_version ( )
Creates the store directory and its subdirectories .
138
11
240,070
def find_store_dirs ( cls ) : store_dirs = [ default_store_location ( ) ] extra_dirs_str = os . getenv ( 'QUILT_PACKAGE_DIRS' ) if extra_dirs_str : store_dirs . extend ( extra_dirs_str . split ( ':' ) ) return store_dirs
Returns the primary package directory and any additional ones from QUILT_PACKAGE_DIRS .
84
21
240,071
def find_package ( cls , team , user , package , pkghash = None , store_dir = None ) : cls . check_name ( team , user , package ) dirs = cls . find_store_dirs ( ) for store_dir in dirs : store = PackageStore ( store_dir ) pkg = store . get_package ( team , user , package , pkghash = pkghash ) if pkg is not None : return store , pkg return None , None
Finds an existing package in one of the package directories .
113
12
240,072
def get_package ( self , team , user , package , pkghash = None ) : self . check_name ( team , user , package ) path = self . package_path ( team , user , package ) if not os . path . isdir ( path ) : return None if pkghash is None : latest_tag = os . path . join ( path , self . TAGS_DIR , self . LATEST ) if not os . path . exists ( latest_tag ) : return None with open ( latest_tag , 'r' ) as tagfile : pkghash = tagfile . read ( ) assert pkghash is not None contents_path = os . path . join ( path , self . CONTENTS_DIR , pkghash ) if not os . path . isfile ( contents_path ) : return None with open ( contents_path , 'r' ) as contents_file : try : return json . load ( contents_file , object_hook = decode_node ) except AssertionError as err : if str ( err ) . startswith ( "Bad package format" ) : name = "{}{}/{}, {}" . format ( team + ':' if team else '' , user , package , pkghash ) raise StoreException ( "Error in {}: {}" . format ( name , str ( err ) ) ) else : raise
Gets a package from this store .
298
8
240,073
def install_package ( self , team , user , package , contents ) : self . check_name ( team , user , package ) assert contents is not None self . create_dirs ( ) path = self . package_path ( team , user , package ) # Delete any existing data. try : os . remove ( path ) except OSError : pass
Creates a new package in the default package store and allocates a per - user directory if needed .
76
21
240,074
def create_package_node ( self , team , user , package , dry_run = False ) : contents = RootNode ( dict ( ) ) if dry_run : return contents self . check_name ( team , user , package ) assert contents is not None self . create_dirs ( ) # Delete any existing data. path = self . package_path ( team , user , package ) try : os . remove ( path ) except OSError : pass return contents
Creates a new package and initializes its contents . See install_package .
100
16
240,075
def iterpackages ( self ) : pkgdir = os . path . join ( self . _path , self . PKG_DIR ) if not os . path . isdir ( pkgdir ) : return for team in sub_dirs ( pkgdir ) : for user in sub_dirs ( self . team_path ( team ) ) : for pkg in sub_dirs ( self . user_path ( team , user ) ) : pkgpath = self . package_path ( team , user , pkg ) for hsh in sub_files ( os . path . join ( pkgpath , PackageStore . CONTENTS_DIR ) ) : yield self . get_package ( team , user , pkg , pkghash = hsh )
Return an iterator over all the packages in the PackageStore .
163
12
240,076
def ls_packages ( self ) : packages = [ ] pkgdir = os . path . join ( self . _path , self . PKG_DIR ) if not os . path . isdir ( pkgdir ) : return [ ] for team in sub_dirs ( pkgdir ) : for user in sub_dirs ( self . team_path ( team ) ) : for pkg in sub_dirs ( self . user_path ( team , user ) ) : pkgpath = self . package_path ( team , user , pkg ) pkgmap = { h : [ ] for h in sub_files ( os . path . join ( pkgpath , PackageStore . CONTENTS_DIR ) ) } for tag in sub_files ( os . path . join ( pkgpath , PackageStore . TAGS_DIR ) ) : with open ( os . path . join ( pkgpath , PackageStore . TAGS_DIR , tag ) , 'r' ) as tagfile : pkghash = tagfile . read ( ) pkgmap [ pkghash ] . append ( tag ) for pkghash , tags in pkgmap . items ( ) : # add teams here if any other than DEFAULT_TEAM should be hidden. team_token = '' if team in ( DEFAULT_TEAM , ) else team + ':' fullpkg = "{team}{owner}/{pkg}" . format ( team = team_token , owner = user , pkg = pkg ) # Add an empty string tag for untagged hashes displaytags = tags if tags else [ "" ] # Display a separate full line per tag like Docker for tag in displaytags : packages . append ( ( fullpkg , str ( tag ) , pkghash ) ) return packages
List packages in this store .
382
6
240,077
def team_path ( self , team = None ) : if team is None : team = DEFAULT_TEAM return os . path . join ( self . _path , self . PKG_DIR , team )
Returns the path to directory with the team s users package repositories .
45
13
240,078
def user_path ( self , team , user ) : return os . path . join ( self . team_path ( team ) , user )
Returns the path to directory with the user s package repositories .
30
12
240,079
def package_path ( self , team , user , package ) : return os . path . join ( self . user_path ( team , user ) , package )
Returns the path to a package repository .
34
8
240,080
def object_path ( self , objhash ) : return os . path . join ( self . _path , self . OBJ_DIR , objhash )
Returns the path to an object file based on its hash .
33
12
240,081
def prune ( self , objs = None ) : if objs is None : objdir = os . path . join ( self . _path , self . OBJ_DIR ) objs = os . listdir ( objdir ) remove_objs = set ( objs ) for pkg in self . iterpackages ( ) : remove_objs . difference_update ( find_object_hashes ( pkg ) ) for obj in remove_objs : path = self . object_path ( obj ) if os . path . exists ( path ) : os . chmod ( path , S_IWUSR ) os . remove ( path ) return remove_objs
Clean up objects not referenced by any packages . Try to prune all objects by default .
144
18
240,082
def save_dataframe ( self , dataframe ) : storepath = self . temporary_object_path ( str ( uuid . uuid4 ( ) ) ) # switch parquet lib parqlib = self . get_parquet_lib ( ) if isinstance ( dataframe , pd . DataFrame ) : #parqlib is ParquetLib.ARROW: # other parquet libs are deprecated, remove? import pyarrow as pa from pyarrow import parquet table = pa . Table . from_pandas ( dataframe ) parquet . write_table ( table , storepath ) elif parqlib is ParquetLib . SPARK : from pyspark import sql as sparksql assert isinstance ( dataframe , sparksql . DataFrame ) dataframe . write . parquet ( storepath ) else : assert False , "Unimplemented ParquetLib %s" % parqlib # Move serialized DataFrame to object store if os . path . isdir ( storepath ) : # Pyspark hashes = [ ] files = [ ofile for ofile in os . listdir ( storepath ) if ofile . endswith ( ".parquet" ) ] for obj in files : path = os . path . join ( storepath , obj ) objhash = digest_file ( path ) self . _move_to_store ( path , objhash ) hashes . append ( objhash ) rmtree ( storepath ) else : filehash = digest_file ( storepath ) self . _move_to_store ( storepath , filehash ) hashes = [ filehash ] return hashes
Save a DataFrame to the store .
345
8
240,083
def load_numpy ( self , hash_list ) : assert len ( hash_list ) == 1 self . _check_hashes ( hash_list ) with open ( self . object_path ( hash_list [ 0 ] ) , 'rb' ) as fd : return np . load ( fd , allow_pickle = False )
Loads a numpy array .
74
7
240,084
def get_file ( self , hash_list ) : assert len ( hash_list ) == 1 self . _check_hashes ( hash_list ) return self . object_path ( hash_list [ 0 ] )
Returns the path of the file - but verifies that the hash is actually present .
47
17
240,085
def save_metadata ( self , metadata ) : if metadata in ( None , { } ) : return None if SYSTEM_METADATA in metadata : raise StoreException ( "Not allowed to store %r in metadata" % SYSTEM_METADATA ) path = self . temporary_object_path ( str ( uuid . uuid4 ( ) ) ) with open ( path , 'w' ) as fd : try : # IMPORTANT: JSON format affects the hash of the package. # In particular, it cannot contain line breaks because of Windows (LF vs CRLF). # To be safe, we use the most compact encoding. json . dump ( metadata , fd , sort_keys = True , separators = ( ',' , ':' ) ) except ( TypeError , ValueError ) : raise StoreException ( "Metadata is not serializable" ) metahash = digest_file ( path ) self . _move_to_store ( path , metahash ) return metahash
Save metadata to the store .
212
6
240,086
def save_package_contents ( self , root , team , owner , pkgname ) : assert isinstance ( root , RootNode ) instance_hash = hash_contents ( root ) pkg_path = self . package_path ( team , owner , pkgname ) if not os . path . isdir ( pkg_path ) : os . makedirs ( pkg_path ) os . mkdir ( os . path . join ( pkg_path , self . CONTENTS_DIR ) ) os . mkdir ( os . path . join ( pkg_path , self . TAGS_DIR ) ) os . mkdir ( os . path . join ( pkg_path , self . VERSIONS_DIR ) ) dest = os . path . join ( pkg_path , self . CONTENTS_DIR , instance_hash ) with open ( dest , 'w' ) as contents_file : json . dump ( root , contents_file , default = encode_node , indent = 2 , sort_keys = True ) tag_dir = os . path . join ( pkg_path , self . TAGS_DIR ) if not os . path . isdir ( tag_dir ) : os . mkdir ( tag_dir ) latest_tag = os . path . join ( pkg_path , self . TAGS_DIR , self . LATEST ) with open ( latest_tag , 'w' ) as tagfile : tagfile . write ( "{hsh}" . format ( hsh = instance_hash ) )
Saves the in - memory contents to a file in the local package repository .
329
16
240,087
def _move_to_store ( self , srcpath , objhash ) : destpath = self . object_path ( objhash ) if os . path . exists ( destpath ) : # Windows: delete any existing object at the destination. os . chmod ( destpath , S_IWUSR ) os . remove ( destpath ) os . chmod ( srcpath , S_IRUSR | S_IRGRP | S_IROTH ) # Make read-only move ( srcpath , destpath )
Make the object read - only and move it to the store .
111
13
240,088
def add_to_package_numpy ( self , root , ndarray , node_path , target , source_path , transform , custom_meta ) : filehash = self . save_numpy ( ndarray ) metahash = self . save_metadata ( custom_meta ) self . _add_to_package_contents ( root , node_path , [ filehash ] , target , source_path , transform , metahash )
Save a Numpy array to the store .
98
9
240,089
def add_to_package_package_tree ( self , root , node_path , pkgnode ) : if node_path : ptr = root for node in node_path [ : - 1 ] : ptr = ptr . children . setdefault ( node , GroupNode ( dict ( ) ) ) ptr . children [ node_path [ - 1 ] ] = pkgnode else : if root . children : raise PackageException ( "Attempting to overwrite root node of a non-empty package." ) root . children = pkgnode . children . copy ( )
Adds a package or sub - package tree from an existing package to this package s contents .
121
18
240,090
def _install_interrupt_handler ( ) : # These would clutter the quilt.x namespace, so they're imported here instead. import os import sys import signal import pkg_resources from . tools import const # Check to see what entry points / scripts are configred to run quilt from the CLI # By doing this, we have these benefits: # * Avoid closing someone's Jupyter/iPython/bPython session when they hit ctrl-c # * Avoid calling exit() when being used as an external lib # * Provide exceptions when running in Jupyter/iPython/bPython # * Provide exceptions when running in unexpected circumstances quilt = pkg_resources . get_distribution ( 'quilt' ) executable = os . path . basename ( sys . argv [ 0 ] ) entry_points = quilt . get_entry_map ( ) . get ( 'console_scripts' , [ ] ) # When python is run with '-c', this was executed via 'python -c "<some python code>"' if executable == '-c' : # This is awkward and somewhat hackish, but we have to ensure that this is *us* # executing via 'python -c' if len ( sys . argv ) > 1 and sys . argv [ 1 ] == 'quilt testing' : # it's us. Let's pretend '-c' is an entry point. entry_points [ '-c' ] = 'blah' sys . argv . pop ( 1 ) if executable not in entry_points : return # We're running as a console script. # If not in dev mode, use SystemExit instead of raising KeyboardInterrupt def handle_interrupt ( signum , stack ) : # Check for dev mode if _DEV_MODE is None : # Args and environment have not been parsed, and no _DEV_MODE state has been set. dev_mode = True if len ( sys . argv ) > 1 and sys . argv [ 1 ] == '--dev' else False dev_mode = True if os . environ . get ( 'QUILT_DEV_MODE' , '' ) . strip ( ) . lower ( ) == 'true' else dev_mode else : # Use forced dev-mode if _DEV_MODE is set dev_mode = _DEV_MODE # In order to display the full traceback, we lose control of the exit code here. # Dev mode ctrl-c exit just produces the generic exit error code 1 if dev_mode : raise KeyboardInterrupt ( ) # Normal exit # avoid annoying prompt displacement when hitting ctrl-c print ( ) exit ( const . EXIT_KB_INTERRUPT ) return signal . signal ( signal . SIGINT , handle_interrupt )
Suppress KeyboardInterrupt traceback display in specific situations
589
11
240,091
def _data_keys ( self ) : return [ name for name , child in iteritems ( self . _children ) if not isinstance ( child , GroupNode ) ]
every child key referencing a dataframe
36
7
240,092
def _group_keys ( self ) : return [ name for name , child in iteritems ( self . _children ) if isinstance ( child , GroupNode ) ]
every child key referencing a group that is not a dataframe
35
12
240,093
def _data ( self , asa = None ) : hash_list = [ ] stack = [ self ] alldfs = True store = None while stack : node = stack . pop ( ) if isinstance ( node , GroupNode ) : stack . extend ( child for _ , child in sorted ( node . _items ( ) , reverse = True ) ) else : if node . _target ( ) != TargetType . PANDAS : alldfs = False if node . _store is None or node . _hashes is None : msg = "Can only merge built dataframes. Build this package and try again." raise NotImplementedError ( msg ) node_store = node . _store if store is None : store = node_store if node_store != store : raise NotImplementedError ( "Can only merge dataframes from the same store" ) hash_list += node . _hashes if asa is None : if not hash_list : return None if not alldfs : raise ValueError ( "Group contains non-dataframe nodes" ) return store . load_dataframe ( hash_list ) else : if hash_list : assert store is not None return asa ( self , [ store . object_path ( obj ) for obj in hash_list ] ) else : return asa ( self , [ ] )
Merges all child dataframes . Only works for dataframes stored on disk - not in memory .
284
20
240,094
def _set ( self , path , value , build_dir = '' ) : assert isinstance ( path , list ) and len ( path ) > 0 if isinstance ( value , pd . DataFrame ) : metadata = { SYSTEM_METADATA : { 'target' : TargetType . PANDAS . value } } elif isinstance ( value , np . ndarray ) : metadata = { SYSTEM_METADATA : { 'target' : TargetType . NUMPY . value } } elif isinstance ( value , string_types + ( bytes , ) ) : # bytes -> string for consistency when retrieving metadata value = value . decode ( ) if isinstance ( value , bytes ) else value if os . path . isabs ( value ) : raise ValueError ( "Invalid path: expected a relative path, but received {!r}" . format ( value ) ) # Security: filepath does not and should not retain the build_dir's location! metadata = { SYSTEM_METADATA : { 'filepath' : value , 'transform' : 'id' } } if build_dir : value = os . path . join ( build_dir , value ) else : accepted_types = tuple ( set ( ( pd . DataFrame , np . ndarray , bytes ) + string_types ) ) raise TypeError ( "Bad value type: Expected instance of any type {!r}, but received type {!r}" . format ( accepted_types , type ( value ) ) , repr ( value ) [ 0 : 100 ] ) for key in path : if not is_nodename ( key ) : raise ValueError ( "Invalid name for node: {}" . format ( key ) ) node = self for key in path [ : - 1 ] : child = node . _get ( key ) if not isinstance ( child , GroupNode ) : child = GroupNode ( { } ) node [ key ] = child node = child key = path [ - 1 ] node [ key ] = DataNode ( None , None , value , metadata )
Create and set a node by path
436
7
240,095
def handle_api_exception ( error ) : _mp_track ( type = "exception" , status_code = error . status_code , message = error . message , ) response = jsonify ( dict ( message = error . message ) ) response . status_code = error . status_code return response
Converts an API exception into an error response .
67
10
240,096
def api ( require_login = True , schema = None , enabled = True , require_admin = False , require_anonymous = False ) : if require_admin : require_login = True if schema is not None : Draft4Validator . check_schema ( schema ) validator = Draft4Validator ( schema ) else : validator = None assert not ( require_login and require_anonymous ) , ( "Can't both require login and require anonymous access." ) def innerdec ( f ) : @ wraps ( f ) def wrapper ( * args , * * kwargs ) : g . auth = Auth ( user = None , email = None , is_logged_in = False , is_admin = False , is_active = True ) user_agent_str = request . headers . get ( 'user-agent' , '' ) g . user_agent = httpagentparser . detect ( user_agent_str , fill_none = True ) if not enabled : raise ApiException ( requests . codes . bad_request , "This endpoint is not enabled." ) if validator is not None : try : validator . validate ( request . get_json ( cache = True ) ) except ValidationError as ex : raise ApiException ( requests . codes . bad_request , ex . message ) auth = request . headers . get ( AUTHORIZATION_HEADER ) g . auth_header = auth if auth is None : if not require_anonymous : if require_login or not ALLOW_ANONYMOUS_ACCESS : raise ApiException ( requests . codes . unauthorized , "Not logged in" ) else : # try to validate new auth token = auth # for compatibility with old clients if token . startswith ( "Bearer " ) : token = token [ 7 : ] try : user = verify_token_string ( token ) except AuthException : raise ApiException ( requests . codes . unauthorized , "Token invalid." ) g . user = user g . auth = Auth ( user = user . name , email = user . email , is_logged_in = True , is_admin = user . is_admin , is_active = user . is_active ) g . auth_token = token if not g . auth . is_active : raise ApiException ( requests . codes . forbidden , "Account is inactive. Must have an active account." ) if require_admin and not g . auth . is_admin : raise ApiException ( requests . codes . forbidden , "Must be authenticated as an admin to use this endpoint." ) return f ( * args , * * kwargs ) return wrapper return innerdec
Decorator for API requests . Handles auth and adds the username as the first argument .
563
19
240,097
def _private_packages_allowed ( ) : if not HAVE_PAYMENTS or TEAM_ID : return True customer = _get_or_create_customer ( ) plan = _get_customer_plan ( customer ) return plan != PaymentPlan . FREE
Checks if the current user is allowed to create private packages .
56
13
240,098
def _create_auth ( team , timeout = None ) : url = get_registry_url ( team ) contents = _load_auth ( ) auth = contents . get ( url ) if auth is not None : # If the access token expires within a minute, update it. if auth [ 'expires_at' ] < time . time ( ) + 60 : try : auth = _update_auth ( team , auth [ 'refresh_token' ] , timeout ) except CommandException as ex : raise CommandException ( "Failed to update the access token (%s). Run `quilt login%s` again." % ( ex , ' ' + team if team else '' ) ) contents [ url ] = auth _save_auth ( contents ) return auth
Reads the credentials updates the access token if necessary and returns it .
161
14
240,099
def _create_session ( team , auth ) : session = requests . Session ( ) session . hooks . update ( dict ( response = partial ( _handle_response , team ) ) ) session . headers . update ( { "Content-Type" : "application/json" , "Accept" : "application/json" , "User-Agent" : "quilt-cli/%s (%s %s) %s/%s" % ( VERSION , platform . system ( ) , platform . release ( ) , platform . python_implementation ( ) , platform . python_version ( ) ) } ) if auth is not None : session . headers [ "Authorization" ] = "Bearer %s" % auth [ 'access_token' ] return session
Creates a session object to be used for push install etc .
162
13