idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
33,100
def get_file_flags ( flags ) : res = render_flags ( flags & ~ rf . RAR_FILE_DICTMASK , file_bits ) xf = ( flags & rf . RAR_FILE_DICTMASK ) >> 5 res += "," + file_parms [ xf ] return res
Show flag names and handle dict size .
33,101
def show_item_v3 ( h ) : st = rar3_type ( h . type ) xprint ( "%s: hdrlen=%d datlen=%d" , st , h . header_size , h . add_size ) if h . type in ( rf . RAR_BLOCK_FILE , rf . RAR_BLOCK_SUB ) : if h . host_os == rf . RAR_OS_UNIX : s_mode = "0%o" % h . mode else : s_mode = "0x%x" % h . mode xprint ( " flags=0x%04x:%s" , h ...
Show any RAR3 record .
33,102
def check_crc ( f , inf , desc ) : exp = inf . _md_expect if exp is None : return ucrc = f . _md_context . digest ( ) if ucrc != exp : print ( 'crc error - %s - exp=%r got=%r' % ( desc , exp , ucrc ) )
Compare result crc to expected value .
33,103
def load_vint ( buf , pos ) : limit = min ( pos + 11 , len ( buf ) ) res = ofs = 0 while pos < limit : b = _byte_code ( buf [ pos ] ) res += ( ( b & 0x7F ) << ofs ) pos += 1 ofs += 7 if b < 0x80 : return res , pos raise BadRarFile ( 'cannot load vint' )
Load variable - size int .
33,104
def load_byte ( buf , pos ) : end = pos + 1 if end > len ( buf ) : raise BadRarFile ( 'cannot load byte' ) return S_BYTE . unpack_from ( buf , pos ) [ 0 ] , end
Load single byte
33,105
def load_le32 ( buf , pos ) : end = pos + 4 if end > len ( buf ) : raise BadRarFile ( 'cannot load le32' ) return S_LONG . unpack_from ( buf , pos ) [ 0 ] , pos + 4
Load little - endian 32 - bit integer
33,106
def load_bytes ( buf , num , pos ) : end = pos + num if end > len ( buf ) : raise BadRarFile ( 'cannot load bytes' ) return buf [ pos : end ] , end
Load sequence of bytes
33,107
def load_vstr ( buf , pos ) : slen , pos = load_vint ( buf , pos ) return load_bytes ( buf , slen , pos )
Load bytes prefixed by vint length
33,108
def load_dostime ( buf , pos ) : stamp , pos = load_le32 ( buf , pos ) tup = parse_dos_time ( stamp ) return to_datetime ( tup ) , pos
Load LE32 dos timestamp
33,109
def load_unixtime ( buf , pos ) : secs , pos = load_le32 ( buf , pos ) dt = datetime . fromtimestamp ( secs , UTC ) return dt , pos
Load LE32 unix timestamp
33,110
def load_windowstime ( buf , pos ) : unix_epoch = 11644473600 val1 , pos = load_le32 ( buf , pos ) val2 , pos = load_le32 ( buf , pos ) secs , n1secs = divmod ( ( val2 << 32 ) | val1 , 10000000 ) dt = datetime . fromtimestamp ( secs - unix_epoch , UTC ) dt = dt . replace ( microsecond = n1secs // 10 ) return dt , pos
Load LE64 windows timestamp
33,111
def is_filelike ( obj ) : if isinstance ( obj , ( bytes , unicode ) ) : return False res = True for a in ( 'read' , 'tell' , 'seek' ) : res = res and hasattr ( obj , a ) if not res : raise ValueError ( "Invalid object passed as file" ) return True
Filename or file object?
33,112
def rar3_s2k ( psw , salt ) : if not isinstance ( psw , unicode ) : psw = psw . decode ( 'utf8' ) seed = bytearray ( psw . encode ( 'utf-16le' ) + salt ) h = Rar3Sha1 ( rarbug = True ) iv = EMPTY for i in range ( 16 ) : for j in range ( 0x4000 ) : cnt = S_LONG . pack ( i * 0x4000 + j ) h . update ( seed ) h . update ( ...
String - to - key hash for RAR3 .
33,113
def rar3_decompress ( vers , meth , data , declen = 0 , flags = 0 , crc = 0 , psw = None , salt = None ) : if meth == RAR_M0 and ( flags & RAR_FILE_PASSWORD ) == 0 : return data flags = flags & ( RAR_FILE_PASSWORD | RAR_FILE_SALT | RAR_FILE_DICTMASK ) flags |= RAR_LONG_BLOCK fname = b'data' date = 0 mode = 0x20 fhdr = ...
Decompress blob of compressed data .
33,114
def to_datetime ( t ) : if t is None : return None year , mon , day , h , m , s = t try : return datetime ( year , mon , day , h , m , s ) except ValueError : pass mday = ( 0 , 31 , 29 , 31 , 30 , 31 , 30 , 31 , 31 , 30 , 31 , 30 , 31 ) if mon < 1 : mon = 1 if mon > 12 : mon = 12 if day < 1 : day = 1 if day > mday [ mo...
Convert 6 - part time tuple into datetime object .
33,115
def parse_dos_time ( stamp ) : sec , stamp = stamp & 0x1F , stamp >> 5 mn , stamp = stamp & 0x3F , stamp >> 6 hr , stamp = stamp & 0x1F , stamp >> 5 day , stamp = stamp & 0x1F , stamp >> 5 mon , stamp = stamp & 0x0F , stamp >> 4 yr = ( stamp & 0x7F ) + 1980 return ( yr , mon , day , hr , mn , sec * 2 )
Parse standard 32 - bit DOS timestamp .
33,116
def custom_popen ( cmd ) : creationflags = 0 if sys . platform == 'win32' : creationflags = 0x08000000 try : p = Popen ( cmd , bufsize = 0 , stdout = PIPE , stdin = PIPE , stderr = STDOUT , creationflags = creationflags ) except OSError as ex : if ex . errno == errno . ENOENT : raise RarCannotExec ( "Unrar not installe...
Disconnect cmd from parent fds read only from stdout .
33,117
def custom_check ( cmd , ignore_retcode = False ) : p = custom_popen ( cmd ) out , _ = p . communicate ( ) if p . returncode and not ignore_retcode : raise RarExecError ( "Check-run failed" ) return out
Run command collect output raise error if needed .
33,118
def check_returncode ( p , out ) : code = p . returncode if code == 0 : return errmap = [ None , RarWarning , RarFatalError , RarCRCError , RarLockedArchiveError , RarWriteError , RarOpenError , RarUserError , RarMemoryError , RarCreateError , RarNoFilesError , RarWrongPassword ] if UNRAR_TOOL == ALT_TOOL : errmap = [ ...
Raise exception according to unrar exit code .
33,119
def membuf_tempfile ( memfile ) : memfile . seek ( 0 , 0 ) tmpfd , tmpname = mkstemp ( suffix = '.rar' ) tmpf = os . fdopen ( tmpfd , "wb" ) try : while True : buf = memfile . read ( BSIZE ) if not buf : break tmpf . write ( buf ) tmpf . close ( ) except : tmpf . close ( ) os . unlink ( tmpname ) raise return tmpname
Write in - memory file object to real file .
33,120
def isdir ( self ) : if self . type == RAR_BLOCK_FILE : return ( self . flags & RAR_FILE_DIRECTORY ) == RAR_FILE_DIRECTORY return False
Returns True if entry is a directory .
33,121
def setpassword ( self , password ) : self . _password = password if self . _file_parser : if self . _file_parser . has_header_encryption ( ) : self . _file_parser = None if not self . _file_parser : self . _parse ( ) else : self . _file_parser . setpassword ( self . _password )
Sets the password to use when extracting .
33,122
def read ( self , fname , psw = None ) : with self . open ( fname , 'r' , psw ) as f : return f . read ( )
Return uncompressed data for archive entry .
33,123
def extract ( self , member , path = None , pwd = None ) : if isinstance ( member , RarInfo ) : fname = member . filename else : fname = member self . _extract ( [ fname ] , path , pwd )
Extract single file into current directory .
33,124
def extractall ( self , path = None , members = None , pwd = None ) : fnlist = [ ] if members is not None : for m in members : if isinstance ( m , RarInfo ) : fnlist . append ( m . filename ) else : fnlist . append ( m ) self . _extract ( fnlist , path , pwd )
Extract all files into current directory .
33,125
def has_header_encryption ( self ) : if self . _hdrenc_main : return True if self . _main : if self . _main . flags & RAR_MAIN_PASSWORD : return True return False
Returns True if headers are encrypted
33,126
def getinfo ( self , member ) : if isinstance ( member , RarInfo ) : fname = member . filename else : fname = member if PATH_SEP == '/' : fname2 = fname . replace ( "\\" , "/" ) else : fname2 = fname . replace ( "/" , "\\" ) try : return self . _info_map [ fname ] except KeyError : try : return self . _info_map [ fname...
Return RarInfo for filename
33,127
def parse ( self ) : self . _fd = None try : self . _parse_real ( ) finally : if self . _fd : self . _fd . close ( ) self . _fd = None
Process file .
33,128
def open ( self , inf , psw ) : if inf . file_redir : if inf . file_redir [ 0 ] in ( RAR5_XREDIR_FILE_COPY , RAR5_XREDIR_HARD_LINK ) : inf = self . getinfo ( inf . file_redir [ 2 ] ) if not inf : raise BadRarFile ( 'cannot find copied file' ) if inf . flags & RAR_FILE_SPLIT_BEFORE : raise NeedFirstVolume ( "Partial fil...
Return stream object for file data .
33,129
def enc_byte ( self ) : try : c = self . encdata [ self . encpos ] self . encpos += 1 return c except IndexError : self . failed = 1 return 0
Copy encoded byte .
33,130
def std_byte ( self ) : try : return self . std_name [ self . pos ] except IndexError : self . failed = 1 return ord ( '?' )
Copy byte from 8 - bit representation .
33,131
def put ( self , lo , hi ) : self . buf . append ( lo ) self . buf . append ( hi ) self . pos += 1
Copy 16 - bit value to result .
33,132
def decode ( self ) : hi = self . enc_byte ( ) flagbits = 0 while self . encpos < len ( self . encdata ) : if flagbits == 0 : flags = self . enc_byte ( ) flagbits = 8 flagbits -= 2 t = ( flags >> flagbits ) & 3 if t == 0 : self . put ( self . enc_byte ( ) , 0 ) elif t == 1 : self . put ( self . enc_byte ( ) , hi ) elif...
Decompress compressed UTF16 value .
33,133
def read ( self , cnt = None ) : if cnt is None or cnt < 0 : cnt = self . _remain elif cnt > self . _remain : cnt = self . _remain if cnt == 0 : return EMPTY data = self . _read ( cnt ) if data : self . _md_context . update ( data ) self . _remain -= len ( data ) if len ( data ) != cnt : raise BadRarFile ( "Failed the ...
Read all or specified amount of data from archive entry .
33,134
def _check ( self ) : final = self . _md_context . digest ( ) exp = self . _inf . _md_expect if exp is None : return if final is None : return if self . _returncode : check_returncode ( self , '' ) if self . _remain != 0 : raise BadRarFile ( "Failed the read enough data" ) if final != exp : raise BadRarFile ( "Corrupt ...
Check final CRC .
33,135
def seek ( self , ofs , whence = 0 ) : self . _md_context = NoHashContext ( ) fsize = self . _inf . file_size cur_ofs = self . tell ( ) if whence == 0 : new_ofs = ofs elif whence == 1 : new_ofs = cur_ofs + ofs elif whence == 2 : new_ofs = fsize + ofs else : raise ValueError ( 'Invalid value for whence' ) if new_ofs < 0...
Seek in data .
33,136
def _skip ( self , cnt ) : while cnt > 0 : if cnt > 8192 : buf = self . read ( 8192 ) else : buf = self . read ( cnt ) if not buf : break cnt -= len ( buf )
Read and discard data
33,137
def _read ( self , cnt ) : data = self . _fd . read ( cnt ) if len ( data ) == cnt or not data : return data buf = [ data ] cnt -= len ( data ) while cnt > 0 : data = self . _fd . read ( cnt ) if not data : break cnt -= len ( data ) buf . append ( data ) return EMPTY . join ( buf )
Read from pipe .
33,138
def _skip ( self , cnt ) : while cnt > 0 : if self . _cur_avail == 0 : if not self . _open_next ( ) : break if cnt > self . _cur_avail : cnt -= self . _cur_avail self . _remain -= self . _cur_avail self . _cur_avail = 0 else : self . _fd . seek ( cnt , 1 ) self . _cur_avail -= cnt self . _remain -= cnt cnt = 0
RAR Seek skipping through rar files to get to correct position
33,139
def _read ( self , cnt ) : buf = [ ] while cnt > 0 : if self . _cur_avail == 0 : if not self . _open_next ( ) : break if cnt > self . _cur_avail : data = self . _fd . read ( self . _cur_avail ) else : data = self . _fd . read ( cnt ) if not data : break cnt -= len ( data ) self . _cur_avail -= len ( data ) buf . append...
Read from potentially multi - volume archive .
33,140
def readinto ( self , buf ) : got = 0 vbuf = memoryview ( buf ) while got < len ( buf ) : if self . _cur_avail == 0 : if not self . _open_next ( ) : break cnt = len ( buf ) - got if cnt > self . _cur_avail : cnt = self . _cur_avail res = self . _fd . readinto ( vbuf [ got : got + cnt ] ) if not res : break self . _md_c...
Zero - copy read directly into buffer .
33,141
def read ( self , cnt = None ) : if cnt > 8 * 1024 : raise BadRarFile ( 'Bad count to header decrypt - wrong password?' ) if cnt <= len ( self . buf ) : res = self . buf [ : cnt ] self . buf = self . buf [ cnt : ] return res res = self . buf self . buf = EMPTY cnt -= len ( res ) blklen = 16 while cnt > 0 : enc = self ....
Read and decrypt .
33,142
def update ( self , data ) : view = memoryview ( data ) bs = self . block_size if self . _buf : need = bs - len ( self . _buf ) if len ( view ) < need : self . _buf += view . tobytes ( ) return self . _add_block ( self . _buf + view [ : need ] . tobytes ( ) ) view = view [ need : ] while len ( view ) >= bs : self . _ad...
Hash data .
33,143
def digest ( self ) : if self . _digest is None : if self . _buf : self . _add_block ( self . _buf ) self . _buf = EMPTY ctx = self . _blake2s ( 0 , 1 , True ) for t in self . _thread : ctx . update ( t . digest ( ) ) self . _digest = ctx . digest ( ) return self . _digest
Return final digest value .
33,144
def update ( self , data ) : self . _md . update ( data ) bufpos = self . _nbytes & 63 self . _nbytes += len ( data ) if self . _rarbug and len ( data ) > 64 : dpos = self . block_size - bufpos while dpos + self . block_size <= len ( data ) : self . _corrupt ( data , dpos ) dpos += self . block_size
Process more data .
33,145
def _corrupt ( self , data , dpos ) : ws = list ( self . _BLK_BE . unpack_from ( data , dpos ) ) for t in range ( 16 , 80 ) : tmp = ws [ ( t - 3 ) & 15 ] ^ ws [ ( t - 8 ) & 15 ] ^ ws [ ( t - 14 ) & 15 ] ^ ws [ ( t - 16 ) & 15 ] ws [ t & 15 ] = ( ( tmp << 1 ) | ( tmp >> ( 32 - 1 ) ) ) & 0xFFFFFFFF self . _BLK_LE . pack_...
Corruption from SHA1 core .
33,146
def get_tags_recommendation ( request ) : query = request . GET . get ( 'query' ) limit = settings . TAGGIT_SELECTIZE [ 'RECOMMENDATION_LIMIT' ] try : cls = import_string ( settings . TAGGIT_SELECTIZE_THROUGH ) except AttributeError : cls = Tag if query : tags = cls . objects . filter ( name__icontains = query ) . valu...
Taggit autocomplete ajax view . Response objects are filtered based on query param . Tags are by default limited to 10 use TAGGIT_SELECTIZE_RECOMMENDATION_LIMIT settings to specify .
33,147
def parse_tags ( tagstring ) : if not tagstring : return [ ] tagstring = force_text ( tagstring ) words = [ ] buffer = [ ] to_be_split = [ ] i = iter ( tagstring ) try : while True : c = six . next ( i ) if c == '"' : if buffer : to_be_split . append ( '' . join ( buffer ) ) buffer = [ ] c = six . next ( i ) while c !=...
Parses tag input with multiple word input being activated and delineated by commas and double quotes . Quotes take precedence so they may contain commas .
33,148
def join_tags ( tags ) : names = [ ] delimiter = settings . TAGGIT_SELECTIZE [ 'DELIMITER' ] for tag in tags : name = tag . name if delimiter in name or ' ' in name : names . append ( '"%s"' % name ) else : names . append ( name ) return delimiter . join ( sorted ( names ) )
Given list of Tag instances creates a string representation of the list suitable for editing by the user such that submitting the given string representation back without changing it will give the same list of tags .
33,149
def get_logger ( name , namespace = '{{project.package}}' , log_level = DEFAULT_LOG_LEVEL , log_dir = DEFAULT_LOG_DIR ) : log_level = ( os . getenv ( '{}_LOG_LEVEL' . format ( namespace . upper ( ) ) ) or os . getenv ( 'LOG_LEVEL' , log_level ) ) log_dir = ( os . getenv ( '{}_LOG_DIR' . format ( namespace . upper ( ) )...
Build a logger that outputs to a file and to the console
33,150
def get ( cls , key , section = None , ** kwargs ) : section = section or cls . _default_sect if section not in cls . _conf : cls . _load ( section = section ) value = cls . _conf [ section ] . get ( key ) if not value and section != cls . _default_sect : value = cls . _conf [ cls . _default_sect ] . get ( key ) if cls...
Retrieves a config value from dict . If not found twrows an InvalidScanbooconfigException .
33,151
def keys ( cls , section = None ) : section = section or cls . _default_sect if section not in cls . _conf : cls . _load ( section = section ) return cls . _conf [ section ] . keys ( )
Get a list with all config keys
33,152
def get ( self ) : args = self . get_parser . parse_args ( ) cred = self . manager . get_credential ( args ) if cred is None : return abort ( http_client . BAD_REQUEST , message = 'Unable to decrypt credential value.' ) else : return cred
Get a credential by file path
33,153
def put ( self ) : cred_payload = utils . uni_to_str ( json . loads ( request . get_data ( ) ) ) return self . manager . update_credential ( cred_payload )
Update a credential by file path
33,154
def parse_response ( self , response ) : status = response . status_code if response . ok : data = response . json ( ) return HttpResponse ( ok = response . ok , status = status , errors = None , data = data ) else : try : errors = response . json ( ) except ValueError : errors = response . content return HttpResponse ...
Parse the response and build a scanboo_common . http_client . HttpResponse object . For successful responses convert the json data into a dict .
33,155
def get_all ( self , path , data = None , limit = 100 ) : return ListResultSet ( path = path , data = data or { } , limit = limit )
Encapsulates GET all requests
33,156
def get ( self , path , data = None ) : data = data or { } response = requests . get ( self . url ( path ) , params = data , headers = self . request_header ( ) ) return self . parse_response ( response )
Encapsulates GET requests
33,157
def post ( self , path , data = None ) : data = data or { } response = requests . post ( self . url ( path ) , data = to_json ( data ) , headers = self . request_header ( ) ) return self . parse_response ( response )
Encapsulates POST requests
33,158
def download_file ( cls , url , local_file_name = None , force = False , chunk_size = 1024 ) : local_file_name = local_file_name if local_file_name else url . split ( '/' ) [ - 1 ] filepath = os . path . join ( cls . data_path , local_file_name ) if not os . path . exists ( filepath ) or force : try : headers = request...
Download file from a given url
33,159
def chunks ( lst , size ) : for i in xrange ( 0 , len ( lst ) , size ) : yield lst [ i : i + size ]
Yield successive n - sized chunks from lst .
33,160
def _to_json_default ( obj ) : if isinstance ( obj , datetime . datetime ) : return obj . isoformat ( ) if isinstance ( obj , uuid . UUID ) : return str ( obj ) if hasattr ( obj , 'item' ) : return obj . item ( ) try : return obj . id except Exception : raise TypeError ( '{obj} is not JSON serializable' . format ( obj ...
Helper to convert non default objects to json .
33,161
def _from_json_object_hook ( obj ) : for key , value in obj . items ( ) : if isinstance ( value , str ) : dt_result = datetime_regex . match ( value ) if dt_result : year , month , day , hour , minute , second = map ( lambda x : int ( x ) , dt_result . groups ( ) ) obj [ key ] = datetime . datetime ( year , month , day...
Converts a json string where datetime and UUID objects were converted into strings using the _to_json_default into a python object .
33,162
def check_path ( path , create = False ) : if not os . path . exists ( path ) : if create : os . makedirs ( path ) return os . path . exists ( path ) else : return False return True
Check for a path on filesystem
33,163
def url_encode ( url ) : if isinstance ( url , text_type ) : url = url . encode ( 'utf8' ) return quote ( url , ':/%?&=' )
Convert special characters using %xx escape .
33,164
def get ( self , id ) : run = self . backend_store . get_run ( id ) if not run : return abort ( http_client . NOT_FOUND , message = "Run {} doesn't exist" . format ( id ) ) return run_model . format_response ( run )
Get run by id
33,165
def delete ( self , id ) : run = self . backend_store . get_run ( id ) if not run : return abort ( http_client . NOT_FOUND , message = "Run {} doesn't exist" . format ( id ) ) if not self . manager . delete_run ( run ) : return abort ( http_client . BAD_REQUEST , message = "Failed to find the task queue " "manager of r...
Delete run by id
33,166
def get ( self ) : LOG . info ( 'Returning all ansible runs' ) response = [ ] for run in self . backend_store . list_runs ( ) : response . append ( run_model . format_response ( run ) ) return response
Get run list
33,167
def post ( self ) : run_payload = utils . uni_to_str ( json . loads ( request . get_data ( ) ) ) run_payload [ 'id' ] = str ( uuid . uuid4 ( ) ) LOG . info ( 'Triggering new ansible run %s' , run_payload [ 'id' ] ) run = self . manager . create_run ( run_payload ) return run_model . format_response ( run ) , http_clien...
Trigger a new run
33,168
def get_spark_session ( enable_hive = False , app_name = 'marvin-engine' , configs = [ ] ) : import findspark findspark . init ( ) from pyspark . sql import SparkSession spark = SparkSession . builder spark = spark . appName ( app_name ) spark = spark . enableHiveSupport ( ) if enable_hive else spark for config in conf...
Return a Spark Session object
33,169
def chunks ( seq , size ) : return ( seq [ pos : pos + size ] for pos in range ( 0 , len ( seq ) , size ) )
simple two - line alternative to ubelt . chunks
33,170
def _trychar ( char , fallback , asciimode = None ) : if asciimode is True : return fallback if hasattr ( sys . stdout , 'encoding' ) and sys . stdout . encoding : try : char . encode ( sys . stdout . encoding ) except Exception : pass else : return char return fallback
Logic from IPython timeit to handle terminals that cant show mu
33,171
def tic ( self ) : if self . verbose : self . flush ( ) self . write ( '\ntic(%r)' % self . label ) if self . newline : self . write ( '\n' ) self . flush ( ) self . tstart = self . _time ( ) return self
starts the timer
33,172
def toc ( self ) : elapsed = self . _time ( ) - self . tstart if self . verbose : self . write ( '...toc(%r)=%.4fs\n' % ( self . label , elapsed ) ) self . flush ( ) return elapsed
stops the timer
33,173
def reset ( self , label = None ) : if label : self . label = label self . times = [ ] self . n_loops = None self . total_time = None return self
clears all measurements allowing the object to be reused
33,174
def call ( self , func , * args , ** kwargs ) : for timer in self : with timer : func ( * args , ** kwargs ) return self
Alternative way to time a simple function call using condensed syntax .
33,175
def mean ( self ) : chunk_iter = chunks ( self . times , self . bestof ) times = list ( map ( min , chunk_iter ) ) mean = sum ( times ) / len ( times ) return mean
The mean of the best results of each trial .
33,176
def std ( self ) : import math chunk_iter = chunks ( self . times , self . bestof ) times = list ( map ( min , chunk_iter ) ) mean = sum ( times ) / len ( times ) std = math . sqrt ( sum ( ( t - mean ) ** 2 for t in times ) / len ( times ) ) return std
The standard deviation of the best results of each trial .
33,177
def report ( self , verbose = 1 ) : lines = [ ] if verbose >= 2 : lines . append ( self . _status_line ( tense = 'past' ) ) if verbose >= 3 : unit , mag = _choose_unit ( self . total_time , self . unit , self . _asciimode ) lines . append ( ' body took: {total:.{pr}{t}} {unit}' . format ( total = self . total_time /...
Creates a human readable report
33,178
def parse_description ( ) : from os . path import dirname , join , exists readme_fpath = join ( dirname ( __file__ ) , 'README.rst' ) if exists ( readme_fpath ) : textlines = [ ] with open ( readme_fpath , 'r' ) as f : textlines = f . readlines ( ) text = '' . join ( textlines ) . strip ( ) return text return ''
Parse the description in the README file
33,179
def parse_requirements ( fname = 'requirements.txt' ) : from os . path import dirname , join , exists import re require_fpath = join ( dirname ( __file__ ) , fname ) if exists ( require_fpath ) : with open ( require_fpath , 'r' ) as f : packages = [ ] for line in f . readlines ( ) : line = line . strip ( ) if line and ...
Parse the package dependencies listed in a requirements file but strips specific versioning information .
33,180
def benchmark ( repeat = 10 ) : columns = [ 'Test' , 'Code' , 'Ratio (stdlib / cython)' ] res = _calculate_benchmarks ( repeat ) try : from tabulate import tabulate print ( tabulate ( res , columns , 'rst' ) ) except ImportError : print ( columns ) print ( res )
Benchmark cyordereddict . OrderedDict against collections . OrderedDict
33,181
def _get_update_fields ( model , uniques , to_update ) : fields = { field . attname : field for field in model . _meta . fields } if to_update is None : to_update = [ field . attname for field in model . _meta . fields ] to_update = [ attname for attname in to_update if ( attname not in uniques and not getattr ( fields...
Get the fields to be updated in an upsert .
33,182
def _fill_auto_fields ( model , values ) : auto_field_names = [ f . attname for f in model . _meta . fields if getattr ( f , 'auto_now' , False ) or getattr ( f , 'auto_now_add' , False ) ] now = timezone . now ( ) for value in values : for f in auto_field_names : setattr ( value , f , now ) return values
Given a list of models fill in auto_now and auto_now_add fields for upserts . Since django manager utils passes Django s ORM these values have to be automatically constructed
33,183
def _sort_by_unique_fields ( model , model_objs , unique_fields ) : unique_fields = [ field for field in model . _meta . fields if field . attname in unique_fields ] def sort_key ( model_obj ) : return tuple ( field . get_db_prep_save ( getattr ( model_obj , field . attname ) , connection ) for field in unique_fields )...
Sort a list of models by their unique fields .
33,184
def _fetch ( queryset , model_objs , unique_fields , update_fields , returning , sync , ignore_duplicate_updates = True , return_untouched = False ) : model = queryset . model if ( return_untouched or sync ) and returning is not True : returning = set ( returning ) if returning else set ( ) returning . add ( model . _m...
Perfom the upsert and do an optional sync operation
33,185
def upsert ( queryset , model_objs , unique_fields , update_fields = None , returning = False , sync = False , ignore_duplicate_updates = True , return_untouched = False ) : queryset = queryset if isinstance ( queryset , models . QuerySet ) else queryset . objects . all ( ) model = queryset . model _fill_auto_fields ( ...
Perform a bulk upsert on a table optionally syncing the results .
33,186
def _get_upserts_distinct ( queryset , model_objs_updated , model_objs_created , unique_fields ) : created_models = [ ] if model_objs_created : created_models . extend ( queryset . extra ( where = [ '({unique_fields_sql}) in %s' . format ( unique_fields_sql = ', ' . join ( unique_fields ) ) ] , params = [ tuple ( [ tup...
Given a list of model objects that were updated and model objects that were created fetch the pks of the newly created models and return the two lists in a tuple
33,187
def _get_model_objs_to_update_and_create ( model_objs , unique_fields , update_fields , extant_model_objs ) : model_objs_to_update , model_objs_to_create = list ( ) , list ( ) for model_obj in model_objs : extant_model_obj = extant_model_objs . get ( tuple ( getattr ( model_obj , field ) for field in unique_fields ) , ...
Used by bulk_upsert to gather lists of models that should be updated and created .
33,188
def _get_prepped_model_field ( model_obj , field ) : field = model_obj . _meta . get_field ( field ) value = field . get_db_prep_save ( getattr ( model_obj , field . attname ) , connection ) return value
Gets the value of a field of a model obj that is prepared for the db .
33,189
def get_or_none ( queryset , ** query_params ) : try : obj = queryset . get ( ** query_params ) except queryset . model . DoesNotExist : obj = None return obj
Get an object or return None if it doesn t exist .
33,190
def bulk_update ( manager , model_objs , fields_to_update ) : value_fields = [ manager . model . _meta . pk . attname ] + fields_to_update row_values = [ [ _get_prepped_model_field ( model_obj , field_name ) for field_name in value_fields ] for model_obj in model_objs ] if len ( row_values ) == 0 or len ( fields_to_upd...
Bulk updates a list of model objects that are already saved .
33,191
def upsert ( manager , defaults = None , updates = None , ** kwargs ) : defaults = defaults or { } defaults . update ( updates or { } ) obj , created = manager . get_or_create ( defaults = defaults , ** kwargs ) if updates is not None and not created and any ( getattr ( obj , k ) != updates [ k ] for k in updates ) : f...
Performs an update on an object or an insert if the object does not exist .
33,192
def bulk_create ( self , * args , ** kwargs ) : ret_val = super ( ManagerUtilsQuerySet , self ) . bulk_create ( * args , ** kwargs ) post_bulk_operation . send ( sender = self . model , model = self . model ) return ret_val
Overrides Django s bulk_create function to emit a post_bulk_operation signal when bulk_create is finished .
33,193
def update ( self , ** kwargs ) : ret_val = super ( ManagerUtilsQuerySet , self ) . update ( ** kwargs ) post_bulk_operation . send ( sender = self . model , model = self . model ) return ret_val
Overrides Django s update method to emit a post_bulk_operation signal when it completes .
33,194
def process_request ( self , request ) : token = request . GET . get ( TOKEN_NAME ) user = None if token is None else authenticate ( url_auth_token = token ) if hasattr ( request , 'session' ) and user is not None : login ( request , user ) if ( hasattr ( request , 'user' ) and request . method == 'GET' and not self . ...
Log user in if request contains a valid login token .
33,195
def get_redirect ( request ) : params = request . GET . copy ( ) params . pop ( TOKEN_NAME ) url = request . path if params : url += '?' + urlencode ( params ) return redirect ( url )
Create a HTTP redirect response that removes the token from the URL .
33,196
def sign ( self , data ) : data = signing . b64_encode ( data ) . decode ( ) return self . signer . sign ( data )
Create an URL - safe signed token from data .
33,197
def unsign ( self , token ) : if self . max_age is None : data = self . signer . unsign ( token ) else : data = self . signer . unsign ( token , max_age = self . max_age ) return signing . b64_decode ( data . encode ( ) )
Extract the data from a signed token .
33,198
def get_revocation_key ( self , user ) : value = '' if self . invalidate_on_password_change : value += user . password if self . one_time : value += str ( user . last_login ) return value
When the value returned by this method changes this revocates tokens .
33,199
def create_token ( self , user ) : h = crypto . pbkdf2 ( self . get_revocation_key ( user ) , self . salt , self . iterations , digest = self . digest , ) return self . sign ( self . packer . pack_pk ( user . pk ) + h )
Create a signed token from a user .