idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
19,300
def _multicomplex2 ( f , fx , x , h ) : n = len ( x ) ee = np . diag ( h ) hess = np . outer ( h , h ) cmplx_wrap = Bicomplex . __array_wrap__ for i in range ( n ) : for j in range ( i , n ) : zph = Bicomplex ( x + 1j * ee [ i , : ] , ee [ j , : ] ) hess [ i , j ] = cmplx_wrap ( f ( zph ) ) . imag12 / hess [ j , i ] hess [ j , i ] = hess [ i , j ] return hess
Calculate Hessian with Bicomplex - step derivative approximation
19,301
def _central_even ( f , fx , x , h ) : n = len ( x ) ee = np . diag ( h ) dtype = np . result_type ( fx ) hess = np . empty ( ( n , n ) , dtype = dtype ) np . outer ( h , h , out = hess ) for i in range ( n ) : hess [ i , i ] = ( f ( x + 2 * ee [ i , : ] ) - 2 * fx + f ( x - 2 * ee [ i , : ] ) ) / ( 4. * hess [ i , i ] ) for j in range ( i + 1 , n ) : hess [ i , j ] = ( f ( x + ee [ i , : ] + ee [ j , : ] ) - f ( x + ee [ i , : ] - ee [ j , : ] ) - f ( x - ee [ i , : ] + ee [ j , : ] ) + f ( x - ee [ i , : ] - ee [ j , : ] ) ) / ( 4. * hess [ j , i ] ) hess [ j , i ] = hess [ i , j ] return hess
Eq 9 .
19,302
def _central2 ( f , fx , x , h ) : n = len ( x ) ee = np . diag ( h ) dtype = np . result_type ( fx ) g = np . empty ( n , dtype = dtype ) gg = np . empty ( n , dtype = dtype ) for i in range ( n ) : g [ i ] = f ( x + ee [ i ] ) gg [ i ] = f ( x - ee [ i ] ) hess = np . empty ( ( n , n ) , dtype = dtype ) np . outer ( h , h , out = hess ) for i in range ( n ) : for j in range ( i , n ) : hess [ i , j ] = ( f ( x + ee [ i , : ] + ee [ j , : ] ) + f ( x - ee [ i , : ] - ee [ j , : ] ) - g [ i ] - g [ j ] + fx - gg [ i ] - gg [ j ] + fx ) / ( 2 * hess [ j , i ] ) hess [ j , i ] = hess [ i , j ] return hess
Eq . 8
19,303
def directionaldiff ( f , x0 , vec , ** options ) : x0 = np . asarray ( x0 ) vec = np . asarray ( vec ) if x0 . size != vec . size : raise ValueError ( 'vec and x0 must be the same shapes' ) vec = np . reshape ( vec / np . linalg . norm ( vec . ravel ( ) ) , x0 . shape ) return Derivative ( lambda t : f ( x0 + t * vec ) , ** options ) ( 0 )
Return directional derivative of a function of n variables
19,304
def valarray ( shape , value = np . NaN , typecode = None ) : if typecode is None : typecode = bool out = np . ones ( shape , dtype = typecode ) * value if not isinstance ( out , np . ndarray ) : out = np . asarray ( out ) return out
Return an array of all value .
19,305
def nominal_step ( x = None ) : if x is None : return 1.0 return np . log1p ( np . abs ( x ) ) . clip ( min = 1.0 )
Return nominal step
19,306
def rule ( self ) : step_ratio = self . step_ratio method = self . method if method in ( 'multicomplex' , ) or self . n == 0 : return np . ones ( ( 1 , ) ) order , method_order = self . n - 1 , self . _method_order parity = self . _parity ( method , order , method_order ) step = self . _richardson_step ( ) num_terms , ix = ( order + method_order ) // step , order // step fd_rules = FD_RULES . get ( ( step_ratio , parity , num_terms ) ) if fd_rules is None : fd_mat = self . _fd_matrix ( step_ratio , parity , num_terms ) fd_rules = linalg . pinv ( fd_mat ) FD_RULES [ ( step_ratio , parity , num_terms ) ] = fd_rules if self . _flip_fd_rule : return - fd_rules [ ix ] return fd_rules [ ix ]
Return finite differencing rule .
19,307
def apply ( self , f_del , h ) : fd_rule = self . rule ne = h . shape [ 0 ] nr = fd_rule . size - 1 _assert ( nr < ne , 'num_steps ({0:d}) must be larger than ' '({1:d}) n + order - 1 = {2:d} + {3:d} -1' ' ({4:s})' . format ( ne , nr + 1 , self . n , self . order , self . method ) ) f_diff = convolve ( f_del , fd_rule [ : : - 1 ] , axis = 0 , origin = nr // 2 ) der_init = f_diff / ( h ** self . n ) ne = max ( ne - nr , 1 ) return der_init [ : ne ] , h [ : ne ]
Apply finite difference rule along the first axis .
19,308
def fd_weights_all ( x , x0 = 0 , n = 1 ) : m = len ( x ) _assert ( n < m , 'len(x) must be larger than n' ) weights = np . zeros ( ( m , n + 1 ) ) _fd_weights_all ( weights , x , x0 , n ) return weights . T
Return finite difference weights for derivatives of all orders up to n .
19,309
def fd_derivative ( fx , x , n = 1 , m = 2 ) : num_x = len ( x ) _assert ( n < num_x , 'len(x) must be larger than n' ) _assert ( num_x == len ( fx ) , 'len(x) must be equal len(fx)' ) du = np . zeros_like ( fx ) mm = n // 2 + m size = 2 * mm + 2 for i in range ( mm ) : du [ i ] = np . dot ( fd_weights ( x [ : size ] , x0 = x [ i ] , n = n ) , fx [ : size ] ) du [ - i - 1 ] = np . dot ( fd_weights ( x [ - size : ] , x0 = x [ - i - 1 ] , n = n ) , fx [ - size : ] ) for i in range ( mm , num_x - mm ) : du [ i ] = np . dot ( fd_weights ( x [ i - mm : i + mm + 1 ] , x0 = x [ i ] , n = n ) , fx [ i - mm : i + mm + 1 ] ) return du
Return the n th derivative for all points using Finite Difference method .
19,310
def _poor_convergence ( z , r , f , bn , mvec ) : check_points = ( - 0.4 + 0.3j , 0.7 + 0.2j , 0.02 - 0.06j ) diffs = [ ] ftests = [ ] for check_point in check_points : rtest = r * check_point ztest = z + rtest ftest = f ( ztest ) comp = np . sum ( bn * np . power ( check_point , mvec ) ) ftests . append ( ftest ) diffs . append ( comp - ftest ) max_abs_error = np . max ( np . abs ( diffs ) ) max_f_value = np . max ( np . abs ( ftests ) ) return max_abs_error > 1e-3 * max_f_value
Test for poor convergence based on three function evaluations .
19,311
def _num_taylor_coefficients ( n ) : _assert ( n < 193 , 'Number of derivatives too large. Must be less than 193' ) correction = np . array ( [ 0 , 0 , 1 , 3 , 4 , 7 ] ) [ _get_logn ( n ) ] log2n = _get_logn ( n - correction ) m = 2 ** ( log2n + 3 ) return m
Return number of taylor coefficients
19,312
def richardson ( vals , k , c = None ) : if c is None : c = richardson_parameter ( vals , k ) return vals [ k ] - ( vals [ k ] - vals [ k - 1 ] ) / c
Richardson extrapolation with parameter estimation
19,313
def taylor ( fun , z0 = 0 , n = 1 , r = 0.0061 , num_extrap = 3 , step_ratio = 1.6 , ** kwds ) : return Taylor ( fun , n = n , r = r , num_extrap = num_extrap , step_ratio = step_ratio , ** kwds ) ( z0 )
Return Taylor coefficients of complex analytic function using FFT
19,314
def derivative ( fun , z0 , n = 1 , ** kwds ) : result = taylor ( fun , z0 , n = n , ** kwds ) m = _num_taylor_coefficients ( n ) fact = factorial ( np . arange ( m ) ) if kwds . get ( 'full_output' ) : coefs , info_ = result info = _INFO ( info_ . error_estimate * fact , * info_ [ 1 : ] ) return coefs * fact , info return result * fact
Calculate n - th derivative of complex analytic function using FFT
19,315
def default_char ( self ) : reverse = mo . DECSCNM in self . mode return Char ( data = " " , fg = "default" , bg = "default" , reverse = reverse )
An empty character with default foreground and background colors .
19,316
def reset ( self ) : self . dirty . update ( range ( self . lines ) ) self . buffer . clear ( ) self . margins = None self . mode = set ( [ mo . DECAWM , mo . DECTCEM ] ) self . title = "" self . icon_name = "" self . charset = 0 self . g0_charset = cs . LAT1_MAP self . g1_charset = cs . VT100_MAP self . tabstops = set ( range ( 8 , self . columns , 8 ) ) self . cursor = Cursor ( 0 , 0 ) self . cursor_position ( ) self . saved_columns = None
Reset the terminal to its initial state .
19,317
def resize ( self , lines = None , columns = None ) : lines = lines or self . lines columns = columns or self . columns if lines == self . lines and columns == self . columns : return self . dirty . update ( range ( lines ) ) if lines < self . lines : self . save_cursor ( ) self . cursor_position ( 0 , 0 ) self . delete_lines ( self . lines - lines ) self . restore_cursor ( ) if columns < self . columns : for line in self . buffer . values ( ) : for x in range ( columns , self . columns ) : line . pop ( x , None ) self . lines , self . columns = lines , columns self . set_margins ( )
Resize the screen to the given size .
19,318
def set_margins ( self , top = None , bottom = None ) : if ( top is None or top == 0 ) and bottom is None : self . margins = None return margins = self . margins or Margins ( 0 , self . lines - 1 ) if top is None : top = margins . top else : top = max ( 0 , min ( top - 1 , self . lines - 1 ) ) if bottom is None : bottom = margins . bottom else : bottom = max ( 0 , min ( bottom - 1 , self . lines - 1 ) ) if bottom - top >= 1 : self . margins = Margins ( top , bottom ) self . cursor_position ( )
Select top and bottom margins for the scrolling region .
19,319
def define_charset ( self , code , mode ) : if code in cs . MAPS : if mode == "(" : self . g0_charset = cs . MAPS [ code ] elif mode == ")" : self . g1_charset = cs . MAPS [ code ]
Define G0 or G1 charset .
19,320
def index ( self ) : top , bottom = self . margins or Margins ( 0 , self . lines - 1 ) if self . cursor . y == bottom : self . dirty . update ( range ( self . lines ) ) for y in range ( top , bottom ) : self . buffer [ y ] = self . buffer [ y + 1 ] self . buffer . pop ( bottom , None ) else : self . cursor_down ( )
Move the cursor down one line in the same column . If the cursor is at the last line create a new line at the bottom .
19,321
def reverse_index ( self ) : top , bottom = self . margins or Margins ( 0 , self . lines - 1 ) if self . cursor . y == top : self . dirty . update ( range ( self . lines ) ) for y in range ( bottom , top , - 1 ) : self . buffer [ y ] = self . buffer [ y - 1 ] self . buffer . pop ( top , None ) else : self . cursor_up ( )
Move the cursor up one line in the same column . If the cursor is at the first line create a new line at the top .
19,322
def tab ( self ) : for stop in sorted ( self . tabstops ) : if self . cursor . x < stop : column = stop break else : column = self . columns - 1 self . cursor . x = column
Move to the next tab space or the end of the screen if there aren t anymore left .
19,323
def save_cursor ( self ) : self . savepoints . append ( Savepoint ( copy . copy ( self . cursor ) , self . g0_charset , self . g1_charset , self . charset , mo . DECOM in self . mode , mo . DECAWM in self . mode ) )
Push the current cursor position onto the stack .
19,324
def erase_in_line ( self , how = 0 , private = False ) : self . dirty . add ( self . cursor . y ) if how == 0 : interval = range ( self . cursor . x , self . columns ) elif how == 1 : interval = range ( self . cursor . x + 1 ) elif how == 2 : interval = range ( self . columns ) line = self . buffer [ self . cursor . y ] for x in interval : line [ x ] = self . cursor . attrs
Erase a line in a specific way .
19,325
def erase_in_display ( self , how = 0 , * args , ** kwargs ) : if how == 0 : interval = range ( self . cursor . y + 1 , self . lines ) elif how == 1 : interval = range ( self . cursor . y ) elif how == 2 or how == 3 : interval = range ( self . lines ) self . dirty . update ( interval ) for y in interval : line = self . buffer [ y ] for x in line : line [ x ] = self . cursor . attrs if how == 0 or how == 1 : self . erase_in_line ( how )
Erases display in a specific way .
19,326
def clear_tab_stop ( self , how = 0 ) : if how == 0 : self . tabstops . discard ( self . cursor . x ) elif how == 3 : self . tabstops = set ( )
Clear a horizontal tab stop .
19,327
def ensure_hbounds ( self ) : self . cursor . x = min ( max ( 0 , self . cursor . x ) , self . columns - 1 )
Ensure the cursor is within horizontal screen bounds .
19,328
def ensure_vbounds ( self , use_margins = None ) : if ( use_margins or mo . DECOM in self . mode ) and self . margins is not None : top , bottom = self . margins else : top , bottom = 0 , self . lines - 1 self . cursor . y = min ( max ( top , self . cursor . y ) , bottom )
Ensure the cursor is within vertical screen bounds .
19,329
def cursor_position ( self , line = None , column = None ) : column = ( column or 1 ) - 1 line = ( line or 1 ) - 1 if self . margins is not None and mo . DECOM in self . mode : line += self . margins . top if not self . margins . top <= line <= self . margins . bottom : return self . cursor . x = column self . cursor . y = line self . ensure_hbounds ( ) self . ensure_vbounds ( )
Set the cursor to a specific line and column .
19,330
def cursor_to_line ( self , line = None ) : self . cursor . y = ( line or 1 ) - 1 if mo . DECOM in self . mode : self . cursor . y += self . margins . top self . ensure_vbounds ( )
Move cursor to a specific line in the current column .
19,331
def alignment_display ( self ) : self . dirty . update ( range ( self . lines ) ) for y in range ( self . lines ) : for x in range ( self . columns ) : self . buffer [ y ] [ x ] = self . buffer [ y ] [ x ] . _replace ( data = "E" )
Fills screen with uppercase E s for screen focus and alignment .
19,332
def select_graphic_rendition ( self , * attrs ) : replace = { } if not attrs or attrs == ( 0 , ) : self . cursor . attrs = self . default_char return else : attrs = list ( reversed ( attrs ) ) while attrs : attr = attrs . pop ( ) if attr == 0 : replace . update ( self . default_char . _asdict ( ) ) elif attr in g . FG_ANSI : replace [ "fg" ] = g . FG_ANSI [ attr ] elif attr in g . BG : replace [ "bg" ] = g . BG_ANSI [ attr ] elif attr in g . TEXT : attr = g . TEXT [ attr ] replace [ attr [ 1 : ] ] = attr . startswith ( "+" ) elif attr in g . FG_AIXTERM : replace . update ( fg = g . FG_AIXTERM [ attr ] , bold = True ) elif attr in g . BG_AIXTERM : replace . update ( bg = g . BG_AIXTERM [ attr ] , bold = True ) elif attr in ( g . FG_256 , g . BG_256 ) : key = "fg" if attr == g . FG_256 else "bg" try : n = attrs . pop ( ) if n == 5 : m = attrs . pop ( ) replace [ key ] = g . FG_BG_256 [ m ] elif n == 2 : replace [ key ] = "{0:02x}{1:02x}{2:02x}" . format ( attrs . pop ( ) , attrs . pop ( ) , attrs . pop ( ) ) except IndexError : pass self . cursor . attrs = self . cursor . attrs . _replace ( ** replace )
Set display attributes .
19,333
def report_device_attributes ( self , mode = 0 , ** kwargs ) : if mode == 0 and not kwargs . get ( "private" ) : self . write_process_input ( ctrl . CSI + "?6c" )
Report terminal identity .
19,334
def report_device_status ( self , mode ) : if mode == 5 : self . write_process_input ( ctrl . CSI + "0n" ) elif mode == 6 : x = self . cursor . x + 1 y = self . cursor . y + 1 if mo . DECOM in self . mode : y -= self . margins . top self . write_process_input ( ctrl . CSI + "{0};{1}R" . format ( y , x ) )
Report terminal status or cursor position .
19,335
def before_event ( self , event ) : if event not in [ "prev_page" , "next_page" ] : while self . history . position < self . history . size : self . next_page ( )
Ensure a screen is at the bottom of the history buffer .
19,336
def erase_in_display ( self , how = 0 , * args , ** kwargs ) : super ( HistoryScreen , self ) . erase_in_display ( how , * args , ** kwargs ) if how == 3 : self . _reset_history ( )
Overloaded to reset history state .
19,337
def index ( self ) : top , bottom = self . margins or Margins ( 0 , self . lines - 1 ) if self . cursor . y == bottom : self . history . top . append ( self . buffer [ top ] ) super ( HistoryScreen , self ) . index ( )
Overloaded to update top history with the removed lines .
19,338
def prev_page ( self ) : if self . history . position > self . lines and self . history . top : mid = min ( len ( self . history . top ) , int ( math . ceil ( self . lines * self . history . ratio ) ) ) self . history . bottom . extendleft ( self . buffer [ y ] for y in range ( self . lines - 1 , self . lines - mid - 1 , - 1 ) ) self . history = self . history . _replace ( position = self . history . position - mid ) for y in range ( self . lines - 1 , mid - 1 , - 1 ) : self . buffer [ y ] = self . buffer [ y - mid ] for y in range ( mid - 1 , - 1 , - 1 ) : self . buffer [ y ] = self . history . top . pop ( ) self . dirty = set ( range ( self . lines ) )
Move the screen page up through the history buffer . Page size is defined by history . ratio so for instance ratio = . 5 means that half the screen is restored from history on page switch .
19,339
def next_page ( self ) : if self . history . position < self . history . size and self . history . bottom : mid = min ( len ( self . history . bottom ) , int ( math . ceil ( self . lines * self . history . ratio ) ) ) self . history . top . extend ( self . buffer [ y ] for y in range ( mid ) ) self . history = self . history . _replace ( position = self . history . position + mid ) for y in range ( self . lines - mid ) : self . buffer [ y ] = self . buffer [ y + mid ] for y in range ( self . lines - mid , self . lines ) : self . buffer [ y ] = self . history . bottom . popleft ( ) self . dirty = set ( range ( self . lines ) )
Move the screen page down through the history buffer .
19,340
def attach ( self , screen ) : if self . listener is not None : warnings . warn ( "As of version 0.6.0 the listener queue is " "restricted to a single element. Existing " "listener {0} will be replaced." . format ( self . listener ) , DeprecationWarning ) if self . strict : for event in self . events : if not hasattr ( screen , event ) : raise TypeError ( "{0} is missing {1}" . format ( screen , event ) ) self . listener = screen self . _parser = None self . _initialize_parser ( )
Adds a given screen to the listener queue .
19,341
def feed ( self , data ) : send = self . _send_to_parser draw = self . listener . draw match_text = self . _text_pattern . match taking_plain_text = self . _taking_plain_text length = len ( data ) offset = 0 while offset < length : if taking_plain_text : match = match_text ( data , offset ) if match : start , offset = match . span ( ) draw ( data [ start : offset ] ) else : taking_plain_text = False else : taking_plain_text = send ( data [ offset : offset + 1 ] ) offset += 1 self . _taking_plain_text = taking_plain_text
Consume some data and advances the state as necessary .
19,342
async def on_shutdown ( app ) : global is_shutting_down is_shutting_down = True for task in app [ "websockets" ] : task . cancel ( ) try : await task except asyncio . CancelledError : pass
Closes all WS connections on shutdown .
19,343
def rmsd ( V , W ) : D = len ( V [ 0 ] ) N = len ( V ) result = 0.0 for v , w in zip ( V , W ) : result += sum ( [ ( v [ i ] - w [ i ] ) ** 2.0 for i in range ( D ) ] ) return np . sqrt ( result / N )
Calculate Root - mean - square deviation from two sets of vectors V and W .
19,344
def kabsch_rmsd ( P , Q , translate = False ) : if translate : Q = Q - centroid ( Q ) P = P - centroid ( P ) P = kabsch_rotate ( P , Q ) return rmsd ( P , Q )
Rotate matrix P unto Q using Kabsch algorithm and calculate the RMSD .
19,345
def kabsch_rotate ( P , Q ) : U = kabsch ( P , Q ) P = np . dot ( P , U ) return P
Rotate matrix P unto matrix Q using Kabsch algorithm .
19,346
def kabsch ( P , Q ) : C = np . dot ( np . transpose ( P ) , Q ) V , S , W = np . linalg . svd ( C ) d = ( np . linalg . det ( V ) * np . linalg . det ( W ) ) < 0.0 if d : S [ - 1 ] = - S [ - 1 ] V [ : , - 1 ] = - V [ : , - 1 ] U = np . dot ( V , W ) return U
Using the Kabsch algorithm with two sets of paired point P and Q centered around the centroid . Each vector set is represented as an NxD matrix where D is the the dimension of the space .
19,347
def quaternion_rotate ( X , Y ) : N = X . shape [ 0 ] W = np . asarray ( [ makeW ( * Y [ k ] ) for k in range ( N ) ] ) Q = np . asarray ( [ makeQ ( * X [ k ] ) for k in range ( N ) ] ) Qt_dot_W = np . asarray ( [ np . dot ( Q [ k ] . T , W [ k ] ) for k in range ( N ) ] ) W_minus_Q = np . asarray ( [ W [ k ] - Q [ k ] for k in range ( N ) ] ) A = np . sum ( Qt_dot_W , axis = 0 ) eigen = np . linalg . eigh ( A ) r = eigen [ 1 ] [ : , eigen [ 0 ] . argmax ( ) ] rot = quaternion_transform ( r ) return rot
Calculate the rotation
19,348
def reorder_distance ( p_atoms , q_atoms , p_coord , q_coord ) : unique_atoms = np . unique ( p_atoms ) view_reorder = np . zeros ( q_atoms . shape , dtype = int ) for atom in unique_atoms : p_atom_idx , = np . where ( p_atoms == atom ) q_atom_idx , = np . where ( q_atoms == atom ) A_coord = p_coord [ p_atom_idx ] B_coord = q_coord [ q_atom_idx ] A_norms = np . linalg . norm ( A_coord , axis = 1 ) B_norms = np . linalg . norm ( B_coord , axis = 1 ) reorder_indices_A = np . argsort ( A_norms ) reorder_indices_B = np . argsort ( B_norms ) translator = np . argsort ( reorder_indices_A ) view = reorder_indices_B [ translator ] view_reorder [ p_atom_idx ] = q_atom_idx [ view ] return view_reorder
Re - orders the input atom list and xyz coordinates by atom type and then by distance of each atom from the centroid .
19,349
def hungarian ( A , B ) : distances = cdist ( A , B , 'euclidean' ) indices_a , indices_b = linear_sum_assignment ( distances ) return indices_b
Hungarian reordering .
19,350
def brute_permutation ( A , B ) : rmsd_min = np . inf view_min = None num_atoms = A . shape [ 0 ] initial_order = list ( range ( num_atoms ) ) for reorder_indices in generate_permutations ( initial_order , num_atoms ) : coords_ordered = B [ reorder_indices ] rmsd_temp = kabsch_rmsd ( A , coords_ordered ) if rmsd_temp < rmsd_min : rmsd_min = rmsd_temp view_min = copy . deepcopy ( reorder_indices ) return view_min
Re - orders the input atom list and xyz coordinates using the brute force method of permuting all rows of the input coordinates
19,351
def check_reflections ( p_atoms , q_atoms , p_coord , q_coord , reorder_method = reorder_hungarian , rotation_method = kabsch_rmsd , keep_stereo = False ) : min_rmsd = np . inf min_swap = None min_reflection = None min_review = None tmp_review = None swap_mask = [ 1 , - 1 , - 1 , 1 , - 1 , 1 ] reflection_mask = [ 1 , - 1 , - 1 , - 1 , 1 , 1 , 1 , - 1 ] for swap , i in zip ( AXIS_SWAPS , swap_mask ) : for reflection , j in zip ( AXIS_REFLECTIONS , reflection_mask ) : if keep_stereo and i * j == - 1 : continue tmp_atoms = copy . copy ( q_atoms ) tmp_coord = copy . deepcopy ( q_coord ) tmp_coord = tmp_coord [ : , swap ] tmp_coord = np . dot ( tmp_coord , np . diag ( reflection ) ) tmp_coord -= centroid ( tmp_coord ) if reorder_method is not None : tmp_review = reorder_method ( p_atoms , tmp_atoms , p_coord , tmp_coord ) tmp_coord = tmp_coord [ tmp_review ] tmp_atoms = tmp_atoms [ tmp_review ] if rotation_method is None : this_rmsd = rmsd ( p_coord , tmp_coord ) else : this_rmsd = rotation_method ( p_coord , tmp_coord ) if this_rmsd < min_rmsd : min_rmsd = this_rmsd min_swap = swap min_reflection = reflection min_review = tmp_review if not ( p_atoms == q_atoms [ min_review ] ) . all ( ) : print ( "error: Not aligned" ) quit ( ) return min_rmsd , min_swap , min_reflection , min_review
Minimize RMSD using reflection planes for molecule P and Q
19,352
def print_coordinates ( atoms , V , title = "" ) : print ( set_coordinates ( atoms , V , title = title ) ) return
Print coordinates V with corresponding atoms to stdout in XYZ format .
19,353
def get_coordinates_pdb ( filename ) : x_column = None V = list ( ) atoms = list ( ) with open ( filename , 'r' ) as f : lines = f . readlines ( ) for line in lines : if line . startswith ( "TER" ) or line . startswith ( "END" ) : break if line . startswith ( "ATOM" ) : tokens = line . split ( ) try : atom = tokens [ 2 ] [ 0 ] if atom in ( "H" , "C" , "N" , "O" , "S" , "P" ) : atoms . append ( atom ) else : atom = tokens [ 2 ] [ 1 ] if atom == "H" : atoms . append ( atom ) else : raise Exception except : exit ( "error: Parsing atomtype for the following line: \n{0:s}" . format ( line ) ) if x_column == None : try : for i , x in enumerate ( tokens ) : if "." in x and "." in tokens [ i + 1 ] and "." in tokens [ i + 2 ] : x_column = i break except IndexError : exit ( "error: Parsing coordinates for the following line: \n{0:s}" . format ( line ) ) try : V . append ( np . asarray ( tokens [ x_column : x_column + 3 ] , dtype = float ) ) except : try : x = line [ 30 : 38 ] y = line [ 38 : 46 ] z = line [ 46 : 54 ] V . append ( np . asarray ( [ x , y , z ] , dtype = float ) ) except : exit ( "error: Parsing input for the following line: \n{0:s}" . format ( line ) ) V = np . asarray ( V ) atoms = np . asarray ( atoms ) assert V . shape [ 0 ] == atoms . size return atoms , V
Get coordinates from the first chain in a pdb file and return a vectorset with all the coordinates .
19,354
def get_coordinates_xyz ( filename ) : f = open ( filename , 'r' ) V = list ( ) atoms = list ( ) n_atoms = 0 try : n_atoms = int ( f . readline ( ) ) except ValueError : exit ( "error: Could not obtain the number of atoms in the .xyz file." ) f . readline ( ) for lines_read , line in enumerate ( f ) : if lines_read == n_atoms : break atom = re . findall ( r'[a-zA-Z]+' , line ) [ 0 ] atom = atom . upper ( ) numbers = re . findall ( r'[-]?\d+\.\d*(?:[Ee][-\+]\d+)?' , line ) numbers = [ float ( number ) for number in numbers ] if len ( numbers ) >= 3 : V . append ( np . array ( numbers ) [ : 3 ] ) atoms . append ( atom ) else : exit ( "Reading the .xyz file failed in line {0}. Please check the format." . format ( lines_read + 2 ) ) f . close ( ) atoms = np . array ( atoms ) V = np . array ( V ) return atoms , V
Get coordinates from filename and return a vectorset with all the coordinates in XYZ format .
19,355
def GetZipInfo ( self ) : if not self . _zip_info : location = getattr ( self . path_spec , 'location' , None ) if location is None : raise errors . PathSpecError ( 'Path specification missing location.' ) if not location . startswith ( self . _file_system . LOCATION_ROOT ) : raise errors . PathSpecError ( 'Invalid location in path specification.' ) if len ( location ) == 1 : return None zip_file = self . _file_system . GetZipFile ( ) try : self . _zip_info = zip_file . getinfo ( location [ 1 : ] ) except KeyError : pass return self . _zip_info
Retrieves the ZIP info object .
19,356
def _NormalizedVolumeIdentifiers ( self , volume_system , volume_identifiers , prefix = 'v' ) : normalized_volume_identifiers = [ ] for volume_identifier in volume_identifiers : if isinstance ( volume_identifier , int ) : volume_identifier = '{0:s}{1:d}' . format ( prefix , volume_identifier ) elif not volume_identifier . startswith ( prefix ) : try : volume_identifier = int ( volume_identifier , 10 ) volume_identifier = '{0:s}{1:d}' . format ( prefix , volume_identifier ) except ( TypeError , ValueError ) : pass try : volume = volume_system . GetVolumeByIdentifier ( volume_identifier ) except KeyError : volume = None if not volume : raise errors . ScannerError ( 'Volume missing for identifier: {0:s}.' . format ( volume_identifier ) ) normalized_volume_identifiers . append ( volume_identifier ) return normalized_volume_identifiers
Normalizes volume identifiers .
19,357
def _ScanVolume ( self , scan_context , scan_node , base_path_specs ) : if not scan_node or not scan_node . path_spec : raise errors . ScannerError ( 'Invalid or missing scan node.' ) if scan_context . IsLockedScanNode ( scan_node . path_spec ) : self . _ScanEncryptedVolume ( scan_context , scan_node ) if scan_context . IsLockedScanNode ( scan_node . path_spec ) : return if scan_node . IsVolumeSystemRoot ( ) : self . _ScanVolumeSystemRoot ( scan_context , scan_node , base_path_specs ) elif scan_node . IsFileSystem ( ) : self . _ScanFileSystem ( scan_node , base_path_specs ) elif scan_node . type_indicator == definitions . TYPE_INDICATOR_VSHADOW : path_spec = path_spec_factory . Factory . NewPathSpec ( definitions . TYPE_INDICATOR_TSK , location = '/' , parent = scan_node . path_spec ) base_path_specs . append ( path_spec ) else : for sub_scan_node in scan_node . sub_nodes : self . _ScanVolume ( scan_context , sub_scan_node , base_path_specs )
Scans a volume scan node for volume and file systems .
19,358
def _ScanVolumeSystemRoot ( self , scan_context , scan_node , base_path_specs ) : if not scan_node or not scan_node . path_spec : raise errors . ScannerError ( 'Invalid scan node.' ) if scan_node . type_indicator == definitions . TYPE_INDICATOR_APFS_CONTAINER : volume_identifiers = self . _GetAPFSVolumeIdentifiers ( scan_node ) elif scan_node . type_indicator == definitions . TYPE_INDICATOR_VSHADOW : volume_identifiers = self . _GetVSSStoreIdentifiers ( scan_node ) volume_identifiers . reverse ( ) else : raise errors . ScannerError ( 'Unsupported volume system type: {0:s}.' . format ( scan_node . type_indicator ) ) for volume_identifier in volume_identifiers : location = '/{0:s}' . format ( volume_identifier ) sub_scan_node = scan_node . GetSubNodeByLocation ( location ) if not sub_scan_node : raise errors . ScannerError ( 'Scan node missing for volume identifier: {0:s}.' . format ( volume_identifier ) ) self . _ScanVolume ( scan_context , sub_scan_node , base_path_specs )
Scans a volume system root scan node for volume and file systems .
19,359
def GetBasePathSpecs ( self , source_path ) : if not source_path : raise errors . ScannerError ( 'Invalid source path.' ) if ( not source_path . startswith ( '\\\\.\\' ) and not os . path . exists ( source_path ) ) : raise errors . ScannerError ( 'No such device, file or directory: {0:s}.' . format ( source_path ) ) scan_context = source_scanner . SourceScannerContext ( ) scan_context . OpenSourcePath ( source_path ) try : self . _source_scanner . Scan ( scan_context ) except ( ValueError , errors . BackEndError ) as exception : raise errors . ScannerError ( 'Unable to scan source with error: {0!s}' . format ( exception ) ) self . _source_path = source_path self . _source_type = scan_context . source_type if self . _source_type not in [ definitions . SOURCE_TYPE_STORAGE_MEDIA_DEVICE , definitions . SOURCE_TYPE_STORAGE_MEDIA_IMAGE ] : scan_node = scan_context . GetRootScanNode ( ) return [ scan_node . path_spec ] scan_node = scan_context . GetRootScanNode ( ) while len ( scan_node . sub_nodes ) == 1 : scan_node = scan_node . sub_nodes [ 0 ] base_path_specs = [ ] if scan_node . type_indicator != definitions . TYPE_INDICATOR_TSK_PARTITION : self . _ScanVolume ( scan_context , scan_node , base_path_specs ) else : partition_identifiers = self . _GetTSKPartitionIdentifiers ( scan_node ) for partition_identifier in partition_identifiers : location = '/{0:s}' . format ( partition_identifier ) sub_scan_node = scan_node . GetSubNodeByLocation ( location ) self . _ScanVolume ( scan_context , sub_scan_node , base_path_specs ) return base_path_specs
Determines the base path specifications .
19,360
def _ScanFileSystemForWindowsDirectory ( self , path_resolver ) : result = False for windows_path in self . _WINDOWS_DIRECTORIES : windows_path_spec = path_resolver . ResolvePath ( windows_path ) result = windows_path_spec is not None if result : self . _windows_directory = windows_path break return result
Scans a file system for a known Windows directory .
19,361
def OpenFile ( self , windows_path ) : path_spec = self . _path_resolver . ResolvePath ( windows_path ) if path_spec is None : return None return self . _file_system . GetFileObjectByPathSpec ( path_spec )
Opens the file specificed by the Windows path .
19,362
def ScanForWindowsVolume ( self , source_path ) : windows_path_specs = self . GetBasePathSpecs ( source_path ) if ( not windows_path_specs or self . _source_type == definitions . SOURCE_TYPE_FILE ) : return False file_system_path_spec = windows_path_specs [ 0 ] self . _file_system = resolver . Resolver . OpenFileSystem ( file_system_path_spec ) if file_system_path_spec . type_indicator == definitions . TYPE_INDICATOR_OS : mount_point = file_system_path_spec else : mount_point = file_system_path_spec . parent self . _path_resolver = windows_path_resolver . WindowsPathResolver ( self . _file_system , mount_point ) if not self . _windows_directory : self . _ScanFileSystemForWindowsDirectory ( self . _path_resolver ) if not self . _windows_directory : return False self . _path_resolver . SetEnvironmentVariable ( 'SystemRoot' , self . _windows_directory ) self . _path_resolver . SetEnvironmentVariable ( 'WinDir' , self . _windows_directory ) return True
Scans for a Windows volume .
19,363
def _FlushCache ( cls , format_categories ) : if definitions . FORMAT_CATEGORY_ARCHIVE in format_categories : cls . _archive_remainder_list = None cls . _archive_scanner = None cls . _archive_store = None if definitions . FORMAT_CATEGORY_COMPRESSED_STREAM in format_categories : cls . _compressed_stream_remainder_list = None cls . _compressed_stream_scanner = None cls . _compressed_stream_store = None if definitions . FORMAT_CATEGORY_FILE_SYSTEM in format_categories : cls . _file_system_remainder_list = None cls . _file_system_scanner = None cls . _file_system_store = None if definitions . FORMAT_CATEGORY_STORAGE_MEDIA_IMAGE in format_categories : cls . _storage_media_image_remainder_list = None cls . _storage_media_image_scanner = None cls . _storage_media_image_store = None if definitions . FORMAT_CATEGORY_VOLUME_SYSTEM in format_categories : cls . _volume_system_remainder_list = None cls . _volume_system_scanner = None cls . _volume_system_store = None
Flushes the cached objects for the specified format categories .
19,364
def _GetSignatureScanner ( cls , specification_store ) : signature_scanner = pysigscan . scanner ( ) signature_scanner . set_scan_buffer_size ( cls . _SCAN_BUFFER_SIZE ) for format_specification in specification_store . specifications : for signature in format_specification . signatures : pattern_offset = signature . offset if pattern_offset is None : signature_flags = pysigscan . signature_flags . NO_OFFSET elif pattern_offset < 0 : pattern_offset *= - 1 signature_flags = pysigscan . signature_flags . RELATIVE_FROM_END else : signature_flags = pysigscan . signature_flags . RELATIVE_FROM_START signature_scanner . add_signature ( signature . identifier , pattern_offset , signature . pattern , signature_flags ) return signature_scanner
Initializes a signature scanner based on a specification store .
19,365
def _GetSpecificationStore ( cls , format_category ) : specification_store = specification . FormatSpecificationStore ( ) remainder_list = [ ] for analyzer_helper in iter ( cls . _analyzer_helpers . values ( ) ) : if not analyzer_helper . IsEnabled ( ) : continue if format_category in analyzer_helper . format_categories : format_specification = analyzer_helper . GetFormatSpecification ( ) if format_specification is not None : specification_store . AddSpecification ( format_specification ) else : remainder_list . append ( analyzer_helper ) return specification_store , remainder_list
Retrieves the specification store for specified format category .
19,366
def _GetTypeIndicators ( cls , signature_scanner , specification_store , remainder_list , path_spec , resolver_context = None ) : type_indicator_list = [ ] file_object = resolver . Resolver . OpenFileObject ( path_spec , resolver_context = resolver_context ) scan_state = pysigscan . scan_state ( ) try : signature_scanner . scan_file_object ( scan_state , file_object ) for scan_result in iter ( scan_state . scan_results ) : format_specification = specification_store . GetSpecificationBySignature ( scan_result . identifier ) if format_specification . identifier not in type_indicator_list : type_indicator_list . append ( format_specification . identifier ) for analyzer_helper in remainder_list : result = analyzer_helper . AnalyzeFileObject ( file_object ) if result is not None : type_indicator_list . append ( result ) finally : file_object . close ( ) return type_indicator_list
Determines if a file contains a supported format types .
19,367
def DeregisterHelper ( cls , analyzer_helper ) : if analyzer_helper . type_indicator not in cls . _analyzer_helpers : raise KeyError ( 'Analyzer helper object not set for type indicator: {0:s}.' . format ( analyzer_helper . type_indicator ) ) analyzer_helper = cls . _analyzer_helpers [ analyzer_helper . type_indicator ] cls . _FlushCache ( analyzer_helper . format_categories ) del cls . _analyzer_helpers [ analyzer_helper . type_indicator ]
Deregisters a format analyzer helper .
19,368
def GetArchiveTypeIndicators ( cls , path_spec , resolver_context = None ) : if ( cls . _archive_remainder_list is None or cls . _archive_store is None ) : specification_store , remainder_list = cls . _GetSpecificationStore ( definitions . FORMAT_CATEGORY_ARCHIVE ) cls . _archive_remainder_list = remainder_list cls . _archive_store = specification_store if cls . _archive_scanner is None : cls . _archive_scanner = cls . _GetSignatureScanner ( cls . _archive_store ) return cls . _GetTypeIndicators ( cls . _archive_scanner , cls . _archive_store , cls . _archive_remainder_list , path_spec , resolver_context = resolver_context )
Determines if a file contains a supported archive types .
19,369
def GetCompressedStreamTypeIndicators ( cls , path_spec , resolver_context = None ) : if ( cls . _compressed_stream_remainder_list is None or cls . _compressed_stream_store is None ) : specification_store , remainder_list = cls . _GetSpecificationStore ( definitions . FORMAT_CATEGORY_COMPRESSED_STREAM ) cls . _compressed_stream_remainder_list = remainder_list cls . _compressed_stream_store = specification_store if cls . _compressed_stream_scanner is None : cls . _compressed_stream_scanner = cls . _GetSignatureScanner ( cls . _compressed_stream_store ) return cls . _GetTypeIndicators ( cls . _compressed_stream_scanner , cls . _compressed_stream_store , cls . _compressed_stream_remainder_list , path_spec , resolver_context = resolver_context )
Determines if a file contains a supported compressed stream types .
19,370
def GetFileSystemTypeIndicators ( cls , path_spec , resolver_context = None ) : if ( cls . _file_system_remainder_list is None or cls . _file_system_store is None ) : specification_store , remainder_list = cls . _GetSpecificationStore ( definitions . FORMAT_CATEGORY_FILE_SYSTEM ) cls . _file_system_remainder_list = remainder_list cls . _file_system_store = specification_store if cls . _file_system_scanner is None : cls . _file_system_scanner = cls . _GetSignatureScanner ( cls . _file_system_store ) return cls . _GetTypeIndicators ( cls . _file_system_scanner , cls . _file_system_store , cls . _file_system_remainder_list , path_spec , resolver_context = resolver_context )
Determines if a file contains a supported file system types .
19,371
def GetStorageMediaImageTypeIndicators ( cls , path_spec , resolver_context = None ) : if ( cls . _storage_media_image_remainder_list is None or cls . _storage_media_image_store is None ) : specification_store , remainder_list = cls . _GetSpecificationStore ( definitions . FORMAT_CATEGORY_STORAGE_MEDIA_IMAGE ) cls . _storage_media_image_remainder_list = remainder_list cls . _storage_media_image_store = specification_store if cls . _storage_media_image_scanner is None : cls . _storage_media_image_scanner = cls . _GetSignatureScanner ( cls . _storage_media_image_store ) return cls . _GetTypeIndicators ( cls . _storage_media_image_scanner , cls . _storage_media_image_store , cls . _storage_media_image_remainder_list , path_spec , resolver_context = resolver_context )
Determines if a file contains a supported storage media image types .
19,372
def GetVolumeSystemTypeIndicators ( cls , path_spec , resolver_context = None ) : if ( cls . _volume_system_remainder_list is None or cls . _volume_system_store is None ) : specification_store , remainder_list = cls . _GetSpecificationStore ( definitions . FORMAT_CATEGORY_VOLUME_SYSTEM ) cls . _volume_system_remainder_list = remainder_list cls . _volume_system_store = specification_store if cls . _volume_system_scanner is None : cls . _volume_system_scanner = cls . _GetSignatureScanner ( cls . _volume_system_store ) return cls . _GetTypeIndicators ( cls . _volume_system_scanner , cls . _volume_system_store , cls . _volume_system_remainder_list , path_spec , resolver_context = resolver_context )
Determines if a file contains a supported volume system types .
19,373
def GetFsType ( self ) : if self . _tsk_fs_type is None : self . _tsk_fs_type = pytsk3 . TSK_FS_TYPE_UNSUPP if ( not self . _tsk_file_system or not hasattr ( self . _tsk_file_system , 'info' ) ) : return self . _tsk_fs_type self . _tsk_fs_type = getattr ( self . _tsk_file_system . info , 'ftype' , pytsk3 . TSK_FS_TYPE_UNSUPP ) return self . _tsk_fs_type
Retrieves the file system type .
19,374
def GetTSKFileByPathSpec ( self , path_spec ) : inode = getattr ( path_spec , 'inode' , None ) location = getattr ( path_spec , 'location' , None ) if inode is not None : tsk_file = self . _tsk_file_system . open_meta ( inode = inode ) elif location is not None : tsk_file = self . _tsk_file_system . open ( location ) else : raise errors . PathSpecError ( 'Path specification missing inode and location.' ) return tsk_file
Retrieves the SleuthKit file object for a path specification .
19,375
def IsHFS ( self ) : tsk_fs_type = self . GetFsType ( ) return tsk_fs_type in [ pytsk3 . TSK_FS_TYPE_HFS , pytsk3 . TSK_FS_TYPE_HFS_DETECT ]
Determines if the file system is HFS HFS + or HFSX .
19,376
def IsNTFS ( self ) : tsk_fs_type = self . GetFsType ( ) return tsk_fs_type in [ pytsk3 . TSK_FS_TYPE_NTFS , pytsk3 . TSK_FS_TYPE_NTFS_DETECT ]
Determines if the file system is NTFS .
19,377
def Close ( self ) : if self . _connection : self . _cursor = None self . _connection . close ( ) self . _connection = None try : os . remove ( self . _temp_file_path ) except ( IOError , OSError ) : pass self . _temp_file_path = ''
Closes the database file object .
19,378
def HasColumn ( self , table_name , column_name ) : if not self . _connection : raise IOError ( 'Not opened.' ) if not column_name : return False table_name = table_name . lower ( ) column_names = self . _column_names_per_table . get ( table_name , None ) if column_names is None : column_names = [ ] self . _cursor . execute ( self . _HAS_COLUMN_QUERY . format ( table_name ) ) for row in self . _cursor . fetchall ( ) : if not row [ 1 ] : continue row_column_name = row [ 1 ] if isinstance ( row_column_name , bytes ) : row_column_name = row_column_name . decode ( 'utf-8' ) column_names . append ( row_column_name . lower ( ) ) self . _column_names_per_table [ table_name ] = column_names column_name = column_name . lower ( ) return column_name in column_names
Determines if a specific column exists .
19,379
def Open ( self , file_object ) : if not file_object : raise ValueError ( 'Missing file-like object.' ) file_object . seek ( 0 , os . SEEK_SET ) data = file_object . read ( len ( self . _HEADER_SIGNATURE ) ) if data != self . _HEADER_SIGNATURE : file_object . close ( ) raise IOError ( 'Unsupported SQLite database signature.' ) with tempfile . NamedTemporaryFile ( delete = False ) as temp_file : self . _temp_file_path = temp_file . name while data : temp_file . write ( data ) data = file_object . read ( self . _COPY_BUFFER_SIZE ) self . _connection = sqlite3 . connect ( self . _temp_file_path ) self . _connection . text_factory = bytes self . _cursor = self . _connection . cursor ( )
Opens the database file object .
19,380
def Query ( self , query , parameters = None ) : if parameters : self . _cursor . execute ( query , parameters ) else : self . _cursor . execute ( query ) return self . _cursor . fetchall ( )
Queries the database file .
19,381
def _GetFileSystemCacheIdentifier ( self , path_spec ) : string_parts = [ ] string_parts . append ( getattr ( path_spec . parent , 'comparable' , '' ) ) string_parts . append ( 'type: {0:s}' . format ( path_spec . type_indicator ) ) return '' . join ( string_parts )
Determines the file system cache identifier for the path specification .
19,382
def CacheFileObject ( self , path_spec , file_object ) : self . _file_object_cache . CacheObject ( path_spec . comparable , file_object )
Caches a file - like object based on a path specification .
19,383
def CacheFileSystem ( self , path_spec , file_system ) : identifier = self . _GetFileSystemCacheIdentifier ( path_spec ) self . _file_system_cache . CacheObject ( identifier , file_system )
Caches a file system object based on a path specification .
19,384
def ForceRemoveFileObject ( self , path_spec ) : cache_value = self . _file_object_cache . GetCacheValue ( path_spec . comparable ) if not cache_value : return False while not cache_value . IsDereferenced ( ) : cache_value . vfs_object . close ( ) return True
Forces the removal of a file - like object based on a path specification .
19,385
def GetFileObjectReferenceCount ( self , path_spec ) : cache_value = self . _file_object_cache . GetCacheValue ( path_spec . comparable ) if not cache_value : return None return cache_value . reference_count
Retrieves the reference count of a cached file - like object .
19,386
def GetFileSystem ( self , path_spec ) : identifier = self . _GetFileSystemCacheIdentifier ( path_spec ) return self . _file_system_cache . GetObject ( identifier )
Retrieves a file system object defined by path specification .
19,387
def GetFileSystemReferenceCount ( self , path_spec ) : identifier = self . _GetFileSystemCacheIdentifier ( path_spec ) cache_value = self . _file_system_cache . GetCacheValue ( identifier ) if not cache_value : return None return cache_value . reference_count
Retrieves the reference count of a cached file system object .
19,388
def GrabFileSystem ( self , path_spec ) : identifier = self . _GetFileSystemCacheIdentifier ( path_spec ) self . _file_system_cache . GrabObject ( identifier )
Grabs a cached file system object defined by path specification .
19,389
def ReleaseFileObject ( self , file_object ) : identifier , cache_value = self . _file_object_cache . GetCacheValueByObject ( file_object ) if not identifier : raise RuntimeError ( 'Object not cached.' ) if not cache_value : raise RuntimeError ( 'Invalid cache value.' ) self . _file_object_cache . ReleaseObject ( identifier ) result = cache_value . IsDereferenced ( ) if result : self . _file_object_cache . RemoveObject ( identifier ) return result
Releases a cached file - like object .
19,390
def ReleaseFileSystem ( self , file_system ) : identifier , cache_value = self . _file_system_cache . GetCacheValueByObject ( file_system ) if not identifier : raise RuntimeError ( 'Object not cached.' ) if not cache_value : raise RuntimeError ( 'Invalid cache value.' ) self . _file_system_cache . ReleaseObject ( identifier ) result = cache_value . IsDereferenced ( ) if result : self . _file_system_cache . RemoveObject ( identifier ) return result
Releases a cached file system object .
19,391
def _GetDecodedStreamSize ( self ) : self . _file_object . seek ( 0 , os . SEEK_SET ) self . _decoder = self . _GetDecoder ( ) self . _decoded_data = b'' encoded_data_offset = 0 encoded_data_size = self . _file_object . get_size ( ) decoded_stream_size = 0 while encoded_data_offset < encoded_data_size : read_count = self . _ReadEncodedData ( self . _ENCODED_DATA_BUFFER_SIZE ) if read_count == 0 : break encoded_data_offset += read_count decoded_stream_size += self . _decoded_data_size return decoded_stream_size
Retrieves the decoded stream size .
19,392
def _AlignDecodedDataOffset ( self , decoded_data_offset ) : self . _file_object . seek ( 0 , os . SEEK_SET ) self . _decoder = self . _GetDecoder ( ) self . _decoded_data = b'' encoded_data_offset = 0 encoded_data_size = self . _file_object . get_size ( ) while encoded_data_offset < encoded_data_size : read_count = self . _ReadEncodedData ( self . _ENCODED_DATA_BUFFER_SIZE ) if read_count == 0 : break encoded_data_offset += read_count if decoded_data_offset < self . _decoded_data_size : self . _decoded_data_offset = decoded_data_offset break decoded_data_offset -= self . _decoded_data_size
Aligns the encoded file with the decoded data offset .
19,393
def _ReadEncodedData ( self , read_size ) : encoded_data = self . _file_object . read ( read_size ) read_count = len ( encoded_data ) self . _encoded_data = b'' . join ( [ self . _encoded_data , encoded_data ] ) self . _decoded_data , self . _encoded_data = ( self . _decoder . Decode ( self . _encoded_data ) ) self . _decoded_data_size = len ( self . _decoded_data ) return read_count
Reads encoded data from the file - like object .
19,394
def SetDecodedStreamSize ( self , decoded_stream_size ) : if self . _is_open : raise IOError ( 'Already open.' ) if decoded_stream_size < 0 : raise ValueError ( ( 'Invalid decoded stream size: {0:d} value out of ' 'bounds.' ) . format ( decoded_stream_size ) ) self . _decoded_stream_size = decoded_stream_size
Sets the decoded stream size .
19,395
def _AddAttribute ( self , attribute ) : if attribute . identifier in self . _attributes : raise KeyError ( ( 'Volume attribute object already set for volume attribute ' 'identifier: {0:s}.' ) . format ( attribute . identifier ) ) self . _attributes [ attribute . identifier ] = attribute
Adds an attribute .
19,396
def GetAttribute ( self , identifier ) : if not self . _is_parsed : self . _Parse ( ) self . _is_parsed = True if identifier not in self . _attributes : return None return self . _attributes [ identifier ]
Retrieves a specific attribute .
19,397
def _AddVolume ( self , volume ) : if volume . identifier in self . _volumes : raise KeyError ( 'Volume object already set for volume identifier: {0:s}' . format ( volume . identifier ) ) self . _volumes [ volume . identifier ] = volume self . _volume_identifiers . append ( volume . identifier )
Adds a volume .
19,398
def GetSectionByIndex ( self , section_index ) : if not self . _is_parsed : self . _Parse ( ) self . _is_parsed = True if section_index < 0 or section_index >= len ( self . _sections ) : return None return self . _sections [ section_index ]
Retrieves a specific section based on the index .
19,399
def GetVolumeByIdentifier ( self , volume_identifier ) : if not self . _is_parsed : self . _Parse ( ) self . _is_parsed = True return self . _volumes [ volume_identifier ]
Retrieves a specific volume based on the identifier .