idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
21,200
def _format_date_param ( params , key , format = "%Y-%m-%d %H:%M:%S" ) : if key in params : param = params [ key ] if hasattr ( param , "strftime" ) : params [ key ] = param . strftime ( format )
Utility function to convert datetime values to strings .
21,201
def submit_sms_conversion ( self , message_id , delivered = True , timestamp = None ) : params = { "message-id" : message_id , "delivered" : delivered , "timestamp" : timestamp or datetime . now ( pytz . utc ) , } _format_date_param ( params , "timestamp" ) return self . post ( self . api_host , "/conversions/sms" , params )
Notify Nexmo that an SMS was successfully received .
21,202
def get_stripped_file_lines ( filename ) : try : lines = open ( filename ) . readlines ( ) except FileNotFoundError : fatal ( "Could not open file: {!r}" . format ( filename ) ) return [ line . strip ( ) for line in lines ]
Return lines of a file with whitespace removed
21,203
def create_settings ( from_environment = False , locustfile = None , classes = None , host = None , num_clients = None , hatch_rate = None , reset_stats = False , run_time = "3m" ) : settings = type ( '' , ( ) , { } ) ( ) settings . from_environment = from_environment settings . locustfile = locustfile settings . classes = classes settings . host = host settings . num_clients = num_clients settings . hatch_rate = hatch_rate settings . reset_stats = reset_stats settings . run_time = run_time settings . no_web = True settings . master = False settings . show_task_ratio_json = False settings . list_commands = False settings . loglevel = 'INFO' settings . slave = False settings . only_summary = True settings . logfile = None settings . show_task_ratio = False settings . print_stats = False if from_environment : for attribute in [ 'locustfile' , 'classes' , 'host' , 'run_time' , 'num_clients' , 'hatch_rate' ] : var_name = 'LOCUST_{0}' . format ( attribute . upper ( ) ) var_value = os . environ . get ( var_name ) if var_value : setattr ( settings , attribute , var_value ) if settings . locustfile is None and settings . classes is None : raise Exception ( 'One of locustfile or classes must be specified' ) if settings . locustfile and settings . classes : raise Exception ( 'Only one of locustfile or classes can be specified' ) if settings . locustfile : docstring , classes = load_locustfile ( settings . locustfile ) settings . classes = [ classes [ n ] for n in classes ] else : if isinstance ( settings . classes , str ) : settings . classes = settings . classes . split ( ',' ) for idx , val in enumerate ( settings . classes ) : settings . classes [ idx ] = eval ( val ) for attribute in [ 'classes' , 'host' , 'num_clients' , 'hatch_rate' ] : val = getattr ( settings , attribute , None ) if not val : raise Exception ( 'configuration error, attribute not set: {0}' . format ( attribute ) ) if isinstance ( val , str ) and val . isdigit ( ) : setattr ( settings , attribute , int ( val ) ) return settings
Returns a settings object to be used by a LocalLocustRunner .
21,204
def get_lambda_runtime_info ( context ) : runtime_info = { 'remaining_time' : context . get_remaining_time_in_millis ( ) , 'function_name' : context . function_name , 'function_version' : context . function_version , 'invoked_function_arn' : context . invoked_function_arn , 'memory_limit' : context . memory_limit_in_mb , 'aws_request_id' : context . aws_request_id , 'log_group_name' : context . log_group_name , 'log_stream_name' : context . log_stream_name } return runtime_info
Returns a dictionary of information about the AWS Lambda function invocation
21,205
def solvedbi_sm ( ah , rho , b , c = None , axis = 4 ) : r a = np . conj ( ah ) if c is None : c = solvedbi_sm_c ( ah , a , rho , axis ) if have_numexpr : cb = inner ( c , b , axis = axis ) return ne . evaluate ( '(b - (a * cb)) / rho' ) else : return ( b - ( a * inner ( c , b , axis = axis ) ) ) / rho
r Solve a diagonal block linear system with a scaled identity term using the Sherman - Morrison equation .
21,206
def solvedbd_sm ( ah , d , b , c = None , axis = 4 ) : r a = np . conj ( ah ) if c is None : c = solvedbd_sm_c ( ah , a , d , axis ) if have_numexpr : cb = inner ( c , b , axis = axis ) return ne . evaluate ( '(b - (a * cb)) / d' ) else : return ( b - ( a * inner ( c , b , axis = axis ) ) ) / d
r Solve a diagonal block linear system with a diagonal term using the Sherman - Morrison equation .
21,207
def Gax ( x , ax ) : slc = ( slice ( None ) , ) * ax + ( slice ( - 1 , None ) , ) xg = np . roll ( x , - 1 , axis = ax ) - x xg [ slc ] = 0.0 return xg
Compute gradient of x along axis ax .
21,208
def GTax ( x , ax ) : slc0 = ( slice ( None ) , ) * ax xg = np . roll ( x , 1 , axis = ax ) - x xg [ slc0 + ( slice ( 0 , 1 ) , ) ] = - x [ slc0 + ( slice ( 0 , 1 ) , ) ] xg [ slc0 + ( slice ( - 1 , None ) , ) ] = x [ slc0 + ( slice ( - 2 , - 1 ) , ) ] return xg
Compute transpose of gradient of x along axis ax .
21,209
def GradientFilters ( ndim , axes , axshp , dtype = None ) : r if dtype is None : dtype = np . float32 g = np . zeros ( [ 2 if k in axes else 1 for k in range ( ndim ) ] + [ len ( axes ) , ] , dtype ) for k in axes : g [ ( 0 , ) * k + ( slice ( None ) , ) + ( 0 , ) * ( g . ndim - 2 - k ) + ( k , ) ] = np . array ( [ 1 , - 1 ] ) Gf = rfftn ( g , axshp , axes = axes ) GHGf = np . sum ( np . conj ( Gf ) * Gf , axis = - 1 ) . real return Gf , GHGf
r Construct a set of filters for computing gradients in the frequency domain .
21,210
def promote16 ( u , fn = None , * args , ** kwargs ) : r dtype = np . float32 if u . dtype == np . float16 else u . dtype up = np . asarray ( u , dtype = dtype ) if fn is None : return up else : v = fn ( up , * args , ** kwargs ) if isinstance ( v , tuple ) : vp = tuple ( [ np . asarray ( vk , dtype = u . dtype ) for vk in v ] ) else : vp = np . asarray ( v , dtype = u . dtype ) return vp
r Utility function for use with functions that do not support arrays of dtype np . float16 . This function has two distinct modes of operation . If called with only the u parameter specified the returned value is either u itself if u is not of dtype np . float16 or u promoted to np . float32 dtype if it is . If the function parameter fn is specified then u is conditionally promoted as described above passed as the first argument to function fn and the returned values are converted back to dtype np . float16 if u is of that dtype . Note that if parameter fn is specified it may not be be specified as a keyword argument if it is followed by any non - keyword arguments .
21,211
def LaplaceCentreWeight ( self ) : sz = [ 1 , ] * self . S . ndim for ax in self . axes : sz [ ax ] = self . S . shape [ ax ] lcw = 2 * len ( self . axes ) * np . ones ( sz , dtype = self . dtype ) for ax in self . axes : lcw [ ( slice ( None ) , ) * ax + ( [ 0 , - 1 ] , ) ] -= 1.0 return lcw
Centre weighting matrix for TV Laplacian .
21,212
def GaussSeidelStep ( self , S , X , ATYU , rho , lcw , W2 ) : Xss = np . zeros_like ( S , dtype = self . dtype ) for ax in self . axes : Xss += sl . zpad ( X [ ( slice ( None ) , ) * ax + ( slice ( 0 , - 1 ) , ) ] , ( 1 , 0 ) , ax ) Xss += sl . zpad ( X [ ( slice ( None ) , ) * ax + ( slice ( 1 , None ) , ) ] , ( 0 , 1 ) , ax ) return ( rho * ( Xss + ATYU ) + W2 * S ) / ( W2 + rho * lcw )
Gauss - Seidel step for linear system in TV problem .
21,213
def runtime ( self ) : warnings . warn ( "admm.ADMM.runtime attribute has been replaced by " "an upgraded timer class: please see the documentation " "for admm.ADMM.solve method and util.Timer class" , PendingDeprecationWarning ) return self . timer . elapsed ( 'init' ) + self . timer . elapsed ( 'solve' )
Transitional property providing access to the new timer mechanism . This will be removed in the future .
21,214
def ustep ( self ) : self . U += self . rsdl_r ( self . AX , self . Y )
Dual variable update .
21,215
def update_rho ( self , k , r , s ) : if self . opt [ 'AutoRho' , 'Enabled' ] : tau = self . rho_tau mu = self . rho_mu xi = self . rho_xi if k != 0 and np . mod ( k + 1 , self . opt [ 'AutoRho' , 'Period' ] ) == 0 : if self . opt [ 'AutoRho' , 'AutoScaling' ] : if s == 0.0 or r == 0.0 : rhomlt = tau else : rhomlt = np . sqrt ( r / ( s * xi ) if r > s * xi else ( s * xi ) / r ) if rhomlt > tau : rhomlt = tau else : rhomlt = tau rsf = 1.0 if r > xi * mu * s : rsf = rhomlt elif s > ( mu / xi ) * r : rsf = 1.0 / rhomlt self . rho *= self . dtype . type ( rsf ) self . U /= rsf if rsf != 1.0 : self . rhochange ( )
Automatic rho adjustment .
21,216
def display_status ( self , fmtstr , itst ) : if self . opt [ 'Verbose' ] : hdrtxt = type ( self ) . hdrtxt ( ) hdrval = type ( self ) . hdrval ( ) itdsp = tuple ( [ getattr ( itst , hdrval [ col ] ) for col in hdrtxt ] ) if not self . opt [ 'AutoRho' , 'Enabled' ] : itdsp = itdsp [ 0 : - 1 ] print ( fmtstr % itdsp )
Display current iteration status as selection of fields from iteration stats tuple .
21,217
def rsdl_sn ( self , U ) : return self . rho * np . linalg . norm ( self . cnst_AT ( U ) )
Compute dual residual normalisation term .
21,218
def getmin ( self ) : if self . opt [ 'ReturnVar' ] == 'X' : return self . var_x ( ) elif self . opt [ 'ReturnVar' ] == 'Y0' : return self . var_y0 ( ) elif self . opt [ 'ReturnVar' ] == 'Y1' : return self . var_y1 ( ) else : raise ValueError ( self . opt [ 'ReturnVar' ] + ' is not a valid value' 'for option ReturnVar' )
Get minimiser after optimisation .
21,219
def cbpdnmsk_class_label_lookup ( label ) : clsmod = { 'admm' : admm_cbpdn . ConvBPDNMaskDcpl , 'fista' : fista_cbpdn . ConvBPDNMask } if label in clsmod : return clsmod [ label ] else : raise ValueError ( 'Unknown ConvBPDNMask solver method %s' % label )
Get a ConvBPDNMask class from a label string .
21,220
def ConvBPDNMaskOptionsDefaults ( method = 'admm' ) : dflt = copy . deepcopy ( cbpdnmsk_class_label_lookup ( method ) . Options . defaults ) if method == 'admm' : dflt . update ( { 'MaxMainIter' : 1 , 'AutoRho' : { 'Period' : 10 , 'AutoScaling' : False , 'RsdlRatio' : 10.0 , 'Scaling' : 2.0 , 'RsdlTarget' : 1.0 } } ) else : dflt . update ( { 'MaxMainIter' : 1 , 'BackTrack' : { 'gamma_u' : 1.2 , 'MaxIter' : 50 } } ) return dflt
Get defaults dict for the ConvBPDNMask class specified by the method parameter .
21,221
def ccmodmsk_class_label_lookup ( label ) : clsmod = { 'ism' : admm_ccmod . ConvCnstrMODMaskDcpl_IterSM , 'cg' : admm_ccmod . ConvCnstrMODMaskDcpl_CG , 'cns' : admm_ccmod . ConvCnstrMODMaskDcpl_Consensus , 'fista' : fista_ccmod . ConvCnstrMODMask } if label in clsmod : return clsmod [ label ] else : raise ValueError ( 'Unknown ConvCnstrMODMask solver method %s' % label )
Get a ConvCnstrMODMask class from a label string .
21,222
def ConvCnstrMODMaskOptionsDefaults ( method = 'fista' ) : dflt = copy . deepcopy ( ccmodmsk_class_label_lookup ( method ) . Options . defaults ) if method == 'fista' : dflt . update ( { 'MaxMainIter' : 1 , 'BackTrack' : { 'gamma_u' : 1.2 , 'MaxIter' : 50 } } ) else : dflt . update ( { 'MaxMainIter' : 1 , 'AutoRho' : { 'Period' : 10 , 'AutoScaling' : False , 'RsdlRatio' : 10.0 , 'Scaling' : 2.0 , 'RsdlTarget' : 1.0 } } ) return dflt
Get defaults dict for the ConvCnstrMODMask class specified by the method parameter .
21,223
def pathsplit ( pth , dropext = True ) : if dropext : pth = os . path . splitext ( pth ) [ 0 ] parts = os . path . split ( pth ) if parts [ 0 ] == '' : return parts [ 1 : ] elif len ( parts [ 0 ] ) == 1 : return parts else : return pathsplit ( parts [ 0 ] , dropext = False ) + parts [ 1 : ]
Split a path into a tuple of all of its components .
21,224
def update_required ( srcpth , dstpth ) : return not os . path . exists ( dstpth ) or os . stat ( srcpth ) . st_mtime > os . stat ( dstpth ) . st_mtime
If the file at dstpth is generated from the file at srcpth determine whether an update is required . Returns True if dstpth does not exist or if srcpth has been more recently modified than dstpth .
21,225
def read_sphinx_environment ( pth ) : with open ( pth , 'rb' ) as fo : env = pickle . load ( fo ) return env
Read the sphinx environment . pickle file at path pth .
21,226
def parse_rst_index ( rstpth ) : pthidx = { } pthlst = [ ] with open ( rstpth ) as fd : lines = fd . readlines ( ) for i , l in enumerate ( lines ) : if i > 0 : if re . match ( r'^ \w+' , l ) is not None and re . match ( r'^\w+' , lines [ i - 1 ] ) is not None : pthlst . append ( lines [ i - 1 ] [ : - 1 ] ) pthidx [ lines [ i - 1 ] [ : - 1 ] ] = l [ 2 : - 1 ] return pthlst , pthidx
Parse the top - level RST index file at rstpth for the example python scripts . Returns a list of subdirectories in order of appearance in the index file and a dict mapping subdirectory name to a description .
21,227
def preprocess_script_string ( str ) : str = re . sub ( r'^(#[^#\n]+\n){5}\n*' , r'' , str ) str = re . sub ( r'from sporco import plot' , r'from sporco import plot' '\nplot.config_notebook_plotting()' , str , flags = re . MULTILINE ) str = re . sub ( r'\n*# Wait for enter on keyboard.*\ninput().*\n*' , r'' , str , flags = re . MULTILINE ) return str
Process python script represented as string str in preparation for conversion to a notebook . This processing includes removal of the header comment modification of the plotting configuration and replacement of certain sphinx cross - references with appropriate links to online docs .
21,228
def script_string_to_notebook ( str , pth ) : nb = py2jn . py_string_to_notebook ( str ) py2jn . write_notebook ( nb , pth )
Convert a python script represented as string str to a notebook with filename pth .
21,229
def script_to_notebook ( spth , npth , cr ) : with open ( spth ) as f : stxt = f . read ( ) stxt = preprocess_script_string ( stxt ) if os . path . exists ( npth ) and notebook_executed ( npth ) : nbold = nbformat . read ( npth , as_version = 4 ) nbnew = script_string_to_notebook_object ( stxt ) if cr is not None : notebook_substitute_ref_with_url ( nbnew , cr ) if same_notebook_code ( nbnew , nbold ) : try : replace_markdown_cells ( nbnew , nbold ) except Exception : script_string_to_notebook_with_links ( stxt , npth , cr ) else : with open ( npth , 'wt' ) as f : nbformat . write ( nbold , f ) else : script_string_to_notebook_with_links ( stxt , npth , cr ) else : script_string_to_notebook_with_links ( stxt , npth , cr )
Convert the script at spth to a notebook at npth . Parameter cr is a CrossReferenceLookup object .
21,230
def script_string_to_notebook_with_links ( str , pth , cr = None ) : if cr is None : script_string_to_notebook ( str , pth ) else : ntbk = script_string_to_notebook_object ( str ) notebook_substitute_ref_with_url ( ntbk , cr ) with open ( pth , 'wt' ) as f : nbformat . write ( ntbk , f )
Convert a python script represented as string str to a notebook with filename pth and replace sphinx cross - references with links to online docs . Parameter cr is a CrossReferenceLookup object .
21,231
def rst_to_notebook ( infile , outfile ) : with open ( infile , 'r' ) as fin : rststr = fin . read ( ) mdfmt = 'markdown_github+tex_math_dollars+fenced_code_attributes' mdstr = pypandoc . convert_text ( rststr , mdfmt , format = 'rst' , extra_args = [ '--atx-headers' ] ) mdstr = re . sub ( r'\(([^\)]+).py\)' , r'(\1.ipynb)' , mdstr ) mdstr = '' nb = py2jn . py_string_to_notebook ( mdstr ) py2jn . tools . write_notebook ( nb , outfile , nbver = 4 )
Convert an rst file to a notebook file .
21,232
def markdown_to_notebook ( infile , outfile ) : with open ( infile , 'r' ) as fin : str = fin . read ( ) str = '' nb = py2jn . py_string_to_notebook ( str ) py2jn . tools . write_notebook ( nb , outfile , nbver = 4 )
Convert a markdown file to a notebook file .
21,233
def rst_to_docs_rst ( infile , outfile ) : with open ( infile , 'r' ) as fin : rst = fin . readlines ( ) ps = pathsplit ( outfile ) [ - 3 : ] if ps [ - 2 ] == 'examples' : ps = ps [ - 2 : ] idx = 'index' else : idx = '' out = '.. _' + '_' . join ( ps ) + ':\n\n' it = iter ( rst ) for line in it : if line [ 0 : 12 ] == '.. toc-start' : toc = [ ] for line in it : if line == '\n' : continue elif line [ 0 : 10 ] == '.. toc-end' : out += '.. toctree::\n :maxdepth: 1\n\n' for c in toc : out += ' %s <%s>\n' % c break else : m = re . search ( r'`(.*?)\s*<(.*?)(?:.py)?>`' , line ) if m : if idx == '' : toc . append ( ( m . group ( 1 ) , m . group ( 2 ) ) ) else : toc . append ( ( m . group ( 1 ) , os . path . join ( m . group ( 2 ) , idx ) ) ) else : out += line with open ( outfile , 'w' ) as fout : fout . write ( out )
Convert an rst file to a sphinx docs rst file .
21,234
def parse_notebook_index ( ntbkpth ) : rex = RSTExporter ( ) rsttxt = rex . from_filename ( ntbkpth ) [ 0 ] rsttxt = re . sub ( r'\n ' , r'' , rsttxt , re . M | re . S ) pthidx = { } pthlst = [ ] lines = rsttxt . split ( '\n' ) for l in lines : m = re . match ( r'^-\s+`([^<]+)\s+<([^>]+).ipynb>`__' , l ) if m : pthlst . append ( m . group ( 2 ) ) pthidx [ m . group ( 2 ) ] = m . group ( 1 ) return pthlst , pthidx
Parse the top - level notebook index file at ntbkpth . Returns a list of subdirectories in order of appearance in the index file and a dict mapping subdirectory name to a description .
21,235
def construct_notebook_index ( title , pthlst , pthidx ) : txt = '\n\n' return txt
Construct a string containing a markdown format index for the list of paths in pthlst . The title for the index is in title and pthidx is a dict giving label text for each path .
21,236
def notebook_executed ( pth ) : nb = nbformat . read ( pth , as_version = 4 ) for n in range ( len ( nb [ 'cells' ] ) ) : if nb [ 'cells' ] [ n ] . cell_type == 'code' and nb [ 'cells' ] [ n ] . execution_count is None : return False return True
Determine whether the notebook at pth has been executed .
21,237
def same_notebook_code ( nb1 , nb2 ) : if len ( nb1 [ 'cells' ] ) != len ( nb2 [ 'cells' ] ) : return False for n in range ( len ( nb1 [ 'cells' ] ) ) : if nb1 [ 'cells' ] [ n ] [ 'cell_type' ] != nb2 [ 'cells' ] [ n ] [ 'cell_type' ] : return False if nb1 [ 'cells' ] [ n ] [ 'cell_type' ] == 'code' and nb1 [ 'cells' ] [ n ] [ 'source' ] != nb2 [ 'cells' ] [ n ] [ 'source' ] : return False return True
Return true of the code cells of notebook objects nb1 and nb2 are the same .
21,238
def execute_notebook ( npth , dpth , timeout = 1200 , kernel = 'python3' ) : ep = ExecutePreprocessor ( timeout = timeout , kernel_name = kernel ) nb = nbformat . read ( npth , as_version = 4 ) t0 = timer ( ) ep . preprocess ( nb , { 'metadata' : { 'path' : dpth } } ) t1 = timer ( ) with open ( npth , 'wt' ) as f : nbformat . write ( nb , f ) return t1 - t0
Execute the notebook at npth using dpth as the execution directory . The execution timeout and kernel are timeout and kernel respectively .
21,239
def replace_markdown_cells ( src , dst ) : if len ( src [ 'cells' ] ) != len ( dst [ 'cells' ] ) : raise ValueError ( 'notebooks do not have the same number of cells' ) for n in range ( len ( src [ 'cells' ] ) ) : if src [ 'cells' ] [ n ] [ 'cell_type' ] != dst [ 'cells' ] [ n ] [ 'cell_type' ] : raise ValueError ( 'cell number %d of different type in src and dst' ) if src [ 'cells' ] [ n ] [ 'cell_type' ] == 'markdown' : dst [ 'cells' ] [ n ] [ 'source' ] = src [ 'cells' ] [ n ] [ 'source' ]
Overwrite markdown cells in notebook object dst with corresponding cells in notebook object src .
21,240
def notebook_substitute_ref_with_url ( ntbk , cr ) : for n in range ( len ( ntbk [ 'cells' ] ) ) : if ntbk [ 'cells' ] [ n ] [ 'cell_type' ] == 'markdown' : txt = ntbk [ 'cells' ] [ n ] [ 'source' ] txt = cr . substitute_ref_with_url ( txt ) ntbk [ 'cells' ] [ n ] [ 'source' ] = txt
In markdown cells of notebook object ntbk replace sphinx cross - references with links to online docs . Parameter cr is a CrossReferenceLookup object .
21,241
def preprocess_notebook ( ntbk , cr ) : for n in range ( len ( ntbk [ 'cells' ] ) ) : if ntbk [ 'cells' ] [ n ] [ 'cell_type' ] == 'markdown' : txt = ntbk [ 'cells' ] [ n ] [ 'source' ] txt = cr . substitute_url_with_ref ( txt ) ntbk [ 'cells' ] [ n ] [ 'source' ] = txt
Process notebook object ntbk in preparation for conversion to an rst document . This processing replaces links to online docs with corresponding sphinx cross - references within the local docs . Parameter cr is a CrossReferenceLookup object .
21,242
def write_notebook_rst ( txt , res , fnm , pth ) : extfnm = fnm + '_files' extpth = os . path . join ( pth , extfnm ) mkdir ( extpth ) for r in res [ 'outputs' ] . keys ( ) : rnew = re . sub ( 'output' , fnm , r ) rpth = os . path . join ( extfnm , rnew ) txt = re . sub ( '\.\. image:: ' + r , '.. image:: ' + rpth , txt , re . M ) fullrpth = os . path . join ( pth , rpth ) with open ( fullrpth , 'wb' ) as fo : fo . write ( res [ 'outputs' ] [ r ] ) txt = re . sub ( r'[ \t]+$' , '' , txt , flags = re . M ) with open ( os . path . join ( pth , fnm + '.rst' ) , 'wt' ) as fo : fo . write ( txt )
Write the converted notebook text txt and resources res to filename fnm in directory pth .
21,243
def notebook_to_rst ( npth , rpth , rdir , cr = None ) : ntbk = nbformat . read ( npth , nbformat . NO_CONVERT ) notebook_object_to_rst ( ntbk , rpth , rdir , cr )
Convert notebook at npth to rst document at rpth in directory rdir . Parameter cr is a CrossReferenceLookup object .
21,244
def notebook_object_to_rst ( ntbk , rpth , cr = None ) : rdir = os . path . dirname ( rpth ) rb = os . path . basename ( os . path . splitext ( rpth ) [ 0 ] ) if cr is not None : preprocess_notebook ( ntbk , cr ) rex = RSTExporter ( ) rsttxt , rstres = rex . from_notebook_node ( ntbk ) rsttxt = re . sub ( r':([^:]+):``(.*?)``' , r':\1:`\2`' , rsttxt ) reflbl = '.. _examples_' + os . path . basename ( rdir ) + '_' + rb . replace ( '-' , '_' ) + ':\n' rsttxt = reflbl + rsttxt write_notebook_rst ( rsttxt , rstres , rb , rdir )
Convert notebook object ntbk to rst document at rpth in directory rdir . Parameter cr is a CrossReferenceLookup object .
21,245
def make_example_scripts_docs ( spth , npth , rpth ) : mkdir ( rpth ) for fp in glob ( os . path . join ( spth , '*.rst' ) ) + glob ( os . path . join ( spth , '*' , '*.rst' ) ) : b = os . path . basename ( fp ) dn = os . path . dirname ( fp ) sd = os . path . split ( dn ) if dn == spth : d = '' else : d = sd [ - 1 ] fd = os . path . join ( rpth , d ) mkdir ( fd ) fn = os . path . join ( fd , b ) if update_required ( fp , fn ) : print ( 'Converting %s ' % os . path . join ( d , b ) , end = '\r' ) rst_to_docs_rst ( fp , fn ) for fp in sorted ( glob ( os . path . join ( spth , '*' , '*.py' ) ) ) : d = os . path . split ( os . path . dirname ( fp ) ) [ 1 ] b = os . path . splitext ( os . path . basename ( fp ) ) [ 0 ] fn = os . path . join ( npth , d , b + '.ipynb' ) fr = os . path . join ( rpth , d , b + '.rst' ) if os . path . exists ( fp ) and os . path . exists ( fn ) : if update_required ( fn , fr ) : fnb = os . path . join ( d , b + '.ipynb' ) print ( 'Processing %s ' % fnb , end = '\r' ) script_and_notebook_to_rst ( fp , fn , fr ) else : print ( 'WARNING: script %s or notebook %s not found' % ( fp , fn ) )
Generate rst docs from example scripts . Arguments spth npth and rpth are the top - level scripts directory the top - level notebooks directory and the top - level output directory within the docs respectively .
21,246
def get_full_name ( self , role , name ) : if name [ 0 ] == '.' : ptrn = r'(?<= )[^,]*' + name + r'(?=,)' ml = re . findall ( ptrn , self . rolnam [ role ] ) if len ( ml ) == 0 : raise KeyError ( 'name matching %s not found' % name , 'name' , len ( ml ) ) elif len ( ml ) > 1 : raise KeyError ( 'multiple names matching %s found' % name , 'name' , len ( ml ) ) else : return ml [ 0 ] else : try : dom = IntersphinxInventory . roledomain [ role ] except KeyError : raise KeyError ( 'role %s not found' % role , 'role' , 0 ) if name in self . inv [ dom ] : return name else : raise KeyError ( 'name %s not found' % name , 'name' , 0 )
If name is already the full name of an object return name . Otherwise if name is a partial object name look up the full name and return it .
21,247
def matching_base_url ( self , url ) : n = len ( self . baseurl ) return url [ 0 : n ] == self . baseurl
Return True if the initial part of url matches the base url passed to the initialiser of this object and False otherwise .
21,248
def inventory_maps ( inv ) : revinv = { } rolnam = { } for d in inv : if d [ 0 : 3 ] == 'py:' and d in IntersphinxInventory . domainrole : r = IntersphinxInventory . domainrole [ d ] rolnam [ r ] = '' for n in inv [ d ] : p = inv [ d ] [ n ] [ 2 ] revinv [ p ] = ( r , n ) rolnam [ r ] += ' ' + n + ',' return revinv , rolnam
Construct dicts facilitating information lookup in an inventory dict . A reversed dict allows lookup of a tuple specifying the sphinx cross - reference role and the name of the referenced type from the intersphinx inventory url postfix string . A role - specific name lookup string allows the set of all names corresponding to a specific role to be searched via regex .
21,249
def get_docs_label ( self , role , name ) : if role == 'cite' : try : cstr = self . env . bibtex_cache . get_label_from_key ( name ) except Exception : raise KeyError ( 'cite key %s not found' % name , 'cite' , 0 ) return '[%s]' % cstr elif role == 'ref' : try : reftpl = self . env . domaindata [ 'std' ] [ 'labels' ] [ name ] except Exception : raise KeyError ( 'ref label %s not found' % name , 'ref' , 0 ) return reftpl [ 2 ] else : if name [ 0 ] == '.' : return name [ 1 : ] else : return name
Get an appropriate label to use in a link to the online docs .
21,250
def substitute_ref_with_url ( self , txt ) : mi = re . finditer ( r':([^:]+):`([^`]+)`' , txt ) if mi : for mo in mi : lbl = None url = None mtxt = mo . group ( 0 ) role = mo . group ( 1 ) name = mo . group ( 2 ) if role == 'ref' : ma = re . match ( r'\s*([^\s<]+)\s*<([^>]+)+>' , name ) if ma : name = ma . group ( 2 ) lbl = ma . group ( 1 ) try : url = self . get_docs_url ( role , name ) if role != 'ref' : lbl = self . get_docs_label ( role , name ) except KeyError as ex : if len ( ex . args ) == 1 or ex . args [ 1 ] != 'role' : print ( 'Warning: %s' % ex . args [ 0 ] ) else : rtxt = '[%s](%s)' % ( lbl , url ) txt = re . sub ( mtxt , rtxt , txt , flags = re . M ) return txt
In the string txt replace sphinx references with corresponding links to online docs .
21,251
def substitute_url_with_ref ( self , txt ) : mi = re . finditer ( r'\[([^\]]+|\[[^\]]+\])\]\(([^\)]+)\)' , txt ) if mi : for mo in mi : mtxt = mo . group ( 0 ) lbl = mo . group ( 1 ) url = mo . group ( 2 ) try : ref = self . get_sphinx_ref ( url , lbl ) except KeyError as ex : print ( 'Warning: %s' % ex . args [ 0 ] ) else : txt = re . sub ( re . escape ( mtxt ) , ref , txt ) return txt
In the string txt replace links to online docs with corresponding sphinx cross - references .
21,252
def obfn_fvarf ( self ) : return self . Xf if self . opt [ 'fEvalX' ] else sl . rfftn ( self . Y , None , self . cri . axisN )
Variable to be evaluated in computing data fidelity term depending on fEvalX option value .
21,253
def rsdl ( self ) : diff = self . Xf - self . Yfprv return sl . rfl2norm2 ( diff , self . X . shape , axis = self . cri . axisN )
Compute fixed point residual in Fourier domain .
21,254
def cbpdn_class_label_lookup ( label ) : clsmod = { 'admm' : admm_cbpdn . ConvBPDN , 'fista' : fista_cbpdn . ConvBPDN } if label in clsmod : return clsmod [ label ] else : raise ValueError ( 'Unknown ConvBPDN solver method %s' % label )
Get a CBPDN class from a label string .
21,255
def ConvBPDNOptionsDefaults ( method = 'admm' ) : dflt = copy . deepcopy ( cbpdn_class_label_lookup ( method ) . Options . defaults ) if method == 'admm' : dflt . update ( { 'MaxMainIter' : 1 , 'AutoRho' : { 'Period' : 10 , 'AutoScaling' : False , 'RsdlRatio' : 10.0 , 'Scaling' : 2.0 , 'RsdlTarget' : 1.0 } } ) else : dflt . update ( { 'MaxMainIter' : 1 , 'BackTrack' : { 'gamma_u' : 1.2 , 'MaxIter' : 50 } } ) return dflt
Get defaults dict for the ConvBPDN class specified by the method parameter .
21,256
def ccmod_class_label_lookup ( label ) : clsmod = { 'ism' : admm_ccmod . ConvCnstrMOD_IterSM , 'cg' : admm_ccmod . ConvCnstrMOD_CG , 'cns' : admm_ccmod . ConvCnstrMOD_Consensus , 'fista' : fista_ccmod . ConvCnstrMOD } if label in clsmod : return clsmod [ label ] else : raise ValueError ( 'Unknown ConvCnstrMOD solver method %s' % label )
Get a CCMOD class from a label string .
21,257
def ConvCnstrMODOptionsDefaults ( method = 'fista' ) : dflt = copy . deepcopy ( ccmod_class_label_lookup ( method ) . Options . defaults ) if method == 'fista' : dflt . update ( { 'MaxMainIter' : 1 , 'BackTrack' : { 'gamma_u' : 1.2 , 'MaxIter' : 50 } } ) else : dflt . update ( { 'MaxMainIter' : 1 , 'AutoRho' : { 'Period' : 10 , 'AutoScaling' : False , 'RsdlRatio' : 10.0 , 'Scaling' : 2.0 , 'RsdlTarget' : 1.0 } } ) return dflt
Get defaults dict for the ConvCnstrMOD class specified by the method parameter .
21,258
def evaluate ( self ) : if self . opt [ 'AccurateDFid' ] : D = self . dstep . var_y ( ) X = self . xstep . var_y ( ) S = self . xstep . S dfd = 0.5 * np . linalg . norm ( ( D . dot ( X ) - S ) ) ** 2 rl1 = np . sum ( np . abs ( X ) ) return dict ( DFid = dfd , RegL1 = rl1 , ObjFun = dfd + self . xstep . lmbda * rl1 ) else : return None
Evaluate functional value of previous iteration
21,259
def is_newer_than ( pth1 , pth2 ) : return not os . path . exists ( pth1 ) or not os . path . exists ( pth2 ) or os . stat ( pth1 ) . st_mtime > os . stat ( pth2 ) . st_mtime
Return true if either file pth1 or file pth2 don t exist or if pth1 has been modified more recently than pth2
21,260
def mpraw_as_np ( shape , dtype ) : sz = int ( np . product ( shape ) ) csz = sz * np . dtype ( dtype ) . itemsize raw = mp . RawArray ( 'c' , csz ) return np . frombuffer ( raw , dtype = dtype , count = sz ) . reshape ( shape )
Construct a numpy array of the specified shape and dtype for which the underlying storage is a multiprocessing RawArray in shared memory .
21,261
def init_mpraw ( mpv , npv ) : globals ( ) [ mpv ] = mpraw_as_np ( npv . shape , npv . dtype ) globals ( ) [ mpv ] [ : ] = npv
Set a global variable as a multiprocessing RawArray in shared memory with a numpy array wrapper and initialise its value .
21,262
def cbpdn_setdict ( ) : global mp_DSf mp_Df [ : ] = sl . rfftn ( mp_D_Y , mp_cri . Nv , mp_cri . axisN ) if mp_cri . Cd == 1 : mp_DSf [ : ] = np . conj ( mp_Df ) * mp_Sf else : mp_DSf [ : ] = sl . inner ( np . conj ( mp_Df [ np . newaxis , ... ] ) , mp_Sf , axis = mp_cri . axisC + 1 )
Set the dictionary for the cbpdn stage . There are no parameters or return values because all inputs and outputs are from and to global variables .
21,263
def cbpdnmd_ustep ( k ) : mp_Z_U0 [ k ] += mp_DX [ k ] - mp_Z_Y0 [ k ] - mp_S [ k ] mp_Z_U1 [ k ] += mp_Z_X [ k ] - mp_Z_Y1 [ k ]
Do the U step of the cbpdn stage . The only parameter is the slice index k and there are no return values ; all inputs and outputs are from and to global variables .
21,264
def ccmodmd_relax ( k ) : mp_D_X [ k ] = mp_drlx * mp_D_X [ k ] + ( 1 - mp_drlx ) * mp_D_Y0 mp_DX [ k ] = mp_drlx * mp_DX [ k ] + ( 1 - mp_drlx ) * ( mp_D_Y1 [ k ] + mp_S [ k ] )
Do relaxation for the ccmod stage . The only parameter is the slice index k and there are no return values ; all inputs and outputs are from and to global variables .
21,265
def eval_grad ( self ) : return self . D . T . dot ( self . D . dot ( self . Y ) - self . S )
Compute gradient in spatial domain for variable Y .
21,266
def rsdl ( self ) : return np . linalg . norm ( ( self . X - self . Yprv ) . ravel ( ) )
Compute fixed point residual .
21,267
def eval_Rf ( self , Vf ) : return sl . inner ( self . Df , Vf , axis = self . cri . axisM ) - self . Sf
Evaluate smooth term in Vf .
21,268
def zpad ( v , Nv ) : vp = np . zeros ( Nv + v . shape [ len ( Nv ) : ] , dtype = v . dtype ) axnslc = tuple ( [ slice ( 0 , x ) for x in v . shape ] ) vp [ axnslc ] = v return vp
Zero - pad initial axes of array to specified size . Padding is applied to the right top etc . of the array indices .
21,269
def Pcn ( x , dsz , Nv , dimN = 2 , dimC = 1 , crp = False , zm = False ) : if crp : def zpadfn ( x ) : return x else : def zpadfn ( x ) : return zpad ( x , Nv ) if zm : def zmeanfn ( x ) : return zeromean ( x , dsz , dimN ) else : def zmeanfn ( x ) : return x return normalise ( zmeanfn ( zpadfn ( bcrop ( x , dsz , dimN ) ) ) , dimN + dimC )
Constraint set projection for convolutional dictionary update problem .
21,270
def getPcn ( dsz , Nv , dimN = 2 , dimC = 1 , crp = False , zm = False ) : fncdict = { ( False , False ) : _Pcn , ( False , True ) : _Pcn_zm , ( True , False ) : _Pcn_crp , ( True , True ) : _Pcn_zm_crp } fnc = fncdict [ ( crp , zm ) ] return functools . partial ( fnc , dsz = dsz , Nv = Nv , dimN = dimN , dimC = dimC )
Construct the constraint set projection function for convolutional dictionary update problem .
21,271
def tiledict ( D , sz = None ) : if D . ndim == 2 : D = D . reshape ( ( sz + ( D . shape [ 1 ] , ) ) ) sz = None dsz = D . shape if D . ndim == 4 : axisM = 3 szni = 3 else : axisM = 2 szni = 2 if sz is None : sz = np . tile ( np . array ( dsz [ 0 : 2 ] ) . reshape ( [ 2 , 1 ] ) , ( 1 , D . shape [ axisM ] ) ) else : sz = np . array ( sum ( tuple ( ( x [ 0 : 2 ] , ) * x [ szni ] for x in sz ) , ( ) ) ) . T mxsz = np . amax ( sz , 1 ) D = D - D . min ( ) D = D / D . max ( ) N = dsz [ axisM ] Vr = int ( np . floor ( np . sqrt ( N ) ) ) Vc = int ( np . ceil ( N / float ( Vr ) ) ) if D . ndim == 4 : im = np . ones ( ( Vr * mxsz [ 0 ] + Vr - 1 , Vc * mxsz [ 1 ] + Vc - 1 , dsz [ 2 ] ) ) else : im = np . ones ( ( Vr * mxsz [ 0 ] + Vr - 1 , Vc * mxsz [ 1 ] + Vc - 1 ) ) k = 0 for l in range ( 0 , Vr ) : for m in range ( 0 , Vc ) : r = mxsz [ 0 ] * l + l c = mxsz [ 1 ] * m + m if D . ndim == 4 : im [ r : ( r + sz [ 0 , k ] ) , c : ( c + sz [ 1 , k ] ) , : ] = D [ 0 : sz [ 0 , k ] , 0 : sz [ 1 , k ] , : , k ] else : im [ r : ( r + sz [ 0 , k ] ) , c : ( c + sz [ 1 , k ] ) ] = D [ 0 : sz [ 0 , k ] , 0 : sz [ 1 , k ] , k ] k = k + 1 if k >= N : break if k >= N : break return im
Construct an image allowing visualization of dictionary content .
21,272
def extractblocks ( img , blksz , stpsz = None ) : if isinstance ( img , tuple ) : img = np . stack ( img , axis = - 1 ) if stpsz is None : stpsz = ( 1 , ) * len ( blksz ) imgsz = img . shape numblocks = tuple ( int ( np . floor ( ( a - b ) / c ) + 1 ) for a , b , c in zip_longest ( imgsz , blksz , stpsz , fillvalue = 1 ) ) blockstrides = tuple ( a * b for a , b in zip_longest ( img . strides , stpsz , fillvalue = 1 ) ) new_shape = blksz + numblocks new_strides = img . strides [ : len ( blksz ) ] + blockstrides blks = np . lib . stride_tricks . as_strided ( img , new_shape , new_strides ) return np . reshape ( blks , blksz + ( - 1 , ) )
Extract blocks from an ndarray signal into an ndarray .
21,273
def averageblocks ( blks , imgsz , stpsz = None ) : blksz = blks . shape [ : - 1 ] if stpsz is None : stpsz = tuple ( 1 for _ in blksz ) numblocks = tuple ( int ( np . floor ( ( a - b ) / c ) + 1 ) for a , b , c in zip_longest ( imgsz , blksz , stpsz , fillvalue = 1 ) ) new_shape = blksz + numblocks blks = np . reshape ( blks , new_shape ) imgs = np . zeros ( imgsz , dtype = blks . dtype ) normalizer = np . zeros ( imgsz , dtype = blks . dtype ) for pos in np . ndindex ( numblocks ) : slices = tuple ( slice ( a * c , a * c + b ) for a , b , c in zip ( pos , blksz , stpsz ) ) imgs [ slices + pos [ len ( blksz ) : ] ] += blks [ ( Ellipsis , ) + pos ] normalizer [ slices + pos [ len ( blksz ) : ] ] += blks . dtype . type ( 1 ) return np . where ( normalizer > 0 , ( imgs / normalizer ) . astype ( blks . dtype ) , np . nan )
Average blocks together from an ndarray to reconstruct ndarray signal .
21,274
def combineblocks ( blks , imgsz , stpsz = None , fn = np . median ) : def listapp ( x , y ) : x . append ( y ) veclistapp = np . vectorize ( listapp , otypes = [ np . object_ ] ) blksz = blks . shape [ : - 1 ] if stpsz is None : stpsz = tuple ( 1 for _ in blksz ) numblocks = tuple ( int ( np . floor ( ( a - b ) / c ) + 1 ) for a , b , c in zip_longest ( imgsz , blksz , stpsz , fillvalue = 1 ) ) new_shape = blksz + numblocks blks = np . reshape ( blks , new_shape ) imgs = np . empty ( imgsz , dtype = np . object_ ) imgs . fill ( [ ] ) imgs = np . frompyfunc ( list , 1 , 1 ) ( imgs ) for pos in np . ndindex ( numblocks ) : slices = tuple ( slice ( a * c , a * c + b ) for a , b , c in zip_longest ( pos , blksz , stpsz , fillvalue = 1 ) ) veclistapp ( imgs [ slices ] . squeeze ( ) , blks [ ( Ellipsis , ) + pos ] . squeeze ( ) ) return np . vectorize ( fn , otypes = [ blks . dtype ] ) ( imgs )
Combine blocks from an ndarray to reconstruct ndarray signal .
21,275
def complex_randn ( * args ) : return np . random . randn ( * args ) + 1j * np . random . randn ( * args )
Return a complex array of samples drawn from a standard normal distribution .
21,276
def spnoise ( s , frc , smn = 0.0 , smx = 1.0 ) : sn = s . copy ( ) spm = np . random . uniform ( - 1.0 , 1.0 , s . shape ) sn [ spm < frc - 1.0 ] = smn sn [ spm > 1.0 - frc ] = smx return sn
Return image with salt & pepper noise imposed on it .
21,277
def pca ( U , centre = False ) : if centre : C = np . mean ( U , axis = 1 , keepdims = True ) U = U - C else : C = None B , S , _ = np . linalg . svd ( U , full_matrices = False , compute_uv = True ) return B , S ** 2 , C
Compute the PCA basis for columns of input array U .
21,278
def tikhonov_filter ( s , lmbda , npd = 16 ) : r grv = np . array ( [ - 1.0 , 1.0 ] ) . reshape ( [ 2 , 1 ] ) gcv = np . array ( [ - 1.0 , 1.0 ] ) . reshape ( [ 1 , 2 ] ) Gr = sla . fftn ( grv , ( s . shape [ 0 ] + 2 * npd , s . shape [ 1 ] + 2 * npd ) , ( 0 , 1 ) ) Gc = sla . fftn ( gcv , ( s . shape [ 0 ] + 2 * npd , s . shape [ 1 ] + 2 * npd ) , ( 0 , 1 ) ) A = 1.0 + lmbda * np . conj ( Gr ) * Gr + lmbda * np . conj ( Gc ) * Gc if s . ndim > 2 : A = A [ ( slice ( None ) , ) * 2 + ( np . newaxis , ) * ( s . ndim - 2 ) ] sp = np . pad ( s , ( ( npd , npd ) , ) * 2 + ( ( 0 , 0 ) , ) * ( s . ndim - 2 ) , 'symmetric' ) slp = np . real ( sla . ifftn ( sla . fftn ( sp , axes = ( 0 , 1 ) ) / A , axes = ( 0 , 1 ) ) ) sl = slp [ npd : ( slp . shape [ 0 ] - npd ) , npd : ( slp . shape [ 1 ] - npd ) ] sh = s - sl return sl . astype ( s . dtype ) , sh . astype ( s . dtype )
r Lowpass filter based on Tikhonov regularization .
21,279
def gaussian ( shape , sd = 1.0 ) : gfn = lambda x , sd : np . exp ( - ( x ** 2 ) / ( 2.0 * sd ** 2 ) ) / ( np . sqrt ( 2.0 * np . pi ) * sd ) gc = 1.0 if isinstance ( shape , int ) : shape = ( shape , ) for k , n in enumerate ( shape ) : x = np . linspace ( - 3.0 , 3.0 , n ) . reshape ( ( 1 , ) * k + ( n , ) + ( 1 , ) * ( len ( shape ) - k - 1 ) ) gc = gc * gfn ( x , sd ) gc /= np . sum ( gc ) return gc
Sample a multivariate Gaussian pdf normalised to have unit sum .
21,280
def convdicts ( ) : pth = os . path . join ( os . path . dirname ( __file__ ) , 'data' , 'convdict.npz' ) npz = np . load ( pth ) cdd = { } for k in list ( npz . keys ( ) ) : cdd [ k ] = npz [ k ] return cdd
Access a set of example learned convolutional dictionaries .
21,281
def netgetdata ( url , maxtry = 3 , timeout = 10 ) : err = ValueError ( 'maxtry parameter should be greater than zero' ) for ntry in range ( maxtry ) : try : rspns = urlrequest . urlopen ( url , timeout = timeout ) cntnt = rspns . read ( ) break except urlerror . URLError as e : err = e if not isinstance ( e . reason , socket . timeout ) : raise else : raise err return io . BytesIO ( cntnt )
Get content of a file via a URL .
21,282
def image ( self , fname , group = None , scaled = None , dtype = None , idxexp = None , zoom = None , gray = None ) : if scaled is None : scaled = self . scaled if dtype is None : if self . dtype is None : dtype = np . uint8 else : dtype = self . dtype if scaled and np . issubdtype ( dtype , np . integer ) : dtype = np . float32 if zoom is None : zoom = self . zoom if gray is None : gray = self . gray if group is None : pth = os . path . join ( self . bpth , fname ) else : pth = os . path . join ( self . bpth , group , fname ) try : img = np . asarray ( imageio . imread ( pth ) , dtype = dtype ) except IOError : raise IOError ( 'Could not access image %s in group %s' % ( fname , group ) ) if scaled : img /= 255.0 if idxexp is not None : img = img [ idxexp ] if zoom is not None : if img . ndim == 2 : img = sni . zoom ( img , zoom ) else : img = sni . zoom ( img , ( zoom , ) * 2 + ( 1 , ) * ( img . ndim - 2 ) ) if gray : img = rgb2gray ( img ) return img
Get named image .
21,283
def elapsed ( self , label = None , total = True ) : t = timer ( ) if label is None : label = self . dfltlbl if label not in self . t0 : return 0.0 if label not in self . t0 : raise KeyError ( 'Unrecognized timer key %s' % label ) te = 0.0 if self . t0 [ label ] is not None : te = t - self . t0 [ label ] if total : te += self . td [ label ] return te
Get elapsed time since timer start .
21,284
def elapsed ( self , total = True ) : return self . timer . elapsed ( self . label , total = total )
Return the elapsed time for the timer .
21,285
def attach_keypress ( fig , scaling = 1.1 ) : def press ( event ) : if event . key == 'q' : plt . close ( fig ) elif event . key == 'e' : fig . set_size_inches ( scaling * fig . get_size_inches ( ) , forward = True ) elif event . key == 'c' : fig . set_size_inches ( fig . get_size_inches ( ) / scaling , forward = True ) if not hasattr ( fig , '_sporco_keypress_cid' ) : cid = fig . canvas . mpl_connect ( 'key_press_event' , press ) fig . _sporco_keypress_cid = cid return press
Attach a key press event handler that configures keys for closing a figure and changing the figure size . Keys e and c respectively expand and contract the figure and key q closes it .
21,286
def attach_zoom ( ax , scaling = 2.0 ) : def zoom ( event ) : cur_xlim = ax . get_xlim ( ) cur_ylim = ax . get_ylim ( ) xdata = event . xdata ydata = event . ydata if xdata is None or ydata is None : return if event . button == 'up' : scale_factor = 1.0 / scaling elif event . button == 'down' : scale_factor = scaling x_left = xdata - cur_xlim [ 0 ] x_right = cur_xlim [ 1 ] - xdata y_top = ydata - cur_ylim [ 0 ] y_bottom = cur_ylim [ 1 ] - ydata new_xlim = ( xdata - x_left * scale_factor , xdata + x_right * scale_factor ) new_ylim = ( ydata - y_top * scale_factor , ydata + y_bottom * scale_factor ) if np . diff ( new_xlim ) > np . diff ( zoom . xlim_ref ) : new_xlim *= np . diff ( zoom . xlim_ref ) / np . diff ( new_xlim ) if new_xlim [ 0 ] < zoom . xlim_ref [ 0 ] : new_xlim += np . array ( zoom . xlim_ref [ 0 ] - new_xlim [ 0 ] ) if new_xlim [ 1 ] > zoom . xlim_ref [ 1 ] : new_xlim -= np . array ( new_xlim [ 1 ] - zoom . xlim_ref [ 1 ] ) if zoom . ylim_ref [ 1 ] < zoom . ylim_ref [ 0 ] : ylim_ref = zoom . ylim_ref [ : : - 1 ] new_ylim = new_ylim [ : : - 1 ] else : ylim_ref = zoom . ylim_ref if np . diff ( new_ylim ) > np . diff ( ylim_ref ) : new_ylim *= np . diff ( ylim_ref ) / np . diff ( new_ylim ) if new_ylim [ 0 ] < ylim_ref [ 0 ] : new_ylim += np . array ( ylim_ref [ 0 ] - new_ylim [ 0 ] ) if new_ylim [ 1 ] > ylim_ref [ 1 ] : new_ylim -= np . array ( new_ylim [ 1 ] - ylim_ref [ 1 ] ) if zoom . ylim_ref [ 1 ] < zoom . ylim_ref [ 0 ] : new_ylim = new_ylim [ : : - 1 ] ax . set_xlim ( new_xlim ) ax . set_ylim ( new_ylim ) ax . figure . canvas . draw ( ) zoom . xlim_ref = ax . get_xlim ( ) zoom . ylim_ref = ax . get_ylim ( ) fig = ax . get_figure ( ) fig . canvas . mpl_connect ( 'scroll_event' , zoom ) return zoom
Attach an event handler that supports zooming within a plot using the mouse scroll wheel .
21,287
def config_notebook_plotting ( ) : from sporco . util import in_notebook module = sys . modules [ __name__ ] if in_notebook ( ) and module . plot . __name__ == 'plot' : set_notebook_plot_backend ( ) plot_original = module . plot def plot_wrap ( * args , ** kwargs ) : plot_original ( * args , ** kwargs ) module . plot = plot_wrap surf_original = module . surf def surf_wrap ( * args , ** kwargs ) : surf_original ( * args , ** kwargs ) module . surf = surf_wrap contour_original = module . contour def contour_wrap ( * args , ** kwargs ) : contour_original ( * args , ** kwargs ) module . contour = contour_wrap imview_original = module . imview def imview_wrap ( * args , ** kwargs ) : imview_original ( * args , ** kwargs ) module . imview = imview_wrap import matplotlib . figure def show_disable ( self ) : pass matplotlib . figure . Figure . show = show_disable
Configure plotting functions for inline plotting within a Jupyter Notebook shell . This function has no effect when not within a notebook shell and may therefore be used within a normal python script .
21,288
def init_vars ( self , S , dimK ) : Nv = S . shape [ 0 : self . dimN ] if self . cri is None or Nv != self . cri . Nv : self . cri = cr . CDU_ConvRepIndexing ( self . dsz , S , dimK , self . dimN ) if self . opt [ 'CUDA_CBPDN' ] : if self . cri . Cd > 1 or self . cri . Cx > 1 : raise ValueError ( 'CUDA CBPDN solver can only be used for ' 'single channel problems' ) if self . cri . K > 1 : raise ValueError ( 'CUDA CBPDN solver can not be used with ' 'mini-batches' ) self . Df = sl . pyfftw_byte_aligned ( sl . rfftn ( self . D , self . cri . Nv , self . cri . axisN ) ) self . Gf = sl . pyfftw_empty_aligned ( self . Df . shape , self . Df . dtype ) self . Z = sl . pyfftw_empty_aligned ( self . cri . shpX , self . dtype ) else : self . Df [ : ] = sl . rfftn ( self . D , self . cri . Nv , self . cri . axisN )
Initalise variables required for sparse coding and dictionary update for training data S .
21,289
def manage_itstat ( self ) : itst = self . iteration_stats ( ) self . itstat . append ( itst ) self . display_status ( self . fmtstr , itst )
Compute record and display iteration statistics .
21,290
def display_start ( self ) : if self . opt [ 'Verbose' ] and self . opt [ 'StatusHeader' ] : print ( self . hdrstr ) print ( "-" * self . nsep )
Start status display if option selected .
21,291
def xstep ( self , S , W , lmbda , dimK ) : if self . opt [ 'CUDA_CBPDN' ] : Z = cuda . cbpdnmsk ( self . D . squeeze ( ) , S [ ... , 0 ] , W . squeeze ( ) , lmbda , self . opt [ 'CBPDN' ] ) Z = Z . reshape ( self . cri . Nv + ( 1 , 1 , self . cri . M , ) ) self . Z [ : ] = np . asarray ( Z , dtype = self . dtype ) self . Zf = sl . rfftn ( self . Z , self . cri . Nv , self . cri . axisN ) self . Sf = sl . rfftn ( S . reshape ( self . cri . shpS ) , self . cri . Nv , self . cri . axisN ) self . xstep_itstat = None else : xstep = cbpdn . ConvBPDNMaskDcpl ( self . D . squeeze ( ) , S , lmbda , W , self . opt [ 'CBPDN' ] , dimK = dimK , dimN = self . cri . dimN ) xstep . solve ( ) self . Sf = sl . rfftn ( S . reshape ( self . cri . shpS ) , self . cri . Nv , self . cri . axisN ) self . setcoef ( xstep . getcoef ( ) ) self . xstep_itstat = xstep . itstat [ - 1 ] if xstep . itstat else None
Solve CSC problem for training data S .
21,292
def keycmp ( a , b , pth = ( ) ) : akey = list ( a . keys ( ) ) for key in list ( b . keys ( ) ) : if key not in akey : raise UnknownKeyError ( pth + ( key , ) ) else : if isinstance ( a [ key ] , dict ) : if isinstance ( b [ key ] , dict ) : keycmp ( a [ key ] , b [ key ] , pth + ( key , ) ) else : raise InvalidValueError ( pth + ( key , ) )
Recurse down the tree of nested dicts b at each level checking that it does not have any keys that are not also at the same level in a . The key path is recorded in pth . If an unknown key is encountered in b an UnknownKeyError exception is raised . If a non - dict value is encountered in b for which the corresponding value in a is a dict an InvalidValueError exception is raised .
21,293
def update ( self , d ) : for key in list ( d . keys ( ) ) : self . __setitem__ ( key , d [ key ] )
Update the dict with the dict tree in parameter d .
21,294
def check ( self , key , value ) : if hasattr ( self , 'dflt' ) : a = self . __class__ . getnode ( self . dflt , self . pth ) if key not in a : raise UnknownKeyError ( self . pth + ( key , ) ) elif isinstance ( a [ key ] , dict ) and not isinstance ( value , dict ) : raise InvalidValueError ( self . pth + ( key , ) )
Check whether key value pair is allowed . The key is allowed if there is a corresponding key in the defaults class attribute dict . The value is not allowed if it is a dict in the defaults dict and not a dict in value .
21,295
def getparent ( d , pth ) : c = d for key in pth [ : - 1 ] : if not isinstance ( c , dict ) : raise InvalidValueError ( c ) elif key not in c : raise UnknownKeyError ( pth ) else : c = c . __getitem__ ( key ) return c
Get the parent node of a subdict as specified by the key path in pth .
21,296
def par_relax_AX ( i ) : global mp_X global mp_Xnr global mp_DX global mp_DXnr mp_Xnr [ mp_grp [ i ] : mp_grp [ i + 1 ] ] = mp_X [ mp_grp [ i ] : mp_grp [ i + 1 ] ] mp_DXnr [ i ] = mp_DX [ i ] if mp_rlx != 1.0 : grpind = slice ( mp_grp [ i ] , mp_grp [ i + 1 ] ) mp_X [ grpind ] = mp_rlx * mp_X [ grpind ] + ( 1 - mp_rlx ) * mp_Y1 [ grpind ] mp_DX [ i ] = mp_rlx * mp_DX [ i ] + ( 1 - mp_rlx ) * mp_Y0 [ i ]
Parallel implementation of relaxation if option RelaxParam ! = 1 . 0 .
21,297
def par_final_stepgrp ( i ) : par_y0bstep ( i ) par_y1step ( i ) par_u0step ( i ) par_u1step ( i )
The parallel step grouping of the final iteration in solve . A cyclic permutation of the steps is done to require only one merge per iteration requiring unique initial and final step groups .
21,298
def par_compute_residuals ( i ) : global mp_ry0 global mp_ry1 global mp_sy0 global mp_sy1 global mp_nrmAx global mp_nrmBy global mp_nrmu mp_ry0 [ i ] = np . sum ( ( mp_DXnr [ i ] - mp_Y0 [ i ] ) ** 2 ) mp_ry1 [ i ] = mp_alpha ** 2 * np . sum ( ( mp_Xnr [ mp_grp [ i ] : mp_grp [ i + 1 ] ] - mp_Y1 [ mp_grp [ i ] : mp_grp [ i + 1 ] ] ) ** 2 ) mp_sy0 [ i ] = np . sum ( ( mp_Y0old [ i ] - mp_Y0 [ i ] ) ** 2 ) mp_sy1 [ i ] = mp_alpha ** 2 * np . sum ( ( mp_Y1old [ mp_grp [ i ] : mp_grp [ i + 1 ] ] - mp_Y1 [ mp_grp [ i ] : mp_grp [ i + 1 ] ] ) ** 2 ) mp_nrmAx [ i ] = np . sum ( mp_DXnr [ i ] ** 2 ) + mp_alpha ** 2 * np . sum ( mp_Xnr [ mp_grp [ i ] : mp_grp [ i + 1 ] ] ** 2 ) mp_nrmBy [ i ] = np . sum ( mp_Y0 [ i ] ** 2 ) + mp_alpha ** 2 * np . sum ( mp_Y1 [ mp_grp [ i ] : mp_grp [ i + 1 ] ] ** 2 ) mp_nrmu [ i ] = np . sum ( mp_U0 [ i ] ** 2 ) + np . sum ( mp_U1 [ mp_grp [ i ] : mp_grp [ i + 1 ] ] ** 2 )
Compute components of the residual and stopping thresholds that can be done in parallel .
21,299
def init_pool ( self ) : if self . pool is None : if self . nproc > 1 : self . pool = mp . Pool ( processes = self . nproc ) else : self . pool = None else : print ( 'pool already initialized?' )
Initialize multiprocessing pool if necessary .