idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
12,500
def main ( ) : # Set up curses. stdscr = curses . initscr ( ) curses . start_color ( ) curses . init_pair ( 1 , foreground_colour , background_colour ) curses . noecho ( ) stdscr . keypad ( True ) stdscr . nodelay ( True ) try : curses . curs_set ( False ) except : pass try : # Run main event loop until quit. while True : try : update_gunicorns ( ) handle_keypress ( stdscr ) display_output ( stdscr ) curses . napms ( int ( screen_delay * 1000 ) ) except KeyboardInterrupt : break finally : # Tear down curses. curses . nocbreak ( ) stdscr . keypad ( False ) curses . echo ( ) curses . endwin ( )
Main entry point for gunicorn_console .
176
10
12,501
def _get_variant_silent ( parser , variant ) : prev_log = config . LOG_NOT_FOUND config . LOG_NOT_FOUND = False results = parser . get_variant_genotypes ( variant ) config . LOG_NOT_FOUND = prev_log return results
Gets a variant from the parser while disabling logging .
65
11
12,502
def _attrs_ ( mcs , cls , attr_name : str ) -> Tuple [ Any , ... ] : return tuple ( map ( lambda x : getattr ( x , attr_name ) , list ( cls ) ) )
Returns a tuple containing just the value of the given attr_name of all the elements from the cls .
54
23
12,503
def _from_attr_ ( mcs , cls , attr_name : str , attr_value : Any ) -> TypeVar : return next ( iter ( filter ( lambda x : getattr ( x , attr_name ) == attr_value , list ( cls ) ) ) , None )
Returns the enumeration item regarding to the attribute name and value or None if not found for the given cls
67
22
12,504
def describe ( cls ) -> None : max_lengths = [ ] for attr_name in cls . attr_names ( ) : attr_func = "%ss" % attr_name attr_list = list ( map ( str , getattr ( cls , attr_func ) ( ) ) ) + [ attr_name ] max_lengths . append ( max ( list ( map ( len , attr_list ) ) ) ) row_format = "{:>%d} | {:>%d} | {:>%d}" % tuple ( max_lengths ) headers = [ attr_name . capitalize ( ) for attr_name in cls . attr_names ( ) ] header_line = row_format . format ( * headers ) output = "Class: %s\n" % cls . __name__ output += header_line + "\n" output += "-" * ( len ( header_line ) ) + "\n" for item in cls : format_list = [ str ( getattr ( item , attr_name ) ) for attr_name in cls . attr_names ( ) ] output += row_format . format ( * format_list ) + "\n" print ( output )
Prints in the console a table showing all the attributes for all the definitions inside the class
275
18
12,505
def read_iter ( use_fpi ) : filename_rhosuffix = 'exe/inv.lastmod_rho' filename = 'exe/inv.lastmod' # filename HAS to exist. Otherwise the inversion was not finished if ( not os . path . isfile ( filename ) ) : print ( 'Inversion was not finished! No last iteration found.' ) if ( use_fpi is True ) : if ( os . path . isfile ( filename_rhosuffix ) ) : filename = filename_rhosuffix linestring = open ( filename , 'r' ) . readline ( ) . strip ( ) linestring = linestring . replace ( '\n' , '' ) linestring = linestring . replace ( '../' , '' ) return linestring
Return the path to the final . mag file either for the complex or the fpi inversion .
175
20
12,506
def list_datafiles ( ) : is_cplx , is_fpi = td_type ( ) # get the highest iteration it_rho = read_iter ( is_fpi ) it_phase = read_iter ( False ) # list the files files = [ 'inv/coverage.mag' ] dtype = [ 'cov' ] files . append ( it_rho ) dtype . append ( 'mag' ) if is_cplx : files . append ( it_rho . replace ( 'mag' , 'pha' ) ) dtype . append ( 'pha' ) if is_fpi : files . append ( it_phase . replace ( 'mag' , 'pha' ) ) dtype . append ( 'pha_fpi' ) return files , dtype
Get the type of the tomodir and the highest iteration to list all files which will be plotted .
174
21
12,507
def read_datafiles ( files , dtype , column ) : pha = [ ] pha_fpi = [ ] for filename , filetype in zip ( files , dtype ) : if filetype == 'cov' : cov = load_cov ( filename ) elif filetype == 'mag' : mag = load_rho ( filename , column ) elif filetype == 'pha' : pha = load_rho ( filename , 2 ) elif filetype == 'pha_fpi' : pha_fpi = load_rho ( filename , 2 ) return cov , mag , pha , pha_fpi
Load the datafiles and return cov mag phase and fpi phase values .
140
15
12,508
def load_cov ( name ) : content = np . genfromtxt ( name , skip_header = 1 , skip_footer = 1 , usecols = ( [ 2 ] ) ) return content
Load a datafile with coverage file structure .
44
9
12,509
def load_rho ( name , column ) : try : content = np . loadtxt ( name , skiprows = 1 , usecols = ( [ column ] ) ) except : raise ValueError ( 'Given column to open does not exist.' ) return content
Load a datafile with rho structure like mag and phase
55
12
12,510
def calc_complex ( mag , pha ) : complx = [ 10 ** m * math . e ** ( 1j * p / 1e3 ) for m , p in zip ( mag , pha ) ] real = [ math . log10 ( ( 1 / c ) . real ) for c in complx ] imag = [ ] for c in complx : if ( ( 1 / c ) . imag ) == 0 : imag . append ( math . nan ) else : i = math . log10 ( abs ( ( 1 / c ) . imag ) ) imag . append ( i ) return real , imag
Calculate real and imaginary part of the complex conductivity from magnitude and phase in log10 .
129
20
12,511
def plot_ratio ( cid , ax , plotman , title , alpha , vmin , vmax , xmin , xmax , zmin , zmax , xunit , cbtiks , elecs ) : # handle options cblabel = 'anisotropy ratio' zlabel = 'z [' + xunit + ']' xlabel = 'x [' + xunit + ']' # cm = 'brg' cm = 'RdYlGn' xmin , xmax , zmin , zmax , vmin , vmax = check_minmax ( plotman , cid , xmin , xmax , zmin , zmax , vmin , vmax , ) # plot fig , ax , cnorm , cmap , cb , scalarMap = plotman . plot_elements_to_ax ( cid = cid , ax = ax , xmin = xmin , xmax = xmax , zmin = zmin , zmax = zmax , cblabel = cblabel , cbnrticks = cbtiks , title = title , zlabel = zlabel , xlabel = xlabel , plot_colorbar = True , cmap_name = cm , no_elecs = elecs , cbmin = vmin , cbmax = vmax , ) return fig , ax , cnorm , cmap , cb
Plot ratio of two conductivity directions .
300
8
12,512
def check_minmax ( plotman , cid , xmin , xmax , zmin , zmax , vmin , vmax ) : if xmin is None : xmin = plotman . grid . grid [ 'x' ] . min ( ) if xmax is None : xmax = plotman . grid . grid [ 'x' ] . max ( ) if zmin is None : zmin = plotman . grid . grid [ 'z' ] . min ( ) if zmax is None : zmax = plotman . grid . grid [ 'z' ] . max ( ) if isinstance ( cid , int ) : subdata = plotman . parman . parsets [ cid ] else : subdata = cid if vmin is None : vmin = subdata . min ( ) if vmax is None : vmax = subdata . max ( ) return xmin , xmax , zmin , zmax , vmin , vmax
Get min and max values for axes and colorbar if not given
207
13
12,513
def citation_director ( * * kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) if qualifier == 'publicationTitle' : return CitationJournalTitle ( content = content ) elif qualifier == 'volume' : return CitationVolume ( content = content ) elif qualifier == 'issue' : return CitationIssue ( content = content ) elif qualifier == 'pageStart' : return CitationFirstpage ( content = content ) elif qualifier == 'pageEnd' : return CitationLastpage ( content = content ) else : return None
Direct the citation elements based on their qualifier .
131
9
12,514
def identifier_director ( * * kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) if qualifier == 'ISBN' : return CitationISBN ( content = content ) elif qualifier == 'ISSN' : return CitationISSN ( content = content ) elif qualifier == 'DOI' : return CitationDOI ( content = content ) elif qualifier == 'REP-NO' : return CitationTechnicalReportNumber ( content = content ) else : return None
Direct the identifier elements based on their qualifier .
118
9
12,515
def get_author ( self , * * kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) children = kwargs . get ( 'children' , [ ] ) creator_type_per = False author_name = None # Find the creator type in children. for child in children : if child . tag == 'type' and child . content == 'per' : creator_type_per = True # Get the author name. elif child . tag == 'name' : author_name = child . content if qualifier == 'aut' and creator_type_per and author_name : return author_name return None
Determine the authors from the creator field .
139
10
12,516
def get_publisher_name ( self , * * kwargs ) : children = kwargs . get ( 'children' , [ ] ) # Find the creator type in children. for child in children : if child . tag == 'name' : return child . content return None
Get the publisher name .
60
5
12,517
def get_publication_date ( self , * * kwargs ) : date_string = kwargs . get ( 'content' , '' ) date_match = CREATION_DATE_REGEX . match ( date_string ) month_match = CREATION_MONTH_REGEX . match ( date_string ) year_match = CREATION_YEAR_REGEX . match ( date_string ) # Check if a date match exists. if date_match : ( year , month , day ) = date_match . groups ( '' ) # Create the date. try : creation_date = datetime . date ( int ( year ) , int ( month ) , int ( day ) ) except ValueError : return None else : return '%s/%s/%s' % ( format_date_string ( creation_date . month ) , format_date_string ( creation_date . day ) , creation_date . year , ) elif month_match : ( year , month ) = month_match . groups ( '' ) # Create the date. try : creation_date = datetime . date ( int ( year ) , int ( month ) , 1 ) except ValueError : return None else : return '%s/%s' % ( format_date_string ( creation_date . month ) , creation_date . year , ) elif year_match : year = year_match . groups ( '' ) [ 0 ] return year else : return None
Determine the creation date for the publication date .
315
11
12,518
def get_online_date ( self , * * kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) # Handle meta-creation-date element. if qualifier == 'metadataCreationDate' : date_match = META_CREATION_DATE_REGEX . match ( content ) ( year , month , day ) = date_match . groups ( '' ) # Create the date. creation_date = datetime . date ( int ( year ) , int ( month ) , int ( day ) ) return '%s/%s/%s' % ( format_date_string ( creation_date . month ) , format_date_string ( creation_date . day ) , creation_date . year , ) return None
Get the online date from the meta creation date .
177
10
12,519
def get_institution ( self , * * kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) if qualifier == 'grantor' : return content return None
Get the dissertation institution .
57
5
12,520
def model_results ( self ) -> str : with open ( os . path . join ( self . directory , "model.results" ) ) as f : return f . read ( )
Reads the model . results file
39
7
12,521
def header ( self ) -> str : return "/" . join ( ( self . pipeline , self . phase , self . data ) )
A header created by joining the pipeline phase and data names
28
11
12,522
def optimizer ( self ) -> non_linear . NonLinearOptimizer : if self . __optimizer is None : with open ( os . path . join ( self . directory , ".optimizer.pickle" ) , "r+b" ) as f : self . __optimizer = pickle . loads ( f . read ( ) ) return self . __optimizer
The optimizer object that was used in this phase
81
10
12,523
def phases_with ( self , * * kwargs ) -> [ PhaseOutput ] : return [ phase for phase in self . phases if all ( [ getattr ( phase , key ) == value for key , value in kwargs . items ( ) ] ) ]
Filters phases . If no arguments are passed all phases are returned . Arguments must be key value pairs with phase data or pipeline as the key .
56
30
12,524
def optimizers_with ( self , * * kwargs ) -> [ non_linear . NonLinearOptimizer ] : return [ phase . optimizer for phase in self . phases_with ( * * kwargs ) ]
Load a list of optimizers for phases in the directory with zero or more filters applied .
50
18
12,525
def model_results ( self , * * kwargs ) -> str : return "\n\n" . join ( "{}\n\n{}" . format ( phase . header , phase . model_results ) for phase in self . phases_with ( * * kwargs ) )
Collates model results from all phases in the directory or some subset if filters are applied .
61
18
12,526
def branches ( config , searchstring = "" ) : repo = config . repo branches_ = list ( find ( repo , searchstring ) ) if branches_ : merged = get_merged_branches ( repo ) info_out ( "Found existing branches..." ) print_list ( branches_ , merged ) if len ( branches_ ) == 1 and searchstring : # If the found branch is the current one, error active_branch = repo . active_branch if active_branch == branches_ [ 0 ] : error_out ( "You're already on '{}'" . format ( branches_ [ 0 ] . name ) ) branch_name = branches_ [ 0 ] . name if len ( branch_name ) > 50 : branch_name = branch_name [ : 47 ] + "…" check_it_out = ( input ( "Check out '{}'? [Y/n] " . format ( branch_name ) ) . lower ( ) . strip ( ) != "n" ) if check_it_out : branches_ [ 0 ] . checkout ( ) elif searchstring : error_out ( "Found no branches matching '{}'." . format ( searchstring ) ) else : error_out ( "Found no branches." )
List all branches . And if exactly 1 found offer to check it out .
266
15
12,527
def decodebytes ( input ) : py_version = sys . version_info [ 0 ] if py_version >= 3 : return _decodebytes_py3 ( input ) return _decodebytes_py2 ( input )
Decode base64 string to byte array .
48
9
12,528
def capture ( self , commit = "" ) : self . _validateProvider ( self . _provider ) # get client for repository # TODO(jchaloup): read config file to switch between local and remove clients # TODO(jchaloup): remote client can cover gofed infratructure or any remove source for repository info client = RepositoryClientBuilder ( ) . buildWithRemoteClient ( self . _provider ) if self . _provider [ "provider" ] == "github" : self . _signature = ProjectGithubRepositoryCapturer ( self . _provider , client ) . capture ( commit ) . signature ( ) elif self . _provider [ "provider" ] == "bitbucket" : self . _signature = ProjectBitbucketRepositoryCapturer ( self . _provider , client ) . capture ( commit ) . signature ( ) else : raise KeyError ( "Provider '%s' not recognized" % self . _provider [ "provider" ] ) return self
Capture the current state of a project based on its provider
220
11
12,529
def found_duplicates ( counts ) : _logger . warning ( "Duplicated markers found" ) for marker , count in counts : _logger . warning ( " - {}: {:,d} times" . format ( marker , count ) ) _logger . warning ( "Appending ':dupX' to the duplicated markers according " "to their location in the file." )
Log that duplicates were found .
86
7
12,530
def patch_model_schemas ( mapping ) : from mbdata . models import Base for table in Base . metadata . sorted_tables : if table . schema is None : continue table . schema = mapping . get ( table . schema , table . schema )
Update mbdata . models to use different schema names
56
11
12,531
def detectRamPorts ( stm : IfContainer , current_en : RtlSignalBase ) : if stm . ifFalse or stm . elIfs : return for _stm in stm . ifTrue : if isinstance ( _stm , IfContainer ) : yield from detectRamPorts ( _stm , _stm . cond & current_en ) elif isinstance ( _stm , Assignment ) : if isinstance ( _stm . dst . _dtype , HArray ) : assert len ( _stm . indexes ) == 1 , "one address per RAM port" w_addr = _stm . indexes [ 0 ] mem = _stm . dst yield ( RAM_WRITE , mem , w_addr , current_en , _stm . src ) elif _stm . src . hidden and len ( _stm . src . drivers ) == 1 : op = _stm . src . drivers [ 0 ] mem = op . operands [ 0 ] if isinstance ( mem . _dtype , HArray ) and op . operator == AllOps . INDEX : r_addr = op . operands [ 1 ] if _stm . indexes : raise NotImplementedError ( ) yield ( RAM_READ , mem , r_addr , current_en , _stm . dst )
Detect RAM ports in If statement
289
6
12,532
def addInputPort ( self , node , name , i : Union [ Value , RtlSignalBase ] , side = PortSide . WEST ) : root = self . node port = node . addPort ( name , PortType . INPUT , side ) netCtxs = self . netCtxs if isinstance ( i , LPort ) : root . addEdge ( i , port ) elif isConst ( i ) : i = i . staticEval ( ) c , wasThereBefore = self . netCtxs . getDefault ( i ) if not wasThereBefore : v = ValueAsLNode ( root , i ) . east [ 0 ] c . addDriver ( v ) c . addEndpoint ( port ) elif i . hidden : # later connect driver of this signal to output port ctx , wasThereBefore = netCtxs . getDefault ( i ) if not wasThereBefore : self . lazyLoadNet ( i ) ctx . addEndpoint ( port ) else : portCtx = self . portCtx rootCtx , _ = self . rootNetCtxs . getDefault ( i ) if self . isVirtual : # later connect signal in root to input port or input port of # wrap node rootCtx . addEndpoint ( port ) else : # spot input port on this wrap node if required isNewlySpotted = ( i , PortType . INPUT ) not in portCtx . data src = portCtx . register ( i , PortType . INPUT ) # connect input port on wrap node with specified output port ctx , _ = netCtxs . getDefault ( i ) ctx . addDriver ( src ) ctx . addEndpoint ( port ) if isNewlySpotted : # get input port from parent view _port = portCtx . getOutside ( i , PortType . INPUT ) rootCtx . addEndpoint ( _port )
Add and connect input port on subnode
411
8
12,533
def addOutputPort ( self , node : LNode , name : str , out : Optional [ Union [ RtlSignalBase , LPort ] ] , side = PortSide . EAST ) : oPort = node . addPort ( name , PortType . OUTPUT , side ) if out is not None : if isinstance ( out , LPort ) : self . node . addEdge ( oPort , out ) elif out . hidden : raise ValueError ( "Hidden signals should not be connected to outside" , name ) elif self . isVirtual : # This node is inlined inside of parent. # Mark that this output of subnode should be connected # to output of parent node. ctx , _ = self . netCtxs . getDefault ( out ) ctx . addDriver ( oPort ) else : # connect my signal to my output port _out = self . portCtx . getInside ( out , PortType . OUTPUT ) self . node . addEdge ( oPort , _out , originObj = out ) # mark connection of output port to parent net ooPort = self . portCtx . getOutside ( out , PortType . OUTPUT ) ctx , _ = self . rootNetCtxs . getDefault ( out ) ctx . addDriver ( ooPort ) return oPort
Add and connect output port on subnode
281
8
12,534
def renderContent ( self ) : stm = self . stm portCtx = self . portCtx # for each inputs and outputs render expression trees # walk statements and render muxs and memories for o in stm . _outputs : if not self . isVirtual : portCtx . register ( o , PortType . OUTPUT ) canHaveRamPorts = isinstance ( stm , IfContainer ) and arr_any ( chain ( stm . _inputs , stm . _outputs ) , lambda s : isinstance ( s . _dtype , HArray ) ) # render RAM ports consumedOutputs = set ( ) if canHaveRamPorts : for pType , memSig , addrSig , enSig , io in detectRamPorts ( stm , stm . cond ) : if pType == RAM_READ : self . createRamReadNode ( memSig , enSig , addrSig , io , True ) consumedOutputs . add ( io ) elif pType == RAM_WRITE : self . createRamWriteNode ( memSig , enSig , addrSig , io , True ) consumedOutputs . add ( memSig ) else : raise TypeError ( ) for o in stm . _outputs : if o not in consumedOutputs : self . renderForSignal ( stm , o , True ) if not self . isVirtual : self . netCtxs . applyConnections ( self . node )
Walk from outputs to inputs for each public signal register port of wrap node if required lazy load all operator and statement nodes for signals
319
25
12,535
def generate ( self , project ) : for assignment in self . s2n_mapping : if assignment [ "ipprefix" ] == project : self . _name = assignment [ "package" ] return self # # github.com -> github # code.google.com/p/ -> googlecode # golang.org/x/ -> golangorg # gopkg.in/check.v1 -> gopkg-check # camlistore.org # name = project if name . startswith ( "github.com" ) : name = re . sub ( r"^github\.com" , "github" , name ) if name . startswith ( "gopkg.in" ) : name = re . sub ( r"gopkg\.in" , "gopkg" , name ) # any version marks? name = re . sub ( r"\.v\d" , "" , name ) name = re . sub ( r"/v\d/" , "/" , name ) if name . startswith ( "code.google.com/p" ) : name = re . sub ( r"^code\.google\.com/p" , "googlecode" , name ) if name . startswith ( "golang.org/x" ) : name = re . sub ( r"^golang\.org/x" , "golangorg" , name ) if name . startswith ( "google.golang.org" ) : name = re . sub ( r"^google\.golang\.org" , "googlegolangorg" , name ) if name . startswith ( "bitbucket.org" ) : name = re . sub ( r"^bitbucket\.org" , "bitbucket" , name ) if name . startswith ( "k8s.io" ) : name = re . sub ( r"^k8s\.io" , "k8s" , name ) if name . endswith ( ".org" ) : name = re . sub ( r"\.org$" , "" , name ) name = name . replace ( "/" , "-" ) self . _name = "golang-%s" % name return self
Package name construction is based on provider not on prefix . Prefix does not have to equal provider_prefix .
491
22
12,536
def hash_host ( hostname , salt = None ) : if salt is None : salt = os . urandom ( sha1 ( ) . digest_size ) else : if salt . startswith ( '|1|' ) : salt = salt . split ( '|' ) [ 2 ] salt = decodebytes ( b ( salt ) ) assert len ( salt ) == sha1 ( ) . digest_size hmac = HMAC ( salt , b ( hostname ) , sha1 ) . digest ( ) hostkey = '|1|%s|%s' % ( u ( encodebytes ( salt ) ) , u ( encodebytes ( hmac ) ) ) return hostkey . replace ( '\n' , '' )
Return a hashed form of the hostname as used by OpenSSH when storing hashed hostnames in the known_hosts file .
158
29
12,537
def _read_elem_nodes ( self , fid ) : nodes = { } # # prepare nodes # nodes_sorted = np.zeros((number_of_nodes, 3), dtype=float) # nodes = np.zeros((number_of_nodes, 3), dtype=float) # read in nodes nodes_raw = np . empty ( ( self . header [ 'nr_nodes' ] , 3 ) , dtype = float ) for nr in range ( 0 , self . header [ 'nr_nodes' ] ) : node_line = fid . readline ( ) . lstrip ( ) nodes_raw [ nr , : ] = np . fromstring ( node_line , dtype = float , sep = ' ' ) # round node coordinates to 5th decimal point. Sometimes this is # important when we deal with mal-formatted node data nodes_raw [ : , 1 : 3 ] = np . round ( nodes_raw [ : , 1 : 3 ] , 5 ) # check for CutMcK # The check is based on the first node, but if one node was renumbered, # so were all the others. if ( nodes_raw [ : , 0 ] != list ( range ( 1 , nodes_raw . shape [ 0 ] ) ) ) : self . header [ 'cutmck' ] = True print ( 'This grid was sorted using CutMcK. The nodes were resorted!' ) else : self . header [ 'cutmck' ] = False # Rearrange nodes when CutMcK was used. if ( self . header [ 'cutmck' ] ) : nodes_cutmck = np . empty_like ( nodes_raw ) nodes_cutmck_index = np . zeros ( nodes_raw . shape [ 0 ] , dtype = int ) for node in range ( 0 , self . header [ 'nr_nodes' ] ) : new_index = np . where ( nodes_raw [ : , 0 ] . astype ( int ) == ( node + 1 ) ) nodes_cutmck [ new_index [ 0 ] , 1 : 3 ] = nodes_raw [ node , 1 : 3 ] nodes_cutmck [ new_index [ 0 ] , 0 ] = new_index [ 0 ] nodes_cutmck_index [ node ] = new_index [ 0 ] # sort them nodes_sorted = nodes_cutmck [ nodes_cutmck_index , : ] nodes [ 'presort' ] = nodes_cutmck nodes [ 'cutmck_index' ] = nodes_cutmck_index nodes [ 'rev_cutmck_index' ] = np . argsort ( nodes_cutmck_index ) else : nodes_sorted = nodes_raw nodes [ 'presort' ] = nodes_raw # prepare node dict nodes [ 'raw' ] = nodes_raw nodes [ 'sorted' ] = nodes_sorted self . nodes = nodes self . nr_of_nodes = nodes [ 'raw' ] . shape [ 0 ]
Read the nodes from an opened elem . dat file . Correct for CutMcK transformations .
663
19
12,538
def calculate_dimensions ( self ) : x_coordinates = np . sort ( self . grid [ 'x' ] [ : , 0 ] ) # first x node self . nr_nodes_z = np . where ( x_coordinates == x_coordinates [ 0 ] ) [ 0 ] . size self . nr_elements_x = self . elements . shape [ 0 ] / ( self . nr_nodes_z - 1 ) self . nr_nodes_x = self . nr_elements_x + 1 self . nr_elements_z = self . nr_nodes_z - 1
For a regular grid calculate the element and node dimensions
142
10
12,539
def _read_elem_neighbors ( self , fid ) : # get number of boundary elements # types 11 and 12 are boundary elements sizes = sum ( [ len ( self . element_data [ key ] ) for key in ( 11 , 12 ) if self . element_data . get ( key , None ) is not None ] ) self . neighbors = [ ] try : for i in range ( 0 , sizes ) : self . neighbors . append ( int ( fid . readline ( ) . strip ( ) ) ) except Exception as e : raise Exception ( 'Not enough neighbors in file' )
Read the boundary - element - neighbors from the end of the file
126
13
12,540
def load_grid ( self , elem_file , elec_file ) : self . load_elem_file ( elem_file ) self . load_elec_file ( elec_file )
Load elem . dat and elec . dat
46
10
12,541
def get_element_centroids ( self ) : centroids = np . vstack ( ( np . mean ( self . grid [ 'x' ] , axis = 1 ) , np . mean ( self . grid [ 'z' ] , axis = 1 ) ) ) . T return centroids
return the central points of all elements
65
7
12,542
def get_internal_angles ( self ) : angles = [ ] for elx , elz in zip ( self . grid [ 'x' ] , self . grid [ 'z' ] ) : el_angles = [ ] xy = np . vstack ( ( elx , elz ) ) for i in range ( 0 , elx . size ) : i1 = ( i - 1 ) % elx . size i2 = ( i + 1 ) % elx . size a = ( xy [ : , i ] - xy [ : , i1 ] ) b = ( xy [ : , i2 ] - xy [ : , i ] ) # note that nodes are ordered counter-clockwise! angle = np . pi - np . arctan2 ( a [ 0 ] * b [ 1 ] - a [ 1 ] * b [ 0 ] , a [ 0 ] * b [ 0 ] + a [ 1 ] * b [ 1 ] ) el_angles . append ( angle * 180 / np . pi ) angles . append ( el_angles ) return np . array ( angles )
Compute all internal angles of the grid
236
8
12,543
def Wm ( self ) : centroids = self . get_element_centroids ( ) Wm = scipy . sparse . csr_matrix ( ( self . nr_of_elements , self . nr_of_elements ) ) # Wm = np.zeros((self.nr_of_elements, self.nr_of_elements)) for i , nb in enumerate ( self . element_neighbors ) : for j , edges in zip ( nb , self . element_neighbors_edges [ i ] ) : # side length edge_coords = self . nodes [ 'presort' ] [ edges ] [ : , 1 : ] edge_length = np . linalg . norm ( edge_coords [ 1 , : ] - edge_coords [ 0 , : ] ) distance = np . linalg . norm ( centroids [ i ] - centroids [ j ] ) # main diagonal Wm [ i , i ] += edge_length / distance # side diagonals Wm [ i , j ] -= edge_length / distance return Wm
Return the smoothing regularization matrix Wm of the grid
248
12
12,544
def create_tomodir ( self , directory ) : pwd = os . getcwd ( ) if not os . path . isdir ( directory ) : os . makedirs ( directory ) os . chdir ( directory ) directories = ( 'config' , 'exe' , 'grid' , 'mod' , 'mod/pot' , 'mod/sens' , 'rho' , ) for directory in directories : if not os . path . isdir ( directory ) : os . makedirs ( directory ) os . chdir ( pwd )
Create a tomodir subdirectory structure in the given directory
120
12
12,545
def load_rho_file ( self , filename ) : pids = self . parman . load_from_rho_file ( filename ) self . register_magnitude_model ( pids [ 0 ] ) self . register_phase_model ( pids [ 1 ] ) return pids
Load a forward model from a rho . dat file
65
11
12,546
def save_to_tomodir ( self , directory ) : self . create_tomodir ( directory ) self . grid . save_elem_file ( directory + os . sep + 'grid/elem.dat' ) self . grid . save_elec_file ( directory + os . sep + 'grid/elec.dat' ) # modeling if self . configs . configs is not None : self . configs . write_crmod_config ( directory + os . sep + 'config/config.dat' ) if self . assignments [ 'forward_model' ] is not None : self . parman . save_to_rho_file ( directory + os . sep + 'rho/rho.dat' , self . assignments [ 'forward_model' ] [ 0 ] , self . assignments [ 'forward_model' ] [ 1 ] , ) self . crmod_cfg . write_to_file ( directory + os . sep + 'exe/crmod.cfg' ) if self . assignments [ 'measurements' ] is not None : self . configs . write_crmod_volt ( directory + os . sep + 'mod/volt.dat' , self . assignments [ 'measurements' ] ) if self . assignments [ 'sensitivities' ] is not None : self . _save_sensitivities ( directory + os . sep + 'mod/sens' , ) if self . assignments [ 'potentials' ] is not None : self . _save_potentials ( directory + os . sep + 'mod/pot' , ) # inversion self . crtomo_cfg . write_to_file ( directory + os . sep + 'exe/crtomo.cfg' ) if self . noise_model is not None : self . noise_model . write_crt_noisemod ( directory + os . sep + 'exe/crt.noisemod' ) if not os . path . isdir ( directory + os . sep + 'inv' ) : os . makedirs ( directory + os . sep + 'inv' )
Save the tomodir instance to a directory structure .
458
11
12,547
def _save_sensitivities ( self , directory ) : print ( 'saving sensitivities' ) digits = int ( np . ceil ( np . log10 ( self . configs . configs . shape [ 0 ] ) ) ) for i in range ( 0 , self . configs . configs . shape [ 0 ] ) : sens_data , meta_data = self . get_sensitivity ( i ) filename_raw = 'sens{0:0' + '{0}' . format ( digits ) + '}.dat' filename = directory + os . sep + filename_raw . format ( i + 1 ) grid_xz = self . grid . get_element_centroids ( ) all_data = np . vstack ( ( grid_xz [ : , 0 ] , grid_xz [ : , 0 ] , sens_data [ 0 ] , sens_data [ 1 ] , ) ) . T with open ( filename , 'wb' ) as fid : fid . write ( bytes ( '{0} {1}\n' . format ( meta_data [ 0 ] , meta_data [ 1 ] ) , 'utf-8' ) ) np . savetxt ( fid , all_data )
save sensitivities to a directory
264
6
12,548
def _save_potentials ( self , directory ) : print ( 'saving potentials' ) digits = int ( np . ceil ( np . log10 ( self . configs . configs . shape [ 0 ] ) ) ) for i in range ( 0 , self . configs . configs . shape [ 0 ] ) : pot_data = self . get_potential ( i ) filename_raw = 'pot{0:0' + '{0}' . format ( digits ) + '}.dat' filename = directory + os . sep + filename_raw . format ( i + 1 ) nodes = self . grid . nodes [ 'sorted' ] [ : , 1 : 3 ] all_data = np . hstack ( ( nodes , pot_data [ 0 ] [ : , np . newaxis ] , pot_data [ 1 ] [ : , np . newaxis ] , ) ) with open ( filename , 'wb' ) as fid : np . savetxt ( fid , all_data )
save potentials to a directory
215
6
12,549
def clear_measurements ( self ) : mid_list = self . assignments . get ( 'measurements' , None ) if mid_list is not None : for mid in mid_list : self . configs . delete_measurements ( mid = mid ) self . assignments [ 'measurements' ] = None
Forget any previous measurements
70
5
12,550
def measurements ( self ) : # check if we have measurements mid = self . assignments . get ( 'measurements' , None ) if mid is None : return_value = self . model ( voltages = True , sensitivities = False , potentials = False , ) if return_value is None : print ( 'cannot model' ) return # retrieve measurements cids = self . assignments [ 'measurements' ] measurements = np . vstack ( ( self . configs . measurements [ cids [ 0 ] ] , self . configs . measurements [ cids [ 1 ] ] , ) ) . T return measurements
Return the measurements associated with this instance .
131
8
12,551
def _read_sensitivities ( self , sens_dir ) : if self . assignments [ 'sensitivities' ] is not None : print ( 'Sensitivities already imported. Will not overwrite!' ) return else : self . assignments [ 'sensitivities' ] = { } sens_files = sorted ( glob ( sens_dir + os . sep + 'sens*.dat' ) ) for nr , filename in enumerate ( sens_files ) : with open ( filename , 'r' ) as fid : metadata = np . fromstring ( fid . readline ( ) . strip ( ) , sep = ' ' , count = 2 ) meta_re = metadata [ 0 ] meta_im = metadata [ 1 ] sens_data = np . loadtxt ( fid ) cids = self . parman . add_data ( sens_data [ : , 2 : 4 ] , [ meta_re , meta_im ] , ) # store cids for later retrieval self . assignments [ 'sensitivities' ] [ nr ] = cids
import sensitivities from a directory
227
6
12,552
def _read_potentials ( self , pot_dir ) : if self . assignments [ 'potentials' ] is not None : print ( 'Potentials already imported. Will not overwrite!' ) return else : self . assignments [ 'potentials' ] = { } pot_files = sorted ( glob ( pot_dir + os . sep + 'pot*.dat' ) ) for nr , filename in enumerate ( pot_files ) : with open ( filename , 'r' ) as fid : pot_data = np . loadtxt ( fid ) nids = self . nodeman . add_data ( pot_data [ : , 2 : 4 ] , ) # store cids for later retrieval self . assignments [ 'potentials' ] [ nr ] = nids
import potentials from a directory
163
6
12,553
def get_potential ( self , config_nr ) : if self . assignments [ 'potentials' ] is None : self . _check_state ( ) if self . can_model : self . model ( potentials = True ) nids = self . assignments [ 'potentials' ] [ config_nr ] pot_data = [ self . nodeman . nodevals [ nid ] for nid in nids ] return pot_data
Return potential data for a given measurement configuration .
94
9
12,554
def get_sensitivity ( self , config_nr ) : if self . assignments [ 'sensitivities' ] is None : self . _check_state ( ) if self . can_model : self . model ( sensitivities = True ) cids = self . assignments [ 'sensitivities' ] [ config_nr ] sens_data = [ self . parman . parsets [ cid ] for cid in cids ] meta_data = [ self . parman . metadata [ cid ] for cid in cids ] return sens_data , meta_data
return a sensitivity as well as corresponding metadata for a given measurement configuration . Indices start at zero .
124
20
12,555
def read_voltages ( self , voltage_file ) : measurements_raw = np . loadtxt ( voltage_file , skiprows = 1 , ) measurements = np . atleast_2d ( measurements_raw ) # extract measurement configurations A = ( measurements [ : , 0 ] / 1e4 ) . astype ( int ) B = ( measurements [ : , 0 ] % 1e4 ) . astype ( int ) M = ( measurements [ : , 1 ] / 1e4 ) . astype ( int ) N = ( measurements [ : , 1 ] % 1e4 ) . astype ( int ) ABMN = np . vstack ( ( A , B , M , N ) ) . T if self . configs . configs is None : self . configs . configs = ABMN else : # configurations don't match if not np . all ( ABMN == self . configs . configs ) : for nr , ( old_config , new_config ) in enumerate ( zip ( self . configs . configs , ABMN ) ) : if np . all ( old_config == new_config ) : continue # check polarity current_electrodes_are_equal = np . all ( old_config [ 0 : 2 ] == new_config [ 0 : 2 ] ) voltage_electrodes_are_switched = np . all ( old_config [ 2 : 4 ] == new_config [ 4 : 1 : - 1 ] ) if ( current_electrodes_are_equal and voltage_electrodes_are_switched ) : if len ( self . configs . measurements . keys ( ) ) > 0 : raise Exception ( 'need to switch electrode polarity, but ' + 'there are already measurements stored for ' + 'the old configuration!' ) else : # switch M/N in configurations self . configs . configs [ nr , : ] = new_config else : raise Exception ( 'There was an error matching configurations of ' + 'voltages with configurations already imported' ) # add measurements to the config instance mid_mag = self . configs . add_measurements ( measurements [ : , 2 ] ) mid_pha = self . configs . add_measurements ( measurements [ : , 3 ] ) self . assignments [ 'measurements' ] = [ mid_mag , mid_pha ]
import voltages from a volt . dat file
507
9
12,556
def model ( self , voltages = True , sensitivities = False , potentials = False , output_directory = None , silent = False , ) : self . _check_state ( ) if self . can_model : if output_directory is not None : if not os . path . isdir ( output_directory ) : os . makedirs ( output_directory ) tempdir = output_directory self . _model ( voltages , sensitivities , potentials , tempdir ) else : raise IOError ( 'output directory already exists: {0}' . format ( output_directory ) ) else : with tempfile . TemporaryDirectory ( dir = self . tempdir ) as tempdir : self . _model ( voltages , sensitivities , potentials , tempdir , silent = silent ) return 1 else : print ( 'Sorry, not all required information to model are present' ) print ( 'Check:' ) print ( '1) configurations present: self.configs.configs' ) print ( '2) is a model present' ) return None
Forward model the tomodir and read in the results
222
11
12,557
def _invert ( self , tempdir , catch_output = True , * * kwargs ) : nr_cores = kwargs . get ( 'cores' , 2 ) print ( 'attempting inversion in directory: {0}' . format ( tempdir ) ) pwd = os . getcwd ( ) os . chdir ( tempdir ) self . save_to_tomodir ( '.' ) os . chdir ( 'exe' ) binary = CRBin . get ( 'CRTomo' ) print ( 'Using binary: {0}' . format ( binary ) ) print ( 'calling CRTomo' ) # store env variable env_omp = os . environ . get ( 'OMP_NUM_THREADS' , '' ) os . environ [ 'OMP_NUM_THREADS' ] = '{0}' . format ( nr_cores ) if catch_output : subprocess . check_output ( binary , shell = True , stderr = subprocess . STDOUT , ) else : subprocess . call ( binary , shell = True , ) # reset environment variable os . environ [ 'OMP_NUM_THREADS' ] = env_omp print ( 'finished' ) os . chdir ( pwd ) self . read_inversion_results ( tempdir )
Internal function than runs an inversion using CRTomo .
295
12
12,558
def invert ( self , output_directory = None , catch_output = True , * * kwargs ) : self . _check_state ( ) if self . can_invert : if output_directory is not None : if not os . path . isdir ( output_directory ) : os . makedirs ( output_directory ) tempdir = output_directory self . _invert ( tempdir , catch_output , * * kwargs ) else : raise IOError ( 'output directory already exists: {0}' . format ( output_directory ) ) else : with tempfile . TemporaryDirectory ( dir = self . tempdir ) as tempdir : self . _invert ( tempdir , catch_output , * * kwargs ) return 0 else : print ( 'Sorry, no measurements present, cannot model yet' ) return 1
Invert this instance and import the result files
181
9
12,559
def read_inversion_results ( self , tomodir ) : self . _read_inversion_results ( tomodir ) self . _read_inv_ctr ( tomodir ) self . _read_resm_m ( tomodir ) self . _read_eps_ctr ( tomodir )
Import inversion results from a tomodir into this instance
69
12
12,560
def plot_eps_data_hist ( self , dfs ) : # check if this is a DC inversion if 'datum' in dfs [ 0 ] : dc_inv = True else : dc_inv = False nr_y = len ( dfs ) size_y = 5 / 2.54 * nr_y if dc_inv : nr_x = 1 else : nr_x = 3 size_x = 15 / 2.54 fig , axes = plt . subplots ( nr_y , nr_x , figsize = ( size_x , size_y ) ) axes = np . atleast_2d ( axes ) # plot initial data errors df = dfs [ 0 ] if dc_inv : ax = axes [ 0 , 0 ] ax . hist ( df [ 'datum' ] / df [ 'eps_r' ] , 100 , ) ax . set_xlabel ( r'$-log(|R|) / \epsilon_r$' ) ax . set_ylabel ( r'count' ) else : # complex inversion ax = axes [ 0 , 0 ] ax . hist ( df [ '-log(|R|)' ] / df [ 'eps' ] , 100 , ) ax . set_xlabel ( r'$-log(|R|)$' ) ax . set_ylabel ( r'count' ) ax = axes [ 0 , 1 ] ax . hist ( df [ '-log(|R|)' ] / df [ 'eps_r' ] , 100 , ) ax . set_xlabel ( r'$-log(|R|) / \epsilon_r$' ) ax . set_ylabel ( r'count' ) ax = axes [ 0 , 2 ] phase_data = df [ '-Phase(rad)' ] / df [ 'eps_p' ] if not np . all ( np . isinf ( phase_data ) | np . isnan ( phase_data ) ) : ax . hist ( phase_data , 100 , ) ax . set_xlabel ( r'$-\phi[rad] / \epsilon_p$' ) ax . set_ylabel ( r'count' ) # iterations for it , df in enumerate ( dfs [ 1 : ] ) : ax = axes [ 1 + it , 0 ] ax . hist ( df [ 'psi' ] , 100 ) rms = np . sqrt ( 1 / df [ 'psi' ] . shape [ 0 ] * np . sum ( df [ 'psi' ] ** 2 ) ) ax . axvline ( rms , color = 'k' , linestyle = 'dashed' ) ax . set_title ( 'iteration: {0}' . format ( it ) ) ax . set_xlabel ( 'psi' ) ax . set_ylabel ( r'count' ) ax = axes [ 1 + it , 1 ] Rdat = df [ 'Re(d)' ] Rmod = df [ 'Re(f(m))' ] ax . scatter ( Rdat , Rmod , ) ax . set_xlabel ( r'$log(R_{data}~[\Omega])$' ) ax . set_ylabel ( r'$log(R_{mod}~[\Omega])$' ) ax = axes [ 1 + it , 2 ] phidat = df [ 'Im(d)' ] phimod = df [ 'Im(f(m))' ] ax . scatter ( phidat , phimod , ) ax . set_xlabel ( r'$\phi_{data}~[mrad]$' ) ax . set_ylabel ( r'$\phi_{mod}~[mrad]$' ) fig . tight_layout ( ) fig . savefig ( 'eps_plot_hist.png' , dpi = 300 )
Plot histograms of data residuals and data error weighting
854
12
12,561
def _read_eps_ctr ( tomodir ) : epsctr_file = tomodir + os . sep + 'inv' + os . sep + 'eps.ctr' if not os . path . isfile ( epsctr_file ) : print ( 'eps.ctr not found: {0}' . format ( epsctr_file ) ) print ( os . getcwd ( ) ) return 1 with open ( epsctr_file , 'r' ) as fid : lines = fid . readlines ( ) group = itertools . groupby ( lines , lambda x : x == '\n' ) dfs = [ ] # group for x in group : # print(x) if not x [ 0 ] : data = [ y for y in x [ 1 ] ] if data [ 0 ] . startswith ( 'IT' ) or data [ 0 ] . startswith ( 'PIT' ) : del ( data [ 0 ] ) data [ 0 ] = data [ 0 ] . replace ( '-Phase (rad)' , '-Phase(rad)' ) tfile = StringIO ( '' . join ( data ) ) df = pd . read_csv ( tfile , delim_whitespace = True , na_values = [ 'Infinity' ] , ) dfs . append ( df ) return dfs
Parse a CRTomo eps . ctr file .
290
13
12,562
def _read_resm_m ( self , tomodir ) : resm_file = tomodir + os . sep + 'inv' + os . sep + 'res_m.diag' if not os . path . isfile ( resm_file ) : print ( 'res_m.diag not found: {0}' . format ( resm_file ) ) print ( os . getcwd ( ) ) return 1 # read header with open ( resm_file , 'rb' ) as fid : first_line = fid . readline ( ) . strip ( ) header_raw = np . fromstring ( first_line , count = 4 , sep = ' ' ) header_raw # nr_cells = int(header_raw[0]) # lam = float(header_raw[1]) subdata = np . genfromtxt ( fid ) print ( subdata . shape ) pid = self . parman . add_data ( subdata [ : , 0 ] ) self . assignments [ 'resm' ] = pid
Read in the resolution matrix of an inversion
227
9
12,563
def register_forward_model ( self , pid_mag , pid_pha ) : self . register_magnitude_model ( pid_mag ) self . register_phase_model ( pid_pha )
Register parameter sets as the forward models for magnitude and phase
44
11
12,564
def register_magnitude_model ( self , pid ) : if self . assignments [ 'forward_model' ] is None : self . assignments [ 'forward_model' ] = [ None , None ] self . assignments [ 'forward_model' ] [ 0 ] = pid
Set a given parameter model to the forward magnitude model
59
10
12,565
def register_phase_model ( self , pid ) : if self . assignments [ 'forward_model' ] is None : self . assignments [ 'forward_model' ] = [ None , None ] self . assignments [ 'forward_model' ] [ 1 ] = pid
Set a given parameter model to the forward phase model
57
10
12,566
def add_homogeneous_model ( self , magnitude , phase = 0 ) : if self . assignments [ 'forward_model' ] is not None : print ( 'model already set, will overwrite' ) # generate distributions magnitude_model = np . ones ( self . grid . nr_of_elements ) * magnitude phase_model = np . ones ( self . grid . nr_of_elements ) * phase pid_mag = self . parman . add_data ( magnitude_model ) pid_pha = self . parman . add_data ( phase_model ) self . assignments [ 'forward_model' ] = [ pid_mag , pid_pha ] return pid_mag , pid_pha
Add a homogeneous resistivity model to the tomodir . This is useful for synthetic measurements .
152
20
12,567
def show_parset ( self , pid ) : fig , ax = plt . subplots ( ) self . plot . plot_elements_to_ax ( pid , ax = ax ) return fig , ax
Plot a given parameter set
47
5
12,568
def cythonize ( * args , * * kwargs ) : global cythonize from Cython . Build import cythonize return cythonize ( * args , * * kwargs )
dirty hack only import cythonize at the time you use it .
46
15
12,569
def response ( self , url ) : resp = requests . get ( url ) . content return self . parseresponse ( resp )
Grab an API response .
26
5
12,570
def errorhandle ( self , resp ) : if self . format == 'json' : parsed = xmltodict . parse ( resp ) errors = parsed [ self . RESPONSE_TOKEN ] [ self . ERROR_TOKEN ] # Create list of errors if more than one error response is given if type ( errors ) is list and len ( errors ) > 1 : messages = ", " . join ( [ " " . join ( [ "{}: {}" . format ( k , v ) for k , v in e . items ( ) ] ) for e in errors ] ) else : overlimit = any ( 'transaction limit' in msg . lower ( ) for msg in errors . values ( ) ) if overlimit : raise APILimitExceeded ( "This API key has used up its daily quota of calls." ) else : messages = " " . join ( [ "{}: {}" . format ( k , v ) for k , v in errors . items ( ) ] ) elif self . format == 'xml' : import xml . etree . ElementTree as ET errors = ET . fromstring ( resp ) . findall ( self . ERROR_TOKEN ) messages = ", " . join ( err . find ( 'msg' ) . text for err in errors ) else : raise ValueError ( "Invalid API response format specified: {}." % self . format ) raise BustimeError ( "API returned: {}" . format ( messages ) )
Parse API error responses and raise appropriate exceptions .
308
10
12,571
def parseresponse ( self , resp ) : # Support Python 3's bytes type from socket repsonses if sys . version_info . major > 2 : resp = resp . decode ( 'utf-8' ) if self . RESPONSE_TOKEN not in resp : raise BustimeError ( "The Bustime API returned an invalid response: {}" . format ( resp ) ) elif self . ERROR_TOKEN in resp : return self . errorhandle ( resp ) else : if self . format == 'json' : return xmltodict . parse ( resp ) [ self . RESPONSE_TOKEN ] elif self . format == 'xml' : return resp
Parse an API response .
143
6
12,572
def get_stack_refs ( refs : list ) : # copy pasted from Senza refs = list ( refs ) refs . reverse ( ) stack_refs = [ ] last_stack = None while refs : ref = refs . pop ( ) if last_stack is not None and re . compile ( r'v[0-9][a-zA-Z0-9-]*$' ) . match ( ref ) : stack_refs . append ( StackReference ( last_stack , ref ) ) else : try : with open ( ref ) as fd : data = yaml . safe_load ( fd ) ref = data [ 'SenzaInfo' ] [ 'StackName' ] except ( OSError , IOError ) : # It's still possible that the ref is a regex pass if refs : version = refs . pop ( ) else : version = None stack_refs . append ( StackReference ( ref , version ) ) last_stack = ref return stack_refs
Returns a list of stack references with name and version .
223
11
12,573
def instance_for_arguments ( self , arguments ) : model_instance = ModelInstance ( ) for prior_model_tuple in self . prior_model_tuples : setattr ( model_instance , prior_model_tuple . name , prior_model_tuple . prior_model . instance_for_arguments ( arguments ) ) return model_instance
Creates a ModelInstance which has an attribute and class instance corresponding to every PriorModel attributed to this instance .
79
22
12,574
def mapper_from_partial_prior_arguments ( self , arguments ) : original_prior_dict = { prior : prior for prior in self . priors } return self . mapper_from_prior_arguments ( { * * original_prior_dict , * * arguments } )
Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors keeping existing priors where no mapping is provided .
67
31
12,575
def mapper_from_prior_arguments ( self , arguments ) : mapper = copy . deepcopy ( self ) for prior_model_tuple in self . prior_model_tuples : setattr ( mapper , prior_model_tuple . name , prior_model_tuple . prior_model . gaussian_prior_model_for_arguments ( arguments ) ) return mapper
Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors .
89
22
12,576
def mapper_from_gaussian_tuples ( self , tuples , a = None , r = None ) : prior_tuples = self . prior_tuples_ordered_by_id prior_class_dict = self . prior_class_dict arguments = { } for i , prior_tuple in enumerate ( prior_tuples ) : prior = prior_tuple . prior cls = prior_class_dict [ prior ] mean = tuples [ i ] [ 0 ] if a is not None and r is not None : raise exc . PriorException ( "Width of new priors cannot be both relative and absolute." ) if a is not None : width_type = "a" value = a elif r is not None : width_type = "r" value = r else : width_type , value = conf . instance . prior_width . get_for_nearest_ancestor ( cls , prior_tuple . name ) if width_type == "r" : width = value * mean elif width_type == "a" : width = value else : raise exc . PriorException ( "Prior widths must be relative 'r' or absolute 'a' e.g. a, 1.0" ) if isinstance ( prior , GaussianPrior ) : limits = ( prior . lower_limit , prior . upper_limit ) else : limits = conf . instance . prior_limit . get_for_nearest_ancestor ( cls , prior_tuple . name ) arguments [ prior ] = GaussianPrior ( mean , max ( tuples [ i ] [ 1 ] , width ) , * limits ) return self . mapper_from_prior_arguments ( arguments )
Creates a new model mapper from a list of floats describing the mean values of gaussian priors . The widths \ of the new priors are taken from the width_config . The new gaussian priors must be provided in the same \ order as the priors associated with model .
371
61
12,577
def info ( self ) : info = [ ] for prior_model_name , prior_model in self . prior_model_tuples : info . append ( prior_model . name + '\n' ) info . extend ( [ f"{prior_model_name}_{item}" for item in prior_model . info ] ) return '\n' . join ( info )
Use the priors that make up the model_mapper to generate information on each parameter of the overall model .
83
23
12,578
def push ( config , force = False ) : repo = config . repo active_branch = repo . active_branch if active_branch . name == "master" : error_out ( "Can't commit when on the master branch. " "You really ought to do work in branches." ) state = read ( config . configfile ) if not state . get ( "FORK_NAME" ) : info_out ( "Can't help you push the commit. Please run: gg config --help" ) return 0 try : repo . remotes [ state [ "FORK_NAME" ] ] except IndexError : error_out ( "There is no remote called '{}'" . format ( state [ "FORK_NAME" ] ) ) destination = repo . remotes [ state [ "FORK_NAME" ] ] if force : pushed , = destination . push ( force = True ) info_out ( pushed . summary ) else : pushed , = destination . push ( ) # Was it rejected? if ( pushed . flags & git . remote . PushInfo . REJECTED or pushed . flags & git . remote . PushInfo . REMOTE_REJECTED ) : error_out ( 'The push was rejected ("{}")' . format ( pushed . summary ) , False ) try_force_push = input ( "Try to force push? [Y/n] " ) . lower ( ) . strip ( ) if try_force_push not in ( "no" , "n" ) : pushed , = destination . push ( force = True ) info_out ( pushed . summary ) else : return 0
Create push the current branch .
345
6
12,579
def chkpath ( path ) : if os . path . exists ( path ) : return path else : msg = "{0} does not exist." . format ( path ) raise argparse . ArgumentTypeError ( msg )
Checks if a path exists .
46
7
12,580
def readin_volt ( filename ) : with open ( filename , 'r' ) as fid : content = np . loadtxt ( fid , skiprows = 1 , usecols = [ 0 , 1 , 2 ] ) volt = content [ : , 2 ] elecs = content [ : , 0 : 2 ] return elecs , volt
Read in measurement data from a volt . dat file and return electrodes and measured resistance .
71
17
12,581
def save_volt ( elecs , volt , filename ) : # bring data in shape content = np . column_stack ( ( elecs , volt , np . zeros ( len ( volt ) ) ) ) # save datapoints with open ( filename , 'w' ) as fid : fid . write ( '{0}\n' . format ( content . shape [ 0 ] ) ) with open ( filename , 'ab' ) as fid : np . savetxt ( fid , np . array ( content ) , fmt = '%i %i %f %f' )
Save the values in volt - format .
122
8
12,582
def main ( ) : options = handle_options ( ) # read in observed and synthetic data elecs , d_obs = readin_volt ( options . d_obs ) elecs , d_est = readin_volt ( options . d_est ) elecs , d_estTC = readin_volt ( options . d_estTC ) # calculate corrected data volt_corr = calc_correction ( d_obs , d_est , d_estTC , ) # save data save_volt ( elecs , volt_corr , options . output , )
Function to remove temperature effect from field data
123
8
12,583
def recursive_update ( default , custom ) : if not isinstance ( default , dict ) or not isinstance ( custom , dict ) : raise TypeError ( 'Params of recursive_update should be dicts' ) for key in custom : if isinstance ( custom [ key ] , dict ) and isinstance ( default . get ( key ) , dict ) : default [ key ] = recursive_update ( default [ key ] , custom [ key ] ) else : default [ key ] = custom [ key ] return default
Return a dict merged from default and custom
108
8
12,584
def cleanup ( config , searchstring , force = False ) : repo = config . repo branches_ = list ( find ( repo , searchstring ) ) if not branches_ : error_out ( "No branches found" ) elif len ( branches_ ) > 1 : error_out ( "More than one branch found.{}" . format ( "\n\t" . join ( [ "" ] + [ x . name for x in branches_ ] ) ) ) assert len ( branches_ ) == 1 branch_name = branches_ [ 0 ] . name active_branch = repo . active_branch if branch_name == active_branch . name : error_out ( "Can't clean up the current active branch." ) # branch_name = active_branch.name upstream_remote = None fork_remote = None state = read ( config . configfile ) origin_name = state . get ( "ORIGIN_NAME" , "origin" ) for remote in repo . remotes : if remote . name == origin_name : # remote.pull() upstream_remote = remote break if not upstream_remote : error_out ( "No remote called {!r} found" . format ( origin_name ) ) # Check out master repo . heads . master . checkout ( ) upstream_remote . pull ( repo . heads . master ) # Is this one of the merged branches?! # XXX I don't know how to do this "nativly" with GitPython. merged_branches = [ x . strip ( ) for x in repo . git . branch ( "--merged" ) . splitlines ( ) if x . strip ( ) and not x . strip ( ) . startswith ( "*" ) ] was_merged = branch_name in merged_branches certain = was_merged or force if not certain : # Need to ask the user. # XXX This is where we could get smart and compare this branch # with the master. certain = ( input ( "Are you certain {} is actually merged? [Y/n] " . format ( branch_name ) ) . lower ( ) . strip ( ) != "n" ) if not certain : return 1 if was_merged : repo . git . branch ( "-d" , branch_name ) else : repo . git . branch ( "-D" , branch_name ) fork_remote = None state = read ( config . configfile ) for remote in repo . remotes : if remote . name == state . get ( "FORK_NAME" ) : fork_remote = remote break if fork_remote : fork_remote . push ( ":" + branch_name ) info_out ( "Remote branch on fork deleted too." )
Deletes a found branch locally and remotely .
576
9
12,585
def calc_correction ( temp , mag , add = False , T_std = 10 , m = 0.021 ) : if mag . shape [ 1 ] == 3 : if add : data_x = ( m * ( T_std - 25 ) + 1 ) / ( m * ( temp - 25 ) + 1 ) * mag [ : , 0 ] data_y = ( m * ( T_std - 25 ) + 1 ) / ( m * ( temp - 25 ) + 1 ) * mag [ : , 1 ] data_z = ( m * ( T_std - 25 ) + 1 ) / ( m * ( temp - 25 ) + 1 ) * mag [ : , 2 ] return np . column_stack ( ( data_x , data_y , data_z ) ) else : data_x = ( m * ( temp - 25 ) + 1 ) / ( m * ( T_std - 25 ) + 1 ) * mag [ : , 0 ] data_y = ( m * ( temp - 25 ) + 1 ) / ( m * ( T_std - 25 ) + 1 ) * mag [ : , 1 ] data_z = ( m * ( temp - 25 ) + 1 ) / ( m * ( T_std - 25 ) + 1 ) * mag [ : , 2 ] return np . column_stack ( ( data_x , data_y , data_z ) ) else : if add : data_i = ( m * ( T_std - 25 ) + 1 ) / ( m * ( temp - 25 ) + 1 ) * mag return data_i else : data_std = ( m * ( temp - 25 ) + 1 ) / ( m * ( T_std - 25 ) + 1 ) * mag return data_std
Function to add or substract the temperature effect to given data . The function can be called in python scripts . For application via command line in a file system use the script td_correct_temperature . py . The data is taken and given in Ohmm .
377
53
12,586
def save_mag_to_file ( mag , filename , rhofile ) : if rhofile : # bring data in shape null = np . zeros ( len ( mag ) ) if mag . shape [ 1 ] == 3 : null = np . column_stack ( ( null , null , null , null ) ) result = np . column_stack ( ( mag , null ) ) # save datapoints with open ( filename , 'w' ) as fid : fid . write ( '{0}\n' . format ( mag . shape [ 0 ] ) ) with open ( filename , 'ab' ) as fid : np . savetxt ( fid , np . array ( result ) , fmt = '%f' ) else : # bring data in shape with open ( 'inv/rho00.mag' , 'r' ) as fid : coor = np . loadtxt ( fid , skiprows = 1 , usecols = [ 0 , 1 ] ) # calculated back to log if mag . shape [ 1 ] == 3 : logx = [ math . log ( d , 10 ) for d in mag [ : , 0 ] ] logy = [ math . log ( d , 10 ) for d in mag [ : , 1 ] ] logz = [ math . log ( d , 10 ) for d in mag [ : , 2 ] ] mag_log = np . column_stack ( ( logx , logy , logz ) ) else : mag_log = [ math . log ( d , 10 ) for d in mag ] content = np . column_stack ( ( coor [ : , 0 ] , coor [ : , 1 ] , mag_log ) ) # save datapoints with open ( filename , 'w' ) as fid : fid . write ( '{0}\n' . format ( content . shape [ 0 ] ) ) with open ( filename , 'ab' ) as fid : np . savetxt ( fid , np . array ( content ) , fmt = '%f' )
Save the values in rho - or mag - format .
430
12
12,587
def main ( ) : options = handle_options ( ) # read in temperature and resistivity data tempdata = readin_temp ( options . temp_file ) magdata = readin_rho ( options . filename , options . rhofile , aniso = options . aniso ) # calculate corrected data mag_corr = calc_correction ( temp = tempdata , mag = magdata , add = options . add , T_std = options . T_std , m = options . m , ) # save data save_mag_to_file ( mag_corr , options . output , options . rhofile )
Function to add or substract the temperature effect to data in a tomodir
134
16
12,588
def singular ( plural ) : if plural . endswith ( 'ies' ) : return plural [ : - 3 ] + 'y' if plural . endswith ( 's' ) : return plural [ : - 1 ] raise ValueError ( 'unknown plural form %r' % ( plural , ) )
Take a plural English word and turn it into singular
65
10
12,589
def plot_single_configuration ( self , config_nr , sens_file ) : indices = elem . load_column_file_to_elements_advanced ( sens_file , [ 2 , 3 ] , False , False ) elem . plt_opt . title = '' elem . plt_opt . reverse = True elem . plt_opt . cbmin = - 1 elem . plt_opt . cbmax = 1 elem . plt_opt . cblabel = r'fill' elem . plt_opt . xlabel = 'x (m)' elem . plt_opt . ylabel = 'z (m)' fig = plt . figure ( figsize = ( 5 , 7 ) ) ax = fig . add_subplot ( 111 ) ax , pm , cb = elem . plot_element_data_to_ax ( indices [ 0 ] , ax , scale = 'asinh' , no_cb = False , ) ax . scatter ( self . sens_centers [ config_nr , 0 ] , self . sens_centers [ config_nr , 1 ] , marker = '*' , s = 50 , color = 'w' , edgecolors = 'w' , ) self . color_electrodes ( config_nr , ax ) # Output sensf = sens_file . split ( 'sens' ) [ - 1 ] sensf = sensf . split ( '.' ) [ 0 ] out = 'sens_center_' + sensf + '.png' fig . savefig ( out , bbox_inches = 'tight' , dpi = 300 ) fig . clf ( ) plt . close ( fig )
plot sensitivity distribution with center of mass for a single configuration . The electrodes used are colored .
372
18
12,590
def plot_sens_center ( self , frequency = 2 ) : try : colors = np . loadtxt ( self . volt_file , skiprows = 1 ) except IOError : print ( 'IOError opening {0}' . format ( volt_file ) ) exit ( ) # check for 1-dimensionality if ( len ( colors . shape ) > 1 ) : print ( 'Artificial or Multi frequency data' ) colors = colors [ : , frequency ] . flatten ( ) colors = colors [ ~ np . isnan ( colors ) ] elem . load_elem_file ( self . elem_file ) elem . load_elec_file ( self . elec_file ) nr_elements = len ( elem . element_type_list [ 0 ] ) elem . element_data = np . zeros ( ( nr_elements , 1 ) ) * np . nan elem . plt_opt . title = ' ' elem . plt_opt . reverse = True elem . plt_opt . cbmin = - 1 elem . plt_opt . cbmax = 1 elem . plt_opt . cblabel = self . cblabel elem . plt_opt . xlabel = 'x (m)' elem . plt_opt . ylabel = 'z (m)' fig = plt . figure ( figsize = ( 5 , 7 ) ) ax = fig . add_subplot ( 111 ) ax , pm , cb = elem . plot_element_data_to_ax ( 0 , ax , scale = 'linear' , no_cb = True ) ax . scatter ( self . sens_centers [ : , 0 ] , self . sens_centers [ : , 1 ] , c = colors , s = 100 , edgecolors = 'none' ) cb_pos = mpl_get_cb_bound_next_to_plot ( ax ) ax1 = fig . add_axes ( cb_pos , frame_on = True ) cmap = mpl . cm . jet_r norm = mpl . colors . Normalize ( vmin = np . nanmin ( colors ) , vmax = np . nanmax ( colors ) ) mpl . colorbar . ColorbarBase ( ax1 , cmap = cmap , norm = norm , orientation = 'vertical' ) fig . savefig ( self . output_file , bbox_inches = 'tight' , dpi = 300 )
plot sensitivity center distribution for all configurations in config . dat . The centers of mass are colored by the data given in volt_file .
543
27
12,591
def color_electrodes ( self , config_nr , ax ) : electrodes = np . loadtxt ( options . config_file , skiprows = 1 ) electrodes = self . configs [ ~ np . isnan ( self . configs ) . any ( 1 ) ] electrodes = electrodes . astype ( int ) conf = [ ] for dim in range ( 0 , electrodes . shape [ 1 ] ) : c = electrodes [ config_nr , dim ] # c = c.partition('0') a = np . round ( c / 10000 ) - 1 b = np . mod ( c , 10000 ) - 1 conf . append ( a ) conf . append ( b ) Ex , Ez = elem . get_electrodes ( ) color = [ '#ffed00' , '#ffed00' , '#ff0000' , '#ff0000' ] ax . scatter ( Ex [ conf ] , Ez [ conf ] , c = color , marker = 's' , s = 60 , clip_on = False , edgecolors = 'k' )
Color the electrodes used in specific configuration . Voltage electrodes are yellow Current electrodes are red ?!
227
18
12,592
def compute_sens ( self , elem_file , elec_file , configs ) : CRMod_config = CRMod . config ( ) # activate 2D mode and set sink nr if self . options . sink is not None : print ( '2D mode with sink {0}' . format ( self . options . sink ) ) CRMod_config [ '2D' ] = 0 CRMod_config [ 'fictitious_sink' ] = 'T' CRMod_config [ 'sink_node' ] = self . options . sink CRMod_config [ 'write_sens' ] = 'T' CRMod_instance = CRMod . CRMod ( CRMod_config ) CRMod_instance . elemfile = elem_file CRMod_instance . elecfile = elec_file CRMod_instance . configdata = configs resistivity = 100 # get number of elements fid = open ( elem_file , 'r' ) fid . readline ( ) elements = int ( fid . readline ( ) . strip ( ) . split ( ) [ 1 ] ) fid . close ( ) # create rho.dat file rhodata = '{0}\n' . format ( elements ) for i in range ( 0 , elements ) : rhodata += '{0} 0\n' . format ( resistivity ) CRMod_instance . rhodata = rhodata CRMod_instance . run_in_tempdir ( ) volt_file = CRMod_instance . volt_file sens_files = CRMod_instance . sens_files return sens_files , volt_file , CRMod_instance . temp_dir
Compute the sensitivities for the given input data . A CRMod instance is called to create the sensitivity files .
362
23
12,593
def compute_center_of_mass ( self , filename ) : sens = np . loadtxt ( filename , skiprows = 1 ) X = sens [ : , 0 ] Z = sens [ : , 1 ] # C = (np.abs(sens[:,2]))# ./ np.max(np.abs(sens[:,2])) C = sens [ : , 2 ] x_center = 0 z_center = 0 sens_sum = 0 for i in range ( 0 , C . shape [ 0 ] ) : # unweighted if ( self . weight == 0 ) : weight = ( C [ i ] ) # abs if ( self . weight == 1 ) : weight = np . abs ( C [ i ] ) # log10 if ( self . weight == 2 ) : weight = np . log10 ( np . abs ( C [ i ] ) ) # sqrt if ( self . weight == 3 ) : weight = np . sqrt ( np . abs ( C [ i ] ) ) x_center += ( X [ i ] * weight ) z_center += ( Z [ i ] * weight ) sens_sum += weight x_center /= sens_sum z_center /= sens_sum return ( x_center , z_center )
Center of mass is computed using the sensitivity data output from CRMod Data weights can be applied using command line options
270
22
12,594
def handle_cmd_options ( ) : parser = OptionParser ( ) parser . add_option ( "-s" , "--silent" , action = "store_true" , dest = "silent" , help = "print any warnings" , default = False ) ( options , args ) = parser . parse_args ( ) return options , args
Get the options from the command line .
75
8
12,595
def move ( fname , folder , options ) : if os . path . isfile ( fname ) : shutil . move ( fname , folder ) else : if options . silent is False : print ( '{0} missing' . format ( fname ) )
Move file to dir if existing
57
6
12,596
def print_tb ( tb , limit = None , file = None ) : if file is None : file = sys . stderr if limit is None : if hasattr ( sys , 'tracebacklimit' ) : limit = sys . tracebacklimit file . write ( '\n' . join ( format_tb ( tb , limit ) ) + '\n' )
Print up to limit stack trace entries from the traceback tb .
83
14
12,597
def print_exception ( etype , value , tb , limit = None , file = None , chain = True ) : import traceback if file is None : file = sys . stderr if tb : file . write ( 'Traceback (most recent call last):\n' ) print_tb ( tb , limit , file ) lines = traceback . format_exception_only ( etype , value ) for line in lines : file . write ( line )
Print exception up to limit stack trace entries from tb to file .
104
14
12,598
def construct ( self , data ) : occurrences = { } main_occurrences = { } # occurrences of devel packages for pkg in data [ "data" ] [ "dependencies" ] : package = pkg [ "package" ] for item in pkg [ "dependencies" ] : dep = item [ "name" ] if package != "." : deps = map ( lambda l : "%s/%s" % ( package , l ) , item [ "location" ] ) else : deps = item [ "location" ] if dep not in occurrences : occurrences [ dep ] = deps else : occurrences [ dep ] = occurrences [ dep ] + deps self . occurrences = occurrences # occurrences of main packages for main in data [ "data" ] [ "main" ] : filename = main [ "filename" ] for dep in main [ "dependencies" ] : if dep not in main_occurrences : main_occurrences [ dep ] = [ filename ] else : main_occurrences [ dep ] . append ( filename ) self . main_occurrences = main_occurrences # test directories self . test_directories = sorted ( map ( lambda l : l [ "test" ] , data [ "data" ] [ "tests" ] ) ) # provided devel packages self . provided_packages = sorted ( data [ "data" ] [ "packages" ] ) # imported paths in devel packages imported_packages = [ ] imported_native_packages = [ ] for path in occurrences : try : self . ipparser . parse ( path ) except ValueError : continue if self . ipparser . isNative ( ) : imported_native_packages . append ( path ) else : imported_packages . append ( path ) self . imported_packages = sorted ( imported_packages ) self . imported_native_packages = sorted ( imported_native_packages ) # main packages self . main_packages = map ( lambda l : l [ "filename" ] , data [ "data" ] [ "main" ] )
Construct info about a project from artefact
436
8
12,599
def join_import_from ( self , import_spec ) : if not self . isroot and not self . ispkg : parent = self . name . rpartition ( '.' ) [ 0 ] else : parent = self . name return join_import_from ( import_spec , parent )
Joins a relative import like from . foo import bar with this module as its parent module . If the module is not a root module or package root it will be joined with the package root .
63
39