idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
12,300
def get_by_range ( model_cls , * args , * * kwargs ) : start_timestamp = kwargs . get ( 'start_timestamp' ) end_timestamp = kwargs . get ( 'end_timestamp' ) if ( start_timestamp is not None ) and ( end_timestamp is not None ) and ( start_timestamp > end_timestamp ) : raise InvalidTimestampRange models = model_cls . read_time_range ( * args , end_timestamp = end_timestamp ) . order_by ( model_cls . time_order ) # # start time -> Loop through until you find one set before or on start # if start_timestamp is not None : index = 0 for index , model in enumerate ( models , start = 1 ) : if model . timestamp <= start_timestamp : break models = models [ : index ] return models
Get ordered list of models for the specified time range . The timestamp on the earliest model will likely occur before start_timestamp . This is to ensure that we return the models for the entire range .
200
40
12,301
def read_time_range ( cls , * args , * * kwargs ) : criteria = list ( args ) start = kwargs . get ( 'start_timestamp' ) end = kwargs . get ( 'end_timestamp' ) if start is not None : criteria . append ( cls . time_order <= - start ) if end is not None : criteria . append ( cls . time_order >= - end ) return cls . read ( * criteria )
Get all timezones set within a given time . Uses time_dsc_index
105
18
12,302
def add_data ( self , data , metadata = None ) : subdata = np . atleast_2d ( data ) # we try to accommodate transposed input if subdata . shape [ 1 ] != self . grid . nr_of_elements : if subdata . shape [ 0 ] == self . grid . nr_of_elements : subdata = subdata . T else : raise Exception ( 'Number of values does not match the number of ' + 'elements in the grid' ) # now make sure that metadata can be zipped with the subdata K = subdata . shape [ 0 ] if metadata is not None : if K > 1 : if ( not isinstance ( metadata , ( list , tuple ) ) or len ( metadata ) != K ) : raise Exception ( 'metadata does not fit the provided data' ) else : # K == 1 metadata = [ metadata , ] if metadata is None : metadata = [ None for i in range ( 0 , K ) ] return_ids = [ ] for dataset , meta in zip ( subdata , metadata ) : cid = self . _get_next_index ( ) self . parsets [ cid ] = dataset self . metadata [ cid ] = meta return_ids . append ( cid ) if len ( return_ids ) == 1 : return return_ids [ 0 ] else : return return_ids
Add data to the parameter set
294
6
12,303
def load_model_from_file ( self , filename ) : assert os . path . isfile ( filename ) data = np . loadtxt ( filename ) . squeeze ( ) assert len ( data . shape ) == 1 pid = self . add_data ( data ) return pid
Load one parameter set from a file which contains one value per line
58
13
12,304
def load_from_sens_file ( self , filename ) : sens_data = np . loadtxt ( filename , skiprows = 1 ) nid_re = self . add_data ( sens_data [ : , 2 ] ) nid_im = self . add_data ( sens_data [ : , 3 ] ) return nid_re , nid_im
Load real and imaginary parts from a sens . dat file generated by CRMod
81
15
12,305
def save_to_rho_file ( self , filename , cid_mag , cid_pha = None ) : mag_data = self . parsets [ cid_mag ] if cid_pha is None : pha_data = np . zeros ( mag_data . shape ) else : pha_data = self . parsets [ cid_pha ] with open ( filename , 'wb' ) as fid : fid . write ( bytes ( '{0}\n' . format ( self . grid . nr_of_elements ) , 'utf-8' , ) ) np . savetxt ( fid , np . vstack ( ( mag_data , pha_data , ) ) . T , fmt = '%f %f' )
Save one or two parameter sets in the rho . dat forward model format
166
15
12,306
def _clean_pid ( self , pid ) : if isinstance ( pid , ( list , tuple ) ) : if len ( pid ) == 1 : return pid [ 0 ] else : return pid return pid
if pid is a number don t do anything . If pid is a list with one entry strip the list and return the number . If pid contains more than one entries do nothing .
43
36
12,307
def modify_area ( self , pid , xmin , xmax , zmin , zmax , value ) : area_polygon = shapgeo . Polygon ( ( ( xmin , zmax ) , ( xmax , zmax ) , ( xmax , zmin ) , ( xmin , zmin ) ) ) self . modify_polygon ( pid , area_polygon , value )
Modify the given dataset in the rectangular area given by the parameters and assign all parameters inside this area the given value .
87
24
12,308
def extract_points ( self , pid , points ) : xy = self . grid . get_element_centroids ( ) data = self . parsets [ pid ] iobj = spi . NearestNDInterpolator ( xy , data ) values = iobj ( points ) return values
Extract values at certain points in the grid from a given parameter set . Cells are selected by interpolating the centroids of the cells towards the line using a nearest scheme .
64
36
12,309
def extract_along_line ( self , pid , xy0 , xy1 , N = 10 ) : assert N >= 2 xy0 = np . array ( xy0 ) . squeeze ( ) xy1 = np . array ( xy1 ) . squeeze ( ) assert xy0 . size == 2 assert xy1 . size == 2 # compute points points = [ ( x , y ) for x , y in zip ( np . linspace ( xy0 [ 0 ] , xy1 [ 0 ] , N ) , np . linspace ( xy0 [ 1 ] , xy1 [ 1 ] , N ) ) ] result = self . extract_points ( pid , points ) results_xyv = np . hstack ( ( points , result [ : , np . newaxis ] ) ) return results_xyv
Extract parameter values along a given line .
182
9
12,310
def extract_polygon_area ( self , pid , polygon_points ) : polygon = shapgeo . Polygon ( polygon_points ) xy = self . grid . get_element_centroids ( ) in_poly = [ ] for nr , point in enumerate ( xy ) : if shapgeo . Point ( point ) . within ( polygon ) : in_poly . append ( nr ) values = self . parsets [ pid ] [ in_poly ] return np . array ( in_poly ) , values
Extract all data points whose element centroid lies within the given polygon .
120
16
12,311
def rotate_point ( xorigin , yorigin , x , y , angle ) : rotx = ( x - xorigin ) * np . cos ( angle ) - ( y - yorigin ) * np . sin ( angle ) roty = ( x - yorigin ) * np . sin ( angle ) + ( y - yorigin ) * np . cos ( angle ) return rotx , roty
Rotate the given point by angle
84
7
12,312
def get_R_mod ( options , rho0 ) : tomodir = tdManager . tdMan ( elem_file = options . elem_file , elec_file = options . elec_file , config_file = options . config_file , ) # set model tomodir . add_homogeneous_model ( magnitude = rho0 ) # only interested in magnitudes Z = tomodir . measurements ( ) [ : , 0 ] return Z
Compute synthetic measurements over a homogeneous half - space
101
11
12,313
def make_and_return_path_from_path_and_folder_names ( path , folder_names ) : for folder_name in folder_names : path += folder_name + '/' try : os . makedirs ( path ) except FileExistsError : pass return path
For a given path create a directory structure composed of a set of folders and return the path to the \ inner - most folder .
62
26
12,314
def register_host ( self , bm_instance ) : bmc_ip = '10.130.%d.100' % ( self . _bmc_range_start + self . _nic_cpt ) bmc_net = '10.130.%d.0' % ( self . _bmc_range_start + self . _nic_cpt ) bmc_gw = '10.130.%d.1' % ( self . _bmc_range_start + self . _nic_cpt ) device = 'eth%d' % ( 2 + self . _nic_cpt ) body_create_subnet = { 'subnets' : [ { 'name' : 'bmc_' + device , 'cidr' : bmc_net + '/24' , 'ip_version' : 4 , 'network_id' : self . _bmc_net [ 'id' ] } ] } subnet_id = self . neutron . create_subnet ( body = body_create_subnet ) [ 'subnets' ] [ 0 ] [ 'id' ] self . attach_subnet_to_router ( subnet_id ) self . os_instance . interface_attach ( None , self . _bmc_net [ 'id' ] , bmc_ip ) content = """ DEVICE="{device}" BOOTPROTO=static IPADDR={bmc_ip} NETMASK=255.255.255.0 ONBOOT=yes """ self . create_file ( '/etc/sysconfig/network-scripts/ifcfg-%s' % device , content = content . format ( device = device , bmc_ip = bmc_ip , bmc_gw = bmc_gw ) ) content = """ 192.0.2.0/24 via {bmc_gw} """ self . create_file ( '/etc/sysconfig/network-scripts/route-%s' % device , content = content . format ( bmc_gw = bmc_gw ) ) self . run ( 'ifup %s' % device ) # Ensure the outgoing traffic go through the correct NIC to avoid spoofing # protection # TODO(Gonéri): This should be persistant. self . run ( 'ip rule add from %s table %d' % ( bmc_ip , self . _nic_cpt + 2 ) ) self . run ( 'ip route add default via %s dev %s table %d' % ( bmc_gw , device , self . _nic_cpt + 2 ) ) content = """ [Unit] Description=openstack-bmc {bm_instance} Service [Service] ExecStart=/usr/local/bin/openstackbmc --os-user {os_username} --os-password {os_password} --os-project-id {os_project_id} --os-auth-url {os_auth_url} --instance {bm_instance} --address {bmc_ip} User=root StandardOutput=kmsg+console StandardError=inherit Restart=always [Install] WantedBy=multi-user.target """ unit = 'openstack-bmc-%d.service' % self . _nic_cpt self . create_file ( '/usr/lib/systemd/system/%s' % unit , content . format ( os_username = self . os_username , os_password = protect_password ( self . os_password ) , os_project_id = self . os_project_id , os_auth_url = self . os_auth_url , bm_instance = bm_instance , bmc_ip = bmc_ip ) ) self . run ( 'systemctl enable %s' % unit ) self . run ( 'systemctl start %s' % unit ) self . _nic_cpt += 1 return bmc_ip
Register an existing nova VM .
863
7
12,315
def Godeps ( self ) : dict = [ ] for package in sorted ( self . _packages . keys ( ) ) : dict . append ( { "ImportPath" : str ( package ) , "Rev" : str ( self . _packages [ package ] ) } ) return dict
Return the snapshot in Godeps . json form
59
9
12,316
def GLOGFILE ( self ) : lines = [ ] for package in sorted ( self . _packages . keys ( ) ) : lines . append ( "%s %s" % ( str ( package ) , str ( self . _packages [ package ] ) ) ) return "\n" . join ( lines )
Return the snapshot in GLOGFILE form
64
8
12,317
def Glide ( self ) : dict = { "hash" : "???" , "updated" : str ( datetime . datetime . now ( tz = pytz . utc ) . isoformat ( ) ) , "imports" : [ ] , } decomposer = ImportPathsDecomposerBuilder ( ) . buildLocalDecomposer ( ) decomposer . decompose ( self . _packages . keys ( ) ) classes = decomposer . classes ( ) for ipp in classes : dep = { "name" : ipp , "version" : str ( self . _packages [ classes [ ipp ] [ 0 ] ] ) } if len ( classes [ ipp ] ) > 1 or classes [ ipp ] [ 0 ] != ipp : dep [ "subpackages" ] = map ( lambda l : l [ len ( ipp ) + 1 : ] , classes [ ipp ] ) dict [ "imports" ] . append ( dep ) return yaml . dump ( dict , default_flow_style = False )
Return the snapshot in glide . lock form
226
8
12,318
def render ( self , trajectories : Tuple [ NonFluents , Fluents , Fluents , Fluents , np . array ] , batch : Optional [ int ] = None ) -> None : non_fluents , initial_state , states , actions , interms , rewards = trajectories non_fluents = dict ( non_fluents ) states = dict ( ( name , fluent [ 0 ] ) for name , fluent in states ) actions = dict ( ( name , fluent [ 0 ] ) for name , fluent in actions ) rewards = rewards [ 0 ] idx = self . _compiler . rddl . domain . state_fluent_ordering . index ( 'location/1' ) start = initial_state [ idx ] [ 0 ] g = non_fluents [ 'GOAL/1' ] path = states [ 'location/1' ] deltas = actions [ 'move/1' ] centers = non_fluents [ 'DECELERATION_ZONE_CENTER/2' ] decays = non_fluents [ 'DECELERATION_ZONE_DECAY/1' ] zones = [ ( x , y , d ) for ( x , y ) , d in zip ( centers , decays ) ] self . _ax1 = plt . gca ( ) self . _render_state_space ( ) self . _render_start_and_goal_positions ( start , g ) self . _render_deceleration_zones ( zones ) self . _render_state_action_trajectory ( start , path , deltas ) plt . title ( 'Navigation' , fontweight = 'bold' ) plt . legend ( loc = 'lower right' ) plt . show ( )
Render the simulated state - action trajectories for Navigation domain .
379
12
12,319
def persistent_timer ( func ) : @ functools . wraps ( func ) def timed_function ( optimizer_instance , * args , * * kwargs ) : start_time_path = "{}/.start_time" . format ( optimizer_instance . phase_output_path ) try : with open ( start_time_path ) as f : start = float ( f . read ( ) ) except FileNotFoundError : start = time . time ( ) with open ( start_time_path , "w+" ) as f : f . write ( str ( start ) ) result = func ( optimizer_instance , * args , * * kwargs ) execution_time = str ( dt . timedelta ( seconds = time . time ( ) - start ) ) logger . info ( "{} took {} to run" . format ( optimizer_instance . phase_name , execution_time ) ) with open ( "{}/execution_time" . format ( optimizer_instance . phase_output_path ) , "w+" ) as f : f . write ( execution_time ) return result return timed_function
Times the execution of a function . If the process is stopped and restarted then timing is continued using saved files .
244
23
12,320
def backup_path ( self ) -> str : return "{}/{}/{}{}/optimizer_backup" . format ( conf . instance . output_path , self . phase_path , self . phase_name , self . phase_tag )
The path to the backed up optimizer folder .
56
10
12,321
def backup ( self ) : try : shutil . rmtree ( self . backup_path ) except FileNotFoundError : pass try : shutil . copytree ( self . opt_path , self . backup_path ) except shutil . Error as e : logger . exception ( e )
Copy files from the sym - linked optimizer folder to the backup folder in the workspace .
62
18
12,322
def restore ( self ) : if os . path . exists ( self . backup_path ) : for file in glob . glob ( self . backup_path + "/*" ) : shutil . copy ( file , self . path )
Copy files from the backup folder to the sym - linked optimizer folder .
49
15
12,323
def config ( self , attribute_name , attribute_type = str ) : return self . named_config . get ( self . __class__ . __name__ , attribute_name , attribute_type )
Get a config field from this optimizer s section in non_linear . ini by a key and value type .
43
24
12,324
def weighted_sample_instance_from_weighted_samples ( self , index ) : model , weight , likelihood = self . weighted_sample_model_from_weighted_samples ( index ) self . _weighted_sample_model = model return self . variable . instance_from_physical_vector ( model ) , weight , likelihood
Setup a model instance of a weighted sample including its weight and likelihood .
74
14
12,325
def weighted_sample_model_from_weighted_samples ( self , index ) : return list ( self . pdf . samples [ index ] ) , self . pdf . weights [ index ] , - 0.5 * self . pdf . loglikes [ index ]
From a weighted sample return the model weight and likelihood hood .
57
12
12,326
def compare_digest ( a , b ) : py_version = sys . version_info [ 0 ] if py_version >= 3 : return _compare_digest_py3 ( a , b ) return _compare_digest_py2 ( a , b )
Compare 2 hash digest .
60
5
12,327
def _render_trajectories ( self , trajectories : Tuple [ NonFluents , Fluents , Fluents , Fluents , np . array ] ) -> None : if self . _verbose : non_fluents , initial_state , states , actions , interms , rewards = trajectories shape = states [ 0 ] [ 1 ] . shape batch_size , horizon , = shape [ 0 ] , shape [ 1 ] states = [ ( s [ 0 ] , s [ 1 ] [ 0 ] ) for s in states ] interms = [ ( f [ 0 ] , f [ 1 ] [ 0 ] ) for f in interms ] actions = [ ( a [ 0 ] , a [ 1 ] [ 0 ] ) for a in actions ] rewards = np . reshape ( rewards , [ batch_size , horizon ] ) [ 0 ] self . _render_batch ( non_fluents , states , actions , interms , rewards )
Prints the first batch of simulated trajectories .
201
10
12,328
def _render_batch ( self , non_fluents : NonFluents , states : Fluents , actions : Fluents , interms : Fluents , rewards : np . array , horizon : Optional [ int ] = None ) -> None : if horizon is None : horizon = len ( states [ 0 ] [ 1 ] ) self . _render_round_init ( horizon , non_fluents ) for t in range ( horizon ) : s = [ ( s [ 0 ] , s [ 1 ] [ t ] ) for s in states ] f = [ ( f [ 0 ] , f [ 1 ] [ t ] ) for f in interms ] a = [ ( a [ 0 ] , a [ 1 ] [ t ] ) for a in actions ] r = rewards [ t ] self . _render_timestep ( t , s , a , f , r ) self . _render_round_end ( rewards )
Prints non_fluents states actions interms and rewards for given horizon .
196
16
12,329
def _render_timestep ( self , t : int , s : Fluents , a : Fluents , f : Fluents , r : np . float32 ) -> None : print ( "============================" ) print ( "TIME = {}" . format ( t ) ) print ( "============================" ) fluent_variables = self . _compiler . rddl . action_fluent_variables self . _render_fluent_timestep ( 'action' , a , fluent_variables ) fluent_variables = self . _compiler . rddl . interm_fluent_variables self . _render_fluent_timestep ( 'interms' , f , fluent_variables ) fluent_variables = self . _compiler . rddl . state_fluent_variables self . _render_fluent_timestep ( 'states' , s , fluent_variables ) self . _render_reward ( r )
Prints fluents and rewards for the given timestep t .
213
14
12,330
def _render_fluent_timestep ( self , fluent_type : str , fluents : Sequence [ Tuple [ str , np . array ] ] , fluent_variables : Sequence [ Tuple [ str , List [ str ] ] ] ) -> None : for fluent_pair , variable_list in zip ( fluents , fluent_variables ) : name , fluent = fluent_pair _ , variables = variable_list print ( name ) fluent = fluent . flatten ( ) for variable , value in zip ( variables , fluent ) : print ( '- {}: {} = {}' . format ( fluent_type , variable , value ) ) print ( )
Prints fluents of given fluent_type as list of instantiated variables with corresponding values .
140
19
12,331
def _render_reward ( self , r : np . float32 ) -> None : print ( "reward = {:.4f}" . format ( float ( r ) ) ) print ( )
Prints reward r .
42
5
12,332
def _render_round_init ( self , horizon : int , non_fluents : NonFluents ) -> None : print ( '*********************************************************' ) print ( '>>> ROUND INIT, horizon = {}' . format ( horizon ) ) print ( '*********************************************************' ) fluent_variables = self . _compiler . rddl . non_fluent_variables self . _render_fluent_timestep ( 'non-fluents' , non_fluents , fluent_variables )
Prints round init information about horizon and non_fluents .
115
13
12,333
def _render_round_end ( self , rewards : np . array ) -> None : print ( "*********************************************************" ) print ( ">>> ROUND END" ) print ( "*********************************************************" ) total_reward = np . sum ( rewards ) print ( "==> Objective value = {}" . format ( total_reward ) ) print ( "==> rewards = {}" . format ( list ( rewards ) ) ) print ( )
Prints round end information about rewards .
97
8
12,334
def _truncate_to_field ( model , field_name , value ) : field = model . _meta . get_field ( field_name ) # pylint: disable=protected-access if len ( value ) > field . max_length : midpoint = field . max_length // 2 len_after_midpoint = field . max_length - midpoint first = value [ : midpoint ] sep = '...' last = value [ len ( value ) - len_after_midpoint + len ( sep ) : ] value = sep . join ( [ first , last ] ) return value
Shorten data to fit in the specified model field .
130
11
12,335
def on_failure ( self , exc , task_id , args , kwargs , einfo ) : if not FailedTask . objects . filter ( task_id = task_id , datetime_resolved = None ) . exists ( ) : FailedTask . objects . create ( task_name = _truncate_to_field ( FailedTask , 'task_name' , self . name ) , task_id = task_id , # Fixed length UUID: No need to truncate args = args , kwargs = kwargs , exc = _truncate_to_field ( FailedTask , 'exc' , repr ( exc ) ) , ) super ( PersistOnFailureTask , self ) . on_failure ( exc , task_id , args , kwargs , einfo )
If the task fails persist a record of the task .
175
11
12,336
def render ( self , trajectories : Tuple [ NonFluents , Fluents , Fluents , Fluents , np . array ] , batch : Optional [ int ] = None ) -> None : raise NotImplementedError
Renders the simulated trajectories for the given batch .
48
11
12,337
def distribution ( self , limit = 1024 ) : res = self . _qexec ( "%s, count(*) as __cnt" % self . name ( ) , group = "%s" % self . name ( ) , order = "__cnt DESC LIMIT %d" % limit ) dist = [ ] cnt = self . _table . size ( ) for i , r in enumerate ( res ) : dist . append ( list ( r ) + [ i , r [ 1 ] / float ( cnt ) ] ) self . _distribution = pd . DataFrame ( dist , columns = [ "value" , "cnt" , "r" , "fraction" ] ) self . _distribution . index = self . _distribution . r return self . _distribution
Build the distribution of distinct values
171
6
12,338
def parse ( self , name ) : name = name . strip ( ) groups = self . _parseFedora ( name ) if groups : self . _signature = DistributionNameSignature ( "Fedora" , groups . group ( 1 ) ) return self raise ValueError ( "Distribution name '%s' not recognized" % name )
Parse distribution string
72
4
12,339
def get_token ( url : str , scopes : str , credentials_dir : str ) -> dict : tokens . configure ( url = url , dir = credentials_dir ) tokens . manage ( 'lizzy' , [ scopes ] ) tokens . start ( ) return tokens . get ( 'lizzy' )
Get access token info .
66
5
12,340
def config ( config , fork_name = "" , origin_name = "" ) : state = read ( config . configfile ) any_set = False if fork_name : update ( config . configfile , { "FORK_NAME" : fork_name } ) success_out ( "fork-name set to: {}" . format ( fork_name ) ) any_set = True if origin_name : update ( config . configfile , { "ORIGIN_NAME" : origin_name } ) success_out ( "origin-name set to: {}" . format ( origin_name ) ) any_set = True if not any_set : info_out ( "Fork-name: {}" . format ( state [ "FORK_NAME" ] ) )
Setting various configuration options
167
4
12,341
def set_area_to_sip_signature ( self , xmin , xmax , zmin , zmax , spectrum ) : assert isinstance ( spectrum , ( sip_response , sip_response2 ) ) assert np . all ( self . frequencies == spectrum . frequencies ) for frequency , rmag , rpha in zip ( self . frequencies , spectrum . rmag , spectrum . rpha ) : td = self . tds [ frequency ] pidm , pidp = td . a [ 'forward_model' ] td . parman . modify_area ( pidm , xmin , xmax , zmin , zmax , rmag ) td . parman . modify_area ( pidp , xmin , xmax , zmin , zmax , rpha )
Parameterize the eit instance by supplying one SIP spectrum and the area to apply to .
165
19
12,342
def add_homogeneous_model ( self , magnitude , phase = 0 , frequency = None ) : if frequency is None : frequencies = self . frequencies else : assert isinstance ( frequency , Number ) frequencies = [ frequency , ] for freq in frequencies : pidm , pidp = self . tds [ freq ] . add_homogeneous_model ( magnitude , phase ) self . a [ 'forward_rmag' ] [ freq ] = pidm self . a [ 'forward_rpha' ] [ freq ] = pidp
Add homogeneous models to one or all tomodirs . Register those as forward models
115
17
12,343
def apply_crtomo_cfg ( self ) : for key in sorted ( self . tds . keys ( ) ) : self . tds [ key ] . crtomo_cfg = self . crtomo_cfg . copy ( )
Set the global crtomo_cfg for all frequencies
52
11
12,344
def apply_noise_models ( self ) : for key in sorted ( self . tds . keys ( ) ) : self . tds [ key ] . noise_model = self . noise_model
Set the global noise_model for all frequencies
43
9
12,345
def load_inversion_results ( self , sipdir ) : # load frequencies and initialize tomodir objects for all frequencies frequency_file = sipdir + os . sep + 'frequencies.dat' frequencies = np . loadtxt ( frequency_file ) self . _init_frequencies ( frequencies ) # cycle through all tomodirs on disc and load the data for nr , ( frequency_key , item ) in enumerate ( sorted ( self . tds . items ( ) ) ) : for label in ( 'rmag' , 'rpha' , 'cre' , 'cim' ) : if label not in self . assigments : self . a [ label ] = { } tdir = sipdir + os . sep + 'invmod' + os . sep + '{:02}_{:.6f}' . format ( nr , frequency_key ) + os . sep rmag_file = sorted ( glob ( tdir + 'inv/*.mag' ) ) [ - 1 ] rmag_data = np . loadtxt ( rmag_file , skiprows = 1 ) [ : , 2 ] pid_rmag = item . parman . add_data ( rmag_data ) self . a [ 'rmag' ] [ frequency_key ] = pid_rmag rpha_file = sorted ( glob ( tdir + 'inv/*.pha' ) ) [ - 1 ] rpha_data = np . loadtxt ( rpha_file , skiprows = 1 ) [ : , 2 ] pid_rpha = item . parman . add_data ( rpha_data ) self . a [ 'rpha' ] [ frequency_key ] = pid_rpha sigma_file = sorted ( glob ( tdir + 'inv/*.sig' ) ) [ - 1 ] sigma_data = np . loadtxt ( sigma_file , skiprows = 1 ) pid_cre = item . parman . add_data ( sigma_data [ : , 0 ] ) pid_cim = item . parman . add_data ( sigma_data [ : , 1 ] ) self . a [ 'cre' ] [ frequency_key ] = pid_cre self . a [ 'cim' ] [ frequency_key ] = pid_cim
Given an sEIT inversion directory load inversion results and store the corresponding parameter ids in self . assignments
497
23
12,346
def plot_forward_models ( self , maglim = None , phalim = None , * * kwargs ) : return_dict = { } N = len ( self . frequencies ) nrx = min ( N , 4 ) nrz = int ( np . ceil ( N / nrx ) ) for index , key , limits in zip ( ( 0 , 1 ) , ( 'rmag' , 'rpha' ) , ( maglim , phalim ) ) : if limits is None : cbmin = None cbmax = None else : cbmin = limits [ 0 ] cbmax = limits [ 1 ] fig , axes = plt . subplots ( nrz , nrx , figsize = ( 16 / 2.54 , nrz * 3 / 2.54 ) , sharex = True , sharey = True , ) for ax in axes . flat : ax . set_visible ( False ) for ax , frequency in zip ( axes . flat , self . frequencies ) : ax . set_visible ( True ) td = self . tds [ frequency ] pids = td . a [ 'forward_model' ] td . plot . plot_elements_to_ax ( pids [ index ] , ax = ax , plot_colorbar = True , cbposition = 'horizontal' , cbmin = cbmin , cbmax = cbmax , * * kwargs ) for ax in axes [ 0 : - 1 , : ] . flat : ax . set_xlabel ( '' ) for ax in axes [ : , 1 : ] . flat : ax . set_ylabel ( '' ) fig . tight_layout ( ) return_dict [ key ] = { 'fig' : fig , 'axes' : axes , } return return_dict
Create plots of the forward models
388
6
12,347
def add_to_configs ( self , configs ) : for f , td in self . tds . items ( ) : td . configs . add_to_configs ( configs )
Add configurations to all tomodirs
43
7
12,348
def model ( self , * * kwargs ) : for key , td in self . tds . items ( ) : td . model ( * * kwargs )
Run the forward modeling for all frequencies .
36
8
12,349
def measurements ( self ) : m_all = np . array ( [ self . tds [ key ] . measurements ( ) for key in sorted ( self . tds . keys ( ) ) ] ) return m_all
Return modeled measurements
46
3
12,350
def get_measurement_responses ( self ) : # take configurations from first tomodir configs = self . tds [ sorted ( self . tds . keys ( ) ) [ 0 ] ] . configs . configs measurements = self . measurements ( ) responses = { } for config , sip_measurement in zip ( configs , np . rollaxis ( measurements , 1 ) ) : sip = sip_response ( frequencies = self . frequencies , rmag = sip_measurement [ : , 0 ] , rpha = sip_measurement [ : , 1 ] ) responses [ tuple ( config ) ] = sip return responses
Return a dictionary of sip_responses for the modeled SIP spectra
137
15
12,351
def create_database ( name , number = 1 , force_clear = False ) : print 'Got:' print 'name' , name , type ( name ) print 'number' , number , type ( number ) print 'force_clear' , force_clear , type ( force_clear )
Command to create a database
61
5
12,352
def _get_long_path_name ( path ) : # Thanks to http://stackoverflow.com/a/3694799/791713 buf = ctypes . create_unicode_buffer ( len ( path ) + 1 ) GetLongPathNameW = ctypes . windll . kernel32 . GetLongPathNameW res = GetLongPathNameW ( path , buf , len ( path ) + 1 ) if res == 0 or res > 260 : return path else : return buf . value
Returns the long path name for a Windows path i . e . the properly cased path of an existing file or directory .
108
25
12,353
def get_dependency_walker ( ) : for dirname in os . getenv ( 'PATH' , '' ) . split ( os . pathsep ) : filename = os . path . join ( dirname , 'depends.exe' ) if os . path . isfile ( filename ) : logger . info ( 'Dependency Walker found at "{}"' . format ( filename ) ) return filename temp_exe = os . path . join ( tempfile . gettempdir ( ) , 'depends.exe' ) temp_dll = os . path . join ( tempfile . gettempdir ( ) , 'depends.dll' ) if os . path . isfile ( temp_exe ) : logger . info ( 'Dependency Walker found at "{}"' . format ( temp_exe ) ) return temp_exe logger . info ( 'Dependency Walker not found. Downloading ...' ) with urlopen ( 'http://dependencywalker.com/depends22_x64.zip' ) as fp : data = fp . read ( ) logger . info ( 'Extracting Dependency Walker to "{}"' . format ( temp_exe ) ) with zipfile . ZipFile ( io . BytesIO ( data ) ) as fp : with fp . open ( 'depends.exe' ) as src : with open ( temp_exe , 'wb' ) as dst : shutil . copyfileobj ( src , dst ) with fp . open ( 'depends.dll' ) as src : with open ( temp_dll , 'wb' ) as dst : shutil . copyfileobj ( src , dst ) return temp_exe
Checks if depends . exe is in the system PATH . If not it will be downloaded and extracted to a temporary directory . Note that the file will not be deleted afterwards .
358
36
12,354
def prepare ( self , setup_func ) : assert inspect . isfunction ( setup_func ) argsspec = inspect . getargspec ( setup_func ) if argsspec . args : raise ValueError ( "prepare function shouldn't have any arguments" ) def decorator ( command_func ) : @ functools . wraps ( command_func ) def wrapper ( * args , * * kwgs ) : # Run setup_func before command_func setup_func ( ) return command_func ( * args , * * kwgs ) return wrapper return decorator
This decorator wrap a function which setup a environment before running a command
122
14
12,355
def addPort ( n : LNode , intf : Interface ) : d = PortTypeFromDir ( intf . _direction ) ext_p = LayoutExternalPort ( n , name = intf . _name , direction = d , node2lnode = n . _node2lnode ) ext_p . originObj = originObjOfPort ( intf ) n . children . append ( ext_p ) addPortToLNode ( ext_p , intf , reverseDirection = True ) return ext_p
Add LayoutExternalPort for interface
111
6
12,356
def drawtree ( self ) : self . win . erase ( ) self . line = 0 for child , depth in self . traverse ( ) : child . curline = self . curline child . picked = self . picked child . expanded = self . expanded child . sized = self . sized if depth == 0 : continue if self . line == self . curline : self . color . curline ( child . name , child . picked ) children = child . children name = child . name else : self . color . default ( child . name , child . picked ) if child . name in self . sized and not self . sized [ child . name ] : self . sized [ child . name ] = " [" + du ( child . name ) + "]" child . drawline ( depth , self . line , self . win ) self . line += 1 self . win . refresh ( ) self . mkheader ( name ) self . mkfooter ( name , children )
Loop over the object process path attribute sets and drawlines based on their current contents .
201
17
12,357
def import_config ( config_path ) : if not os . path . isfile ( config_path ) : raise ConfigBuilderError ( 'Could not find config file: ' + config_path ) loader = importlib . machinery . SourceFileLoader ( config_path , config_path ) module = loader . load_module ( ) if not hasattr ( module , 'config' ) or not isinstance ( module . config , Config ) : raise ConfigBuilderError ( 'Could not load config file "{}": config files must contain ' 'a variable called "config" that is ' 'assigned to a Config object.' . format ( config_path ) ) return module . config
Import a Config from a given path relative to the current directory .
142
13
12,358
def grid ( fitness_function , no_dimensions , step_size ) : best_fitness = float ( "-inf" ) best_arguments = None for arguments in make_lists ( no_dimensions , step_size ) : fitness = fitness_function ( tuple ( arguments ) ) if fitness > best_fitness : best_fitness = fitness best_arguments = tuple ( arguments ) return best_arguments
Grid search using a fitness function over a given number of dimensions and a given step size between inclusive limits of 0 and 1 .
90
25
12,359
def make_lists ( no_dimensions , step_size , centre_steps = True ) : if no_dimensions == 0 : return [ [ ] ] sub_lists = make_lists ( no_dimensions - 1 , step_size , centre_steps = centre_steps ) return [ [ step_size * value + ( 0.5 * step_size if centre_steps else 0 ) ] + sub_list for value in range ( 0 , int ( ( 1 / step_size ) ) ) for sub_list in sub_lists ]
Create a list of lists of floats covering every combination across no_dimensions of points of integer step size between 0 and 1 inclusive .
117
27
12,360
def portCnt ( port ) : if port . children : return sum ( map ( lambda p : portCnt ( p ) , port . children ) ) else : return 1
recursively count number of ports without children
37
9
12,361
def copyPort ( port , targetLNode , reverseDir , topPortName = None ) : newP = _copyPort ( port , targetLNode , reverseDir ) if topPortName is not None : newP . name = topPortName return newP
Create identical port on targetNode
55
6
12,362
def walkSignalPorts ( rootPort : LPort ) : if rootPort . children : for ch in rootPort . children : yield from walkSignalPorts ( ch ) else : yield rootPort
recursively walk ports without any children
43
8
12,363
def agent_error ( e : requests . HTTPError , fatal = True ) : try : data = e . response . json ( ) details = data [ 'detail' ] # type: str except JSONDecodeError : details = e . response . text or str ( e . response ) lines = ( '[AGENT] {}' . format ( line ) for line in details . splitlines ( ) ) msg = '\n' + '\n' . join ( lines ) if fatal : fatal_error ( msg ) else : error ( msg )
Prints an agent error and exits
115
7
12,364
def parse_stack_refs ( stack_references : List [ str ] ) -> List [ str ] : stack_names = [ ] references = list ( stack_references ) references . reverse ( ) while references : current = references . pop ( ) # current that might be a file file_path = os . path . abspath ( current ) if os . path . exists ( file_path ) and os . path . isfile ( file_path ) : try : with open ( file_path ) as fd : data = yaml . safe_load ( fd ) current = data [ 'SenzaInfo' ] [ 'StackName' ] except ( KeyError , TypeError , YAMLError ) : raise click . UsageError ( 'Invalid senza definition {}' . format ( current ) ) stack_names . append ( current ) return stack_names
Check if items included in stack_references are Senza definition file paths or stack name reference . If Senza definition file path substitute the definition file path by the stack name in the same position on the list .
185
43
12,365
def list_stacks ( stack_ref : List [ str ] , all : bool , remote : str , region : str , watch : int , output : str ) : lizzy = setup_lizzy_client ( remote ) stack_references = parse_stack_refs ( stack_ref ) while True : rows = [ ] for stack in lizzy . get_stacks ( stack_references , region = region ) : creation_time = dateutil . parser . parse ( stack [ 'creation_time' ] ) rows . append ( { 'stack_name' : stack [ 'stack_name' ] , 'version' : stack [ 'version' ] , 'status' : stack [ 'status' ] , 'creation_time' : creation_time . timestamp ( ) , 'description' : stack [ 'description' ] } ) rows . sort ( key = lambda x : ( x [ 'stack_name' ] , x [ 'version' ] ) ) with OutputFormat ( output ) : print_table ( 'stack_name version status creation_time description' . split ( ) , rows , styles = STYLES , titles = TITLES ) if watch : # pragma: no cover time . sleep ( watch ) click . clear ( ) else : break
List Lizzy stacks
270
4
12,366
def traffic ( stack_name : str , stack_version : Optional [ str ] , percentage : Optional [ int ] , region : Optional [ str ] , remote : Optional [ str ] , output : Optional [ str ] ) : lizzy = setup_lizzy_client ( remote ) if percentage is None : stack_reference = [ stack_name ] with Action ( 'Requesting traffic info..' ) : stack_weights = [ ] for stack in lizzy . get_stacks ( stack_reference , region = region ) : if stack [ 'status' ] in [ 'CREATE_COMPLETE' , 'UPDATE_COMPLETE' ] : stack_id = '{stack_name}-{version}' . format_map ( stack ) traffic = lizzy . get_traffic ( stack_id , region = region ) stack_weights . append ( { 'stack_name' : stack_name , 'version' : stack [ 'version' ] , 'identifier' : stack_id , 'weight%' : traffic [ 'weight' ] } ) cols = 'stack_name version identifier weight%' . split ( ) with OutputFormat ( output ) : print_table ( cols , sorted ( stack_weights , key = lambda x : x [ 'identifier' ] ) ) else : with Action ( 'Requesting traffic change..' ) : stack_id = '{stack_name}-{stack_version}' . format_map ( locals ( ) ) lizzy . traffic ( stack_id , percentage , region = region )
Manage stack traffic
331
4
12,367
def scale ( stack_name : str , stack_version : Optional [ str ] , new_scale : int , region : Optional [ str ] , remote : Optional [ str ] ) : lizzy = setup_lizzy_client ( remote ) with Action ( 'Requesting rescale..' ) : stack_id = '{stack_name}-{stack_version}' . format_map ( locals ( ) ) lizzy . scale ( stack_id , new_scale , region = region )
Rescale a stack
106
5
12,368
def delete ( stack_ref : List [ str ] , region : str , dry_run : bool , force : bool , remote : str ) : lizzy = setup_lizzy_client ( remote ) stack_refs = get_stack_refs ( stack_ref ) all_with_version = all ( stack . version is not None for stack in stack_refs ) # this is misleading but it's the current behaviour of senza # TODO Lizzy list (stack_refs) to see if it actually matches more than one stack # to match senza behaviour if ( not all_with_version and not dry_run and not force ) : fatal_error ( 'Error: {} matching stacks found. ' . format ( len ( stack_refs ) ) + 'Please use the "--force" flag if you really want to delete multiple stacks.' ) # TODO pass force option to agent output = '' for stack in stack_refs : if stack . version is not None : stack_id = '{stack.name}-{stack.version}' . format ( stack = stack ) else : stack_id = stack . name with Action ( "Requesting stack '{stack_id}' deletion.." , stack_id = stack_id ) : output = lizzy . delete ( stack_id , region = region , dry_run = dry_run ) print ( output )
Delete Cloud Formation stacks
298
4
12,369
def pydict2xml ( filename , metadata_dict , * * kwargs ) : try : f = open ( filename , 'w' ) f . write ( pydict2xmlstring ( metadata_dict , * * kwargs ) . encode ( 'utf-8' ) ) f . close ( ) except : raise MetadataGeneratorException ( 'Failed to create an XML file. Filename: %s' % ( filename ) )
Create an XML file .
97
5
12,370
def pydict2xmlstring ( metadata_dict , * * kwargs ) : ordering = kwargs . get ( 'ordering' , UNTL_XML_ORDER ) root_label = kwargs . get ( 'root_label' , 'metadata' ) root_namespace = kwargs . get ( 'root_namespace' , None ) elements_namespace = kwargs . get ( 'elements_namespace' , None ) namespace_map = kwargs . get ( 'namespace_map' , None ) root_attributes = kwargs . get ( 'root_attributes' , None ) # Set any root namespace and namespace map. if root_namespace and namespace_map : root = Element ( root_namespace + root_label , nsmap = namespace_map ) elif namespace_map : root = Element ( root_label , nsmap = namespace_map ) else : root = Element ( root_label ) # Set any root element attributes. if root_attributes : for key , value in root_attributes . items ( ) : root . attrib [ key ] = value # Create an XML structure from field list. for metadata_key in ordering : if metadata_key in metadata_dict : for element in metadata_dict [ metadata_key ] : if 'content' in element and 'qualifier' in element : create_dict_subelement ( root , metadata_key , element [ 'content' ] , attribs = { 'qualifier' : element [ 'qualifier' ] } , namespace = elements_namespace , ) elif 'content' in element and 'role' in element : create_dict_subelement ( root , metadata_key , element [ 'content' ] , attribs = { 'role' : element [ 'role' ] } , namespace = elements_namespace , ) elif 'content' in element and 'scheme' in element : create_dict_subelement ( root , metadata_key , element [ 'content' ] , attribs = { 'scheme' : element [ 'scheme' ] } , namespace = elements_namespace , ) elif 'content' in element : create_dict_subelement ( root , metadata_key , element [ 'content' ] , namespace = elements_namespace , ) # Create the XML tree. return '<?xml version="1.0" encoding="UTF-8"?>\n' + tostring ( root , pretty_print = True )
Create an XML string from a metadata dictionary .
545
9
12,371
def create_dict_subelement ( root , subelement , content , * * kwargs ) : attribs = kwargs . get ( 'attribs' , None ) namespace = kwargs . get ( 'namespace' , None ) key = subelement # Add subelement's namespace and attributes. if namespace and attribs : subelement = SubElement ( root , namespace + subelement , attribs ) elif namespace : subelement = SubElement ( root , namespace + subelement ) elif attribs : subelement = SubElement ( root , subelement , attribs ) # Otherwise, create SubElement without any extra data. else : subelement = SubElement ( root , subelement ) if not isinstance ( content , dict ) : subelement . text = content # Do special case ordering for degree children on etd_ms. elif key == 'degree' : for degree_order_key in DEGREE_ORDER : for descriptor , value in content . items ( ) : if descriptor == degree_order_key : sub_descriptors = SubElement ( subelement , descriptor ) sub_descriptors . text = value else : for descriptor , value in content . items ( ) : sub_descriptors = SubElement ( subelement , descriptor ) sub_descriptors . text = value
Create a XML subelement from a Python dictionary .
286
10
12,372
def highwiredict2xmlstring ( highwire_elements , ordering = HIGHWIRE_ORDER ) : # Sort the elements by the ordering list. highwire_elements . sort ( key = lambda obj : ordering . index ( obj . name ) ) root = Element ( 'metadata' ) for element in highwire_elements : attribs = { 'name' : element . name , 'content' : element . content } SubElement ( root , 'meta' , attribs ) # Create the XML tree. return '<?xml version="1.0" encoding="UTF-8"?>\n' + tostring ( root , pretty_print = True )
Create an XML string from the highwire data dictionary .
144
11
12,373
def get ( binary_name ) : if binary_name not in binaries : raise Exception ( 'binary_name: {0} not found' . format ( binary_name ) ) system = platform . system ( ) binary_list = binaries [ binary_name ] [ system ] # check list for a valid entry for filename in binary_list : valid_file = shutil . which ( filename ) if valid_file : return os . path . abspath ( valid_file )
return a valid path to the given binary . Return an error if no existing binary can be found .
100
20
12,374
def get_upgrade_lock ( dbname , connect_str , timeout = LOCK_TIMEOUT ) : # # Open connection and try to get the lock # engine = sqlalchemy . create_engine ( connect_str ) cursor = engine . execute ( "SELECT GET_LOCK('upgrade_{}', {})" . format ( dbname , timeout ) ) lock = cursor . scalar ( ) cursor . close ( ) # # Keep trying until you get it. # while not lock : logger . info ( 'Cannot acquire {} upgrade lock. Sleeping {} seconds.' . format ( dbname , timeout ) ) time . sleep ( timeout ) cursor = engine . execute ( "SELECT GET_LOCK('upgrade_{}', {})" . format ( dbname , timeout ) ) lock = cursor . scalar ( ) cursor . close ( ) logger . info ( 'Acquired {} upgrade lock' . format ( dbname ) ) yield lock # # Release the lock and close the connection. # cursor = engine . execute ( "SELECT RELEASE_LOCK('upgrade_{}')" . format ( dbname ) ) cursor . close ( ) engine . dispose ( ) logger . info ( 'Released {} upgrade lock' . format ( dbname ) )
Wait until you can get the lock then yield it and eventually release it .
260
15
12,375
def upgrade ( dbname , connect_str , alembic_conf ) : # # The db has to exist before we can get the lock. On the off-chance that another process creates the db between # checking if it exists and running the create, ignore the exception. # if not sqlalchemy_utils . database_exists ( connect_str ) : logger . info ( 'Creating {}' . format ( dbname ) ) try : sqlalchemy_utils . create_database ( connect_str ) except sqlalchemy . exc . ProgrammingError as exc : if not sqlalchemy_utils . database_exists ( connect_str ) : logger . error ( 'Could not create {}' . format ( dbname ) ) raise exc with get_upgrade_lock ( dbname , connect_str ) : alembic_config = alembic . config . Config ( alembic_conf , attributes = { 'configure_logger' : False } ) logger . info ( 'Upgrading {} to head' . format ( dbname ) ) alembic . command . upgrade ( alembic_config , 'head' )
Get the database s upgrade lock and run alembic .
242
12
12,376
def write_to_file ( self , filename ) : fid = open ( filename , 'w' ) for key in self . key_order : if ( key == - 1 ) : fid . write ( '\n' ) else : fid . write ( '{0}\n' . format ( self [ key ] ) ) fid . close ( )
Write the configuration to a file . Use the correct order of values .
74
14
12,377
def parse ( self , importpath ) : # reset default values self . native = False self . _prefix = "" self . _package = "" url = re . sub ( r'http://' , '' , importpath ) url = re . sub ( r'https://' , '' , url ) # is import path native package? if url . split ( '/' ) [ 0 ] in self . native_packages [ "packages" ] : self . native = True return self for regex in self . known_ipprefixes : match = re . search ( regex , url ) if match : self . _prefix = match . group ( 1 ) if match . group ( 3 ) : self . _package = match . group ( 3 ) return self raise ValueError ( "Import path prefix for '%s' not recognized" % importpath )
Parse import path . Determine if the path is native or starts with known prefix .
177
18
12,378
def sub_retab ( match ) : before = match . group ( 1 ) tabs = len ( match . group ( 2 ) ) return before + ( ' ' * ( TAB_SIZE * tabs - len ( before ) % TAB_SIZE ) )
r Remove all tabs and convert them into spaces .
54
10
12,379
def handle_whitespace ( text ) : text = re_retab . sub ( sub_retab , text ) text = re_whitespace . sub ( '' , text ) . strip ( ) return text
r Handles whitespace cleanup .
46
7
12,380
def get_variables ( text ) : variables = { var : value for var , value in re_vars . findall ( text ) } text = re_vars . sub ( '' , text ) return text , variables
Extracts variables that can be used in templating engines .
48
14
12,381
def get_references ( text ) : references = { } for ref_id , link , _ , title in re_references . findall ( text ) : ref_id = re . sub ( r'<(.*?)>' , r'\1' , ref_id ) . lower ( ) . strip ( ) references [ ref_id ] = ( link , title ) text = re_references . sub ( '' , text ) return text , references
Retrieves all link references within the text .
99
10
12,382
def get_footnote_backreferences ( text , markdown_obj ) : footnotes = OrderedDict ( ) for footnote_id , footnote in re_footnote_backreferences . findall ( text ) : footnote_id = re . sub ( r'<(.*?)>' , r'\1' , footnote_id ) . lower ( ) . strip ( ) footnote = re . sub ( r'^[ ]{0,4}' , '' , footnote , flags = re . M ) footnotes [ footnote_id ] = footnote text = re_footnote_backreferences . sub ( '' , text ) return text , footnotes
Retrieves all footnote backreferences within the text .
143
12
12,383
def hash_blocks ( text , hashes ) : def sub ( match ) : block = match . group ( 1 ) hashed = hash_text ( block , 'block' ) hashes [ hashed ] = block return '\n\n' + hashed + '\n\n' return re_block . sub ( sub , text )
Hashes HTML block tags .
72
6
12,384
def hash_lists ( text , hashes , markdown_obj ) : for style , marker in ( ( 'u' , '[+*-]' ) , ( 'o' , r'\d+\.' ) ) : list_re = re . compile ( re_list % ( marker , marker ) , re . S | re . X ) # import pdb # pdb.set_trace() for match in list_re . finditer ( text ) : if not match : continue lst = match . group ( 1 ) items = re . split ( r'(?:\n|\A) {0,3}%s ' % marker , lst ) [ 1 : ] whole_list = '' for item in items : item = re . sub ( r'^ {1,4}' , '' , item , flags = re . M ) item = markdown_obj . convert ( item ) par_match = re . match ( '<p>(.*?)</p>' , item , flags = re . S ) if par_match and par_match . group ( 0 ) == item . strip ( ) : item = par_match . group ( 1 ) whole_list += '<li>{}</li>\n' . format ( item ) whole_list = '<{0}l>\n{1}\n</{0}l>' . format ( style , re . sub ( '^' , ' ' , whole_list . strip ( ) , flags = re . M ) ) hashed = hash_text ( whole_list , 'list' ) hashes [ hashed ] = whole_list start = text . index ( match . group ( 0 ) ) end = start + len ( match . group ( 0 ) ) text = text [ : start ] + '\n\n' + hashed + '\n\n' + text [ end : ] return text
Hashes ordered and unordered lists .
409
8
12,385
def hash_blockquotes ( text , hashes , markdown_obj ) : def sub ( match ) : block = match . group ( 1 ) . strip ( ) block = re . sub ( r'(?:(?<=\n)|(?<=\A))> ?' , '' , block ) block = markdown_obj . convert ( block ) block = '<blockquote>{}</blockquote>' . format ( block ) hashed = hash_text ( block , 'blockquote' ) hashes [ hashed ] = block return '\n\n' + hashed + '\n\n' return re_blockquote . sub ( sub , text )
Hashes block quotes .
147
5
12,386
def hash_codes ( text , hashes ) : def sub ( match ) : code = '<code>{}</code>' . format ( escape ( match . group ( 2 ) ) ) hashed = hash_text ( code , 'code' ) hashes [ hashed ] = code return hashed return re_code . sub ( sub , text )
Hashes inline code tags .
75
6
12,387
def hash_tags ( text , hashes ) : def sub ( match ) : hashed = hash_text ( match . group ( 0 ) , 'tag' ) hashes [ hashed ] = match . group ( 0 ) return hashed return re_tag . sub ( sub , text )
Hashes any non - block tags .
60
8
12,388
def unhash ( text , hashes ) : def retrieve_match ( match ) : return hashes [ match . group ( 0 ) ] while re_hash . search ( text ) : text = re_hash . sub ( retrieve_match , text ) text = re_pre_tag . sub ( lambda m : re . sub ( '^' + m . group ( 1 ) , '' , m . group ( 0 ) , flags = re . M ) , text ) return text
Unhashes all hashed entites in the hashes dictionary .
99
13
12,389
def paragraph_sub ( match ) : text = re . sub ( r' \n' , r'\n<br/>\n' , match . group ( 0 ) . strip ( ) ) return '<p>{}</p>' . format ( text )
Captures paragraphs .
58
4
12,390
def truncateGraph ( graph , root_nodes ) : subgraph = Graph ( ) for node in root_nodes : subgraph = GraphUtils . joinGraphs ( subgraph , GraphUtils . getReacheableSubgraph ( graph , node ) ) return subgraph
Create a set of all nodes containg the root_nodes and all nodes reacheable from them
60
22
12,391
def filterGraph ( graph , node_fnc ) : nodes = filter ( lambda l : node_fnc ( l ) , graph . nodes ( ) ) edges = { } gedges = graph . edges ( ) for u in gedges : if u not in nodes : continue for v in gedges [ u ] : if v not in nodes : continue try : edges [ u ] . append ( v ) except KeyError : edges [ u ] = [ v ] return Graph ( nodes , edges )
Remove all nodes for with node_fnc does not hold
107
12
12,392
def listdir ( self , path ) : for f in os . listdir ( path ) : if not f . startswith ( '.' ) : yield f
Return a list of all non dotfiles in a given directory .
34
13
12,393
def getchildren ( self ) : try : if self . hidden : return [ os . path . join ( self . name , child ) for child in sorted ( self . listdir ( self . name ) ) ] else : return [ os . path . join ( self . name , child ) for child in sorted ( os . listdir ( self . name ) ) ] except OSError : return None
Create list of absolute paths to be used to instantiate path objects for traversal based on whether or not hidden attribute is set .
84
26
12,394
def getpaths ( self ) : self . children = self . getchildren ( ) if self . children is None : return if self . paths is None : self . paths = [ Paths ( self . screen , os . path . join ( self . name , child ) , self . hidden , self . picked , self . expanded , self . sized ) for child in self . children ] return self . paths
If we have children use a list comprehension to instantiate new paths objects to traverse .
85
17
12,395
def traverse ( self ) : yield self , 0 if self . name in self . expanded : for path in self . getpaths ( ) : for child , depth in path . traverse ( ) : yield child , depth + 1
Recursive generator that lazily unfolds the filesystem .
47
10
12,396
def line_line_intersect ( x , y ) : A = x [ 0 ] * y [ 1 ] - y [ 0 ] * x [ 1 ] B = x [ 2 ] * y [ 3 ] - y [ 2 ] * x [ 4 ] C = ( x [ 0 ] - x [ 1 ] ) * ( y [ 2 ] - y [ 3 ] ) - ( y [ 0 ] - y [ 1 ] ) * ( x [ 2 ] - x [ 3 ] ) Ix = ( A * ( x [ 2 ] - x [ 3 ] ) - ( x [ 0 ] - x [ 1 ] ) * B ) / C Iy = ( A * ( y [ 2 ] - y [ 3 ] ) - ( y [ 0 ] - y [ 1 ] ) * B ) / C return Ix , Iy
Compute the intersection point of two lines
178
8
12,397
def pkg_data_filename ( resource_name , filename = None ) : resource_filename = pkg_resources . resource_filename ( tripleohelper . __name__ , resource_name ) if filename is not None : resource_filename = os . path . join ( resource_filename , filename ) return resource_filename
Returns the path of a file installed along the package
69
10
12,398
def merge ( config ) : repo = config . repo active_branch = repo . active_branch if active_branch . name == "master" : error_out ( "You're already on the master branch." ) if repo . is_dirty ( ) : error_out ( 'Repo is "dirty". ({})' . format ( ", " . join ( [ repr ( x . b_path ) for x in repo . index . diff ( None ) ] ) ) ) branch_name = active_branch . name state = read ( config . configfile ) origin_name = state . get ( "ORIGIN_NAME" , "origin" ) upstream_remote = None for remote in repo . remotes : if remote . name == origin_name : upstream_remote = remote break if not upstream_remote : error_out ( "No remote called {!r} found" . format ( origin_name ) ) repo . heads . master . checkout ( ) upstream_remote . pull ( repo . heads . master ) repo . git . merge ( branch_name ) repo . git . branch ( "-d" , branch_name ) success_out ( "Branch {!r} deleted." . format ( branch_name ) ) info_out ( "NOW, you might want to run:\n" ) info_out ( "git push origin master\n\n" ) push_for_you = input ( "Run that push? [Y/n] " ) . lower ( ) . strip ( ) != "n" if push_for_you : upstream_remote . push ( "master" ) success_out ( "Current master pushed to {}" . format ( upstream_remote . name ) )
Merge the current branch into master .
365
8
12,399
def chord_task ( * args , * * kwargs ) : given_backend = kwargs . get ( u'backend' , None ) if not isinstance ( given_backend , ChordableDjangoBackend ) : kwargs [ u'backend' ] = ChordableDjangoBackend ( kwargs . get ( 'app' , current_app ) ) return task ( * args , * * kwargs )
u Override of the default task decorator to specify use of this backend .
99
16