idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
4,000
def forkexec_pty ( argv , env = None , size = None ) : child_pid , child_fd = pty . fork ( ) if child_pid == 0 : os . closerange ( 3 , MAXFD ) environ = os . environ . copy ( ) if env is not None : environ . update ( env ) os . execve ( argv [ 0 ] , argv , environ ) if size is None : try : size = get_terminal_size ( 1 ...
Fork a child process attached to a pty .
4,001
def get_ancestor_processes ( ) : if not _ANCESTOR_PROCESSES and psutil is not None : proc = psutil . Process ( os . getpid ( ) ) while proc . parent ( ) is not None : try : _ANCESTOR_PROCESSES . append ( proc . parent ( ) . exe ( ) ) proc = proc . parent ( ) except psutil . Error : break return _ANCESTOR_PROCESSES
Get a list of the executables of all ancestor processes .
4,002
def get_default_shell ( environ = None , fallback = _UNSPECIFIED ) : if environ is None : environ = os . environ if "PIAS_OPT_SHELL" in environ : return environ [ "PIAS_OPT_SHELL" ] shells = [ ] for filename in ( environ . get ( "SHELL" ) , "bash" , "sh" ) : if filename is not None : filepath = find_executable ( filena...
Get the user s default shell program .
4,003
def get_default_terminal ( environ = None , fallback = _UNSPECIFIED ) : if environ is None : environ = os . environ if "PIAS_OPT_TERMINAL" in environ : return environ [ "PIAS_OPT_TERMINAL" ] terminals = [ ] colorterm = environ . get ( "COLORTERM" ) for filename in ( colorterm , "gnome-terminal" , "konsole" , "xterm" ) ...
Get the user s default terminal program .
4,004
def get_pias_script ( environ = None ) : if os . path . basename ( sys . argv [ 0 ] ) == "pias" : return sys . argv [ 0 ] filepath = find_executable ( "pias" , environ ) if filepath is not None : return filepath filepath = os . path . join ( os . path . dirname ( __file__ ) , "__main__.py" ) if os . path . exists ( fil...
Get the path to the playitagainsam command - line script .
4,005
def airwires ( board , showgui = 0 ) : 'search for airwires in eagle board' board = Path ( board ) . expand ( ) . abspath ( ) file_out = tempfile . NamedTemporaryFile ( suffix = '.txt' , delete = 0 ) file_out . close ( ) ulp = ulp_templ . replace ( 'FILE_NAME' , file_out . name ) file_ulp = tempfile . NamedTemporaryFil...
search for airwires in eagle board
4,006
def _initialize ( self , con ) : if self . initialized : return SQLite3Database ( ) . _initialize ( con ) cur = con . execute ( 'SELECT COUNT(*) FROM sqlite_master WHERE name="reaction"' ) if cur . fetchone ( ) [ 0 ] == 0 : for init_command in init_commands : con . execute ( init_command ) con . commit ( ) self . initi...
Set up tables in SQL
4,007
def write_publication ( self , values ) : con = self . connection or self . _connect ( ) self . _initialize ( con ) cur = con . cursor ( ) values = ( values [ 'pub_id' ] , values [ 'title' ] , json . dumps ( values [ 'authors' ] ) , values [ 'journal' ] , values [ 'volume' ] , values [ 'number' ] , values [ 'pages' ] ,...
Write publication info to db
4,008
def write ( self , values , data = None ) : con = self . connection or self . _connect ( ) self . _initialize ( con ) cur = con . cursor ( ) pub_id = values [ 'pub_id' ] ase_ids = values [ 'ase_ids' ] energy_corrections = values [ 'energy_corrections' ] if ase_ids is not None : check_ase_ids ( values , ase_ids ) else :...
Write reaction info to db file
4,009
def update ( self , id , values , key_names = 'all' ) : con = self . connection or self . _connect ( ) self . _initialize ( con ) cur = con . cursor ( ) pub_id = values [ 'pub_id' ] ase_ids = values [ 'ase_ids' ] energy_corrections = values [ 'energy_corrections' ] if ase_ids is not None : check_ase_ids ( values , ase_...
Update reaction info for a selected row
4,010
def get_last_id ( self , cur , table = 'reaction' ) : cur . execute ( "SELECT seq FROM sqlite_sequence WHERE name='{0}'" . format ( table ) ) result = cur . fetchone ( ) if result is not None : id = result [ 0 ] else : id = 0 return id
Get the id of the last written row in table
4,011
def read_wv_master_from_array ( master_table , lines = 'brightest' , debugplot = 0 ) : if lines not in [ 'brightest' , 'all' ] : raise ValueError ( 'Unexpected lines=' + str ( lines ) ) if master_table . ndim == 1 : wv_master = master_table else : wv_master_all = master_table [ : , 0 ] if master_table . shape [ 1 ] == ...
read arc line wavelengths from numpy array
4,012
def read_wv_master_file ( wv_master_file , lines = 'brightest' , debugplot = 0 ) : if lines not in [ 'brightest' , 'all' ] : raise ValueError ( 'Unexpected lines=' + str ( lines ) ) master_table = np . genfromtxt ( wv_master_file ) wv_master = read_wv_master_from_array ( master_table , lines ) if abs ( debugplot ) >= 1...
read arc line wavelengths from external file .
4,013
def wvcal_spectrum ( sp , fxpeaks , poly_degree_wfit , wv_master , wv_ini_search = None , wv_end_search = None , wvmin_useful = None , wvmax_useful = None , geometry = None , debugplot = 0 ) : if len ( fxpeaks ) <= poly_degree_wfit : print ( ">>> Warning: not enough lines to fit spectrum" ) return None naxis1 = sp . sh...
Execute wavelength calibration of a spectrum using fixed line peaks .
4,014
def get_thumbnail ( file_ , name ) : options = settings . OPTIONS_DICT [ name ] opt = copy ( options ) geometry = opt . pop ( 'geometry' ) return original_get_thumbnail ( file_ , geometry , ** opt )
get_thumbnail version that uses aliasses defined in THUMBNAIL_OPTIONS_DICT
4,015
def bitset ( name , members , base = bases . BitSet , list = False , tuple = False ) : if not name : raise ValueError ( 'empty bitset name: %r' % name ) if not hasattr ( members , '__getitem__' ) or not hasattr ( members , '__len__' ) : raise ValueError ( 'non-sequence bitset members: %r' % members ) if not len ( membe...
Return a new bitset class with given name and members .
4,016
def _extrapolation ( self , extrapolate ) : modes = [ 'extrapolate' , 'raise' , 'const' , 'border' ] if extrapolate not in modes : msg = 'invalid extrapolation mode {}' . format ( extrapolate ) raise ValueError ( msg ) if extrapolate == 'raise' : self . bounds_error = True self . extrapolate = False else : self . extra...
Check permited values of extrapolation .
4,017
def _create_h ( x ) : h = np . zeros_like ( x ) h [ : - 1 ] = x [ 1 : ] - x [ : - 1 ] h [ - 1 ] = h [ - 2 ] return h
increase between samples
4,018
def _eval ( self , v , in_bounds , der ) : result = np . zeros_like ( v , dtype = 'float' ) x_indices = np . searchsorted ( self . _x , v , side = 'rigth' ) ids = x_indices [ in_bounds ] - 1 u = v [ in_bounds ] - self . _x [ ids ] result [ in_bounds ] = self . _poly_eval ( u , ids , der ) return result
Eval polynomial inside bounds .
4,019
def _extrapolate ( self , result , v , below_bounds , above_bounds , der ) : if self . extrapolate_mode == 'const' : fill_b = fill_a = self . fill_value elif self . extrapolate_mode == 'border' : fill_b = self . _poly_eval ( 0 , 0 , der ) fill_a = self . _poly_eval ( 0 , - 1 , der ) elif self . extrapolate_mode == 'ext...
Extrapolate result based on extrapolation mode .
4,020
def _check_bounds ( self , v ) : below_bounds = v < self . _x [ 0 ] above_bounds = v > self . _x [ - 1 ] if self . bounds_error and below_bounds . any ( ) : raise ValueError ( "A value in x_new is below the interpolation " "range." ) if self . bounds_error and above_bounds . any ( ) : raise ValueError ( "A value in x_n...
Check which values are out of bounds .
4,021
def filtmask ( sp , fmin = 0.02 , fmax = 0.15 , debugplot = 0 ) : xf = np . fft . fftfreq ( sp . size ) yf = np . fft . fft ( sp ) if abs ( debugplot ) in ( 21 , 22 ) : iok = np . where ( xf > 0 ) ximplotxy ( xf [ iok ] , yf [ iok ] . real , plottype = 'loglog' , xlabel = 'frequency' , ylabel = 'power' , title = 'befor...
Filter spectrum in Fourier space and apply cosine bell .
4,022
def cosinebell ( n , fraction ) : mask = np . ones ( n ) nmasked = int ( fraction * n ) for i in range ( nmasked ) : yval = 0.5 * ( 1 - np . cos ( np . pi * float ( i ) / float ( nmasked ) ) ) mask [ i ] = yval mask [ n - i - 1 ] = yval return mask
Return a cosine bell spanning n pixels masking a fraction of pixels
4,023
def convolve_comb_lines ( lines_wave , lines_flux , sigma , crpix1 , crval1 , cdelt1 , naxis1 ) : xwave = crval1 + ( np . arange ( naxis1 ) + 1 - crpix1 ) * cdelt1 spectrum = np . zeros ( naxis1 ) for wave , flux in zip ( lines_wave , lines_flux ) : sp_tmp = gauss_box_model ( x = xwave , amplitude = flux , mean = wave ...
Convolve a set of lines of known wavelengths and flux .
4,024
def _split_refextract_authors_str ( authors_str ) : author_seq = ( x . strip ( ) for x in RE_SPLIT_AUTH . split ( authors_str ) if x ) res = [ ] current = '' for author in author_seq : if not isinstance ( author , six . text_type ) : author = six . text_type ( author . decode ( 'utf8' , 'ignore' ) ) author = re . sub (...
Extract author names out of refextract authors output .
4,025
def _set_publication_info_field ( self , field_name , value ) : self . _ensure_reference_field ( 'publication_info' , { } ) self . obj [ 'reference' ] [ 'publication_info' ] [ field_name ] = value
Put a value in the publication info of the reference .
4,026
def set_pubnote ( self , pubnote ) : if 'publication_info' in self . obj . get ( 'reference' , { } ) : self . add_misc ( u'Additional pubnote: {}' . format ( pubnote ) ) return if self . RE_VALID_PUBNOTE . match ( pubnote ) : pubnote = split_pubnote ( pubnote ) pubnote = convert_old_publication_info_to_new ( [ pubnote ...
Parse pubnote and populate correct fields .
4,027
def _add_uid ( self , uid , skip_handle = False ) : uid = uid or '' if is_arxiv ( uid ) : self . _ensure_reference_field ( 'arxiv_eprint' , normalize_arxiv ( uid ) ) elif idutils . is_doi ( uid ) : self . _ensure_reference_field ( 'dois' , [ ] ) self . obj [ 'reference' ] [ 'dois' ] . append ( idutils . normalize_doi (...
Add unique identifier in correct field .
4,028
def set_page_artid ( self , page_start = None , page_end = None , artid = None ) : if page_end and not page_start : raise ValueError ( 'End_page provided without start_page' ) self . _ensure_reference_field ( 'publication_info' , { } ) publication_info = self . obj [ 'reference' ] [ 'publication_info' ] if page_start :...
Add artid start end pages to publication info of a reference .
4,029
def separate_globs ( globs ) : exclude = [ ] include = [ ] for path in globs : if path . startswith ( "!" ) : exclude . append ( path [ 1 : ] ) else : include . append ( path ) return ( exclude , include )
Separate include and exclude globs .
4,030
def parse_glob ( path , included ) : files = glob . glob ( path , recursive = True ) array = [ ] for file in files : file = os . path . abspath ( file ) if file not in included : array . append ( file ) included += array return array
Parse a glob .
4,031
def find_base ( path ) : result = _pattern . match ( path ) if result : base = result . group ( 0 ) else : base = "./" if base . endswith ( '/' ) or base . endswith ( '\\' ) : return os . path . abspath ( base ) else : return os . path . dirname ( os . path . abspath ( base ) )
Find the base of a glob .
4,032
def fun_wv ( xchannel , crpix1 , crval1 , cdelt1 ) : wv = crval1 + ( xchannel - crpix1 ) * cdelt1 return wv
Compute wavelengths from channels .
4,033
def update_poly_wlcalib ( coeff_ini , coeff_residuals , naxis1_ini , debugplot ) : coeff = [ ] for fdum in coeff_ini : coeff . append ( fdum ) poly_ini = np . polynomial . Polynomial ( coeff ) poldeg_wlcalib = len ( coeff ) - 1 if len ( coeff_residuals ) == 0 : return poly_ini . coef else : if np . count_nonzero ( poly...
Update wavelength calibration polynomial using the residuals fit .
4,034
def generate_docs ( klass ) : import numina . types . datatype attrh = ( 'Attributes\n' '----------\n' ) doc = getattr ( klass , '__doc__' , None ) if doc is None or doc == '' : doc = "%s documentation." % klass . __name__ if len ( klass . stored ( ) ) : doc = doc + '\n\n' + attrh skeys = sorted ( klass . stored ( ) . ...
Add documentation to generated classes
4,035
def reinverted ( n , r ) : result = 0 r = 1 << ( r - 1 ) while n : if not n & 1 : result |= r r >>= 1 n >>= 1 if r : result |= ( r << 1 ) - 1 return result
Integer with reversed and inverted bits of n assuming bit length r .
4,036
def rank ( items , sequence = string . ascii_lowercase ) : items = set ( items ) return sum ( 1 << i for i , s in enumerate ( sequence ) if s in items )
Rank items from sequence in colexicographical order .
4,037
def unrank ( n , sequence = string . ascii_lowercase ) : return list ( map ( sequence . __getitem__ , indexes ( n ) ) )
Unrank n from sequence in colexicographical order .
4,038
def background_estimator ( bdata ) : crowded = False std = numpy . std ( bdata ) std0 = std mean = bdata . mean ( ) while True : prep = len ( bdata ) numpy . clip ( bdata , mean - 3 * std , mean + 3 * std , out = bdata ) if prep == len ( bdata ) : if std < 0.8 * std0 : crowded = True break std = numpy . std ( bdata ) m...
Estimate the background in a 2D array
4,039
def create_background_map ( data , bsx , bsy ) : sx , sy = data . shape mx = sx // bsx my = sy // bsy comp = [ ] rms = [ ] sp = numpy . split ( data , numpy . arange ( bsx , sx , bsx ) , axis = 0 ) for s in sp : rp = numpy . split ( s , numpy . arange ( bsy , sy , bsy ) , axis = 1 ) for r in rp : b , r = background_est...
Create a background map with a given mesh size
4,040
def _read_columns_file ( f ) : try : columns = json . loads ( open ( f , 'r' ) . read ( ) , object_pairs_hook = collections . OrderedDict ) except Exception as err : raise InvalidColumnsFileError ( "There was an error while reading {0}: {1}" . format ( f , err ) ) if '__options' in columns : del columns [ '__options' ]...
Return the list of column queries read from the given JSON file .
4,041
def _table_to_csv ( table_ ) : f = cStringIO . StringIO ( ) try : _write_csv ( f , table_ ) return f . getvalue ( ) finally : f . close ( )
Return the given table converted to a CSV string .
4,042
def table ( dicts , columns , csv = False , pretty = False ) : if isinstance ( columns , basestring ) : columns = _read_columns_file ( columns ) for column in columns . values ( ) : if "pattern" in column : assert "pattern_path" not in column , ( 'A column must have either a "pattern" or a "pattern_path"' "but not both...
Query a list of dicts with a list of queries and return a table .
4,043
def query ( pattern_path , dict_ , max_length = None , strip = False , case_sensitive = False , unique = False , deduplicate = False , string_transformations = None , hyperlink = False , return_multiple_columns = False ) : if string_transformations is None : string_transformations = [ ] if max_length : string_transform...
Query the given dict with the given pattern path and return the result .
4,044
def get_reactions ( columns = 'all' , n_results = 20 , write_db = False , ** kwargs ) : if write_db or columns == 'all' : columns = all_columns [ 'reactions' ] queries = { } for key , value in kwargs . items ( ) : key = map_column_names ( key ) if key == 'distinct' : if value in [ True , 'True' , 'true' ] : queries . u...
Get reactions from server
4,045
def create_recipe_file_logger ( logger , logfile , logformat ) : recipe_formatter = logging . Formatter ( logformat ) fh = logging . FileHandler ( logfile , mode = 'w' ) fh . setLevel ( logging . DEBUG ) fh . setFormatter ( recipe_formatter ) return fh
Redirect Recipe log messages to a file .
4,046
def run_recipe ( recipe , task , rinput , workenv , logger_control ) : recipe_logger = logging . getLogger ( logger_control [ 'default' ] ) if logger_control [ 'enabled' ] : logfile = os . path . join ( workenv . resultsdir , logger_control [ 'logfile' ] ) logformat = logger_control [ 'format' ] _logger . debug ( 'crea...
Recipe execution mode of numina .
4,047
def run_recipe_timed ( task , recipe , rinput ) : _logger . info ( 'running recipe' ) now1 = datetime . datetime . now ( ) task . state = 1 task . time_start = now1 result = recipe ( rinput ) _logger . info ( 'result: %r' , result ) task . result = result now2 = datetime . datetime . now ( ) task . state = 2 task . tim...
Run the recipe and count the time it takes .
4,048
def stop_db_session ( exc = None ) : if has_db_session ( ) : exc_type = None tb = None if exc : exc_type , exc , tb = get_exc_info ( exc ) db_session . __exit__ ( exc_type , exc , tb )
Stops the last db_session
4,049
def get_path ( dest , file , cwd = None ) : if callable ( dest ) : return dest ( file ) if not cwd : cwd = file . cwd if not os . path . isabs ( dest ) : dest = os . path . join ( cwd , dest ) relative = os . path . relpath ( file . path , file . base ) return os . path . join ( dest , relative )
Get the writing path of a file .
4,050
def write_file ( path , contents ) : os . makedirs ( os . path . dirname ( path ) , exist_ok = True ) with open ( path , "w" ) as file : file . write ( contents )
Write contents to a local file .
4,051
def get_pub_id ( title , authors , year ) : "construct publication id" if len ( title . split ( ' ' ) ) > 1 and title . split ( ' ' ) [ 0 ] . lower ( ) in [ 'the' , 'a' ] : _first_word = title . split ( ' ' ) [ 1 ] . split ( '_' ) [ 0 ] else : _first_word = title . split ( ' ' ) [ 0 ] . split ( '_' ) [ 0 ] pub_id = aut...
construct publication id
4,052
def extract_atoms ( molecule ) : if molecule == '' : return molecule try : return float ( molecule ) except BaseException : pass atoms = '' if not molecule [ 0 ] . isalpha ( ) : i = 0 while not molecule [ i ] . isalpha ( ) : i += 1 prefactor = float ( molecule [ : i ] ) if prefactor < 0 : prefactor = abs ( prefactor ) ...
Return a string with all atoms in molecule
4,053
def check_reaction ( reactants , products ) : reactant_list = [ reactant . split ( '@' ) [ 0 ] . strip ( 'star' ) . strip ( 'gas' ) for reactant in reactants ] product_list = [ product . split ( '@' ) [ 0 ] . strip ( 'star' ) . strip ( 'gas' ) for product in products ] reactant_atoms = [ extract_atoms ( reactant ) for ...
Check the stoichiometry and format of chemical reaction used for folder structure . list of reactants - > list of products
4,054
def check_section ( node , section , keys = None ) : if keys : for key in keys : if key not in node : raise ValueError ( 'Missing key %r inside %r node' % ( key , section ) )
Validate keys in a section
4,055
def drp_load ( package , resource , confclass = None ) : data = pkgutil . get_data ( package , resource ) return drp_load_data ( package , data , confclass = confclass )
Load the DRPS from a resource file .
4,056
def drp_load_data ( package , data , confclass = None ) : drpdict = yaml . safe_load ( data ) ins = load_instrument ( package , drpdict , confclass = confclass ) if ins . version == 'undefined' : pkg = importlib . import_module ( package ) ins . version = getattr ( pkg , '__version__' , 'undefined' ) return ins
Load the DRPS from data .
4,057
def load_modes ( node ) : if isinstance ( node , list ) : values = [ load_mode ( child ) for child in node ] keys = [ mode . key for mode in values ] return dict ( zip ( keys , values ) ) elif isinstance ( node , dict ) : values = { key : load_mode ( child ) for key , child in node } return values else : raise NotImple...
Load all observing modes
4,058
def load_mode ( node ) : obs_mode = ObservingMode ( ) obs_mode . __dict__ . update ( node ) load_mode_validator ( obs_mode , node ) load_mode_builder ( obs_mode , node ) load_mode_tagger ( obs_mode , node ) return obs_mode
Load one observing mdode
4,059
def load_mode_tagger ( obs_mode , node ) : ntagger = node . get ( 'tagger' ) if ntagger is None : pass elif isinstance ( ntagger , list ) : def full_tagger ( obsres ) : return get_tags_from_full_ob ( obsres , reqtags = ntagger ) obs_mode . tagger = full_tagger elif isinstance ( ntagger , six . string_types ) : obs_mode...
Load observing mode OB tagger
4,060
def load_mode_builder ( obs_mode , node ) : nval1 = node . get ( 'builder' ) if nval1 is not None : if isinstance ( nval1 , str ) : newmethod = import_object ( nval1 ) obs_mode . build_ob = newmethod . __get__ ( obs_mode ) else : raise TypeError ( 'builder must be None or a string' ) else : nval2 = node . get ( 'builde...
Load observing mode OB builder
4,061
def load_mode_validator ( obs_mode , node ) : nval = node . get ( 'validator' ) if nval is None : pass elif isinstance ( nval , str ) : obs_mode . validator = import_object ( nval ) else : raise TypeError ( 'validator must be None or a string' ) return obs_mode
Load observing mode validator
4,062
def frommembers ( cls , members ) : return cls . frombitsets ( map ( cls . BitSet . frommembers , members ) )
Series from iterable of member iterables .
4,063
def frombools ( cls , bools ) : return cls . frombitsets ( map ( cls . BitSet . frombools , bools ) )
Series from iterable of boolean evaluable iterables .
4,064
def frombits ( cls , bits ) : return cls . frombitsets ( map ( cls . BitSet . frombits , bits ) )
Series from binary string arguments .
4,065
def fromints ( cls , ints ) : return cls . frombitsets ( map ( cls . BitSet . fromint , ints ) )
Series from integer rank arguments .
4,066
def index_sets ( self , as_set = False ) : indexes = frozenset if as_set else tuple return [ indexes ( b . iter_set ( ) ) for b in self ]
Return the series as list of index set tuples .
4,067
def append_arrays ( many , single ) : assert np . ndim ( single ) == 1 diff = single . shape [ 0 ] - many . shape [ 0 ] if diff < 0 : single = np . pad ( single , ( 0 , - diff ) , 'constant' , constant_values = np . nan ) elif diff > 0 : many = np . pad ( many , ( ( 0 , diff ) , ) , 'constant' , constant_values = np . ...
Append an array to another padding with NaNs for constant length .
4,068
def _get_locations ( self , calc ) : return ( self . _location_in ( calc . profile ) , self . _location_out ( calc . profile ) )
Locate locations within the profile .
4,069
def find_pix_borders ( sp , sought_value ) : if sp . ndim != 1 : raise ValueError ( 'Unexpected number of dimensions:' , sp . ndim ) naxis1 = len ( sp ) jborder_min = - 1 jborder_max = naxis1 if not np . alltrue ( sp == sought_value ) : while True : jborder_min += 1 if sp [ jborder_min ] != sought_value : break while T...
Find useful region of a given spectrum
4,070
def fix_pix_borders ( image2d , nreplace , sought_value , replacement_value ) : naxis2 , naxis1 = image2d . shape for i in range ( naxis2 ) : jborder_min , jborder_max = find_pix_borders ( image2d [ i , : ] , sought_value = sought_value ) if jborder_min != - 1 : j1 = jborder_min j2 = min ( j1 + nreplace , naxis1 ) imag...
Replace a few pixels at the borders of each spectrum .
4,071
def define_mask_borders ( image2d , sought_value , nadditional = 0 ) : naxis2 , naxis1 = image2d . shape mask2d = np . zeros ( ( naxis2 , naxis1 ) , dtype = bool ) borders = [ ] for i in range ( naxis2 ) : jborder_min , jborder_max = find_pix_borders ( image2d [ i , : ] , sought_value = sought_value ) borders . append ...
Generate mask avoiding undesired values at the borders .
4,072
def update_features ( self , poly ) : for feature in self . features : feature . wavelength = poly ( feature . xpos )
Evaluate wavelength at xpos using the provided polynomial .
4,073
def dataframe_from_list ( values ) : if ( isinstance ( values , six . string_types ) ) : return DataFrame ( filename = values ) elif ( isinstance ( values , fits . HDUList ) ) : return DataFrame ( frame = values ) else : return None
Build a DataFrame object from a list .
4,074
def obsres_from_dict ( values ) : obsres = ObservationResult ( ) ikey = 'frames' if 'images' in values : ikey = 'images' obsres . id = values . get ( 'id' , 1 ) obsres . mode = values [ 'mode' ] obsres . instrument = values [ 'instrument' ] obsres . configuration = values . get ( 'configuration' , 'default' ) obsres . ...
Build a ObservationResult object from a dictionary .
4,075
def get_sample_frame ( self ) : for frame in self . frames : return frame . open ( ) for res in self . results . values ( ) : return res . open ( ) return None
Return first available image in observation result
4,076
def fowler_array ( fowlerdata , ti = 0.0 , ts = 0.0 , gain = 1.0 , ron = 1.0 , badpixels = None , dtype = 'float64' , saturation = 65631 , blank = 0 , normalize = False ) : import numina . array . _nirproc as _nirproc if gain <= 0 : raise ValueError ( "invalid parameter, gain <= 0.0" ) if ron <= 0 : raise ValueError ( ...
Loop over the first axis applying Fowler processing .
4,077
def ramp_array ( rampdata , ti , gain = 1.0 , ron = 1.0 , badpixels = None , dtype = 'float64' , saturation = 65631 , blank = 0 , nsig = None , normalize = False ) : import numina . array . _nirproc as _nirproc if ti <= 0 : raise ValueError ( "invalid parameter, ti <= 0.0" ) if gain <= 0 : raise ValueError ( "invalid p...
Loop over the first axis applying ramp processing .
4,078
def accept_freeware_license ( ) : ntab = 3 if version ( ) . startswith ( '6.6.' ) else 2 for _ in range ( ntab ) : EasyProcess ( 'xdotool key KP_Tab' ) . call ( ) time . sleep ( 0.5 ) EasyProcess ( 'xdotool key KP_Space' ) . call ( ) time . sleep ( 0.5 ) EasyProcess ( 'xdotool key KP_Space' ) . call ( )
different Eagle versions need differnt TAB count . 6 . 5 - > 2 6 . 6 - > 3 7 . 4 - > 2
4,079
def fully_qualified_name ( obj , sep = '.' ) : if inspect . isclass ( obj ) : return obj . __module__ + sep + obj . __name__ else : return obj . __module__ + sep + obj . __class__ . __name__
Return fully qualified name from object
4,080
def fun_residuals ( params , xnor , ynor , w , bbox , k , ext ) : spl = LSQUnivariateSpline ( x = xnor , y = ynor , t = [ item . value for item in params . values ( ) ] , w = w , bbox = bbox , k = k , ext = ext , check_finite = False ) return spl . get_residual ( )
Compute fit residuals
4,081
def parse_as_yaml ( strdict ) : interm = "" for key , val in strdict . items ( ) : interm = "%s: %s, %s" % ( key , val , interm ) fin = '{%s}' % interm return yaml . load ( fin )
Parse a dictionary of strings as if yaml reads it
4,082
def folder2db ( folder_name , debug , energy_limit , skip_folders , goto_reaction ) : folder_name = folder_name . rstrip ( '/' ) skip = [ ] for s in skip_folders . split ( ', ' ) : for sk in s . split ( ',' ) : skip . append ( sk ) pub_id = _folder2db . main ( folder_name , debug , energy_limit , skip , goto_reaction )...
Read folder and collect data in local sqlite3 database
4,083
def db2server ( dbfile , block_size , dbuser , dbpassword ) : _db2server . main ( dbfile , write_reaction = True , write_ase = True , write_publication = True , write_reaction_system = True , block_size = block_size , start_block = 0 , user = dbuser , password = dbpassword )
Transfer data from local database to Catalysis Hub server
4,084
def reactions ( columns , n_results , write_db , queries ) : if not isinstance ( queries , dict ) : query_dict = { } for q in queries : key , value = q . split ( '=' ) if key == 'distinct' : if value in [ 'True' , 'true' ] : query_dict . update ( { key : True } ) continue try : value = int ( value ) query_dict . update...
Search for reactions
4,085
def publications ( columns , n_results , queries ) : if not isinstance ( queries , dict ) : query_dict = { } for q in queries : key , value = q . split ( '=' ) if key == 'distinct' : if value in [ 'True' , 'true' ] : query_dict . update ( { key : True } ) continue try : value = int ( value ) query_dict . update ( { key...
Search for publications
4,086
def make_folders ( template , custom_base ) : def dict_representer ( dumper , data ) : return dumper . represent_dict ( data . items ( ) ) Dumper . add_representer ( collections . OrderedDict , dict_representer ) if custom_base is None : custom_base = os . path . abspath ( os . path . curdir ) template = custom_base + ...
Create a basic folder tree for dumping DFT calculcations for reaction energies .
4,087
def organize ( ** kwargs ) : if len ( kwargs [ 'adsorbates' ] ) == 0 : print ( ) print ( " Enter adsorbates like so --adsorbates CO,O,CO2" ) print ( " [Comma-separated list without spaces.]" ) kwargs [ 'adsorbates' ] = list ( map ( lambda x : ( '' . join ( sorted ( string2symbols ( x ) ) ) ) , kwargs [ ...
Read reactions from non - organized folder
4,088
def initCurses ( self ) : curses . noecho ( ) curses . cbreak ( ) curses . curs_set ( 0 ) curses . start_color ( ) curses . use_default_colors ( ) curses . init_pair ( 1 , curses . COLOR_WHITE , - 1 ) curses . init_pair ( 2 , curses . COLOR_YELLOW , - 1 ) curses . init_pair ( 3 , curses . COLOR_MAGENTA , - 1 ) curses ....
Set up screen properties
4,089
def patchCurses ( self ) : if ( sys . version_info ) [ : 3 ] == ( 3 , 4 , 0 ) : self . addchar = lambda y , x , * args : self . win . addch ( x , y , * args ) else : self . addchar = self . win . addch
Fix curses addch function for python 3 . 4 . 0
4,090
def splash ( self ) : dirname = os . path . split ( os . path . abspath ( __file__ ) ) [ 0 ] try : splash = open ( os . path . join ( dirname , "splash" ) , "r" ) . readlines ( ) except IOError : return width = len ( max ( splash , key = len ) ) y = int ( self . y_grid / 2 ) - len ( splash ) x = int ( self . x_grid / 2...
Draw splash screen
4,091
def drawHUD ( self ) : self . win . move ( self . height - 2 , self . x_pad ) self . win . clrtoeol ( ) self . win . box ( ) self . addstr ( 2 , self . x_pad + 1 , "Population: %i" % len ( self . grid ) ) self . addstr ( 3 , self . x_pad + 1 , "Generation: %s" % self . current_gen ) self . addstr ( 3 , self . x_grid - ...
Draw information on population size and current generation
4,092
def drawGrid ( self ) : for cell in self . grid : y , x = cell y += self . y_pad x += self . x_pad if self . traditional : sprite = '.' color = curses . color_pair ( 4 ) else : sprite = self . char [ self . grid [ cell ] - 1 ] color = curses . color_pair ( self . grid [ cell ] ) self . addchar ( y , x , sprite , color ...
Redraw the grid with the new generation
4,093
def nextGen ( self ) : self . current_gen += 1 self . change_gen [ self . current_gen % 3 ] = copy . copy ( self . grid ) grid_cp = copy . copy ( self . grid ) for cell in self . grid : y , x = cell y1 = ( y - 1 ) % self . y_grid y2 = ( y + 1 ) % self . y_grid x1 = ( x - 1 ) % self . x_grid x2 = ( x + 1 ) % self . x_gr...
Decide the fate of the cells
4,094
def countNeighbours ( self , cell ) : count = 0 y , x = cell y = y % self . y_grid x = x % self . x_grid y1 = ( y - 1 ) % self . y_grid y2 = ( y + 1 ) % self . y_grid x1 = ( x - 1 ) % self . x_grid x2 = ( x + 1 ) % self . x_grid cell = y , x for neighbour in product ( [ y1 , y , y2 ] , [ x1 , x , x2 ] ) : if neighbour ...
Return the number active neighbours within one positions away from cell
4,095
def initGrid ( self ) : blinker = [ ( 4 , 4 ) , ( 4 , 5 ) , ( 4 , 6 ) ] toad = [ ( 9 , 5 ) , ( 9 , 6 ) , ( 9 , 7 ) , ( 10 , 4 ) , ( 10 , 5 ) , ( 10 , 6 ) ] glider = [ ( 4 , 11 ) , ( 5 , 12 ) , ( 6 , 10 ) , ( 6 , 11 ) , ( 6 , 12 ) ] r_pentomino = [ ( 10 , 60 ) , ( 9 , 61 ) , ( 10 , 61 ) , ( 11 , 61 ) , ( 9 , 62 ) ] self...
Initialise the game grid
4,096
def restart ( self ) : self . initGrid ( ) self . win . clear ( ) self . current_gen = 1 self . start
Restart the game from a new generation 0
4,097
def end ( self ) : if self . loop : self . restart return self . addstr ( 2 , self . x_grid / 2 - 4 , "GAMEOVER" , 7 ) if self . hud : self . addstr ( 2 , self . x_pad + 13 , len ( self . grid ) , 5 ) self . addstr ( 3 , self . x_pad + 13 , self . current_gen , 5 ) if self . test : exit ( ) while self . state == 'stopp...
Game Finished - Restart or Quit
4,098
def mean ( arrays , masks = None , dtype = None , out = None , zeros = None , scales = None , weights = None ) : return generic_combine ( intl_combine . mean_method ( ) , arrays , masks = masks , dtype = dtype , out = out , zeros = zeros , scales = scales , weights = weights )
Combine arrays using the mean with masks and offsets .
4,099
def median ( arrays , masks = None , dtype = None , out = None , zeros = None , scales = None , weights = None ) : return generic_combine ( intl_combine . median_method ( ) , arrays , masks = masks , dtype = dtype , out = out , zeros = zeros , scales = scales , weights = weights )
Combine arrays using the median with masks .