idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
16,500
def overwriteComponent ( self , comp , row , col ) : self . _segments [ row ] [ col ] = comp # in case of samplerate change, just always update self . updateCalibration ( )
Overwrites the component at the specficied location with a provided one .
46
17
16,501
def removeLastRow ( self ) : lastrow = self . _segments . pop ( len ( self . _segments ) - 1 ) if len ( lastrow ) > 0 : raise Exception ( "Attempt to remove non-empty stimulus track" )
Removes the last track
53
5
16,502
def removeComponent ( self , row , col ) : self . _segments [ row ] . pop ( col ) # If this row is now empty we should remove it? if self . columnCountForRow ( - 1 ) == 0 : self . removeRow ( len ( self . _segments ) - 1 ) # in case of samplerate change, just always update self . updateCalibration ( )
Removes the component at the given location
86
8
16,503
def indexByComponent ( self , component ) : for row , rowcontents in enumerate ( self . _segments ) : if component in rowcontents : column = rowcontents . index ( component ) return ( row , column )
Returns a location for the given component or None if it is not in the model
50
16
16,504
def traceCount ( self ) : nsegs = sum ( [ len ( track ) for track in self . _segments ] ) if nsegs == 0 : return 0 ntraces = 1 for irow in range ( self . _autoParams . nrows ( ) ) : ntraces = ntraces * self . _autoParams . numSteps ( irow ) return ntraces
The number of unique stimului for this stimulus object
87
10
16,505
def contains ( self , stimtype ) : for track in self . _segments : for component in track : if component . __class__ . __name__ == stimtype : return True return False
Returns whether the specified stimlus type is a component in this stimulus
41
13
16,506
def purgeAutoSelected ( self ) : params = self . _autoParams . allData ( ) for p in params : comps_to_remove = [ ] for comp in p [ 'selection' ] : if self . indexByComponent ( comp ) is None : comps_to_remove . append ( comp ) for orphaned in comps_to_remove : p [ 'selection' ] . remove ( orphaned )
Clears out orphaned auto parameters
92
7
16,507
def expandFunction ( self , func , args = [ ] ) : # initilize array to hold all varied parameters params = self . _autoParams . allData ( ) steps = self . autoParamRanges ( ) ntraces = 1 for p in steps : ntraces = ntraces * len ( p ) varylist = [ [ None for x in range ( len ( params ) ) ] for y in range ( ntraces ) ] x = 1 for iset , step_set in enumerate ( steps ) : for itrace in range ( ntraces ) : idx = ( itrace / x ) % len ( step_set ) varylist [ itrace ] [ iset ] = step_set [ idx ] x = x * len ( step_set ) # now create the stimuli according to steps # go through list of modifing parameters, update this stimulus, # and then save current state to list stim_list = [ ] for itrace in range ( ntraces ) : for ip , param in enumerate ( params ) : for component in param [ 'selection' ] : # print 'setting component {} parameter {} to {}'.format(component.name, param['parameter'], varylist[itrace][ip]) # so I encountered a bug when the parameters were dragged the # pickling/unpickling seems to either make a copy or somehow # otherwise loose connection to the original components # make sure to be setting the components that are in this model. index = self . indexByComponent ( component ) component = self . component ( * index ) component . set ( param [ 'parameter' ] , varylist [ itrace ] [ ip ] ) # copy of current stim state, or go ahead and turn it into a signal? # so then would I want to formulate some doc here as well? stim_list . append ( func ( * args ) ) # now reset the components to start value for ip , param in enumerate ( params ) : for component in param [ 'selection' ] : component . set ( param [ 'parameter' ] , varylist [ 0 ] [ ip ] ) return stim_list
applies the given function to each of this stimulus s memerships when autoparamters are applied
454
20
16,508
def setReorderFunc ( self , func , name = None ) : self . reorder = func self . reorderName = name
Sets the function that reorders the expanded signals of this stimulus
29
13
16,509
def expandedStim ( self ) : logger = logging . getLogger ( 'main' ) logger . debug ( "Generating Expanded Stimulus" ) # 3 loops now -- could be done in one... signals = self . expandFunction ( self . signal ) docs = self . expandFunction ( self . componentDoc ) overloads = [ ] for s , d in zip ( signals , docs ) : d [ 'overloaded_attenuation' ] = s [ 2 ] overloads . append ( s [ 2 ] ) # remove the undesired attenuation argument signals = [ sig [ 0 : 2 ] for sig in signals ] if self . reorder : order = self . reorder ( docs ) signals = [ signals [ i ] for i in order ] docs = [ docs [ i ] for i in order ] return signals , docs , overloads
Apply the autoparameters to this stimulus and return a list of the resulting stimuli a complimentary list of doc dictionaries and a complimentary list of undesired attenuations .
177
33
16,510
def loadFromTemplate ( template , stim = None ) : if stim is None : stim = StimulusModel ( ) stim . setRepCount ( template [ 'reps' ] ) stim . setUserTag ( template . get ( 'user_tag' , '' ) ) # don't set calibration details - this should be the same application wide component_classes = get_stimuli_models ( ) for comp_doc in template [ 'components' ] : comp = get_component ( comp_doc [ 'stim_type' ] , component_classes ) comp . loadState ( comp_doc ) # ignore extra dict entries stim . insertComponent ( comp , * comp_doc [ 'index' ] ) # revert from location based selection to component list autoparams = template [ 'autoparameters' ] for p in autoparams : selection = p [ 'selection' ] component_selection = [ ] for index in selection : component = stim . component ( * index ) component_selection . append ( component ) p [ 'selection' ] = component_selection stim . autoParams ( ) . setParameterList ( autoparams ) stim . setReorderFunc ( order_function ( template [ 'reorder' ] ) , template [ 'reorder' ] ) stim . setStimType ( template [ 'testtype' ] ) return stim
Loads the stimlus to the state provided by a template
285
12
16,511
def duration ( self ) : durs = [ ] for track in self . _segments : durs . append ( sum ( [ comp . duration ( ) for comp in track ] ) ) return max ( durs )
The duration of this stimulus
46
5
16,512
def componentDoc ( self , starttime = True ) : samplerate = self . samplerate ( ) doc_list = [ ] for row , track in enumerate ( self . _segments ) : start_time = 0 for col , component in enumerate ( track ) : info = component . stateDict ( ) info [ 'stim_type' ] = component . name if starttime : info [ 'start_s' ] = start_time info [ 'index' ] = ( row , col ) start_time += info [ 'duration' ] doc_list . append ( info ) return { 'samplerate_da' : samplerate , 'components' : doc_list }
The documentation for the components as a dict
150
8
16,513
def warning ( self ) : signals , docs , overs = self . expandedStim ( ) if np . any ( np . array ( overs ) > 0 ) : msg = 'Stimuli in this test are over the maximum allowable \ voltage output. They will be rescaled with a maximum \ undesired attenuation of {:.2f}dB.' . format ( np . amax ( overs ) ) return msg return 0
Checks Stimulus for any warning conditions
88
8
16,514
def verifyExpanded ( self , samplerate ) : results = self . expandFunction ( self . verifyComponents , args = ( samplerate , ) ) msg = [ x for x in results if x ] if len ( msg ) > 0 : return msg [ 0 ] else : return 0
Checks the expanded parameters for invalidating conditions
62
9
16,515
def verifyComponents ( self , samplerate ) : # flatten list of components components = [ comp for track in self . _segments for comp in track ] for comp in components : msg = comp . verify ( samplerate = samplerate ) if msg : return msg return 0
Checks the current components for invalidating conditions
61
9
16,516
def verify ( self , windowSize = None ) : if self . samplerate ( ) is None : return "Multiple recording files with conflicting samplerates" msg = self . _autoParams . verify ( ) if msg : return msg if self . traceCount ( ) == 0 : return "Test is empty" if windowSize is not None : durations = self . expandFunction ( self . duration ) # print 'windowSize', windowSize, 'self', durations[0], durations[-1] # ranges are linear, so we only need to test first and last if durations [ 0 ] > windowSize or durations [ - 1 ] > windowSize : return "Stimulus duration exceeds window duration" msg = self . verifyExpanded ( self . samplerate ( ) ) if msg : return msg if self . caldb is None or self . calv is None : return "Test reference voltage not set" if None in self . voltage_limits : return "Device voltage limits not set" return 0
Checks the stimulus including expanded parameters for invalidating conditions
216
11
16,517
def get_ao_chans ( dev ) : buf = create_string_buffer ( 256 ) buflen = c_uint32 ( sizeof ( buf ) ) DAQmxGetDevAOPhysicalChans ( dev . encode ( ) , buf , buflen ) pybuf = buf . value chans = pybuf . decode ( u'utf-8' ) . split ( u"," ) return chans
Discover and return a list of the names of all analog output channels for the given device
88
17
16,518
def get_devices ( ) : buf = create_string_buffer ( 512 ) buflen = c_uint32 ( sizeof ( buf ) ) DAQmxGetSysDevNames ( buf , buflen ) pybuf = buf . value devices = pybuf . decode ( u'utf-8' ) . split ( u"," ) return devices
Discover and return a list of the names of all NI devices on this system
73
15
16,519
def write ( self , output ) : w = c_int32 ( ) # print "output max", max(abs(output)) self . WriteAnalogF64 ( self . bufsize , 0 , 10.0 , DAQmx_Val_GroupByChannel , output , w , None )
Writes the data to be output to the device buffer output will be looped when the data runs out
63
21
16,520
def setXlimits ( self , lims ) : self . responseSignalPlot . setXlim ( lims ) self . stimSignalPlot . setXlim ( lims )
Sets the X axis limits of the signal plots
39
10
16,521
def from_chars ( cls , chars = '' , optimal = 3 ) : if not chars : chars = '' . join ( ALNUM ) sets = most_even_chunk ( chars , optimal ) return cls ( sets )
Construct a Pat object from the specified string and optimal position count .
50
13
16,522
def create ( self , count ) : space , self . space = tee ( self . space ) limit = reduce ( mul , map ( len , self . sets ) ) * self . position logging . debug ( 'limit: %s' , limit ) if limit >= count : return '' . join ( islice ( space , count ) ) else : raise IndexError ( '{count} Overflows {sets}!' . format ( count = count , sets = self . sets ) )
Create a pattern of the specified length .
101
8
16,523
def locate ( self , pattern , big_endian = False ) : space , self . space = tee ( self . space ) if pattern . startswith ( '0x' ) : target = unhexlify ( pattern [ 2 : ] . encode ( 'utf-8' ) ) . decode ( 'utf-8' ) if not big_endian : target = target [ : : - 1 ] else : target = pattern for index , one in enumerate ( window ( space , self . position ) ) : if '' . join ( one ) == target [ : self . position ] : return index raise KeyError ( '{target} Not Found In {sets}!' . format ( target = pattern , sets = self . sets ) )
Locate the pattern .
157
5
16,524
def preserve_namespace ( newns = None ) : ns = cmds . namespaceInfo ( an = True ) try : cmds . namespace ( set = newns ) yield finally : cmds . namespace ( set = ns )
Contextmanager that will restore the current namespace
48
8
16,525
def preserve_selection ( ) : sl = cmds . ls ( sl = True ) try : yield finally : cmds . select ( sl , replace = True )
Contextmanager that will restore the current selection
34
8
16,526
def locknode ( node , lock = True ) : oldstatus = cmds . lockNode ( node , q = 1 ) cmds . lockNode ( node , lock = lock ) try : yield finally : if isinstance ( node , basestring ) : if cmds . objExists ( node ) : cmds . lockNode ( node , lock = oldstatus [ 0 ] ) else : for n , l in zip ( node , oldstatus ) : if cmds . objExists ( n ) : cmds . lockNode ( n , lock = l )
Contextmanager that will lock or unlock the given node and afterwards restore the original status
119
16
16,527
def get_top_namespace ( node ) : name = node . rsplit ( "|" , 1 ) [ - 1 ] # get the node name, in case we get a dagpath name = name . lstrip ( ":" ) # strip the root namespace if ":" not in name : # if there is no namespace return root return ":" else : # get the top namespace return name . partition ( ":" ) [ 0 ]
Return the top namespace of the given node
92
8
16,528
def disconnect_node ( node , src = True , dst = True ) : if dst : destconns = cmds . listConnections ( node , connections = True , plugs = True , source = False ) or [ ] for i in range ( 0 , len ( destconns ) , 2 ) : source , dest = destconns [ i ] , destconns [ i + 1 ] cmds . disconnectAttr ( source , dest ) if src : srcconns = cmds . listConnections ( node , connections = True , plugs = True , destination = False ) or [ ] for i in range ( 0 , len ( srcconns ) , 2 ) : source , dest = srcconns [ i + 1 ] , srcconns [ i ] cmds . disconnectAttr ( source , dest )
Disconnect all connections from node
176
6
16,529
def fuzzy_match ( self , other ) : magic , fuzzy = False , False try : magic = self . alias == other . magic except AttributeError : pass if '.' in self . alias : major = self . alias . split ( '.' ) [ 0 ] fuzzy = major == other . alias return magic or fuzzy
Given another token see if either the major alias identifier matches the other alias or if magic matches the alias .
67
21
16,530
def eval ( self ) : if self . and_or == 'or' : return [ Input ( self . alias , file , self . cwd , 'and' ) for file in self . files ] return ' ' . join ( self . files )
Evaluates the given input and returns a string containing the actual filenames represented . If the input token represents multiple independent files then eval will return a list of all the input files needed otherwise it returns the filenames in a string .
53
49
16,531
def files ( self ) : res = None if not res : res = glob . glob ( self . path ) if not res and self . is_glob : res = glob . glob ( self . magic_path ) if not res : res = glob . glob ( self . alias ) if not res : raise ValueError ( 'No files match. %s' % self ) return res
Returns a list of all the files that match the given input token .
81
14
16,532
def from_string ( string , _or = '' ) : if _or : and_or = 'or' else : and_or = '' return Input ( string , and_or = and_or )
Parse a given string and turn it into an input token .
44
13
16,533
def eval ( self ) : if self . magic : return self . magic if not self . filename : return file_pattern . format ( self . alias , self . ext ) return self . path
Returns a filename to be used for script output .
40
10
16,534
def _clean ( self , magic ) : if magic . lower ( ) == 'o' : self . magic = '' elif magic [ : 2 ] . lower ( ) == 'o:' : self . magic = magic [ 2 : ] elif magic [ : 2 ] . lower ( ) == 'o.' : self . ext = magic [ 1 : ]
Given a magic string remove the output tag designator .
75
11
16,535
def get_candidate_election ( self , election ) : return CandidateElection . objects . get ( candidate = self , election = election )
Get a CandidateElection .
30
6
16,536
def get_election_votes ( self , election ) : candidate_election = CandidateElection . objects . get ( candidate = self , election = election ) return candidate_election . votes . all ( )
Get all votes for this candidate in an election .
42
10
16,537
def get_election_electoral_votes ( self , election ) : candidate_election = CandidateElection . objects . get ( candidate = self , election = election ) return candidate_election . electoral_votes . all ( )
Get all electoral votes for this candidate in an election .
47
11
16,538
def get_election_delegates ( self , election ) : candidate_election = CandidateElection . objects . get ( candidate = self , election = election ) return candidate_election . delegates . all ( )
Get all pledged delegates for this candidate in an election .
43
11
16,539
def load_config ( self , config_file = None ) : if config_file is None : config_file = [ '/etc/ellis.conf' , '/etc/ellis/ellis.conf' , os . path . join ( os . path . dirname ( __file__ ) , 'ellis.conf' ) , ] self . config . read ( config_file , encoding = 'utf-8' ) return self
If config_file is not None tries to load Ellis configuration from the given location . If for some reason the file can t be read Ellis will not start .
94
32
16,540
def load_rules ( self ) : for rule_name in self . config . sections ( ) : limit = 1 try : limit = self . config . getint ( rule_name , 'limit' ) except ValueError : warnings . warn ( "Rule '{0}': invalid value for 'limit' option. " "Limit must be an integer > 0. " "Going on with the default value of 1." . format ( rule_name ) ) except configparser . NoOptionError : warnings . warn ( "Rule '{0}': no value specified for 'limit' " "option. Going on with the default value of 1." . format ( rule_name ) ) try : filter_str = self . config . get ( rule_name , 'filter' ) action_str = self . config . get ( rule_name , 'action' ) except configparser . NoOptionError as e : warnings . warn ( "Ignoring '{0}' rule: {1}." . format ( rule_name , e ) ) else : try : rule = Rule ( rule_name , filter_str , limit , action_str ) except ValueError as e : warnings . warn ( "Ignoring '{0}' rule: {1}." . format ( rule_name , e ) ) else : self . rules . append ( rule ) if not self . rules : raise NoRuleError ( ) return self
Loads the Rules from the config file .
298
9
16,541
def load_units ( self ) : # Of course, we only consider valid Rules. for rule in self . rules : try : systemd_unit = self . config . get ( rule . name , 'systemd_unit' ) except configparser . NoOptionError : warnings . warn ( "Rule '{0}' doesn't have a `systemd_unit` " "option set.\nThe filters will be checked " "against all journald entries, which will " "probably result in poor performance." . format ( rule . name ) ) # At this point, we can clear `self.units` because in any # case, we will need to process every journald entries # for THIS Rule. self . units . clear ( ) # And we can also stop looping through rules. break else : # Append ".service" if not present. # Note that we don't check if the service actually exists. # FIXME ? if not systemd_unit . endswith ( ".service" ) : systemd_unit += ".service" self . units . add ( systemd_unit ) return self
Build a set of systemd units that Ellis will watch .
232
11
16,542
def find_commons ( lists ) : others = lists [ 1 : ] return [ val for val in lists [ 0 ] if is_in_all ( val , others ) ]
Finds common values
38
4
16,543
def send_query ( self , query ) : if self . __switched_on : return self . __solr_server_connector . send_query ( query ) else : msg = 'Not sending query' LOGGER . debug ( msg ) raise esgfpid . exceptions . SolrSwitchedOff ( msg )
This method is called by the tasks . It is redirected to the submodule .
69
16
16,544
def strainer ( sequencepath ) : metadata_list = list ( ) assert os . path . isdir ( sequencepath ) , 'Cannot locate sequence path as specified: {}' . format ( sequencepath ) # Get the sequences in the sequences folder into a list. Note that they must have a file extension that # begins with .fa strains = sorted ( glob . glob ( os . path . join ( sequencepath , '*.fa*' ) ) ) # Populate the metadata object. This object will be populated to mirror the objects created in the # genome assembly pipeline. This way this script will be able to be used as a stand-alone, or as part # of a pipeline assert strains , 'Could not find any files with an extension starting with "fa" in {}. Please check ' 'to ensure that your sequence path is correct' . format ( sequencepath ) for sample in strains : # Create the object metadata = MetadataObject ( ) # Set the base file name of the sequence. Just remove the file extension filename = os . path . splitext ( os . path . split ( sample ) [ 1 ] ) [ 0 ] # Set the .name attribute to be the file name metadata . name = filename # Create the .general attribute metadata . general = GenObject ( ) metadata . commands = GenObject ( ) metadata . general . outputdirectory = os . path . join ( sequencepath , filename ) # Set the .general.bestassembly file to be the name and path of the sequence file metadata . general . bestassemblyfile = os . path . join ( metadata . general . outputdirectory , '{sn}.fasta' . format ( sn = filename ) ) make_path ( metadata . general . outputdirectory ) # Create a symlink to the directory relative_symlink ( sample , metadata . general . outputdirectory ) metadata . general . logout = os . path . join ( metadata . general . outputdirectory , 'out' ) metadata . general . logerr = os . path . join ( metadata . general . outputdirectory , 'err' ) # Append the metadata for each sample to the list of samples metadata_list . append ( metadata ) return strains , metadata_list
Locate all the FASTA files in the supplied sequence path . Create basic metadata objects for each sample
458
21
16,545
def render ( self , model , color , num_turtles ) : self . program . bind ( ) glBindVertexArray ( self . vao ) self . model_buffer . load ( model . data , model . byte_size ) self . color_buffer . load ( color . data , color . byte_size ) glDrawArraysInstanced ( GL_TRIANGLES , 0 , len ( self . geometry . edges ) // 7 , # 7 = 4 for vertex, 3 for edge num_turtles ) glBindVertexArray ( 0 ) self . program . unbind ( )
Renders all turtles of a given shape
127
8
16,546
def renew_connection ( password ) : with Controller . from_port ( port = 9051 ) as controller : controller . authenticate ( password = password ) controller . signal ( Signal . NEWNYM )
Renews TOR session
43
4
16,547
def parse_url ( url ) : parsed = url if not url . startswith ( "http://" ) and not url . startswith ( "https://" ) : # if url is like www.yahoo.com parsed = "http://" + parsed elif url . startswith ( "https://" ) : parsed = parsed [ 8 : ] parsed = "http://" + parsed index_hash = parsed . rfind ( "#" ) # remove trailing # index_slash = parsed . rfind ( "/" ) if index_hash > index_slash : parsed = parsed [ 0 : index_hash ] return parsed
Parses correctly url
136
5
16,548
def get_links ( self , recall , timeout ) : for _ in range ( recall ) : try : # setting timeout soup = BeautifulSoup ( self . source ) # parse source out_links = [ ] for tag in soup . findAll ( [ "a" , "link" ] , href = True ) : tag [ "href" ] = urljoin ( self . url , tag [ "href" ] ) out_links . append ( tag [ "href" ] ) return sorted ( out_links ) # sort array except : time . sleep ( timeout )
Gets links in page
119
5
16,549
def open_in_browser ( self , n_times ) : for _ in range ( n_times ) : webbrowser . open ( self . url )
Opens page in browser
33
5
16,550
def download_url ( self , local_file ) : downloader = urllib . request . URLopener ( ) downloader . retrieve ( self . url , local_file )
Downloads url to local file
39
6
16,551
def download_to_file ( self , local_file , headers = None , cookies = None , chunk_size = 1024 ) : if not headers : headers = HEADERS if not cookies : cookies = { } req = requests . get ( self . url , headers = headers , cookies = cookies , stream = True ) with open ( local_file , "wb" ) as local_download : for chunk in req . iter_content ( chunk_size ) : if chunk : local_download . write ( chunk )
Downloads link to local file
107
6
16,552
def getLogicalLines ( fp , allowQP = True , findBegin = False ) : if not allowQP : val = fp . read ( - 1 ) #Shouldn't need this anymore... """ if len(val) > 0: if not findBegin: val = val.decode('utf-8') else: for encoding in 'utf-8', 'utf-16-LE', 'utf-16-BE', 'iso-8859-1': try: val = val.decode(encoding) if begin_re.search(val) is not None: break except UnicodeDecodeError: pass else: raise ParseError('Could not find BEGIN when trying to determine encoding') """ # strip off any UTF8 BOMs which Python's UTF8 decoder leaves #val = val.lstrip( unicode( codecs.BOM_UTF8, "utf8" ) ) lineNumber = 1 for match in logical_lines_re . finditer ( val ) : line , n = wrap_re . subn ( '' , match . group ( ) ) if line != '' : yield line , lineNumber lineNumber += n else : quotedPrintable = False newbuffer = six . StringIO logicalLine = newbuffer ( ) lineNumber = 0 lineStartNumber = 0 while True : line = fp . readline ( ) if line == '' : break else : line = line . rstrip ( CRLF ) lineNumber += 1 if line . rstrip ( ) == '' : if logicalLine . tell ( ) > 0 : yield logicalLine . getvalue ( ) , lineStartNumber lineStartNumber = lineNumber logicalLine = newbuffer ( ) quotedPrintable = False continue if quotedPrintable and allowQP : logicalLine . write ( '\n' ) logicalLine . write ( line ) quotedPrintable = False elif line [ 0 ] in SPACEORTAB : logicalLine . write ( line [ 1 : ] ) elif logicalLine . tell ( ) > 0 : yield logicalLine . getvalue ( ) , lineStartNumber lineStartNumber = lineNumber logicalLine = newbuffer ( ) logicalLine . write ( line ) else : logicalLine = newbuffer ( ) logicalLine . write ( line ) # vCard 2.1 allows parameters to be encoded without a parameter name. # False positives are unlikely, but possible. val = logicalLine . getvalue ( ) if val [ - 1 ] == '=' and val . lower ( ) . find ( 'quoted-printable' ) >= 0 : quotedPrintable = True if logicalLine . tell ( ) > 0 : yield logicalLine . getvalue ( ) , lineStartNumber
Iterate through a stream yielding one logical line at a time .
569
13
16,553
def newFromBehavior ( name , id = None ) : name = name . upper ( ) behavior = getBehavior ( name , id ) if behavior is None : raise VObjectError ( "No behavior found named %s" % name ) if behavior . isComponent : obj = Component ( name ) else : obj = ContentLine ( name , [ ] , '' ) obj . behavior = behavior obj . isNative = False return obj
Given a name return a behaviored ContentLine or Component .
90
12
16,554
def transformFromNative ( self ) : if self . isNative and self . behavior and self . behavior . hasNative : try : return self . behavior . transformFromNative ( self ) except Exception as e : # wrap errors in transformation in a NativeError lineNumber = getattr ( self , 'lineNumber' , None ) if isinstance ( e , NativeError ) : if lineNumber is not None : e . lineNumber = lineNumber raise else : msg = "In transformFromNative, unhandled exception on line %s %s: %s" msg = msg % ( lineNumber , sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] ) raise NativeError ( msg , lineNumber ) else : return self
Return self transformed into a ContentLine or Component if needed .
158
12
16,555
def _wrap_callback_parse_time_info ( subscription , on_data , message ) : if message . type == message . REPLY : time_response = web_pb2 . TimeSubscriptionResponse ( ) time_response . ParseFromString ( message . reply . data ) time = parse_isostring ( time_response . timeInfo . currentTimeUTC ) #pylint: disable=protected-access subscription . _process ( time ) if on_data : on_data ( time ) elif message . type == message . DATA : if message . data . type == yamcs_pb2 . TIME_INFO : time_message = getattr ( message . data , 'timeInfo' ) time = parse_isostring ( time_message . currentTimeUTC ) #pylint: disable=protected-access subscription . _process ( time ) if on_data : on_data ( time )
Wraps a user callback to parse TimeInfo from a WebSocket data message
196
15
16,556
def _wrap_callback_parse_event ( on_data , message ) : if message . type == message . DATA : if message . data . type == yamcs_pb2 . EVENT : event = Event ( getattr ( message . data , 'event' ) ) #pylint: disable=protected-access on_data ( event )
Wraps a user callback to parse Events from a WebSocket data message
74
14
16,557
def _wrap_callback_parse_link_event ( subscription , on_data , message ) : if message . type == message . DATA : if message . data . type == yamcs_pb2 . LINK_EVENT : link_message = getattr ( message . data , 'linkEvent' ) link_event = LinkEvent ( link_message ) #pylint: disable=protected-access subscription . _process ( link_event ) if on_data : on_data ( link_event )
Wraps a user callback to parse LinkEvents from a WebSocket data message
108
15
16,558
def get_time ( self , instance ) : url = '/instances/{}' . format ( instance ) response = self . get_proto ( url ) message = yamcsManagement_pb2 . YamcsInstance ( ) message . ParseFromString ( response . content ) if message . HasField ( 'missionTime' ) : return parse_isostring ( message . missionTime ) return None
Return the current mission time for the specified instance .
87
10
16,559
def get_server_info ( self ) : response = self . get_proto ( path = '' ) message = rest_pb2 . GetApiOverviewResponse ( ) message . ParseFromString ( response . content ) return ServerInfo ( message )
Return general server info .
54
5
16,560
def get_auth_info ( self ) : try : response = self . session . get ( self . auth_root , headers = { 'Accept' : 'application/protobuf' } ) message = web_pb2 . AuthInfo ( ) message . ParseFromString ( response . content ) return AuthInfo ( message ) except requests . exceptions . ConnectionError : raise ConnectionFailure ( 'Connection to {} refused' . format ( self . address ) )
Returns general authentication information . This operation does not require authenticating and is useful to test if a server requires authentication or not .
96
25
16,561
def get_user_info ( self ) : response = self . get_proto ( path = '/user' ) message = yamcsManagement_pb2 . UserInfo ( ) message . ParseFromString ( response . content ) return UserInfo ( message )
Get information on the authenticated user .
56
7
16,562
def create_instance ( self , name , template , args = None , labels = None ) : req = rest_pb2 . CreateInstanceRequest ( ) req . name = name req . template = template if args : for k in args : req . templateArgs [ k ] = args [ k ] if labels : for k in labels : req . labels [ k ] = labels [ k ] url = '/instances' self . post_proto ( url , data = req . SerializeToString ( ) )
Create a new instance based on an existing template . This method blocks until the instance is fully started .
107
20
16,563
def list_instance_templates ( self ) : response = self . get_proto ( path = '/instance-templates' ) message = rest_pb2 . ListInstanceTemplatesResponse ( ) message . ParseFromString ( response . content ) templates = getattr ( message , 'template' ) return iter ( [ InstanceTemplate ( template ) for template in templates ] )
List the available instance templates .
81
6
16,564
def list_services ( self , instance ) : # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods url = '/services/{}' . format ( instance ) response = self . get_proto ( path = url ) message = rest_pb2 . ListServiceInfoResponse ( ) message . ParseFromString ( response . content ) services = getattr ( message , 'service' ) return iter ( [ Service ( service ) for service in services ] )
List the services for an instance .
109
7
16,565
def stop_service ( self , instance , service ) : req = rest_pb2 . EditServiceRequest ( ) req . state = 'stopped' url = '/services/{}/{}' . format ( instance , service ) self . patch_proto ( url , data = req . SerializeToString ( ) )
Stops a single service .
70
6
16,566
def list_processors ( self , instance = None ) : # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods url = '/processors' if instance : url += '/' + instance response = self . get_proto ( path = url ) message = rest_pb2 . ListProcessorsResponse ( ) message . ParseFromString ( response . content ) processors = getattr ( message , 'processor' ) return iter ( [ Processor ( processor ) for processor in processors ] )
Lists the processors .
114
5
16,567
def list_clients ( self , instance = None ) : # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods url = '/clients' if instance : url = '/instances/{}/clients' . format ( instance ) response = self . get_proto ( path = url ) message = rest_pb2 . ListClientsResponse ( ) message . ParseFromString ( response . content ) clients = getattr ( message , 'client' ) return iter ( [ Client ( client ) for client in clients ] )
Lists the clients .
125
5
16,568
def list_instances ( self ) : # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods response = self . get_proto ( path = '/instances' ) message = rest_pb2 . ListInstancesResponse ( ) message . ParseFromString ( response . content ) instances = getattr ( message , 'instance' ) return iter ( [ Instance ( instance ) for instance in instances ] )
Lists the instances .
99
5
16,569
def start_instance ( self , instance ) : params = { 'state' : 'running' } url = '/instances/{}' . format ( instance ) self . patch_proto ( url , params = params )
Starts a single instance .
48
6
16,570
def stop_instance ( self , instance ) : params = { 'state' : 'stopped' } url = '/instances/{}' . format ( instance ) self . patch_proto ( url , params = params )
Stops a single instance .
49
6
16,571
def restart_instance ( self , instance ) : params = { 'state' : 'restarted' } url = '/instances/{}' . format ( instance ) self . patch_proto ( url , params = params )
Restarts a single instance .
49
6
16,572
def list_data_links ( self , instance ) : # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods response = self . get_proto ( path = '/links/' + instance ) message = rest_pb2 . ListLinkInfoResponse ( ) message . ParseFromString ( response . content ) links = getattr ( message , 'link' ) return iter ( [ Link ( link ) for link in links ] )
Lists the data links visible to this client .
103
10
16,573
def send_event ( self , instance , message , event_type = None , time = None , severity = 'info' , source = None , sequence_number = None ) : req = rest_pb2 . CreateEventRequest ( ) req . message = message req . severity = severity if event_type : req . type = event_type if time : req . time = to_isostring ( time ) if source : req . source = source if sequence_number is not None : req . sequence_number = sequence_number url = '/archive/{}/events' . format ( instance ) self . post_proto ( url , data = req . SerializeToString ( ) )
Post a new event .
147
5
16,574
def get_data_link ( self , instance , link ) : response = self . get_proto ( '/links/{}/{}' . format ( instance , link ) ) message = yamcsManagement_pb2 . LinkInfo ( ) message . ParseFromString ( response . content ) return Link ( message )
Gets a single data link .
70
7
16,575
def enable_data_link ( self , instance , link ) : req = rest_pb2 . EditLinkRequest ( ) req . state = 'enabled' url = '/links/{}/{}' . format ( instance , link ) self . patch_proto ( url , data = req . SerializeToString ( ) )
Enables a data link .
71
6
16,576
def create_data_link_subscription ( self , instance , on_data = None , timeout = 60 ) : manager = WebSocketSubscriptionManager ( self , resource = 'links' ) # Represent subscription as a future subscription = DataLinkSubscription ( manager ) wrapped_callback = functools . partial ( _wrap_callback_parse_link_event , subscription , on_data ) manager . open ( wrapped_callback , instance ) # Wait until a reply or exception is received subscription . reply ( timeout = timeout ) return subscription
Create a new subscription for receiving data link updates of an instance .
112
13
16,577
def create_time_subscription ( self , instance , on_data = None , timeout = 60 ) : manager = WebSocketSubscriptionManager ( self , resource = 'time' ) # Represent subscription as a future subscription = TimeSubscription ( manager ) wrapped_callback = functools . partial ( _wrap_callback_parse_time_info , subscription , on_data ) manager . open ( wrapped_callback , instance ) # Wait until a reply or exception is received subscription . reply ( timeout = timeout ) return subscription
Create a new subscription for receiving time updates of an instance . Time updates are emitted at 1Hz .
109
20
16,578
def create_event_subscription ( self , instance , on_data , timeout = 60 ) : manager = WebSocketSubscriptionManager ( self , resource = 'events' ) # Represent subscription as a future subscription = WebSocketSubscriptionFuture ( manager ) wrapped_callback = functools . partial ( _wrap_callback_parse_event , on_data ) manager . open ( wrapped_callback , instance ) # Wait until a reply or exception is received subscription . reply ( timeout = timeout ) return subscription
Create a new subscription for receiving events of an instance .
105
11
16,579
def remove_group ( self , group ) : if not isinstance ( group , Group ) : raise TypeError ( "group must be Group" ) if group not in self . groups : raise ValueError ( "Group doesn't exist / is not bound to this database." ) #save num entries and children before removal to avoid for loop problems num_entries = len ( group . entries ) for i in xrange ( num_entries ) : self . remove_entry ( group . entries [ 0 ] ) # Recurse down to remove sub-groups num_children = len ( group . children ) for i in xrange ( num_children ) : # We may need to copy this to avoid CME (see below) self . remove_group ( group . children [ 0 ] ) # Finally remove group from the parent's list. group . parent . children . remove ( group ) # Concurrent modification exception? Parent in recursive stack is iterating ... self . groups . remove ( group )
Remove the specified group .
206
5
16,580
def create_entry ( self , group , * * kwargs ) : if group not in self . groups : raise ValueError ( "Group doesn't exist / is not bound to this database." ) uuid = binascii . hexlify ( get_random_bytes ( 16 ) ) entry = Entry ( uuid = uuid , group_id = group . id , created = util . now ( ) , modified = util . now ( ) , accessed = util . now ( ) , * * kwargs ) self . entries . append ( entry ) group . entries . append ( entry ) return entry
Create a new Entry object . The group which should hold the entry is needed .
129
16
16,581
def _bind_model ( self ) : if self . groups [ 0 ] . level != 0 : self . log . info ( "Got invalid first group: {0}" . format ( self . groups [ 0 ] ) ) raise ValueError ( "Invalid group tree: first group must have level of 0 (got {0})" . format ( self . groups [ 0 ] . level ) ) # The KeePassX source code maintains that first group to have incremented # level is a child of the previous group with a lower level. # # [R] # | A (1) # +-| B (2) # | | C (2) # | D (1) # +-| E (2) # | F (2) # +-| G (3) # | H (3) # | I (3) # class Stack ( list ) : """ A class to make parsing code slightly more semantic. """ def push ( self , el ) : self . append ( el ) # This is a different parsing approach than taken by KeePassX (or other python # libs), but seems a little more intuitive. It could be further simplified # by noting that current_parent is always parent_stack[-1], but this is a bit # more readable. parent_stack = Stack ( [ self . root ] ) current_parent = self . root prev_group = None for g in self . groups : g . db = self # Bind database to group objects if prev_group is not None : # first iteration is exceptional if g . level > prev_group . level : # Always true for iteration 1 since root has level of -1 # Dropping down a level; the previous group is the parent current_parent = prev_group parent_stack . push ( current_parent ) elif g . level < prev_group . level : # Pop off parents until we have a parent with a level that is less than current while g . level <= current_parent . level : current_parent = parent_stack . pop ( ) parent_stack . push ( current_parent ) # We want to make sure that the top of the stack always matches current parent # bi-directional child-parent binding g . parent = current_parent current_parent . children . append ( g ) prev_group = g # Bind group objects to entries for entry in self . entries : for group in self . groups : if entry . group_id == group . id : group . entries . append ( entry ) entry . group = group break else : # KeePassX adds these to the first group (i.e. root.children[0]) raise NotImplementedError ( "Orphaned entries not (yet) supported." )
This method binds the various model objects together in the correct hierarchy and adds referneces to this database object in the groups .
569
25
16,582
def filepath ( self , value ) : if not self . readonly and self . _filepath != value : if self . _locked : self . log . debug ( "Releasing previously-held lock file: {0}" . format ( self . lockfile ) ) # Release the lock on previous filepath. self . release_lock ( ) self . _filepath = value if self . _filepath is not None : self . acquire_lock ( ) else : self . _filepath = value
Property for setting current filepath automatically takes out lock on new file if not readonly db .
106
19
16,583
def close ( self ) : super ( LockingDatabase , self ) . close ( ) if not self . readonly : self . release_lock ( )
Closes the database releasing lock .
32
7
16,584
def get_dates_file ( path ) : with open ( path ) as f : dates = f . readlines ( ) return [ ( convert_time_string ( date_string . split ( " " ) [ 0 ] ) , float ( date_string . split ( " " ) [ 1 ] ) ) for date_string in dates ]
parse dates file of dates and probability of choosing
72
9
16,585
def get_dates_link ( url ) : urllib . request . urlretrieve ( url , "temp.txt" ) dates = get_dates_file ( "temp.txt" ) os . remove ( "temp.txt" ) return dates
download the dates file from the internet and parse it as a dates file
54
14
16,586
def get_request_mock ( ) : basehandler = BaseHandler ( ) basehandler . load_middleware ( ) # http://www.python.org/dev/peps/pep-0333/ request = WSGIRequest ( { 'HTTP_COOKIE' : '' , 'PATH_INFO' : '/' , 'QUERY_STRING' : '' , 'REMOTE_ADDR' : '127.0.0.1' , 'REQUEST_METHOD' : 'GET' , 'SERVER_NAME' : 'page-request-mock' , 'SCRIPT_NAME' : '' , 'SERVER_PORT' : '80' , 'SERVER_PROTOCOL' : 'HTTP/1.1' , 'HTTP_HOST' : 'page-request-host' , 'CONTENT_TYPE' : 'text/html; charset=utf-8' , 'wsgi.version' : ( 1 , 0 ) , 'wsgi.url_scheme' : 'http' , 'wsgi.multiprocess' : True , 'wsgi.multithread' : False , 'wsgi.run_once' : False , 'wsgi.input' : StringIO ( ) } ) # Apply request middleware for middleware_method in basehandler . _request_middleware : # LocaleMiddleware should never be applied a second time because # it would broke the current real request language if 'LocaleMiddleware' not in str ( middleware_method . __class__ ) : middleware_method ( request ) return request
Build a request mock up that is used in to render the templates in the most fidel environement as possible .
346
24
16,587
def pages_view ( view ) : def pages_view_decorator ( request , * args , * * kwargs ) : # if the current page is already there if ( kwargs . get ( 'current_page' , False ) or kwargs . get ( 'pages_navigation' , False ) ) : return view ( request , * args , * * kwargs ) path = kwargs . pop ( 'path' , None ) lang = kwargs . pop ( 'lang' , None ) if path : from basic_cms . views import details response = details ( request , path = path , lang = lang , only_context = True , delegation = False ) context = response extra_context_var = kwargs . pop ( 'extra_context_var' , None ) if extra_context_var : kwargs . update ( { extra_context_var : context } ) else : kwargs . update ( context ) return view ( request , * args , * * kwargs ) return pages_view_decorator
Make sure the decorated view gets the essential pages variables .
228
11
16,588
def true_neg_rate ( self ) : false_pos = self . matrix [ 1 ] [ 0 ] true_neg = self . matrix [ 1 ] [ 1 ] return divide ( 1.0 * true_neg , true_neg + false_pos )
Calculates true negative rate
55
6
16,589
def f1_score ( self ) : m_pre = self . precision ( ) rec = self . recall ( ) return divide ( 2.0 , 1.0 / m_pre + 1.0 / rec )
Calculates F1 score
46
6
16,590
def from_columns ( columns ) : data = [ [ column [ i ] for i in range ( len ( column ) ) ] for column in columns ] return Matrix ( data )
Parses raw columns
38
5
16,591
def get_version_details ( path ) : with open ( path , "r" ) as reader : lines = reader . readlines ( ) data = { line . split ( " = " ) [ 0 ] . replace ( "__" , "" ) : line . split ( " = " ) [ 1 ] . strip ( ) . replace ( "'" , "" ) for line in lines } return data
Parses version file
84
5
16,592
def _get_record ( self , name ) : request = self . _session . get ( self . _baseurl , params = { 'name' : name , 'type' : 'A' } ) if not request . ok : raise RuntimeError ( 'Failed to search record: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) records = request . json ( ) if len ( records ) == 0 : return record = records [ 0 ] if 'record' not in record or 'id' not in record [ 'record' ] : raise RuntimeError ( 'Invalid record JSON format: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) return int ( record [ 'record' ] [ 'id' ] )
Returns the id of a record if it exists .
180
10
16,593
def _create_record ( self , name , address , ttl ) : data = json . dumps ( { 'record' : { 'name' : name , 'record_type' : 'A' , 'content' : address , 'ttl' : ttl } } ) headers = { 'Content-Type' : 'application/json' } request = self . _session . post ( self . _baseurl , data = data , headers = headers ) if not request . ok : raise RuntimeError ( 'Failed to create new record: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) record = request . json ( ) if 'record' not in record or 'id' not in record [ 'record' ] : raise RuntimeError ( 'Invalid record JSON format: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) return record [ 'record' ]
Creates a new record .
212
6
16,594
def _update_record ( self , record_id , name , address , ttl ) : data = json . dumps ( { 'record' : { 'name' : name , 'content' : address , 'ttl' : ttl } } ) headers = { 'Content-Type' : 'application/json' } request = self . _session . put ( self . _baseurl + '/%d' % record_id , data = data , headers = headers ) if not request . ok : raise RuntimeError ( 'Failed to update record: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) record = request . json ( ) if 'record' not in record or 'id' not in record [ 'record' ] : raise RuntimeError ( 'Invalid record JSON format: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) return record [ 'record' ]
Updates an existing record .
214
6
16,595
def update_record ( self , name , address , ttl = 60 ) : record_id = self . _get_record ( name ) if record_id is None : return self . _create_record ( name , address , ttl ) return self . _update_record ( record_id , name , address , ttl )
Updates a record creating it if not exists .
71
10
16,596
def wheelEvent ( self , ev , axis = None ) : state = None # ctrl reverses mouse operation axis if ev . modifiers ( ) == QtCore . Qt . ControlModifier : state = self . mouseEnabled ( ) self . setMouseEnabled ( not state [ 0 ] , not state [ 1 ] ) if self . _zeroWheel : ev . pos = lambda : self . mapViewToScene ( QtCore . QPoint ( 0 , 0 ) ) super ( SpikeyViewBox , self ) . wheelEvent ( ev , axis ) if state is not None : self . setMouseEnabled ( * state )
Reacts to mouse wheel movement custom behaviour switches zoom axis when ctrl is pressed and sets the locus of zoom if zeroWheel is set .
129
29
16,597
def copy ( self ) : # copied from pyqtgraph ViewBoxMenu m = QtGui . QMenu ( ) for sm in self . subMenus ( ) : if isinstance ( sm , QtGui . QMenu ) : m . addMenu ( sm ) else : m . addAction ( sm ) m . setTitle ( self . title ( ) ) return m
Adds menus to itself required by ViewBox
80
8
16,598
def select_k_best ( self , k ) : x_new = SelectKBest ( chi2 , k = k ) . fit_transform ( self . x_train , self . y_train ) return x_new
Selects k best features in dataset
48
7
16,599
def main ( self ) : logging . info ( 'Aligning reads with bowtie2 for Qualimap' ) self . bowtie ( ) self . indexing ( ) self . pilon ( ) self . filter ( ) self . clear ( )
Run the methods in the correct order
52
7