idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
16,100
def _project_dict ( self , * * kwargs : Dict [ str , Any ] ) -> Dict [ str , Hist ] : # Setup function arguments with values which don't change per loop. get_hist_args = copy . deepcopy ( kwargs ) projection_name_args = copy . deepcopy ( kwargs ) for key , input_observable in self . observable_to_project_from . items ( ) : output_hist , projection_name , projection_name_args , = self . _project_observable ( input_key = key , input_observable = input_observable , get_hist_args = get_hist_args , projection_name_args = projection_name_args , * * kwargs , ) # Store the output observable output_hist_args = projection_name_args output_hist_args . update ( { # type: ignore "output_hist" : output_hist , "projection_name" : projection_name } ) output_key_name = self . output_key_name ( * * output_hist_args ) # type: ignore self . output_observable [ output_key_name ] = self . output_hist ( * * output_hist_args ) # type: ignore return self . output_observable
Driver function for projecting and storing a dictionary of observables .
288
12
16,101
def cleanup_cuts ( self , hist : Hist , cut_axes : Iterable [ HistAxisRange ] ) -> None : for axis in cut_axes : # According to the function TAxis::SetRange(first, last), the widest possible range is # (1, Nbins). Anything beyond that will be reset to (1, Nbins) axis . axis ( hist ) . SetRange ( 1 , axis . axis ( hist ) . GetNbins ( ) )
Cleanup applied cuts by resetting the axis to the full range .
105
14
16,102
def projection_name ( self , * * kwargs : Dict [ str , Any ] ) -> str : return self . projection_name_format . format ( * * kwargs )
Define the projection name for this projector .
41
9
16,103
def get_hist ( self , observable : Any , * * kwargs : Dict [ str , Any ] ) -> Any : return observable
Get the histogram that may be stored in some object .
30
12
16,104
def output_key_name ( self , input_key : str , output_hist : Hist , projection_name : str , * * kwargs ) -> str : return projection_name
Returns the key under which the output object should be stored .
40
12
16,105
def output_hist ( self , output_hist : Hist , input_observable : Any , * * kwargs : Dict [ str , Any ] ) -> Union [ Hist , Any ] : return output_hist
Return an output object . It should store the output_hist .
47
13
16,106
def run ( ) : args = parse_args ( ) codetools . setup_logging ( args . debug ) global g g = pygithub . login_github ( token_path = args . token_path , token = args . token ) org = g . get_organization ( args . org ) # only iterate over all teams once try : teams = list ( org . get_teams ( ) ) except github . RateLimitExceededException : raise except github . GithubException as e : msg = 'error getting teams' raise pygithub . CaughtOrganizationError ( org , e , msg ) from None old_team = find_team ( teams , args . oldteam ) new_team = find_team ( teams , args . newteam ) move_me = args . repos debug ( len ( move_me ) , 'repos to be moved' ) added = [ ] removed = [ ] for name in move_me : try : r = org . get_repo ( name ) except github . RateLimitExceededException : raise except github . GithubException as e : msg = "error getting repo by name: {r}" . format ( r = name ) raise pygithub . CaughtOrganizationError ( org , e , msg ) from None # Add team to the repo debug ( "Adding {repo} to '{team}' ..." . format ( repo = r . full_name , team = args . newteam ) ) if not args . dry_run : try : new_team . add_to_repos ( r ) added += r . full_name debug ( ' ok' ) except github . RateLimitExceededException : raise except github . GithubException : debug ( ' FAILED' ) if old_team . name in 'Owners' : warn ( "Removing repo {repo} from team 'Owners' is not allowed" . format ( repo = r . full_name ) ) debug ( "Removing {repo} from '{team}' ..." . format ( repo = r . full_name , team = args . oldteam ) ) if not args . dry_run : try : old_team . remove_from_repos ( r ) removed += r . full_name debug ( ' ok' ) except github . RateLimitExceededException : raise except github . GithubException : debug ( ' FAILED' ) info ( 'Added:' , added ) info ( 'Removed:' , removed )
Move the repos
528
4
16,107
def poll_values ( ) : subscription = processor . create_parameter_subscription ( [ '/YSS/SIMULATOR/BatteryVoltage1' ] ) sleep ( 5 ) print ( 'Latest value:' ) print ( subscription . get_value ( '/YSS/SIMULATOR/BatteryVoltage1' ) ) sleep ( 5 ) print ( 'Latest value:' ) print ( subscription . get_value ( '/YSS/SIMULATOR/BatteryVoltage1' ) )
Shows how to poll values from the subscription .
106
10
16,108
def receive_callbacks ( ) : def print_data ( data ) : for parameter in data . parameters : print ( parameter ) processor . create_parameter_subscription ( '/YSS/SIMULATOR/BatteryVoltage1' , on_data = print_data ) sleep ( 5 )
Shows how to receive callbacks on value updates .
64
11
16,109
def manage_subscription ( ) : subscription = processor . create_parameter_subscription ( [ '/YSS/SIMULATOR/BatteryVoltage1' ] ) sleep ( 5 ) print ( 'Adding extra items to the existing subscription...' ) subscription . add ( [ '/YSS/SIMULATOR/Alpha' , '/YSS/SIMULATOR/BatteryVoltage2' , 'MDB:OPS Name/SIMULATOR_PrimBusVoltage1' , ] ) sleep ( 5 ) print ( 'Shrinking subscription...' ) subscription . remove ( '/YSS/SIMULATOR/Alpha' ) print ( 'Cancelling the subscription...' ) subscription . cancel ( ) print ( 'Last values from cache:' ) print ( subscription . get_value ( '/YSS/SIMULATOR/BatteryVoltage1' ) ) print ( subscription . get_value ( '/YSS/SIMULATOR/BatteryVoltage2' ) ) print ( subscription . get_value ( '/YSS/SIMULATOR/Alpha' ) ) print ( subscription . get_value ( 'MDB:OPS Name/SIMULATOR_PrimBusVoltage1' ) )
Shows how to interact with a parameter subscription .
256
10
16,110
def setPixelScale ( self , pxms ) : pxms = float ( pxms ) / 2 self . pixelsPerms = pxms if pxms * self . gridms < GRID_PIXEL_MIN : self . gridms = self . gridms * 2 elif pxms * self . gridms > GRID_PIXEL_MAX : self . gridms = self . gridms / 2 self . _viewIsDirty = True self . viewport ( ) . update ( ) return self . gridms
Sets the zoom scale
118
5
16,111
def indexXY ( self , index ) : rect = self . visualRect ( index ) return rect . x ( ) , rect . y ( )
Returns the top left coordinates of the item for the given index
30
12
16,112
def mouseDoubleClickEvent ( self , event ) : if self . mode == BuildMode : if event . button ( ) == QtCore . Qt . LeftButton : index = self . indexAt ( event . pos ( ) ) self . edit ( index )
Launches an editor for the component if the mouse cursor is over an item
53
15
16,113
def mousePressEvent ( self , event ) : if self . mode == BuildMode : super ( StimulusView , self ) . mousePressEvent ( event ) else : # select and de-select components index = self . indexAt ( event . pos ( ) ) if index . isValid ( ) : self . selectionModel ( ) . select ( index , QtGui . QItemSelectionModel . Toggle ) comp = self . model ( ) . data ( index , AbstractDragView . DragRole ) self . componentSelected . emit ( comp ) self . hintRequested . emit ( 'Click components to toggle more members of auto-parameter\n\n-or-\n\nEdit fields of auto-parameter (parameter type should be selected first)' )
In Auto - parameter selection mode mouse press over an item emits componentSelected
163
15
16,114
def visualRegionForSelection ( self , selection ) : region = QtGui . QRegion ( ) for index in selection . indexes ( ) : region = region . united ( self . _rects [ index . row ( ) ] [ index . column ( ) ] ) return region
Gets the region of all of the components in selection
59
11
16,115
def sizeHint ( self , option , index ) : # calculate size by data component component = index . internalPointer ( ) width = self . component . duration ( ) * self . pixelsPerms * 1000 return QtCore . QSize ( width , 50 )
Size based on component duration and a fixed height
55
9
16,116
def get_namespace ( taskfileinfo ) : element = taskfileinfo . task . element name = element . name return name + "_1"
Return a suitable name for a namespace for the taskfileinfo
31
12
16,117
def get_groupname ( taskfileinfo ) : element = taskfileinfo . task . element name = element . name return name + "_grp"
Return a suitable name for a groupname for the given taskfileinfo .
32
15
16,118
def group_content ( content , namespace , grpname , grpnodetype ) : with common . preserve_namespace ( namespace ) : grpnode = cmds . createNode ( grpnodetype , name = grpname ) # create grp node cmds . group ( content , uag = grpnode ) # group content return grpnode
Group the given content in the given namespace under a node of type grpnodetype with the name grpname
79
24
16,119
def getLabelByName ( self , name ) : name = name . lower ( ) if name in self . stimLabels : return self . stimLabels [ name ] else : return None
Gets a label widget by it component name
40
9
16,120
def read_byte ( self ) : buf = b'' if len ( self . cookedq ) > 0 : buf = bytes ( [ self . cookedq [ 0 ] ] ) self . cookedq = self . cookedq [ 1 : ] else : yield from self . process_rawq ( ) if not self . eof : yield from self . fill_rawq ( ) yield from self . process_rawq ( ) # There now should be data so lets read again buf = yield from self . read_byte ( ) return buf
Read one byte of cooked data
112
6
16,121
def read_line ( self ) : buf = b'' while not self . eof and buf . endswith ( b'\n' ) is False : buf += yield from self . read_byte ( ) if self . eof : buf = b'' # Remove \n character buf = buf . replace ( b'\n' , b'' ) return buf
Read data until \ n is found
77
7
16,122
def getTzid ( tzid , smart = True ) : tz = __tzidMap . get ( toUnicode ( tzid ) , None ) if smart and tzid and not tz : try : from pytz import timezone , UnknownTimeZoneError try : tz = timezone ( tzid ) registerTzid ( toUnicode ( tzid ) , tz ) except UnknownTimeZoneError : pass except ImportError : pass return tz
Return the tzid if it exists or None .
106
11
16,123
def dateTimeToString ( dateTime , convertToUTC = False ) : if dateTime . tzinfo and convertToUTC : dateTime = dateTime . astimezone ( utc ) datestr = "{}{}{}T{}{}{}" . format ( numToDigits ( dateTime . year , 4 ) , numToDigits ( dateTime . month , 2 ) , numToDigits ( dateTime . day , 2 ) , numToDigits ( dateTime . hour , 2 ) , numToDigits ( dateTime . minute , 2 ) , numToDigits ( dateTime . second , 2 ) , ) if tzinfo_eq ( dateTime . tzinfo , utc ) : datestr += "Z" return datestr
Ignore tzinfo unless convertToUTC . Output string .
165
13
16,124
def transformToNative ( obj ) : if obj . isNative : return obj obj . isNative = True if obj . value == '' : return obj obj . value = obj . value #we're cheating a little here, parseDtstart allows DATE obj . value = parseDtstart ( obj ) if obj . value . tzinfo is None : obj . params [ 'X-VOBJ-FLOATINGTIME-ALLOWED' ] = [ 'TRUE' ] if obj . params . get ( 'TZID' ) : # Keep a copy of the original TZID around obj . params [ 'X-VOBJ-ORIGINAL-TZID' ] = [ obj . params [ 'TZID' ] ] del obj . params [ 'TZID' ] return obj
Turn obj . value into a datetime .
174
9
16,125
def transformFromNative ( cls , obj ) : # print('transforming from native') if obj . isNative : obj . isNative = False tzid = TimezoneComponent . registerTzinfo ( obj . value . tzinfo ) obj . value = dateTimeToString ( obj . value , cls . forceUTC ) if not cls . forceUTC and tzid is not None : obj . tzid_param = tzid if obj . params . get ( 'X-VOBJ-ORIGINAL-TZID' ) : if not hasattr ( obj , 'tzid_param' ) : obj . tzid_param = obj . x_vobj_original_tzid_param del obj . params [ 'X-VOBJ-ORIGINAL-TZID' ] return obj
Replace the datetime in obj . value with an ISO 8601 string .
180
16
16,126
def get_currencies_info ( ) -> Element : response = requests . get ( const . CBRF_API_URLS [ 'info' ] ) return XML ( response . text )
Get META information about currencies
40
6
16,127
def get_daily_rates ( date_req : datetime . datetime = None , lang : str = 'rus' ) -> Element : if lang not in [ 'rus' , 'eng' ] : raise ValueError ( '"lang" must be string. "rus" or "eng"' ) base_url = const . CBRF_API_URLS [ 'daily_rus' ] if lang == 'rus' else const . CBRF_API_URLS [ 'daily_eng' ] url = base_url + 'date_req=' + utils . date_to_str ( date_req ) if date_req else base_url response = requests . get ( url = url ) return XML ( response . text )
Getting currency for current day .
157
6
16,128
def mangleIR ( data , ignore_errors = False ) : try : # Packet mangling algorithm inspired by Rex Becket's kirarx vera plugin # Determine a median value for the timing packets and categorize each # timing as longer or shorter than that. This will always work for signals # that use pulse width modulation (since varying by long-short is basically # the definition of what PWM is). By lucky coincidence this also works with # the RC-5/RC-6 encodings used by Phillips (manchester encoding) # because time variations of opposite-phase/same-phase are either N or 2*N if isinstance ( data , bytes ) : data = data . decode ( 'ascii' ) data = data . strip ( ) times = [ int ( x , 16 ) for x in data . split ( ) [ 2 : ] ] minTime = min ( times [ 2 : - 1 ] ) maxTime = max ( times [ 2 : - 1 ] ) margin = ( maxTime - minTime ) / 2 + minTime return '' . join ( [ ( x < margin and 'S' or 'L' ) for x in times ] ) except : # Probably a mangled packet. if not ignore_errors : raise
Mangle a raw Kira data packet into shorthand
265
9
16,129
def mangleNec ( code , freq = 40 ) : # base time is 550 microseconds # unit of burst time # lead in pattern: 214d 10b3 # "1" burst pattern: 0226 0960 # "0" burst pattern: 0226 0258 # lead out pattern: 0226 2000 # there's large disagreement between devices as to a common preamble # or the "long" off period for the representation of a binary 1 # thus we can't construct a code suitable for transmission # without more information--but it's good enough for creating # a shorthand representaiton for use with recv timings = [ ] for octet in binascii . unhexlify ( code . replace ( " " , "" ) ) : burst = lambda x : x and "0226 06AD" or "0226 0258" for bit in reversed ( "%08d" % int ( bin ( ord ( octet ) ) [ 2 : ] ) ) : bit = int ( bit ) timings . append ( burst ( bit ) ) return mangleIR ( "K %0X22 214d 10b3 " % freq + " " . join ( timings ) + " 0226 2000" )
Convert NEC code to shorthand notation
258
7
16,130
def get_model_queries ( self , query_obj , model_queries_config ) : for search4 , model_attrib in model_queries_config : if search4 is not None : query_obj = self . _model_query ( query_obj , search4 , model_attrib ) return query_obj
use this if your are searching for a field in the same model
72
13
16,131
def _login ( session ) : resp = session . get ( LOGIN_URL , params = _get_params ( session . auth . locale ) ) parsed = BeautifulSoup ( resp . text , HTML_PARSER ) csrf = parsed . find ( CSRF_FIND_TAG , CSRF_FIND_ATTR ) . get ( VALUE_ATTR ) resp = session . post ( LOGIN_URL , { 'userID' : session . auth . username , 'password' : session . auth . password , 'loginAction' : 'X' , 'CSRFToken' : csrf , 'loc' : session . auth . locale } ) if resp . status_code == 403 : raise UPSError ( 'login failure' ) parsed = BeautifulSoup ( resp . text , HTML_PARSER ) error = parsed . find ( ERROR_FIND_TAG , ERROR_FIND_ATTR ) if error and error . string : raise UPSError ( error . string . strip ( ) ) _save_cookies ( session . cookies , session . auth . cookie_path )
Login to UPS .
236
4
16,132
def get_packages ( session ) : resp = session . get ( DELIVERIES_URL , params = _get_params ( session . auth . locale ) ) parsed = BeautifulSoup ( resp . text , HTML_PARSER ) token_elem = parsed . find ( TOKEN_FIND_TAG , TOKEN_FIND_ATTR ) tid_elem = parsed . find ( TID_FIND_TAG , TID_FIND_ATTR ) if not token_elem or not tid_elem : raise UPSError ( 'failed to find token or tid' ) token = token_elem . get ( VALUE_ATTR ) tid = tid_elem . get ( VALUE_ATTR ) resp = session . post ( SERVICE_URL , { 'token' : token , 'uid' : session . auth . username , 'callType' : 'allShipments' , 'tid' : tid , 'loc' : session . auth . locale } ) try : packages = [ ] data = json . loads ( resp . text [ UPS_JSON_PREAMBLE_SIZE : ] ) shipments = data [ 'shipmentContainer' ] [ 'inboundShipments' ] + data [ 'shipmentContainer' ] [ 'historyShipments' ] for shipment in shipments : from_location = '{}, {}, {}' . format ( shipment [ 'sfc' ] , shipment [ 'sfs' ] , shipment [ 'sfcn' ] ) estimated_date = _parsed_date ( shipment [ 'sddfd' ] ) actual_date = _parsed_date ( shipment [ 'dd' ] ) packages . append ( { 'tracking_number' : shipment [ 'tn' ] , 'status' : shipment [ 'sts' ] , 'from' : shipment [ 'sfn' ] , 'from_location' : from_location , 'estimated_delivery_date' : estimated_date , 'estimated_delivery_timeframe' : shipment [ 'sdtfd' ] , 'delivery_date' : actual_date } ) return packages except JSONDecodeError : raise UPSError ( 'failed to parse json' )
Get deliveries in progress and completed .
473
7
16,133
def get_session ( username , password , locale = DEFAULT_LOCALE , cookie_path = COOKIE_PATH ) : class UPSAuth ( AuthBase ) : # pylint: disable=too-few-public-methods """UPS authorization storage.""" def __init__ ( self , username , password , locale , cookie_path ) : """Init.""" self . username = username self . password = password self . locale = locale self . cookie_path = cookie_path def __call__ ( self , r ) : """Call is no-op.""" return r session = requests . session ( ) session . auth = UPSAuth ( username , password , locale , cookie_path ) if os . path . exists ( cookie_path ) : session . cookies = _load_cookies ( cookie_path ) else : _login ( session ) return session
Get UPS HTTP session .
183
5
16,134
def hide ( self , event ) : if self . content . isHidden ( ) : self . content . show ( ) self . hideBtn . setIcon ( self . hideIcon ) self . setMaximumHeight ( 16777215 ) else : self . content . hide ( ) self . hideBtn . setIcon ( self . showIcon ) self . setFixedHeight ( 30 )
Toggles the visiblity of the content widget
81
10
16,135
def _next_rotation_id ( rotated_files ) : if not rotated_files : return 0 else : highest_rotated_file = max ( rotated_files , key = lambda x : x [ 1 ] ) return highest_rotated_file [ 1 ] + 1
Given the hanoi_rotator generated files in the output directory returns the rotation_id that will be given to the current file . If there are no existing rotated files return 0 .
59
38
16,136
def _locate_files_to_delete ( algorithm , rotated_files , next_rotation_id ) : rotation_slot = algorithm . id_to_slot ( next_rotation_id ) for a_path , a_rotation_id in rotated_files : if rotation_slot == algorithm . id_to_slot ( a_rotation_id ) : yield a_path
Looks for hanoi_rotator generated files that occupy the same slot that will be given to rotation_id .
85
24
16,137
def rotate ( algorithm , path , ext = "" , destination_dir = None , verbose = False ) : paths = Paths ( path , ext , destination_dir ) _move_files ( algorithm , paths , verbose )
Programmatic access to the archive rotator
48
8
16,138
def update_or_create_candidate ( self , candidate , aggregable = True , uncontested = False ) : candidate_election , c = CandidateElection . objects . update_or_create ( candidate = candidate , election = self , defaults = { "aggregable" : aggregable , "uncontested" : uncontested } , ) return candidate_election
Create a CandidateElection .
78
6
16,139
def delete_candidate ( self , candidate ) : CandidateElection . objects . filter ( candidate = candidate , election = self ) . delete ( )
Delete a CandidateElection .
31
6
16,140
def get_candidates ( self ) : candidate_elections = CandidateElection . objects . filter ( election = self ) return [ ce . candidate for ce in candidate_elections ]
Get all CandidateElections for this election .
39
9
16,141
def get_candidates_by_party ( self ) : candidate_elections = CandidateElection . objects . filter ( election = self ) return { ce . candidate . party . slug : ce . candidate for ce in candidate_elections }
Get CandidateElections serialized into an object with party - slug keys .
51
15
16,142
def get_candidate_election ( self , candidate ) : return CandidateElection . objects . get ( candidate = candidate , election = self )
Get CandidateElection for a Candidate in this election .
30
11
16,143
def get_candidate_votes ( self , candidate ) : candidate_election = CandidateElection . objects . get ( candidate = candidate , election = self ) return candidate_election . votes . all ( )
Get all votes attached to a CandidateElection for a Candidate in this election .
43
16
16,144
def get_votes ( self ) : candidate_elections = CandidateElection . objects . filter ( election = self ) votes = None for ce in candidate_elections : votes = votes | ce . votes . all ( ) return votes
Get all votes for this election .
49
7
16,145
def get_candidate_electoral_votes ( self , candidate ) : candidate_election = CandidateElection . objects . get ( candidate = candidate , election = self ) return candidate_election . electoral_votes . all ( )
Get all electoral votes for a candidate in this election .
48
11
16,146
def get_electoral_votes ( self ) : candidate_elections = CandidateElection . objects . filter ( election = self ) electoral_votes = None for ce in candidate_elections : electoral_votes = electoral_votes | ce . electoral_votes . all ( ) return electoral_votes
Get all electoral votes for all candidates in this election .
62
11
16,147
def get_candidate_delegates ( self , candidate ) : candidate_election = CandidateElection . objects . get ( candidate = candidate , election = self ) return candidate_election . delegates . all ( )
Get all pledged delegates for a candidate in this election .
44
11
16,148
def get_delegates ( self ) : candidate_elections = CandidateElection . objects . filter ( election = self ) delegates = None for ce in candidate_elections : delegates = delegates | ce . delegates . all ( ) return delegates
Get all pledged delegates for any candidate in this election .
50
11
16,149
def list_packet_names ( self ) : # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods path = '/archive/{}/packet-names' . format ( self . _instance ) response = self . _client . get_proto ( path = path ) message = archive_pb2 . GetPacketNamesResponse ( ) message . ParseFromString ( response . content ) names = getattr ( message , 'name' ) return iter ( names )
Returns the existing packet names .
113
6
16,150
def list_processed_parameter_groups ( self ) : # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods path = '/archive/{}/parameter-groups' . format ( self . _instance ) response = self . _client . get_proto ( path = path ) message = archive_pb2 . ParameterGroupInfo ( ) message . ParseFromString ( response . content ) groups = getattr ( message , 'group' ) return iter ( groups )
Returns the existing parameter groups .
115
6
16,151
def list_processed_parameter_group_histogram ( self , group = None , start = None , stop = None , merge_time = 20 ) : params = { } if group is not None : params [ 'group' ] = group if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if merge_time is not None : params [ 'mergeTime' ] = int ( merge_time * 1000 ) return pagination . Iterator ( client = self . _client , path = '/archive/{}/parameter-index' . format ( self . _instance ) , params = params , response_class = archive_pb2 . IndexResponse , items_key = 'group' , item_mapper = IndexGroup , )
Reads index records related to processed parameter groups between the specified start and stop time .
189
17
16,152
def list_event_sources ( self ) : # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods path = '/archive/{}/events/sources' . format ( self . _instance ) response = self . _client . get_proto ( path = path ) message = archive_pb2 . EventSourceInfo ( ) message . ParseFromString ( response . content ) sources = getattr ( message , 'source' ) return iter ( sources )
Returns the existing event sources .
111
6
16,153
def list_completeness_index ( self , start = None , stop = None ) : params = { } if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) return pagination . Iterator ( client = self . _client , path = '/archive/{}/completeness-index' . format ( self . _instance ) , params = params , response_class = archive_pb2 . IndexResponse , items_key = 'group' , item_mapper = IndexGroup , )
Reads completeness index records between the specified start and stop time .
136
14
16,154
def list_packets ( self , name = None , start = None , stop = None , page_size = 500 , descending = False ) : params = { 'order' : 'desc' if descending else 'asc' , } if name is not None : params [ 'name' ] = name if page_size is not None : params [ 'limit' ] = page_size if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) return pagination . Iterator ( client = self . _client , path = '/archive/{}/packets' . format ( self . _instance ) , params = params , response_class = rest_pb2 . ListPacketsResponse , items_key = 'packet' , item_mapper = Packet , )
Reads packet information between the specified start and stop time .
193
12
16,155
def list_events ( self , source = None , severity = None , text_filter = None , start = None , stop = None , page_size = 500 , descending = False ) : params = { 'order' : 'desc' if descending else 'asc' , } if source is not None : params [ 'source' ] = source if page_size is not None : params [ 'limit' ] = page_size if severity is not None : params [ 'severity' ] = severity if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if text_filter is not None : params [ 'q' ] = text_filter return pagination . Iterator ( client = self . _client , path = '/archive/{}/events' . format ( self . _instance ) , params = params , response_class = rest_pb2 . ListEventsResponse , items_key = 'event' , item_mapper = Event , )
Reads events between the specified start and stop time .
231
11
16,156
def sample_parameter_values ( self , parameter , start = None , stop = None , sample_count = 500 , parameter_cache = 'realtime' , source = 'ParameterArchive' ) : path = '/archive/{}/parameters{}/samples' . format ( self . _instance , parameter ) now = datetime . utcnow ( ) params = { 'count' : sample_count , 'source' : source , 'start' : to_isostring ( now - timedelta ( hours = 1 ) ) , 'stop' : to_isostring ( now ) , } if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if parameter_cache : params [ 'processor' ] = parameter_cache else : params [ 'norealtime' ] = True response = self . _client . get_proto ( path = path , params = params ) message = pvalue_pb2 . TimeSeries ( ) message . ParseFromString ( response . content ) samples = getattr ( message , 'sample' ) return [ Sample ( s ) for s in samples ]
Returns parameter samples .
266
4
16,157
def list_parameter_ranges ( self , parameter , start = None , stop = None , min_gap = None , max_gap = None , parameter_cache = 'realtime' ) : path = '/archive/{}/parameters{}/ranges' . format ( self . _instance , parameter ) params = { } if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if min_gap is not None : params [ 'minGap' ] = int ( min_gap * 1000 ) if max_gap is not None : params [ 'maxGap' ] = int ( max_gap * 1000 ) if parameter_cache : params [ 'processor' ] = parameter_cache else : params [ 'norealtime' ] = True response = self . _client . get_proto ( path = path , params = params ) message = pvalue_pb2 . Ranges ( ) message . ParseFromString ( response . content ) ranges = getattr ( message , 'range' ) return [ ParameterRange ( r ) for r in ranges ]
Returns parameter ranges between the specified start and stop time .
259
11
16,158
def list_parameter_values ( self , parameter , start = None , stop = None , page_size = 500 , descending = False , parameter_cache = 'realtime' , source = 'ParameterArchive' ) : params = { 'source' : source , 'order' : 'desc' if descending else 'asc' , } if page_size is not None : params [ 'limit' ] = page_size if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if parameter_cache : params [ 'processor' ] = parameter_cache else : params [ 'norealtime' ] = True return pagination . Iterator ( client = self . _client , path = '/archive/{}/parameters{}' . format ( self . _instance , parameter ) , params = params , response_class = rest_pb2 . ListParameterValuesResponse , items_key = 'parameter' , item_mapper = ParameterValue , )
Reads parameter values between the specified start and stop time .
234
12
16,159
def list_command_history ( self , command = None , start = None , stop = None , page_size = 500 , descending = False ) : params = { 'order' : 'desc' if descending else 'asc' , } if page_size is not None : params [ 'limit' ] = page_size if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if command : path = '/archive/{}/commands{}' . format ( self . _instance , command ) else : path = '/archive/{}/commands' . format ( self . _instance ) return pagination . Iterator ( client = self . _client , path = path , params = params , response_class = rest_pb2 . ListCommandsResponse , items_key = 'entry' , item_mapper = CommandHistory , )
Reads command history entries between the specified start and stop time .
210
13
16,160
def list_tables ( self ) : # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods path = '/archive/{}/tables' . format ( self . _instance ) response = self . _client . get_proto ( path = path ) message = rest_pb2 . ListTablesResponse ( ) message . ParseFromString ( response . content ) tables = getattr ( message , 'table' ) return iter ( [ Table ( table ) for table in tables ] )
Returns the existing tables .
117
5
16,161
def get_table ( self , table ) : path = '/archive/{}/tables/{}' . format ( self . _instance , table ) response = self . _client . get_proto ( path = path ) message = archive_pb2 . TableInfo ( ) message . ParseFromString ( response . content ) return Table ( message )
Gets a single table .
77
6
16,162
def list_streams ( self ) : # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods path = '/archive/{}/streams' . format ( self . _instance ) response = self . _client . get_proto ( path = path ) message = rest_pb2 . ListStreamsResponse ( ) message . ParseFromString ( response . content ) streams = getattr ( message , 'stream' ) return iter ( [ Stream ( stream ) for stream in streams ] )
Returns the existing streams .
117
5
16,163
def get_stream ( self , stream ) : path = '/archive/{}/streams/{}' . format ( self . _instance , stream ) response = self . _client . get_proto ( path = path ) message = archive_pb2 . StreamInfo ( ) message . ParseFromString ( response . content ) return Stream ( message )
Gets a single stream .
77
6
16,164
def create_stream_subscription ( self , stream , on_data , timeout = 60 ) : options = rest_pb2 . StreamSubscribeRequest ( ) options . stream = stream manager = WebSocketSubscriptionManager ( self . _client , resource = 'stream' , options = options ) # Represent subscription as a future subscription = WebSocketSubscriptionFuture ( manager ) wrapped_callback = functools . partial ( _wrap_callback_parse_stream_data , subscription , on_data ) manager . open ( wrapped_callback , instance = self . _instance ) # Wait until a reply or exception is received subscription . reply ( timeout = timeout ) return subscription
Create a new stream subscription .
138
6
16,165
def execute_sql ( self , statement ) : path = '/archive/{}/sql' . format ( self . _instance ) req = archive_pb2 . ExecuteSqlRequest ( ) req . statement = statement response = self . _client . post_proto ( path = path , data = req . SerializeToString ( ) ) message = archive_pb2 . ExecuteSqlResponse ( ) message . ParseFromString ( response . content ) if message . HasField ( 'result' ) : return message . result return None
Executes a single SQL statement .
116
7
16,166
def copy_file ( source , destination , unique = False , sort = False , case_sensitive = True , create_path = False ) : _File . copy ( source , destination , unique , sort , case_sensitive , create_path )
Python utility to create file
51
5
16,167
def get_details ( self ) : title = str ( self . get_title ( ) ) . strip ( ) artist = str ( self . get_artist ( ) ) . strip ( ) album = str ( self . get_album ( ) ) . strip ( ) year = str ( self . get_year ( ) ) . strip ( ) return { "title" : title , "artist" : artist , "album" : album , "year" : year }
Finds songs details
98
4
16,168
def _set_attr ( self , attribute ) : self . tags . add ( attribute ) self . song . save ( )
Sets attribute of song
26
5
16,169
def set_title ( self , name ) : self . _set_attr ( TIT2 ( encoding = 3 , text = name . decode ( 'utf-8' ) ) )
Sets song s title
38
5
16,170
def set_artist ( self , artist ) : self . _set_attr ( TPE1 ( encoding = 3 , text = artist . decode ( 'utf-8' ) ) )
Sets song s artist
39
5
16,171
def set_album ( self , album ) : self . _set_attr ( TALB ( encoding = 3 , text = album . decode ( 'utf-8' ) ) )
Sets song s album
39
5
16,172
def set_nr_track ( self , nr_track ) : self . _set_attr ( TRCK ( encoding = 3 , text = str ( nr_track ) ) )
Sets song s track numb
41
6
16,173
def set_year ( self , year ) : self . _set_attr ( TDRC ( encoding = 3 , text = str ( year ) ) )
Sets song s year
32
5
16,174
def set_genre ( self , genre ) : self . _set_attr ( TCON ( encoding = 3 , text = str ( genre ) ) )
Sets song s genre
32
5
16,175
def updateTraceCount ( self ) : self . ui . ntracesLbl . setNum ( self . ui . trackview . model ( ) . traceCount ( ) )
Updates the trace count label with the data from the model
40
12
16,176
def preview ( self ) : msg = self . ui . trackview . model ( ) . verify ( ) if msg : answer = QtGui . QMessageBox . warning ( self , "Bummer" , 'Problem: {}.' . format ( msg ) ) return stim_signal , atten , ovld = self . ui . trackview . model ( ) . signal ( ) fig = SpecWidget ( ) fig . setWindowModality ( 2 ) # application modal fig . updateData ( stim_signal , self . ui . trackview . model ( ) . samplerate ( ) ) fig . setTitle ( 'Stimulus Preview' ) fig . show ( ) self . previewFig = fig
Assemble the current components in the QStimulusModel and generate a spectrogram plot in a separate window
152
22
16,177
def assertpathsandfiles ( self ) : # Assertion to ensure that the MiSeq path exists assert os . path . isdir ( self . miseqpath ) , u'MiSeqPath is not a valid directory {0!r:s}' . format ( self . miseqpath ) # If the miseq folder name is not provided, the default of the most recent run will be used if not self . miseqfolder : # Get a list of folders miseqfolders = glob ( '{}*/' . format ( self . miseqpath ) ) self . miseqfolder = sorted ( miseqfolders ) [ - 1 ] # Create :miseqfoldername to store the name of this folder by splitting the path and taking the second # last piece (it's not the last piece because the folder has a trailing slash) self . miseqfoldername = self . miseqfolder . split ( "/" ) [ - 2 ] # Otherwise add the folder to the miseq path to yield the destination folder else : # Set the folder name before adding the path to the miseq path self . miseqfoldername = self . miseqfolder self . miseqfolder = self . miseqpath + self . miseqfolder + "/" # Assert to ensure that the folder exists assert os . path . isdir ( self . miseqfolder ) , u'MiSeqFolder is not a valid directory {0!r:s}' . format ( self . miseqfolder ) # Pull the data from the SampleSheet.csv if self . customsamplesheet : self . samplesheet = self . customsamplesheet assert os . path . isfile ( self . customsamplesheet ) , u'Could not find CustomSampleSheet as entered: {0!r:s}' . format ( self . customsamplesheet ) # Otherwise use the SampleSheet.csv located in :self.miseqfolder else : self . samplesheet = self . miseqfolder + "SampleSheet.csv"
Assertions to make sure that arguments are at least mostly valid
447
13
16,178
def numberofsamples ( self ) : # Initialise variables to store line data idline = 0 linenumber = 0 # Parse the sample sheet to find the number of samples with open ( self . samplesheet , "rb" ) as ssheet : # Use enumerate to iterate through the lines in the sample sheet to retrieve the line number and the data for linenumber , entry in enumerate ( ssheet ) : # Once Sample_ID is encountered if "Sample_ID" in entry : # Set the id line as the current line number idline = linenumber # :samplecount is the last line number in the file minus the line number of Sample_ID self . samplecount = linenumber - idline printtime ( 'There are {} samples in this run. ' 'Running off-hours module with the following parameters:\n' 'MiSeqPath: {},\n' 'MiSeqFolder: {},\n' 'SampleSheet: {}' . format ( self . samplecount , self . miseqpath , self . miseqfolder , self . samplesheet ) , self . start ) # Run the fastqmover module now that the number of sequences is known self . fastqlinker ( )
Count the number of samples is the samplesheet
260
9
16,179
def print_packet_range ( ) : first_packet = next ( iter ( archive . list_packets ( ) ) ) last_packet = next ( iter ( archive . list_packets ( descending = True ) ) ) print ( 'First packet:' , first_packet ) print ( 'Last packet:' , last_packet ) td = last_packet . generation_time - first_packet . generation_time print ( 'Timespan:' , td )
Print the range of archived packets .
103
7
16,180
def iterate_specific_packet_range ( ) : now = datetime . utcnow ( ) start = now - timedelta ( hours = 1 ) total = 0 for packet in archive . list_packets ( start = start , stop = now ) : total += 1 # print(packet) print ( 'Found' , total , 'packets in range' )
Count the number of packets in a specific range .
80
10
16,181
def iterate_specific_event_range ( ) : now = datetime . utcnow ( ) start = now - timedelta ( hours = 1 ) total = 0 for event in archive . list_events ( start = start , stop = now ) : total += 1 # print(event) print ( 'Found' , total , 'events in range' )
Count the number of events in a specific range .
76
10
16,182
def print_last_values ( ) : iterable = archive . list_parameter_values ( '/YSS/SIMULATOR/BatteryVoltage1' , descending = True ) for pval in islice ( iterable , 0 , 10 ) : print ( pval )
Print the last 10 values .
61
6
16,183
def iterate_specific_parameter_range ( ) : now = datetime . utcnow ( ) start = now - timedelta ( hours = 1 ) total = 0 for pval in archive . list_parameter_values ( '/YSS/SIMULATOR/BatteryVoltage1' , start = start , stop = now ) : total += 1 # print(pval) print ( 'Found' , total , 'parameter values in range' )
Count the number of parameter values in a specific range .
99
11
16,184
def print_last_commands ( ) : iterable = archive . list_command_history ( descending = True ) for entry in islice ( iterable , 0 , 10 ) : print ( entry )
Print the last 10 commands .
44
6
16,185
def transmogrify ( l ) : d = { l [ 0 ] : { } } tmp = d for c in l : tmp [ c ] = { } tmp = tmp [ c ] return d
Fit a flat list into a treeable object .
43
10
16,186
def tree ( node , formatter = None , prefix = None , postfix = None , _depth = 1 ) : current = 0 length = len ( node . keys ( ) ) tee_joint = '\xe2\x94\x9c\xe2\x94\x80\xe2\x94\x80' elbow_joint = '\xe2\x94\x94\xe2\x94\x80\xe2\x94\x80' for key , value in node . iteritems ( ) : current += 1 k = formatter ( key ) if formatter else key pre = prefix ( key ) if prefix else '' post = postfix ( key ) if postfix else '' space = elbow_joint if current == length else tee_joint yield ' {space} {prefix}{key}{postfix}' . format ( space = space , key = k , prefix = pre , postfix = post ) if value : for e in tree ( value , formatter = formatter , prefix = prefix , postfix = postfix , _depth = _depth + 1 ) : yield ( ' | ' if current != length else ' ' ) + e
Print a tree .
255
4
16,187
def assemble_chain ( leaf , store ) : store_dict = { } for cert in store : store_dict [ cert . get_subject ( ) . CN ] = cert chain = [ leaf ] current = leaf try : while current . get_issuer ( ) . CN != current . get_subject ( ) . CN : chain . append ( store_dict [ current . get_issuer ( ) . CN ] ) current = store_dict [ current . get_issuer ( ) . CN ] except KeyError : invalid = crypto . X509 ( ) patch_certificate ( invalid ) invalid . set_subject ( current . get_issuer ( ) ) chain . append ( invalid ) chain . reverse ( ) return chain
Assemble the trust chain .
153
6
16,188
def _get_api_content ( self ) : if GITHUB_TOKEN is not None : self . add_params_to_url ( { "access_token" : GITHUB_TOKEN } ) api_content_response = requests . get ( self . api_url ) self . api_content = json . loads ( api_content_response . text )
Updates class api content by calling Github api and storing result
81
12
16,189
def get_trending_daily ( lang = "" ) : url = "https://github.com/trending/" url += str ( lang ) . lower ( ) . replace ( " " , "" ) + "?since=daily" api_content_request = urllib . request . Request ( url ) api_content_response = urllib . request . urlopen ( api_content_request ) . read ( ) . decode ( "utf-8" ) # parse response soup = BeautifulSoup ( api_content_response , "lxml" ) # html parser raw_repo_list = soup . find ( "ol" , { "class" : "repo-list" } ) . find_all ( "li" ) repos_list = [ ] for repo in raw_repo_list : details = repo . find_all ( "div" ) [ 0 ] . a . text . split ( "/" ) repo_owner = details [ 0 ] . strip ( ) repo_name = details [ 1 ] . strip ( ) repos_list . append ( GithubUserRepository ( repo_owner , repo_name ) ) return repos_list
Fetches repos in Trending Daily Github section
254
11
16,190
def _get_repos ( url ) : current_page = 1 there_is_something_left = True repos_list = [ ] while there_is_something_left : api_driver = GithubRawApi ( url , url_params = { "page" : current_page } , get_api_content_now = True ) # driver to parse API content for repo in api_driver . api_content : # list of raw repository repo_name = repo [ "name" ] repo_user = repo [ "owner" ] [ "login" ] repos_list . append ( GithubUserRepository ( repo_user , repo_name ) ) there_is_something_left = bool ( api_driver . api_content ) current_page += 1 return repos_list
Gets repos in url
171
6
16,191
def _dict_of_vars_to_vcf_file ( variants , outfile ) : header_lines = [ '##fileformat=VCFv4.2' , '##source=cluster_vcf_records, version ' + cluster_vcf_records_version , '##fileDate=' + str ( datetime . date . today ( ) ) , '\t' . join ( [ '#CHROM' , 'POS' , 'ID' , 'REF' , 'ALT' , 'QUAL' , 'FILTER' , 'INFO' ] ) ] with open ( outfile , 'w' ) as f : print ( * header_lines , sep = '\n' , file = f ) for ref_name in sorted ( variants ) : for pos in sorted ( variants [ ref_name ] ) : for ref_string in sorted ( variants [ ref_name ] [ pos ] ) : alts = sorted ( list ( variants [ ref_name ] [ pos ] [ ref_string ] ) ) print ( ref_name , pos + 1 , '.' , ref_string , ',' . join ( alts ) , '.' , 'PASS' , 'SVTYPE=MERGED' , sep = '\t' , file = f )
Input is dict made by vcf_file_read . vcf_file_to_dict_of_vars or vcf_file_read . vcf_file_to_dict_of_vars . Output is bare - bones VCF file ( columns empty wherever possible
279
60
16,192
def collect_commands ( package_name = None , in_place = False , level = 1 ) : commands = { } frame = inspect . stack ( ) [ level ] [ 0 ] f_globals = frame . f_globals if package_name is None : # Collect from package containing module of call site package_name = f_globals [ '__name__' ] . rsplit ( '.' , 1 ) [ 0 ] package_paths = [ os . path . dirname ( f_globals [ '__file__' ] ) ] else : # Collect from named package package = importlib . import_module ( package_name ) package_name = package . __name__ package_paths = package . __path__ for package_path in package_paths : package_path = pathlib . Path ( package_path ) for file in package_path . rglob ( '*.py' ) : rel_path = str ( file . relative_to ( package_path ) ) rel_path = rel_path [ : - 3 ] module_name = rel_path . replace ( os . sep , '.' ) module_name = '.' . join ( ( package_name , module_name ) ) module = importlib . import_module ( module_name ) module_commands = get_commands_in_namespace ( module ) commands . update ( module_commands ) commands = OrderedDict ( ( name , commands [ name ] ) for name in sorted ( commands ) ) if in_place : f_globals . update ( commands ) return commands
Collect commands from package and its subpackages .
347
9
16,193
def get_commands_in_namespace ( namespace = None , level = 1 ) : from . . command import Command # noqa: Avoid circular import commands = { } if namespace is None : frame = inspect . stack ( ) [ level ] [ 0 ] namespace = frame . f_globals elif inspect . ismodule ( namespace ) : namespace = vars ( namespace ) for name in namespace : obj = namespace [ name ] if isinstance ( obj , Command ) : commands [ name ] = obj return OrderedDict ( ( name , commands [ name ] ) for name in sorted ( commands ) )
Get commands in namespace .
129
5
16,194
def selectedIndexes ( self ) : model = self . model ( ) indexes = [ ] for comp in self . _selectedComponents : index = model . indexByComponent ( comp ) if index is None : # must have been removed from model, discard self . _selectedComponents . remove ( comp ) else : indexes . append ( index ) return indexes
Returns a list of QModelIndex currently in the model
74
11
16,195
def selection ( self ) : sel = QtGui . QItemSelection ( ) for index in self . selectedIndexes ( ) : sel . select ( index , index ) return sel
Returns items in selection as a QItemSelection object
42
11
16,196
def selectionComponents ( self ) : comps = [ ] model = self . model ( ) for comp in self . _selectedComponents : index = model . indexByComponent ( comp ) if index is not None : comps . append ( comp ) return comps
Returns the names of the component types in this selection
56
10
16,197
def expose_content ( self ) : placeholders = get_placeholders ( self . get_template ( ) ) exposed_content = [ ] for lang in self . get_languages ( ) : for ctype in [ p . name for p in placeholders ] : content = self . get_content ( lang , ctype , False ) if content : exposed_content . append ( content ) return u"\r\n" . join ( exposed_content )
Return all the current content of this page into a string .
98
12
16,198
def read_http_header ( sock ) : buf = [ ] hdr_end = '\r\n\r\n' while True : buf . append ( sock . recv ( bufsize ) . decode ( 'utf-8' ) ) data = '' . join ( buf ) i = data . find ( hdr_end ) if i == - 1 : continue return data [ : i ] , data [ i + len ( hdr_end ) : ]
Read HTTP header from socket return header and rest of data .
100
12
16,199
def connect ( url ) : url = urlparse ( url ) if url . scheme == 'tcp' : sock = socket ( ) netloc = tuple ( url . netloc . rsplit ( ':' , 1 ) ) hostname = socket . gethostname ( ) elif url . scheme == 'ipc' : sock = socket ( AF_UNIX ) netloc = url . path hostname = 'localhost' else : raise ValueError ( 'unknown socket type: %s' % url . scheme ) sock . connect ( netloc ) return sock , hostname
Connect to UNIX or TCP socket .
120
8