idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
8,300
def _title_similarity_score ( full_text , title ) : sentences = sentence_tokenizer ( full_text ) norm = _normalize ( [ title ] + sentences ) similarity_matrix = pairwise_kernels ( norm , metric = 'cosine' ) return sorted ( zip ( similarity_matrix [ 0 , 1 : ] , range ( len ( similarity_matrix ) ) , sentences ) , key = l...
Similarity scores for sentences with title in descending order
8,301
def _aggregrate_scores ( its , tss , num_sentences ) : final = [ ] for i , el in enumerate ( its ) : for j , le in enumerate ( tss ) : if el [ 2 ] == le [ 2 ] : assert el [ 1 ] == le [ 1 ] final . append ( ( el [ 1 ] , i + j , el [ 2 ] ) ) _final = sorted ( final , key = lambda tup : tup [ 1 ] ) [ : num_sentences ] ret...
rerank the two vectors by min aggregrate rank reorder
8,302
def _eval_meta_as_summary ( meta ) : if meta == '' : return False if len ( meta ) > 500 : return False if 'login' in meta . lower ( ) : return False return True
some crude heuristics for now most are implemented on bot - side with domain whitelists
8,303
def get_subscriptions ( self ) : subscriptions = [ ] for uri in self . get_http_client ( ) . get ( "subscriptions" ) . get ( 'subscriptions' ) : subscriptions . append ( self . get_subscription ( uri . split ( "/" ) [ - 1 ] ) ) return subscriptions
Return a list of subscriptions currently active for this WVA device
8,304
def get_event_stream ( self ) : if self . _event_stream is None : self . _event_stream = WVAEventStream ( self . _http_client ) return self . _event_stream
Get the event stream associated with this WVA
8,305
def _populateHistogram ( self ) : try : buildHistogram . populate1DHist ( self . _data , self . histogram , self . minValue , self . maxValue , self . binWidth ) except : if ( ( self . _data . max ( ) - self . _data . min ( ) ) < self . binWidth ) : raise ValueError ( "In histogram1d class, the binWidth is " "greater t...
Call the C - code that actually populates the histogram
8,306
def getCenters ( self ) : return np . arange ( self . histogram . size ) * self . binWidth + self . minValue
Returns histogram s centers .
8,307
def book_reservation ( self , sessionid , roomid , start , end ) : duration = int ( ( end - start ) . seconds / 60 ) format = "%Y-%m-%dT%H:%M:%S-{}" . format ( self . get_dst_gmt_timezone ( ) ) booking_url = "{}/reserve/{}/{}/?d={}" . format ( BASE_URL , roomid , start . strftime ( format ) , duration ) resp = requests...
Book a reservation given the session id the room id as an integer and the start and end time as datetimes .
8,308
def delete_booking ( self , sessionid , booking_id ) : url = "{}{}{}/" . format ( BASE_URL , "/delete/" , booking_id ) cookies = dict ( sessionid = sessionid ) try : resp = requests . get ( url , cookies = cookies , headers = { 'Referer' : '{}{}' . format ( BASE_URL , "/reservations/" ) } ) except resp . exceptions . H...
Deletes a Wharton GSR Booking for a given booking and session id .
8,309
def get_wharton_gsrs ( self , sessionid , date = None ) : if date : date += " {}" . format ( self . get_dst_gmt_timezone ( ) ) else : date = datetime . datetime . utcnow ( ) . strftime ( "%Y-%m-%d %H:%S" ) resp = requests . get ( 'https://apps.wharton.upenn.edu/gsr/api/app/grid_view/' , params = { 'search_time' : date ...
Make a request to retrieve Wharton GSR listings .
8,310
def switch_format ( self , gsr ) : if "error" in gsr : return gsr categories = { "cid" : 1 , "name" : "Huntsman Hall" , "rooms" : [ ] } for time in gsr [ "times" ] : for entry in time : entry [ "name" ] = entry [ "room_number" ] del entry [ "room_number" ] start_time_str = entry [ "start_time" ] end_time = datetime . d...
Convert the Wharton GSR format into the studyspaces API format .
8,311
def get_wharton_gsrs_formatted ( self , sessionid , date = None ) : gsrs = self . get_wharton_gsrs ( sessionid , date ) return self . switch_format ( gsrs )
Return the wharton GSR listing formatted in studyspaces format .
8,312
def get_options ( ) : options = collections . defaultdict ( list ) for opt_class in config_factory . get_options ( ) : if not issubclass ( opt_class , config_base . Options ) : continue config_options = opt_class ( None ) options [ config_options . group_name ] . extend ( config_options . list ( ) ) return [ ( key , va...
Collect all the options info from the other modules .
8,313
def check_is_working ( self ) : try : r = requests . post ( "http://{}/" . format ( LAUNDRY_DOMAIN ) , timeout = 60 , data = { "locationid" : "5faec7e9-a4aa-47c2-a514-950c03fac460" , "email" : "pennappslabs@gmail.com" , "washers" : 0 , "dryers" : 0 , "locationalert" : "OK" } ) r . raise_for_status ( ) return "The trans...
Returns True if the wash alert web interface seems to be working properly or False otherwise .
8,314
def machine_usage ( self , hall_no ) : try : num = int ( hall_no ) except ValueError : raise ValueError ( "Room Number must be integer" ) r = requests . get ( USAGE_BASE_URL + str ( num ) , timeout = 60 ) parsed = BeautifulSoup ( r . text , 'html5lib' ) usage_table = parsed . find_all ( 'table' , width = '504px' ) [ 0 ...
Returns the average usage of laundry machines every hour for a given hall .
8,315
def create_message ( from_addr , to_addr , subject , body , encoding = None ) : if encoding == "None" : encoding = None if not encoding : encoding = 'utf-8' msg = MIMEText ( body . encode ( encoding ) , 'plain' , encoding ) msg [ 'Subject' ] = Header ( subject . encode ( encoding ) , encoding ) msg [ 'From' ] = from_ad...
Create message object for sending email
8,316
def _obtain_token ( self ) : if self . expiration and self . expiration > datetime . datetime . now ( ) : return resp = requests . post ( "{}/1.1/oauth/token" . format ( API_URL ) , data = { "client_id" : self . client_id , "client_secret" : self . client_secret , "grant_type" : "client_credentials" } ) . json ( ) if "...
Obtain an auth token from client id and client secret .
8,317
def _request ( self , * args , ** kwargs ) : if not self . token : self . _obtain_token ( ) headers = { "Authorization" : "Bearer {}" . format ( self . token ) } if "headers" in kwargs : kwargs [ "headers" ] . update ( headers ) else : kwargs [ "headers" ] = headers args = list ( args ) if not args [ 1 ] . startswith (...
Make a signed request to the libcal API .
8,318
def get_rooms ( self , lid , start = None , end = None ) : range_str = "availability" if start : start_datetime = datetime . datetime . combine ( datetime . datetime . strptime ( start , "%Y-%m-%d" ) . date ( ) , datetime . datetime . min . time ( ) ) range_str += "=" + start if end and not start == end : range_str += ...
Returns a list of rooms and their availabilities grouped by category .
8,319
def book_room ( self , item , start , end , fname , lname , email , nickname , custom = { } , test = False ) : data = { "start" : start , "fname" : fname , "lname" : lname , "email" : email , "nickname" : nickname , "bookings" : [ { "id" : item , "to" : end } ] , "test" : test } data . update ( custom ) resp = self . _...
Books a room given the required information .
8,320
def cancel_room ( self , booking_id ) : resp = self . _request ( "POST" , "/1.1/space/cancel/{}" . format ( booking_id ) ) return resp . json ( )
Cancel a room given a booking id .
8,321
def get_reservations ( self , email , date , timeout = None ) : try : resp = self . _request ( "GET" , "/1.1/space/bookings?email={}&date={}&limit=100" . format ( email , date ) , timeout = timeout ) except resp . exceptions . HTTPError as error : raise APIError ( "Server Error: {}" . format ( error ) ) except requests...
Gets reservations for a given email .
8,322
def get_reservations_for_booking_ids ( self , booking_ids ) : try : resp = self . _request ( "GET" , "/1.1/space/booking/{}" . format ( booking_ids ) ) except resp . exceptions . HTTPError as error : raise APIError ( "Server Error: {}" . format ( error ) ) return resp . json ( )
Gets booking information for a given list of booking ids .
8,323
def get_room_info ( self , room_ids ) : try : resp = self . _request ( "GET" , "/1.1/space/item/{}" . format ( room_ids ) ) rooms = resp . json ( ) for room in rooms : if not room [ "image" ] . startswith ( "http" ) : room [ "image" ] = "https:" + room [ "image" ] if "description" in room : description = room [ "descri...
Gets room information for a given list of ids .
8,324
def reconstruct_ancestral_states ( tree , character , states , prediction_method = MPPA , model = F81 , params = None , avg_br_len = None , num_nodes = None , num_tips = None , force_joint = True ) : logging . getLogger ( 'pastml' ) . debug ( 'ACR settings for {}:\n\tMethod:\t{}{}.' . format ( character , prediction_me...
Reconstructs ancestral states for the given character on the given tree .
8,325
def acr ( tree , df , prediction_method = MPPA , model = F81 , column2parameters = None , force_joint = True ) : for c in df . columns : df [ c ] = df [ c ] . apply ( lambda _ : '' if pd . isna ( _ ) else _ . encode ( 'ASCII' , 'replace' ) . decode ( ) ) columns = preannotate_tree ( df , tree ) name_tree ( tree ) colla...
Reconstructs ancestral states for the given tree and all the characters specified as columns of the given annotation dataframe .
8,326
def compute_correction_factors ( data , true_conductivity , elem_file , elec_file ) : settings = { 'rho' : 100 , 'pha' : 0 , 'elem' : 'elem.dat' , 'elec' : 'elec.dat' , '2D' : True , 'sink_node' : 100 , } K = geometric_factors . compute_K_numerical ( data , settings = settings ) data = geometric_factors . apply_K ( dat...
Compute correction factors for 2D rhizotron geometries following Weigand and Kemna 2017 Biogeosciences
8,327
def rdf_suffix ( fmt : str ) -> str : for k , v in SUFFIX_FORMAT_MAP . items ( ) : if fmt == v : return k return 'rdf'
Map the RDF format to the approproate suffix
8,328
def export_bert ( data , electrodes , filename ) : if has_multiple_timesteps ( data ) : for i , timestep in enumerate ( split_timesteps ( data ) ) : export_bert ( timestep , electrodes , filename . replace ( "." , "_%.3d." % i ) ) f = open ( filename , 'w' ) f . write ( "%d\n" % len ( electrodes ) ) f . write ( "# " ) ...
Export to unified data format used in pyGIMLi & BERT .
8,329
def reset ( self , index = None ) : points_handler_count = len ( self . registration_view . points ) if index is None : indexes = range ( points_handler_count ) else : indexes = [ index ] indexes = [ i for i in indexes if i < points_handler_count ] for i in indexes : self . registration_view . points [ i ] . reset ( ) ...
Reset the points for the specified index position . If no index is specified reset points for all point handlers .
8,330
def _read_file ( filename ) : with open ( filename , 'r' ) as fid2 : abem_data_orig = fid2 . read ( ) fid = StringIO ( ) fid . write ( abem_data_orig ) fid . seek ( 0 ) fid . readline ( ) fid . readline ( ) file_type = int ( fid . readline ( ) . strip ( ) ) fid . seek ( 0 ) return file_type , fid
Read a res2dinv - file and return the header
8,331
def add_dat_file ( filename , settings , container = None , ** kwargs ) : importers = { 11 : _read_general_type , } file_type , content = _read_file ( filename ) if file_type not in importers : raise Exception ( 'type of RES2DINV data file not recognized: {0}' . format ( file_type ) ) header , data = importers [ file_t...
Read a RES2DINV - style file produced by the ABEM export program .
8,332
def console_input ( default , validation = None , allow_empty = False ) : value = raw_input ( "> " ) or default if value == "" and not allow_empty : print "Invalid: Empty value is not permitted." return console_input ( default , validation ) if validation : try : return validation ( value ) except ValidationError , e :...
Get user input value from stdin
8,333
def correct ( self , calib , temp , we_t , ae_t ) : if not A4TempComp . in_range ( temp ) : return None if self . __algorithm == 1 : return self . __eq1 ( temp , we_t , ae_t ) if self . __algorithm == 2 : return self . __eq2 ( temp , we_t , ae_t , calib . we_cal_mv , calib . ae_cal_mv ) if self . __algorithm == 3 : ret...
Compute weC from weT aeT
8,334
def cf_t ( self , temp ) : index = int ( ( temp - A4TempComp . __MIN_TEMP ) // A4TempComp . __INTERVAL ) if temp % A4TempComp . __INTERVAL == 0 : return self . __values [ index ] y1 = self . __values [ index ] y2 = self . __values [ index + 1 ] delta_y = y2 - y1 delta_x = float ( temp % A4TempComp . __INTERVAL ) / A4Te...
Compute the linear - interpolated temperature compensation factor .
8,335
def run_once ( function , state = { } , errors = { } ) : @ six . wraps ( function ) def _wrapper ( * args , ** kwargs ) : if function in errors : six . reraise ( * errors [ function ] ) try : return state [ function ] except KeyError : try : state [ function ] = result = function ( * args , ** kwargs ) return result ex...
A memoization decorator whose purpose is to cache calls .
8,336
def _session ( self ) : if self . _http_session is None : self . _http_session = requests . Session ( ) self . _http_session . headers . update ( self . _get_headers ( ) ) self . _http_session . verify = self . _verify_https_request ( ) if all ( self . _credentials ) : username , password = self . _credentials self . _...
The current session used by the client .
8,337
def get_resource ( self , path ) : response = self . _http_request ( path ) try : return response . json ( ) except ValueError : raise exception . ServiceException ( "Invalid service response." )
Getting the required information from the API .
8,338
def update_resource ( self , path , data , if_match = None ) : response = self . _http_request ( resource = path , method = "PUT" , body = data , if_match = if_match ) try : return response . json ( ) except ValueError : raise exception . ServiceException ( "Invalid service response." )
Update the required resource .
8,339
def summarize ( self ) : s = str ( self . allval ( ) ) return self . parse ( s [ : 2 ] + '' . join ( [ 'Z' ] * len ( s [ 2 : ] ) ) )
Convert all of the values to their max values . This form is used to represent the summary level
8,340
def _filter_schlumberger ( configs ) : configs_sorted = np . hstack ( ( np . sort ( configs [ : , 0 : 2 ] , axis = 1 ) , np . sort ( configs [ : , 2 : 4 ] , axis = 1 ) , ) ) . astype ( int ) MN = configs_sorted [ : , 2 : 4 ] . copy ( ) MN_unique = np . unique ( MN . view ( MN . dtype . descr * 2 ) ) MN_unique_reshape =...
Filter Schlumberger configurations
8,341
def _filter_dipole_dipole ( configs ) : dist_ab = np . abs ( configs [ : , 0 ] - configs [ : , 1 ] ) dist_mn = np . abs ( configs [ : , 2 ] - configs [ : , 3 ] ) distances_equal = ( dist_ab == dist_mn ) not_overlapping = ( ( ( configs [ : , 0 ] < configs [ : , 2 ] ) & ( configs [ : , 1 ] < configs [ : , 2 ] ) & ( confi...
Filter dipole - dipole configurations
8,342
def _sort_dd_skips ( configs , dd_indices_all ) : config_current_skips = np . abs ( configs [ : , 1 ] - configs [ : , 0 ] ) if np . all ( np . isnan ( config_current_skips ) ) : return { 0 : [ ] } available_skips_raw = np . unique ( config_current_skips ) available_skips = available_skips_raw [ ~ np . isnan ( available...
Given a set of dipole - dipole configurations sort them according to their current skip .
8,343
def filter ( configs , settings ) : if isinstance ( configs , pd . DataFrame ) : configs = configs [ [ 'a' , 'b' , 'm' , 'n' ] ] . values filter_funcs = { 'dd' : _filter_dipole_dipole , 'schlumberger' : _filter_schlumberger , } keys = [ 'dd' , 'schlumberger' , ] allowed_keys = settings . get ( 'only_types' , filter_fun...
Main entry function to filtering configuration types
8,344
def save_block_to_crt ( filename , group , norrec = 'all' , store_errors = False ) : if norrec != 'all' : group = group . query ( 'norrec == "{0}"' . format ( norrec ) ) with open ( filename , 'wb' ) as fid : fid . write ( bytes ( '{0}\n' . format ( len ( group ) ) , 'UTF-8' ) ) AB = group [ 'a' ] * 1e4 + group [ 'b' ]...
Save a dataset to a CRTomo - compatible . crt file
8,345
def get_label ( parameter , ptype , flavor = None , mpl = None ) : if flavor is not None : if flavor not in ( 'latex' , 'mathml' ) : raise Exception ( 'flavor not recognized: {}' . format ( flavor ) ) else : if mpl is None : raise Exception ( 'either the flavor or mpl must be provided' ) rendering = mpl . rcParams [ 't...
Return the label of a given SIP parameter
8,346
def _add_labels ( self , axes , dtype ) : for ax in axes [ 1 , : ] . flat : ax . set_xlabel ( 'frequency [Hz]' ) if dtype == 'rho' : axes [ 0 , 0 ] . set_ylabel ( r'$|\rho| [\Omega m]$' ) axes [ 0 , 1 ] . set_ylabel ( r'$-\phi [mrad]$' ) axes [ 1 , 0 ] . set_ylabel ( r"$\sigma' [S/m]$" ) axes [ 1 , 1 ] . set_ylabel ( r...
Given a 2x2 array of axes add x and y labels
8,347
def add ( self , response , label = None ) : if not isinstance ( response , sip_response . sip_response ) : raise Exception ( 'can only add sip_reponse.sip_response objects' ) self . objects . append ( response ) if label is None : self . labels . append ( 'na' ) else : self . labels . append ( label )
add one response object to the list
8,348
def split_data ( data , squeeze = False ) : vdata = np . atleast_2d ( data ) nr_freqs = int ( vdata . shape [ 1 ] / 2 ) part1 = vdata [ : , 0 : nr_freqs ] part2 = vdata [ : , nr_freqs : ] if ( squeeze ) : part1 = part1 . squeeze ( ) part2 = part2 . squeeze ( ) return part1 , part2
Split 1D or 2D into two parts using the last axis
8,349
def convert ( input_format , output_format , data , one_spectrum = False ) : if input_format == output_format : return data if input_format not in from_converters : raise KeyError ( 'Input format {0} not known!' . format ( input_format ) ) if output_format not in to_converters : raise KeyError ( 'Output format {0} not ...
Convert from the given format to the requested format
8,350
def search ( self , params , standardize = False ) : resp = self . _request ( ENDPOINTS [ 'SEARCH' ] , params ) if not standardize : return resp for res in resp [ 'result_data' ] : res = self . standardize ( res ) return resp
Get a list of person objects for the given search params .
8,351
def detail_search ( self , params , standardize = False ) : response = self . _request ( ENDPOINTS [ 'SEARCH' ] , params ) result_data = [ ] for person in response [ 'result_data' ] : try : detail = self . person_details ( person [ 'person_id' ] , standardize = standardize ) except ValueError : pass else : result_data ...
Get a detailed list of person objects for the given search params .
8,352
def person_details ( self , person_id , standardize = False ) : resp = self . _request ( path . join ( ENDPOINTS [ 'DETAILS' ] , person_id ) ) if standardize : resp [ 'result_data' ] = [ self . standardize ( res ) for res in resp [ 'result_data' ] ] return resp
Get a detailed person object
8,353
def plot_ps_extra ( dataobj , key , ** kwargs ) : if isinstance ( dataobj , pd . DataFrame ) : df_raw = dataobj else : df_raw = dataobj . data if kwargs . get ( 'subquery' , False ) : df = df_raw . query ( kwargs . get ( 'subquery' ) ) else : df = df_raw def fancyfy ( axes , N ) : for ax in axes [ 0 : - 1 , : ] . flat ...
Create grouped pseudoplots for one or more time steps
8,354
def twisted_absolute_path ( path , request ) : parsed = urlparse . urlparse ( request . uri ) if parsed . scheme != '' : path_parts = parsed . path . lstrip ( '/' ) . split ( '/' ) request . prepath = path_parts [ 0 : 1 ] request . postpath = path_parts [ 1 : ] path = request . prepath [ 0 ] return path , request
Hack to fix twisted not accepting absolute URIs
8,355
def _add_rhoa ( df , spacing ) : df [ 'k' ] = redaK . compute_K_analytical ( df , spacing = spacing ) df [ 'rho_a' ] = df [ 'r' ] * df [ 'k' ] if 'Zt' in df . columns : df [ 'rho_a_complex' ] = df [ 'Zt' ] * df [ 'k' ] return df
a simple wrapper to compute K factors and add rhoa
8,356
def simplify ( geoids ) : from collections import defaultdict aggregated = defaultdict ( set ) d = { } for g in geoids : if not bool ( g ) : continue av = g . allval ( ) d [ av ] = None aggregated [ av ] . add ( g ) compiled = set ( ) for k , v in aggregated . items ( ) : if len ( v ) >= 5 : compiled . add ( k ) compil...
Given a list of geoids reduce it to a simpler set . If there are five or more geoids at one summary level convert them to a single geoid at the higher level .
8,357
def isimplify ( geoids ) : s0 = list ( geoids ) for i in range ( 10 ) : s1 = simplify ( s0 ) if len ( s1 ) == len ( s0 ) : return s1 s0 = s1
Iteratively simplify until the set stops getting smaller .
8,358
def regenerate_thumbs ( self ) : Model = self . model instances = Model . objects . all ( ) num_instances = instances . count ( ) regen_tracker = { } counter = 1 for instance in instances : file = getattr ( instance , self . field ) if not file : print "(%d/%d) ID: %d -- Skipped -- No file" % ( counter , num_instances ...
Handle re - generating the thumbnails . All this involves is reading the original file then saving the same exact thing . Kind of annoying but it s simple .
8,359
def count_vowels ( text ) : count = 0 for i in text : if i . lower ( ) in config . AVRO_VOWELS : count += 1 return count
Count number of occurrences of vowels in a given string
8,360
def count_consonants ( text ) : count = 0 for i in text : if i . lower ( ) in config . AVRO_CONSONANTS : count += 1 return count
Count number of occurrences of consonants in a given string
8,361
def _pseudodepths_wenner ( configs , spacing = 1 , grid = None ) : if grid is None : xpositions = ( configs - 1 ) * spacing else : xpositions = grid . get_electrode_positions ( ) [ configs - 1 , 0 ] z = np . abs ( np . max ( xpositions , axis = 1 ) - np . min ( xpositions , axis = 1 ) ) * - 0.11 x = np . mean ( xpositi...
Given distances between electrodes compute Wenner pseudo depths for the provided configuration
8,362
def plot_pseudodepths ( configs , nr_electrodes , spacing = 1 , grid = None , ctypes = None , dd_merge = False , ** kwargs ) : pseudo_d_functions = { 'dd' : _pseudodepths_dd_simple , 'schlumberger' : _pseudodepths_schlumberger , 'wenner' : _pseudodepths_wenner , } titles = { 'dd' : 'dipole-dipole configurations' , 'sch...
Plot pseudodepths for the measurements . If grid is given then the actual electrode positions are used and the parameter spacing is ignored
8,363
def matplot ( x , y , z , ax = None , colorbar = True , ** kwargs ) : xmin = x . min ( ) xmax = x . max ( ) dx = np . abs ( x [ 0 , 1 ] - x [ 0 , 0 ] ) ymin = y . min ( ) ymax = y . max ( ) dy = np . abs ( y [ 1 , 0 ] - y [ 0 , 0 ] ) x2 , y2 = np . meshgrid ( np . arange ( xmin , xmax + 2 * dx , dx ) - dx / 2. , np . a...
Plot x y z as expected with correct axis labels .
8,364
def summary ( self ) : return "\n" . join ( [ "Transaction:" , " When: " + self . date . strftime ( "%a %d %b %Y" ) , " Description: " + self . desc . replace ( '\n' , ' ' ) , " For amount: {}" . format ( self . amount ) , " From: {}" . format ( ", " . join ( map ( lambda x : x . account , self . src...
Return a string summary of transaction
8,365
def check ( self ) : if not self . date : raise XnDataError ( "Missing date" ) if not self . desc : raise XnDataError ( "Missing description" ) if not self . dst : raise XnDataError ( "No destination accounts" ) if not self . src : raise XnDataError ( "No source accounts" ) if not self . amount : raise XnDataError ( "N...
Check this transaction for completeness
8,366
def balance ( self ) : self . check ( ) if not sum ( map ( lambda x : x . amount , self . src ) ) == - self . amount : raise XnBalanceError ( "Sum of source amounts " "not equal to transaction amount" ) if not sum ( map ( lambda x : x . amount , self . dst ) ) == self . amount : raise XnBalanceError ( "Sum of destinati...
Check this transaction for correctness
8,367
def match_rules ( self , rules ) : try : self . check ( ) return None except XnDataError : pass scores = { } for r in rules : outcomes = r . match ( self ) if not outcomes : continue for outcome in outcomes : if isinstance ( outcome , rule . SourceOutcome ) : key = 'src' elif isinstance ( outcome , rule . DestinationOu...
Process this transaction against the given ruleset
8,368
def complete ( self , uio , dropped = False ) : if self . dropped and not dropped : return for end in [ 'src' , 'dst' ] : if getattr ( self , end ) : continue uio . show ( '\nEnter ' + end + ' for transaction:' ) uio . show ( '' ) uio . show ( self . summary ( ) ) try : endpoints = [ ] remaining = self . amount while r...
Query for all missing information in the transaction
8,369
def process ( self , rules , uio , prevxn = None ) : self . apply_outcomes ( self . match_rules ( rules ) , uio , prevxn = prevxn )
Matches rules and applies outcomes
8,370
def plot_quadpole_evolution ( dataobj , quadpole , cols , threshold = 5 , rolling = False , ax = None ) : if isinstance ( dataobj , pd . DataFrame ) : df = dataobj else : df = dataobj . data subquery = df . query ( 'a == {0} and b == {1} and m == {2} and n == {3}' . format ( * quadpole ) ) if ax is not None : fig = ax ...
Visualize time - lapse evolution of a single quadropole .
8,371
def visitSenseFlags ( self , ctx : ShExDocParser . SenseFlagsContext ) : if '!' in ctx . getText ( ) : self . expression . negated = True if '^' in ctx . getText ( ) : self . expression . inverse = True
! ^ ? | ^ ! ?
8,372
def as_tuple ( self ) : if self . _tuple is None : year = 9999 if self . year : m = self . DIGITS . match ( self . year ) if m : year = int ( m . group ( 0 ) ) month = self . month_num or 99 day = self . day if self . day is not None else 99 self . _tuple = year , month , day return self . _tuple
Date as three - tuple of numbers
8,373
def _cmp_date ( self ) : dates = sorted ( val for val in self . kw . values ( ) if isinstance ( val , CalendarDate ) ) if dates : return dates [ 0 ] return CalendarDate ( )
Returns Calendar date used for comparison .
8,374
def better_sentences ( func ) : @ wraps ( func ) def wrapped ( * args ) : sentences = func ( * args ) new_sentences = [ ] for i , l in enumerate ( sentences ) : if '\n\n' in l : splits = l . split ( '\n\n' ) if len ( splits ) > 1 : for ind , spl in enumerate ( splits ) : if len ( spl ) < 20 : del splits [ ind ] new_sen...
takes care of some edge cases of sentence tokenization for cases when websites don t close sentences properly usually after blockquotes image captions or attributions
8,375
def __we_c ( cls , calib , tc , temp , we_v ) : offset_v = calib . pid_elc_mv / 1000.0 response_v = we_v - offset_v response_c = tc . correct ( temp , response_v ) if response_c is None : return None we_c = response_c + offset_v return we_c
Compute weC from sensor temperature compensation of weV
8,376
def __cnc ( cls , calib , we_c ) : if we_c is None : return None offset_v = calib . pid_elc_mv / 1000.0 response_c = we_c - offset_v cnc = response_c / calib . pid_sens_mv return cnc
Compute cnc from weC
8,377
def add_to_class ( self , model_class ) : model_class . _meta . add_field ( self ) setattr ( model_class , self . name , _FieldDescriptor ( self ) )
Replace the Field attribute with a named _FieldDescriptor .
8,378
def add_field ( self , field ) : self . remove_field ( field . name ) self . _fields [ field . name ] = field if field . default is not None : if six . callable ( field . default ) : self . _default_callables [ field . key ] = field . default else : self . _defaults [ field . key ] = field . default
Add the received field to the model .
8,379
def remove_field ( self , field_name ) : field = self . _fields . pop ( field_name , None ) if field is not None and field . default is not None : if six . callable ( field . default ) : self . _default_callables . pop ( field . key , None ) else : self . _defaults . pop ( field . key , None )
Remove the field with the received field name from model .
8,380
def get_defaults ( self ) : defaults = self . _defaults . copy ( ) for field_key , default in self . _default_callables . items ( ) : defaults [ field_key ] = default ( ) return defaults
Get a dictionary that contains all the available defaults .
8,381
def speak ( self , textstr , lang = 'en-US' , gender = 'female' , format = 'riff-16khz-16bit-mono-pcm' ) : concatkey = '%s-%s-%s-%s' % ( textstr , lang . lower ( ) , gender . lower ( ) , format ) key = self . tts_engine + '' + str ( hash ( concatkey ) ) self . filename = '%s-%s.mp3' % ( key , lang ) fileloc = self . di...
Run will call Microsoft Translate API and and produce audio
8,382
def call ( args ) : b = StringIO ( ) p = subprocess . Popen ( args , stdout = subprocess . PIPE , stderr = subprocess . STDOUT ) encoding = getattr ( sys . stdout , 'encoding' , None ) or 'utf-8' for stdout in iter ( p . stdout . readline , '' ) : if len ( stdout ) == 0 : break stdout = force_unicode ( stdout , encodin...
Call terminal command and return exit_code and stdout
8,383
def get_command_str ( args ) : single_quote = "'" double_quote = '"' for i , value in enumerate ( args ) : if " " in value and double_quote not in value : args [ i ] = '"%s"' % value elif " " in value and single_quote not in value : args [ i ] = "'%s'" % value return " " . join ( args )
Get terminal command string from list of command and arguments
8,384
def receive_data_chunk ( self , raw_data , start ) : self . file . write ( raw_data ) eventlet . sleep ( 0 )
Over - ridden method to circumvent the worker timeouts on large uploads .
8,385
def stoptimes ( self , start_date , end_date ) : params = { 'start' : self . format_date ( start_date ) , 'end' : self . format_date ( end_date ) } response = self . _request ( ENDPOINTS [ 'STOPTIMES' ] , params ) return response
Return all stop times in the date range
8,386
def setup_logger ( self ) : self . log_list = [ ] handler = ListHandler ( self . log_list ) formatter = logging . Formatter ( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) handler . setFormatter ( formatter ) logger = logging . getLogger ( ) logger . addHandler ( handler ) logger . setLevel ( logging . INFO ...
Setup a logger
8,387
def match_to_dict ( match ) : balance , indent , account_fragment = match . group ( 1 , 2 , 3 ) return { 'balance' : decimal . Decimal ( balance ) , 'indent' : len ( indent ) , 'account_fragment' : account_fragment , 'parent' : None , 'children' : [ ] , }
Convert a match object into a dict .
8,388
def balance ( output ) : lines = map ( pattern . search , output . splitlines ( ) ) stack = [ ] top = [ ] for item in map ( match_to_dict , itertools . takewhile ( lambda x : x , lines ) ) : while stack and item [ 'indent' ] <= stack [ - 1 ] [ 'indent' ] : stack . pop ( ) if not stack : stack . append ( item ) top . ap...
Convert ledger balance output into an hierarchical data structure .
8,389
def is_punctuation ( text ) : return not ( text . lower ( ) in config . AVRO_VOWELS or text . lower ( ) in config . AVRO_CONSONANTS )
Check if given string is a punctuation
8,390
def is_exact ( needle , haystack , start , end , matchnot ) : return ( ( start >= 0 and end < len ( haystack ) and haystack [ start : end ] == needle ) ^ matchnot )
Check exact occurrence of needle in haystack
8,391
def fix_string_case ( text ) : fixed = [ ] for i in text : if is_case_sensitive ( i ) : fixed . append ( i ) else : fixed . append ( i . lower ( ) ) return '' . join ( fixed )
Converts case - insensitive characters to lower case
8,392
def _crmod_to_abmn ( self , configs ) : A = configs [ : , 0 ] % 1e4 B = np . floor ( configs [ : , 0 ] / 1e4 ) . astype ( int ) M = configs [ : , 1 ] % 1e4 N = np . floor ( configs [ : , 1 ] / 1e4 ) . astype ( int ) ABMN = np . hstack ( ( A [ : , np . newaxis ] , B [ : , np . newaxis ] , M [ : , np . newaxis ] , N [ : ...
convert crmod - style configurations to a Nx4 array
8,393
def load_crmod_config ( self , filename ) : with open ( filename , 'r' ) as fid : nr_of_configs = int ( fid . readline ( ) . strip ( ) ) configs = np . loadtxt ( fid ) print ( 'loaded configs:' , configs . shape ) if nr_of_configs != configs . shape [ 0 ] : raise Exception ( 'indicated number of measurements does not e...
Load a CRMod configuration file
8,394
def _get_crmod_abmn ( self ) : ABMN = np . vstack ( ( self . configs [ : , 0 ] * 1e4 + self . configs [ : , 1 ] , self . configs [ : , 2 ] * 1e4 + self . configs [ : , 3 ] , ) ) . T . astype ( int ) return ABMN
return a Nx2 array with the measurement configurations formatted CRTomo style
8,395
def write_crmod_volt ( self , filename , mid ) : ABMN = self . _get_crmod_abmn ( ) if isinstance ( mid , ( list , tuple ) ) : mag_data = self . measurements [ mid [ 0 ] ] pha_data = self . measurements [ mid [ 1 ] ] else : mag_data = self . measurements [ mid ] pha_data = np . zeros ( mag_data . shape ) all_data = np ....
Write the measurements to the output file in the volt . dat file format that can be read by CRTomo .
8,396
def write_crmod_config ( self , filename ) : ABMN = self . _get_crmod_abmn ( ) with open ( filename , 'wb' ) as fid : fid . write ( bytes ( '{0}\n' . format ( ABMN . shape [ 0 ] ) , 'utf-8' , ) ) np . savetxt ( fid , ABMN . astype ( int ) , fmt = '%i %i' )
Write the configurations to a configuration file in the CRMod format All configurations are merged into one previor to writing to file
8,397
def gen_dipole_dipole ( self , skipc , skipv = None , stepc = 1 , stepv = 1 , nr_voltage_dipoles = 10 , before_current = False , start_skip = 0 , N = None ) : if N is None and self . nr_electrodes is None : raise Exception ( 'You must provide the number of electrodes' ) elif N is None : N = self . nr_electrodes if skip...
Generate dipole - dipole configurations
8,398
def gen_gradient ( self , skip = 0 , step = 1 , vskip = 0 , vstep = 1 ) : N = self . nr_electrodes quadpoles = [ ] for a in range ( 1 , N - skip , step ) : b = a + skip + 1 for m in range ( a + 1 , b - vskip - 1 , vstep ) : n = m + vskip + 1 quadpoles . append ( ( a , b , m , n ) ) configs = np . array ( quadpoles ) if...
Generate gradient measurements
8,399
def remove_duplicates ( self , configs = None ) : if configs is None : c = self . configs else : c = configs struct = c . view ( c . dtype . descr * 4 ) configs_unique = np . unique ( struct ) . view ( c . dtype ) . reshape ( - 1 , 4 ) if configs is None : self . configs = configs_unique else : return configs_unique
remove duplicate entries from 4 - point configurations . If no configurations are provided then use self . configs . Unique configurations are only returned if configs is not None .