idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
14,900
def inflate_analysis_group ( self , identifier , definition ) : providers_definition = definition . pop ( 'providers' , None ) checkers_definition = definition . pop ( 'checkers' , None ) analysis_group = AnalysisGroup ( ) try : first_plugin = self . inflate_plugin ( identifier , definition ) if isinstance ( first_plugin , Checker ) : analysis_group . checkers . append ( first_plugin ) if providers_definition is None : raise ValueError ( 'when declaring an analysis group with a checker ' 'identifier, you must also declare providers with ' 'the "providers" key.' ) analysis_group . providers . extend ( self . inflate_providers ( providers_definition ) ) elif isinstance ( first_plugin , Provider ) : analysis_group . providers . append ( first_plugin ) if checkers_definition is None : raise ValueError ( 'when declaring an analysis group with a provider ' 'identifier, you must also declare checkers with ' 'the "checkers" key.' ) analysis_group . checkers . extend ( self . inflate_checkers ( checkers_definition ) ) except PluginNotFoundError as e : logger . warning ( 'Could not find any plugin identified by %s, ' 'considering entry as group name. Exception: %s.' , identifier , e ) analysis_group . name = definition . pop ( 'name' , identifier ) analysis_group . description = definition . pop ( 'description' , None ) if bool ( providers_definition ) != bool ( checkers_definition ) : raise ValueError ( 'when declaring an analysis group with a name, you must ' 'either declare both "providers" and "checkers" or none.' ) if providers_definition and checkers_definition : analysis_group . providers . extend ( self . inflate_providers ( providers_definition ) ) analysis_group . checkers . extend ( self . inflate_checkers ( checkers_definition ) ) self . cleanup_definition ( definition ) for nd_identifier , nd_definition in definition . items ( ) : analysis_group . checkers . append ( self . inflate_nd_checker ( nd_identifier , nd_definition ) ) return analysis_group
Inflate a whole analysis group .
488
8
14,901
def print_plugins ( self ) : width = console_width ( ) line = Style . BRIGHT + '=' * width + '\n' middle = int ( width / 2 ) if self . available_providers : print ( line + ' ' * middle + 'PROVIDERS' ) for provider in sorted ( self . available_providers . values ( ) , key = lambda x : x . identifier ) : provider ( ) . print ( ) print ( ) if self . available_checkers : print ( line + ' ' * middle + 'CHECKERS' ) for checker in sorted ( self . available_checkers . values ( ) , key = lambda x : x . identifier ) : checker ( ) . print ( ) print ( )
Print the available plugins .
158
5
14,902
def cmd ( send , msg , args ) : match = re . match ( r'--(.+?)\b' , msg ) randtype = 'hex' if match : if match . group ( 1 ) == 'int' : randtype = 'int' else : send ( "Invalid Flag." ) return if randtype == 'hex' : send ( hex ( getrandbits ( 50 ) ) ) else : maxlen = 1000000000 msg = msg . split ( ) if len ( msg ) == 2 : if msg [ 1 ] . isdigit ( ) : maxlen = int ( msg [ 1 ] ) else : send ( "Invalid Length" ) return send ( str ( randrange ( maxlen ) ) )
For when you don t have enough randomness in your life .
151
13
14,903
def split ( X , Y , question ) : true_X , false_X = [ ] , [ ] true_Y , false_Y = [ ] , [ ] for x , y in zip ( X , Y ) : if question . match ( x ) : true_X . append ( x ) true_Y . append ( y ) else : false_X . append ( x ) false_Y . append ( y ) return ( np . array ( true_X ) , np . array ( false_X ) , np . array ( true_Y ) , np . array ( false_Y ) )
Partitions a dataset .
128
5
14,904
def build_tree ( X , y , criterion , max_depth , current_depth = 1 ) : # check for max_depth accomplished if max_depth >= 0 and current_depth >= max_depth : return Leaf ( y ) # check for 0 gain gain , question = find_best_question ( X , y , criterion ) if gain == 0 : return Leaf ( y ) # split true_X , false_X , true_y , false_y = split ( X , y , question ) # Build the `true` branch of the tree recursively true_branch = build_tree ( true_X , true_y , criterion , max_depth , current_depth = current_depth + 1 ) # Build the `false` branch of the tree recursively false_branch = build_tree ( false_X , false_y , criterion , max_depth , current_depth = current_depth + 1 ) # returning the root of the tree/subtree return Node ( question = question , true_branch = true_branch , false_branch = false_branch )
Builds the decision tree .
235
6
14,905
def print_tree ( root , space = ' ' ) : if isinstance ( root , Leaf ) : print ( space + "Prediction: " + str ( root . most_frequent ) ) return print ( space + str ( root . question ) ) print ( space + "--> True:" ) print_tree ( root . true_branch , space + ' ' ) print ( space + "--> False:" ) print_tree ( root . false_branch , space + ' ' )
Prints the Decision Tree in a pretty way .
104
10
14,906
def find_element ( driver , elem_path , by = CSS , timeout = TIMEOUT , poll_frequency = 0.5 ) : wait = WebDriverWait ( driver , timeout , poll_frequency ) return wait . until ( EC . presence_of_element_located ( ( by , elem_path ) ) )
Find and return an element once located
70
7
14,907
def find_elements ( driver , elem_path , by = CSS , timeout = TIMEOUT , poll_frequency = 0.5 ) : wait = WebDriverWait ( driver , timeout , poll_frequency ) return wait . until ( EC . presence_of_all_elements_located ( ( by , elem_path ) ) )
Find and return all elements once located
74
7
14,908
def find_write ( driver , elem_path , write_str , clear_first = True , send_enter = False , by = CSS , timeout = TIMEOUT , poll_frequency = 0.5 ) : elem = find_element ( driver , elem_path = elem_path , by = by , timeout = timeout , poll_frequency = poll_frequency ) if clear_first : elem . clear ( ) elem . send_keys ( write_str ) if send_enter : elem . send_keys ( Keys . ENTER ) return elem
Find a writable element and write to it
122
9
14,909
def cmd ( send , msg , args ) : if not msg : msg = gen_word ( ) send ( gen_intensify ( msg ) )
Intensifies text .
32
5
14,910
def fetch_states ( self , elections ) : states = [ ] for election in elections : if election . division . level . name == DivisionLevel . DISTRICT : division = election . division . parent else : division = election . division states . append ( division ) return sorted ( list ( set ( states ) ) , key = lambda s : s . label )
Returns the unique divisions for all elections on an election day .
73
12
14,911
def get_name ( self , type_ , id_ ) : cachefile = self . filename ( type_ , id_ ) try : with open ( cachefile , 'r' ) as f : return f . read ( ) except ( OSError , IOError ) as e : if e . errno != errno . ENOENT : raise
Read a cached name if available .
75
7
14,912
def put_name ( self , type_ , id_ , name ) : cachefile = self . filename ( type_ , id_ ) dirname = os . path . dirname ( cachefile ) try : os . makedirs ( dirname ) except OSError as e : if e . errno != errno . EEXIST : raise with open ( cachefile , 'w' ) as f : f . write ( name )
Write a cached name to disk .
94
7
14,913
def get_or_load_name ( self , type_ , id_ , method ) : name = self . get_name ( type_ , id_ ) if name is not None : defer . returnValue ( name ) instance = yield method ( id_ ) if instance is None : defer . returnValue ( None ) self . put_name ( type_ , id_ , instance . name ) defer . returnValue ( instance . name )
read - through cache for a type of object s name .
92
12
14,914
def call ( self , name , * args , * * kwargs ) : # Like txkoji.Connection, we always want the full request for tasks: if name in ( 'getTaskInfo' , 'getTaskDescendants' ) : kwargs [ 'request' ] = True if kwargs : kwargs [ '__starstar' ] = True args = args + ( kwargs , ) payload = { 'methodName' : name , 'params' : args } self . calls . append ( payload )
Add a new call to the list that we will submit to the server .
113
15
14,915
def _multicall_callback ( self , values , calls ) : result = KojiMultiCallIterator ( values ) result . connection = self . connection result . calls = calls return result
Fires when we get information back from the XML - RPC server .
39
14
14,916
def time_ago ( dt ) : now = datetime . datetime . now ( ) return humanize . naturaltime ( now - dt )
Return the current time ago
32
5
14,917
def cmd ( send , msg , args ) : if not msg : user = choice ( get_users ( args ) ) else : user = msg send ( gen_insult ( user ) )
Insults a user .
40
5
14,918
def needed ( name , required ) : return [ relative_field ( r , name ) if r and startswith_field ( r , name ) else None for r in required ]
RETURN SUBSET IF name IN REQUIRED
38
11
14,919
def _match_data_to_parameter ( cls , data ) : in_value = data [ "in" ] for cls in [ QueryParameter , HeaderParameter , FormDataParameter , PathParameter , BodyParameter ] : if in_value == cls . IN : return cls return None
find the appropriate parameter for a parameter field
64
8
14,920
def cmd ( send , msg , args ) : if not msg : result = subprocess . run ( [ 'eix' , '-c' ] , env = { 'EIX_LIMIT' : '0' , 'HOME' : os . environ [ 'HOME' ] } , stdout = subprocess . PIPE , universal_newlines = True ) if result . returncode : send ( "eix what?" ) return send ( choice ( result . stdout . splitlines ( ) ) ) return args = [ 'eix' , '-c' ] + msg . split ( ) result = subprocess . run ( args , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , universal_newlines = True ) if result . returncode : send ( "%s isn't important enough for Gentoo." % msg ) else : send ( result . stdout . splitlines ( ) [ 0 ] . strip ( ) )
Runs eix with the given arguments .
209
9
14,921
def to_str ( number ) : states = globals ( ) for name , value in states . items ( ) : if number == value and name . isalpha ( ) and name . isupper ( ) : return name return '(unknown state %d)' % number
Convert a task state ID number to a string .
55
11
14,922
def to_json ( self , filename , indent = 2 ) : d = self . to_dicts ( ) if hasattr ( filename , "write" ) : json . dump ( d , filename , indent = indent ) elif isinstance ( filename , string_types ) : with open ( os . path . expanduser ( filename ) , "w" ) as f : json . dump ( d , f , indent = indent ) else : raise ValueError ( "Unknown filename or file-object" )
Return a JSON string of all model inputs and outputs .
106
11
14,923
def to_df ( self , recommended_only = False , include_io = True ) : od = BMDS . _df_ordered_dict ( include_io ) [ session . _add_to_to_ordered_dict ( od , i , recommended_only ) for i , session in enumerate ( self ) ] return pd . DataFrame ( od )
Return a pandas DataFrame for each model and dataset .
78
12
14,924
def to_csv ( self , filename , delimiter = "," , recommended_only = False , include_io = True ) : df = self . to_df ( recommended_only , include_io ) df . to_csv ( filename , index = False , sep = delimiter )
Return a CSV for each model and dataset .
61
9
14,925
def to_excel ( self , filename , recommended_only = False , include_io = True ) : df = self . to_df ( recommended_only , include_io ) if isinstance ( filename , string_types ) : filename = os . path . expanduser ( filename ) df . to_excel ( filename , index = False )
Return an Excel file for each model and dataset .
74
10
14,926
def to_docx ( self , filename = None , input_dataset = True , summary_table = True , recommendation_details = True , recommended_model = True , all_models = False , ) : rep = Reporter ( ) for model in self : rep . add_session ( model , input_dataset , summary_table , recommendation_details , recommended_model , all_models , ) if filename : rep . save ( filename ) return rep
Write batch sessions to a Word file .
98
8
14,927
def save_plots ( self , directory , format = "png" , recommended_only = False ) : for i , session in enumerate ( self ) : session . save_plots ( directory , prefix = str ( i ) , format = format , recommended_only = recommended_only )
Save images of dose - response curve - fits for each model .
62
13
14,928
def load_messages ( self , directory , catalogue ) : if not os . path . isdir ( directory ) : raise ValueError ( "{0} is not a directory" . format ( directory ) ) for format , loader in list ( self . loaders . items ( ) ) : extension = "{0}.{1}" . format ( catalogue . locale , format ) files = find_files ( directory , "*.{0}" . format ( extension ) ) for file in files : domain = file . split ( "/" ) [ - 1 ] [ : - 1 * len ( extension ) - 1 ] catalogue . add_catalogue ( loader . load ( file , catalogue . locale , domain ) )
Loads translation found in a directory .
146
8
14,929
def disable_backup ( self ) : for dumper in list ( self . dumpers . values ( ) ) : dumper . set_backup ( False )
Disables dumper backup .
35
6
14,930
def write_translations ( self , catalogue , format , options = { } ) : if format not in self . dumpers : raise ValueError ( 'There is no dumper associated with format "{0}"' . format ( format ) ) dumper = self . dumpers [ format ] if "path" in options and not os . path . isdir ( options [ 'path' ] ) : os . mkdir ( options [ 'path' ] ) dumper . dump ( catalogue , options )
Writes translation from the catalogue according to the selected format .
104
12
14,931
def main ( ) : s = rawdata . content . DataFiles ( ) all_ingredients = list ( s . get_collist_by_name ( data_files [ 1 ] [ 'file' ] , data_files [ 1 ] [ 'col' ] ) [ 0 ] ) #find_best_ingredients(ingredients_on_hand, dinner_guests) best_ingred , worst_ingred = find_best_ingredients ( all_ingredients , dinner_guests ) print ( 'best ingred = ' , best_ingred ) print ( 'worst ingred = ' , worst_ingred ) for have in ingredients_on_hand : if have in best_ingred : print ( 'Use this = ' , have )
script to find a list of recipes for a group of people with specific likes and dislikes . Output of script
162
22
14,932
def batch ( iterable , n , fillvalue = None ) : ensure_iterable ( iterable ) if not isinstance ( n , Integral ) : raise TypeError ( "invalid number of elements in a batch" ) if not ( n > 0 ) : raise ValueError ( "number of elements in a batch must be positive" ) # since we must use ``izip_longest`` # (``izip`` fails if ``n`` is greater than length of ``iterable``), # we will apply some 'trimming' to resulting tuples if necessary if fillvalue is None : fillvalue = object ( ) trimmer = lambda item : tuple ( x for x in item if x is not fillvalue ) else : trimmer = identity ( ) args = [ iter ( iterable ) ] * n zipped = izip_longest ( * args , fillvalue = fillvalue ) return imap ( trimmer , zipped )
Batches the elements of given iterable .
200
9
14,933
def intertwine ( * iterables ) : iterables = tuple ( imap ( ensure_iterable , iterables ) ) empty = object ( ) return ( item for iterable in izip_longest ( * iterables , fillvalue = empty ) for item in iterable if item is not empty )
Constructs an iterable which intertwines given iterables .
65
12
14,934
def iterate ( iterator , n = None ) : ensure_iterable ( iterator ) if n is None : deque ( iterator , maxlen = 0 ) else : next ( islice ( iterator , n , n ) , None )
Efficiently advances the iterator N times ; by default goes to its end .
50
16
14,935
def unique ( iterable , key = None ) : ensure_iterable ( iterable ) key = hash if key is None else ensure_callable ( key ) def generator ( ) : seen = set ( ) for elem in iterable : k = key ( elem ) if k not in seen : seen . add ( k ) yield elem return generator ( )
Removes duplicates from given iterable using given key as criterion .
77
14
14,936
def breadth_first ( start , expand ) : ensure_callable ( expand ) def generator ( ) : queue = deque ( [ start ] ) while queue : node = queue . popleft ( ) yield node queue . extend ( expand ( node ) ) return generator ( )
Performs a breadth - first search of a graph - like structure .
58
14
14,937
def depth_first ( start , descend ) : ensure_callable ( descend ) def generator ( ) : stack = [ start ] while stack : node = stack . pop ( ) yield node stack . extend ( descend ( node ) ) return generator ( )
Performs a depth - first search of a graph - like structure .
52
14
14,938
def regex ( regex ) : if not hasattr ( regex , 'pattern' ) : regex = re . compile ( regex ) pattern = regex . pattern flags = regex . flags codes = sre . parse ( pattern ) return _strategy ( codes , Context ( flags = flags ) ) . filter ( regex . match )
Return strategy that generates strings that match given regex .
66
10
14,939
def strategy ( self ) : max_codepoint = None if self . _unicode else 127 strategies = [ ] if self . _negate : if self . _categories or self . _whitelist_chars : strategies . append ( hs . characters ( blacklist_categories = self . _categories | set ( [ 'Cc' , 'Cs' ] ) , blacklist_characters = self . _whitelist_chars , max_codepoint = max_codepoint , ) ) if self . _blacklist_chars : strategies . append ( hs . sampled_from ( list ( self . _blacklist_chars - self . _whitelist_chars ) ) ) else : if self . _categories or self . _blacklist_chars : strategies . append ( hs . characters ( whitelist_categories = self . _categories , blacklist_characters = self . _blacklist_chars , max_codepoint = max_codepoint , ) ) if self . _whitelist_chars : strategies . append ( hs . sampled_from ( list ( self . _whitelist_chars - self . _blacklist_chars ) ) ) return hs . one_of ( * strategies ) if strategies else hs . just ( u'' )
Returns resulting strategy that generates configured char set
290
8
14,940
def add_category ( self , category ) : if category == sre . CATEGORY_DIGIT : self . _categories |= UNICODE_DIGIT_CATEGORIES elif category == sre . CATEGORY_NOT_DIGIT : self . _categories |= UNICODE_CATEGORIES - UNICODE_DIGIT_CATEGORIES elif category == sre . CATEGORY_SPACE : self . _categories |= UNICODE_SPACE_CATEGORIES for c in ( UNICODE_SPACE_CHARS if self . _unicode else SPACE_CHARS ) : self . _whitelist_chars . add ( c ) elif category == sre . CATEGORY_NOT_SPACE : self . _categories |= UNICODE_CATEGORIES - UNICODE_SPACE_CATEGORIES for c in ( UNICODE_SPACE_CHARS if self . _unicode else SPACE_CHARS ) : self . _blacklist_chars . add ( c ) elif category == sre . CATEGORY_WORD : self . _categories |= UNICODE_WORD_CATEGORIES self . _whitelist_chars . add ( u'_' ) if HAS_WEIRD_WORD_CHARS and self . _unicode : for c in UNICODE_WEIRD_NONWORD_CHARS : self . _blacklist_chars . add ( c ) elif category == sre . CATEGORY_NOT_WORD : self . _categories |= UNICODE_CATEGORIES - UNICODE_WORD_CATEGORIES self . _blacklist_chars . add ( u'_' ) if HAS_WEIRD_WORD_CHARS and self . _unicode : for c in UNICODE_WEIRD_NONWORD_CHARS : self . _whitelist_chars . add ( c )
Add unicode category to set
464
6
14,941
def add_chars ( self , chars ) : for c in chars : if self . _ignorecase : self . _whitelist_chars . add ( c . lower ( ) ) self . _whitelist_chars . add ( c . upper ( ) ) else : self . _whitelist_chars . add ( c )
Add given chars to char set
75
6
14,942
def str_traceback ( error , tb ) : if not isinstance ( tb , types . TracebackType ) : return tb return '' . join ( traceback . format_exception ( error . __class__ , error , tb ) )
Returns a string representation of the traceback .
55
9
14,943
def filter_traceback ( error , tb , ignore_pkg = CURRENT_PACKAGE ) : if not isinstance ( tb , types . TracebackType ) : return tb def in_namespace ( n ) : return n and ( n . startswith ( ignore_pkg + '.' ) or n == ignore_pkg ) # Skip test runner traceback levels while tb and in_namespace ( tb . tb_frame . f_globals [ '__package__' ] ) : tb = tb . tb_next starting_tb = tb limit = 0 while tb and not in_namespace ( tb . tb_frame . f_globals [ '__package__' ] ) : tb = tb . tb_next limit += 1 return '' . join ( traceback . format_exception ( error . __class__ , error , starting_tb , limit ) )
Filtered out all parent stacktraces starting with the given stacktrace that has a given variable name in its globals .
206
25
14,944
def get_true_function ( obj ) : if not callable ( obj ) : raise TypeError ( "%r is not callable." % ( obj , ) ) ismethod = inspect . ismethod ( obj ) if inspect . isfunction ( obj ) or ismethod : return obj , ismethod if hasattr ( obj , 'im_func' ) : return obj . im_func , True if inspect . isclass ( obj ) : return getattr ( obj , '__init__' , None ) , True if isinstance ( obj , object ) : if hasattr ( obj , 'func' ) : return get_true_function ( obj . func ) return obj . __call__ , True raise TypeError ( "Unknown type of object: %r" % obj )
Returns the actual function and a boolean indicated if this is a method or not .
163
16
14,945
def assert_valid_path ( self , path ) : if not isinstance ( path , str ) : raise NotFoundResourceException ( "Resource passed to load() method must be a file path" ) if not os . path . isfile ( path ) : raise NotFoundResourceException ( 'File "{0}" does not exist' . format ( path ) )
Ensures that the path represents an existing file
75
10
14,946
def read_file ( self , path ) : self . assert_valid_path ( path ) with open ( path , 'rb' ) as file : contents = file . read ( ) . decode ( 'UTF-8' ) return contents
Reads a file into memory and returns it s contents
50
11
14,947
def flatten ( self , messages , parent_key = '' ) : items = [ ] sep = '.' for k , v in list ( messages . items ( ) ) : new_key = "{0}{1}{2}" . format ( parent_key , sep , k ) if parent_key else k if isinstance ( v , collections . MutableMapping ) : items . extend ( list ( self . flatten ( v , new_key ) . items ( ) ) ) else : items . append ( ( new_key , v ) ) return dict ( items )
Flattens an nested array of translations .
121
9
14,948
def parse ( self , resource ) : try : import polib except ImportError as e : self . rethrow ( "You need to install polib to use PoFileLoader or MoFileLoader" , ImportError ) self . assert_valid_path ( resource ) messages = { } parsed = self . _load_contents ( polib , resource ) for item in parsed : if item . msgid_plural : plurals = sorted ( item . msgstr_plural . items ( ) ) if item . msgid and len ( plurals ) > 1 : messages [ item . msgid ] = plurals [ 0 ] [ 1 ] plurals = [ msgstr for idx , msgstr in plurals ] messages [ item . msgid_plural ] = "|" . join ( plurals ) elif item . msgid : messages [ item . msgid ] = item . msgstr return messages
Loads given resource into a dict using polib
192
10
14,949
def arch ( self ) : if self . method in ( 'buildArch' , 'createdistrepo' , 'livecd' ) : return self . params [ 2 ] if self . method in ( 'createrepo' , 'runroot' ) : return self . params [ 1 ] if self . method == 'createImage' : return self . params [ 3 ] if self . method == 'indirectionimage' : return self . params [ 0 ] [ 'arch' ]
Return an architecture for this task .
102
7
14,950
def arches ( self ) : if self . method == 'image' : return self . params [ 2 ] if self . arch : return [ self . arch ] return [ ]
Return a list of architectures for this task .
37
9
14,951
def duration ( self ) : if not self . started : return None start = self . started end = self . completed if not end : end = datetime . utcnow ( ) return end - start
Return a timedelta for this task .
42
8
14,952
def estimate_completion ( self ) : if self . completion_ts : # Task is already complete. Return the exact completion time: defer . returnValue ( self . completed ) # Get the timestamps from the descendent task that's doing the work: if self . method == 'build' or self . method == 'image' : subtask_completion = yield self . estimate_descendents ( ) defer . returnValue ( subtask_completion ) if self . state == task_states . FREE : est_completion = yield self . _estimate_free ( ) defer . returnValue ( est_completion ) avg_delta = yield self . estimate_duration ( ) if avg_delta is None : defer . returnValue ( None ) est_completion = self . started + avg_delta defer . returnValue ( est_completion )
Estimate completion time for a task .
185
8
14,953
def _estimate_free ( self ) : # Query the information we need for this task's channel and package. capacity_deferred = self . channel . total_capacity ( ) open_tasks_deferred = self . channel . tasks ( state = [ task_states . OPEN ] ) avg_delta_deferred = self . estimate_duration ( ) deferreds = [ capacity_deferred , open_tasks_deferred , avg_delta_deferred ] results = yield defer . gatherResults ( deferreds , consumeErrors = True ) capacity , open_tasks , avg_delta = results # Ensure this task's channel has spare capacity for this task. open_weight = sum ( [ task . weight for task in open_tasks ] ) if open_weight >= capacity : # TODO: Evaluate all tasks in the channel and # determine when enough OPEN tasks will complete so that we can # get to OPEN. raise NotImplementedError ( 'channel %d is at capacity' % self . channel_id ) # A builder will pick up this task and start it within SLEEPTIME. # start_time is the maximum amount of time we expect to wait here. start_time = self . created + SLEEPTIME if avg_delta is None : defer . returnValue ( None ) est_completion = start_time + avg_delta defer . returnValue ( est_completion )
Estimate completion time for a free task .
305
9
14,954
def package ( self ) : if self . method == 'buildNotification' : return self . params [ 1 ] [ 'name' ] if self . method in ( 'createImage' , 'image' , 'livecd' ) : return self . params [ 0 ] if self . method == 'indirectionimage' : return self . params [ 0 ] [ 'name' ] # params[0] is the source URL for these tasks: if self . method not in ( 'build' , 'buildArch' , 'buildContainer' , 'buildMaven' , 'buildSRPMFromSCM' , 'maven' ) : return None # (I wish there was a better way to do this.) source = self . params [ 0 ] o = urlparse ( source ) # build tasks can load an SRPM from a "cli-build" tmpdir: if source . endswith ( '.src.rpm' ) : srpm = os . path . basename ( source ) ( name , version , release ) = srpm . rsplit ( '-' , 2 ) # Note we're throwing away version and release here. They could be # useful eventually, maybe in a "Package" class. return name # or an allowed SCM: elif o . scheme : package = os . path . basename ( o . path ) if package . endswith ( '.git' ) : package = package [ : - 4 ] if self . method == 'buildContainer' : package += '-container' return package raise ValueError ( 'could not parse source "%s"' % source )
Find a package name from a build task s parameters .
335
11
14,955
def params ( self ) : if isinstance ( self . request , list ) : return unmunchify ( self . request ) ( params , _ ) = xmlrpc . loads ( self . request ) return params
Return a list of parameters in this task s request .
44
11
14,956
def cmd ( send , _ , args ) : try : if exists ( join ( args [ 'handler' ] . confdir , '.git' ) ) : send ( do_pull ( srcdir = args [ 'handler' ] . confdir ) ) else : send ( do_pull ( repo = args [ 'config' ] [ 'api' ] [ 'githubrepo' ] ) ) except subprocess . CalledProcessError as ex : for line in ex . output . strip ( ) . splitlines ( ) : logging . error ( line ) raise ex
Pull changes .
117
3
14,957
def _start_date_of_year ( year : int ) -> datetime . date : jan_one = datetime . date ( year , 1 , 1 ) diff = 7 * ( jan_one . isoweekday ( ) > 3 ) - jan_one . isoweekday ( ) return jan_one + datetime . timedelta ( days = diff )
Return start date of the year using MMWR week rules
79
12
14,958
def date_to_epiweek ( date = datetime . date . today ( ) ) -> Epiweek : year = date . year start_dates = list ( map ( _start_date_of_year , [ year - 1 , year , year + 1 ] ) ) start_date = start_dates [ 1 ] if start_dates [ 1 ] > date : start_date = start_dates [ 0 ] elif date >= start_dates [ 2 ] : start_date = start_dates [ 2 ] return Epiweek ( year = ( start_date + datetime . timedelta ( days = 7 ) ) . year , week = ( ( date - start_date ) . days // 7 ) + 1 , day = ( date . isoweekday ( ) % 7 ) + 1 )
Convert python date to Epiweek
172
8
14,959
def epiweeks_in_year ( year : int ) -> int : if date_to_epiweek ( epiweek_to_date ( Epiweek ( year , 53 ) ) ) . year == year : return 53 else : return 52
Return number of epiweeks in a year
55
10
14,960
def parser ( self ) : if self . _command_parser is None : parents = [ ] if self . need_verbose : parents . append ( _verbose_parser ) if self . need_settings : parents . append ( _settings_parser ) self . _command_parser = self . _main_parser . add_parser ( self . name , help = self . help , parents = parents , formatter_class = argparse . RawDescriptionHelpFormatter ) return self . _command_parser
Returns the appropriate parser to use for adding arguments to your command .
107
13
14,961
def cmd ( send , _ , args ) : adminlist = [ ] for admin in args [ 'db' ] . query ( Permissions ) . order_by ( Permissions . nick ) . all ( ) : if admin . registered : adminlist . append ( "%s (V)" % admin . nick ) else : adminlist . append ( "%s (U)" % admin . nick ) send ( ", " . join ( adminlist ) , target = args [ 'nick' ] )
Returns a list of admins .
102
6
14,962
def run ( self ) : x , y = 1 , 0 # set the direction num_steps = 0 while self . s . get_state ( ) != 'Halted' : self . s . command ( { 'name' : 'walk' , 'type' : 'move' , 'direction' : [ x , y ] } , self . a1 ) self . s . command ( { 'name' : 'walk' , 'type' : 'run' , 'direction' : [ x , y + 1 ] } , self . a2 ) num_steps += 1 if num_steps >= 3 : break for a in self . s . agents : print ( a . name , 'finished at position ' , a . coords [ 'x' ] , a . coords [ 'y' ] )
This AI simple moves the characters towards the opposite edges of the grid for 3 steps or until event halts the simulation
172
23
14,963
def validate ( schema_file , config_file , deprecation ) : result = validator_from_config_file ( config_file , schema_file ) result . validate ( error_on_deprecated = deprecation ) for error in result . errors ( ) : click . secho ( 'Error : %s' % error , err = True , fg = 'red' ) for warning in result . warnings ( ) : click . secho ( 'Warning : %s' % warning , err = True , fg = 'yellow' )
Validate a configuration file against a confirm schema .
118
10
14,964
def migrate ( schema_file , config_file ) : schema = load_schema_file ( open ( schema_file , 'r' ) ) config = load_config_file ( config_file , open ( config_file , 'r' ) . read ( ) ) config = append_existing_values ( schema , config ) migrated_config = generate_config_parser ( config ) migrated_config . write ( sys . stdout )
Migrates a configuration file using a confirm schema .
94
11
14,965
def document ( schema_file ) : schema = load_schema_file ( open ( schema_file , 'r' ) ) documentation = generate_documentation ( schema ) sys . stdout . write ( documentation )
Generate reStructuredText documentation from a confirm schema .
46
12
14,966
def generate ( schema_file , all_options ) : schema = load_schema_file ( open ( schema_file , 'r' ) ) config_parser = generate_config_parser ( schema , include_all = all_options ) config_parser . write ( sys . stdout )
Generates a template configuration file from a confirm schema .
63
11
14,967
def init ( config_file ) : schema = generate_schema_file ( open ( config_file , 'r' ) . read ( ) ) sys . stdout . write ( schema )
Initialize a confirm schema from an existing configuration file .
41
11
14,968
def _init_random_gaussians ( self , X ) : n_samples = np . shape ( X ) [ 0 ] self . priors = ( 1 / self . k ) * np . ones ( self . k ) for _ in range ( self . k ) : params = { } params [ "mean" ] = X [ np . random . choice ( range ( n_samples ) ) ] params [ "cov" ] = calculate_covariance_matrix ( X ) self . parameters . append ( params )
Initialize gaussian randomly
116
5
14,969
def _get_likelihoods ( self , X ) : n_samples = np . shape ( X ) [ 0 ] likelihoods = np . zeros ( ( n_samples , self . k ) ) for i in range ( self . k ) : likelihoods [ : , i ] = self . multivariate_gaussian ( X , self . parameters [ i ] ) return likelihoods
Calculate the likelihood over all samples
84
8
14,970
def _expectation ( self , X ) : # Calculate probabilities of X belonging to the different clusters weighted_likelihoods = self . _get_likelihoods ( X ) * self . priors sum_likelihoods = np . expand_dims ( np . sum ( weighted_likelihoods , axis = 1 ) , axis = 1 ) # Determine responsibility as P(X|y)*P(y)/P(X) self . responsibility = weighted_likelihoods / sum_likelihoods # Assign samples to cluster that has largest probability self . sample_assignments = self . responsibility . argmax ( axis = 1 ) # Save value for convergence check self . responsibilities . append ( np . max ( self . responsibility , axis = 1 ) )
Calculate the responsibility
164
5
14,971
def _maximization ( self , X ) : # Iterate through clusters and recalculate mean and covariance for i in range ( self . k ) : resp = np . expand_dims ( self . responsibility [ : , i ] , axis = 1 ) mean = ( resp * X ) . sum ( axis = 0 ) / resp . sum ( ) covariance = ( X - mean ) . T . dot ( ( X - mean ) * resp ) / resp . sum ( ) self . parameters [ i ] [ "mean" ] , self . parameters [ i ] [ "cov" ] = mean , covariance # Update weights n_samples = np . shape ( X ) [ 0 ] self . priors = self . responsibility . sum ( axis = 0 ) / n_samples
Update the parameters and priors
169
6
14,972
def _converged ( self , X ) : if len ( self . responsibilities ) < 2 : return False diff = np . linalg . norm ( self . responsibilities [ - 1 ] - self . responsibilities [ - 2 ] ) return diff <= self . tolerance
Covergence if || likehood - last_likelihood || < tolerance
55
14
14,973
def cluster ( self , X ) : # Initialize the gaussians randomly self . _init_random_gaussians ( X ) # Run EM until convergence or for max iterations for _ in range ( self . max_iterations ) : self . _expectation ( X ) # E-step self . _maximization ( X ) # M-step # Check convergence if self . _converged ( X ) : break # Make new assignments and return them self . _expectation ( X ) return self . sample_assignments
Run GMM and return the cluster indices
116
8
14,974
def cmd ( send , msg , args ) : parser = arguments . ArgParser ( args [ 'config' ] ) parser . add_argument ( '--lang' , '--from' , default = None ) parser . add_argument ( '--to' , default = 'en' ) parser . add_argument ( 'msg' , nargs = '+' ) try : cmdargs = parser . parse_args ( msg ) except arguments . ArgumentException as e : send ( str ( e ) ) return send ( gen_translate ( ' ' . join ( cmdargs . msg ) , cmdargs . lang , cmdargs . to ) )
Translate something .
136
4
14,975
def cmd ( send , msg , _ ) : coin = [ 'heads' , 'tails' ] if not msg : send ( 'The coin lands on... %s' % choice ( coin ) ) elif not msg . lstrip ( '-' ) . isdigit ( ) : send ( "Not A Valid Positive Integer." ) else : msg = int ( msg ) if msg < 0 : send ( "Negative Flipping requires the (optional) quantum coprocessor." ) return headflips = randint ( 0 , msg ) tailflips = msg - headflips send ( 'The coins land on heads %g times and on tails %g times.' % ( headflips , tailflips ) )
Flips a coin a number of times .
151
9
14,976
def is_admin ( self , send , nick , required_role = 'admin' ) : # If the required role is None, bypass checks. if not required_role : return True # Current roles are admin and owner, which is a superset of admin. with self . db . session_scope ( ) as session : admin = session . query ( orm . Permissions ) . filter ( orm . Permissions . nick == nick ) . first ( ) if admin is None : return False # owner implies admin, but not the other way around. if required_role == "owner" and admin . role != "owner" : return False # no nickserv support, assume people are who they say they are. if not self . config [ 'feature' ] . getboolean ( 'nickserv' ) : return True if not admin . registered : self . update_authstatus ( nick ) # We don't necessarily want to complain in all cases. if send is not None : send ( "Unverified admin: %s" % nick , target = self . config [ 'core' ] [ 'channel' ] ) return False else : if not self . features [ 'account-notify' ] : # reverify every 5min if we don't have the notification feature. if datetime . now ( ) - admin . time > timedelta ( minutes = 5 ) : self . update_authstatus ( nick ) return True
Checks if a nick is a admin .
301
9
14,977
def get_admins ( self ) : # no nickserv support, assume people are who they say they are. if not self . config [ 'feature' ] . getboolean ( 'nickserv' ) : return with self . db . session_scope ( ) as session : for a in session . query ( orm . Permissions ) . all ( ) : if not a . registered : self . update_authstatus ( a . nick )
Check verification for all admins .
96
6
14,978
def abusecheck ( self , send , nick , target , limit , cmd ) : if nick not in self . abuselist : self . abuselist [ nick ] = { } if cmd not in self . abuselist [ nick ] : self . abuselist [ nick ] [ cmd ] = [ datetime . now ( ) ] else : self . abuselist [ nick ] [ cmd ] . append ( datetime . now ( ) ) count = 0 for x in self . abuselist [ nick ] [ cmd ] : # 60 seconds - arbitrary cuttoff if datetime . now ( ) - x < timedelta ( seconds = 60 ) : count = count + 1 if count > limit : msg = "%s: don't abuse scores!" if cmd == 'scores' else "%s: stop abusing the bot!" send ( msg % nick , target = target ) with self . db . session_scope ( ) as session : send ( misc . ignore ( session , nick ) ) return True
Rate - limits commands .
216
5
14,979
def do_log ( self , target , nick , msg , msgtype ) : if not isinstance ( msg , str ) : raise Exception ( "IRC doesn't like it when you send it a %s" % type ( msg ) . __name__ ) target = target . lower ( ) flags = 0 # Properly handle /msg +#channel if target . startswith ( ( '+' , '@' ) ) : target = target [ 1 : ] with self . data_lock : if target in self . channels : if self . opers [ target ] . get ( nick , False ) : flags |= 1 if self . voiced [ target ] . get ( nick , False ) : flags |= 2 else : target = 'private' # FIXME: should we special-case this? # strip ctrl chars from !creffett msg = msg . replace ( '\x02\x038,4' , '<rage>' ) self . db . log ( nick , target , flags , msg , msgtype ) if self . log_to_ctrlchan : ctrlchan = self . config [ 'core' ] [ 'ctrlchan' ] if target != ctrlchan : ctrlmsg = "%s:%s:%s:%s" % ( target , msgtype , nick , msg ) # If we call self.send, we'll get a infinite loop. self . connection . privmsg ( ctrlchan , ctrlmsg . strip ( ) )
Handles logging .
315
4
14,980
def do_part ( self , cmdargs , nick , target , msgtype , send , c ) : channel = self . config [ 'core' ] [ 'channel' ] botnick = self . config [ 'core' ] [ 'nick' ] if not cmdargs : # don't leave the primary channel if target == channel : send ( "%s must have a home." % botnick ) return else : cmdargs = target if not cmdargs . startswith ( ( '#' , '+' , '@' ) ) : cmdargs = '#' + cmdargs # don't leave the primary channel if cmdargs == channel : send ( "%s must have a home." % botnick ) return # don't leave the control channel if cmdargs == self . config [ 'core' ] [ 'ctrlchan' ] : send ( "%s must remain under control, or bad things will happen." % botnick ) return self . send ( cmdargs , nick , "Leaving at the request of %s" % nick , msgtype ) c . part ( cmdargs )
Leaves a channel .
228
5
14,981
def do_join ( self , cmdargs , nick , msgtype , send , c ) : if not cmdargs : send ( "Join what?" ) return if cmdargs == '0' : send ( "I'm sorry, Dave. I'm afraid I can't do that." ) return if not cmdargs . startswith ( ( '#' , '+' , '@' ) ) : cmdargs = '#' + cmdargs cmd = cmdargs . split ( ) # FIXME: use argparse if cmd [ 0 ] in self . channels and not ( len ( cmd ) > 1 and cmd [ 1 ] == "force" ) : send ( "%s is already a member of %s" % ( self . config [ 'core' ] [ 'nick' ] , cmd [ 0 ] ) ) return c . join ( cmd [ 0 ] ) self . send ( cmd [ 0 ] , nick , "Joined at the request of " + nick , msgtype )
Join a channel .
206
4
14,982
def do_mode ( self , target , msg , nick , send ) : mode_changes = irc . modes . parse_channel_modes ( msg ) with self . data_lock : for change in mode_changes : if change [ 1 ] == 'v' : self . voiced [ target ] [ change [ 2 ] ] = True if change [ 0 ] == '+' else False if change [ 1 ] == 'o' : self . opers [ target ] [ change [ 2 ] ] = True if change [ 0 ] == '+' else False # reop # FIXME: handle -o+o msbobBot msbobBot if [ x for x in mode_changes if self . check_mode ( x ) ] : send ( "%s: :(" % nick , target = target ) # Assume bot admins know what they're doing. if not self . is_admin ( None , nick ) : send ( "OP %s" % target , target = 'ChanServ' ) send ( "UNBAN %s" % target , target = 'ChanServ' ) if len ( self . guarded ) > 0 : # if user is guarded and quieted, devoiced, or deopped, fix that regex = r"(.*(-v|-o|\+q|\+b)[^ ]*) (%s)" % "|" . join ( self . guarded ) match = re . search ( regex , msg ) if match and nick not in [ match . group ( 3 ) , self . connection . real_nickname ] : modestring = "+voe-qb %s" % ( " " . join ( [ match . group ( 3 ) ] * 5 ) ) self . connection . mode ( target , modestring ) send ( 'Mode %s on %s by the guard system' % ( modestring , target ) , target = self . config [ 'core' ] [ 'ctrlchan' ] )
reop and handle guard violations .
413
7
14,983
def do_kick ( self , send , target , nick , msg , slogan = True ) : if not self . kick_enabled : return if target not in self . channels : send ( "%s: you're lucky, private message kicking hasn't been implemented yet." % nick ) return with self . data_lock : ops = [ k for k , v in self . opers [ target ] . items ( ) if v ] botnick = self . config [ 'core' ] [ 'nick' ] if botnick not in ops : ops = [ 'someone' ] if not ops else ops send ( textutils . gen_creffett ( "%s: /op the bot" % random . choice ( ops ) ) , target = target ) elif random . random ( ) < 0.01 and msg == "shutting caps lock off" : if nick in ops : send ( "%s: HUEHUEHUE GIBE CAPSLOCK PLS I REPORT U" % nick , target = target ) else : self . connection . kick ( target , nick , "HUEHUEHUE GIBE CAPSLOCK PLS I REPORT U" ) else : msg = textutils . gen_slogan ( msg ) . upper ( ) if slogan else msg if nick in ops : send ( "%s: %s" % ( nick , msg ) , target = target ) else : self . connection . kick ( target , nick , msg )
Kick users .
306
3
14,984
def do_args ( self , modargs , send , nick , target , source , name , msgtype ) : realargs = { } args = { 'nick' : nick , 'handler' : self , 'db' : None , 'config' : self . config , 'source' : source , 'name' : name , 'type' : msgtype , 'botnick' : self . connection . real_nickname , 'target' : target if target [ 0 ] == "#" else "private" , 'do_kick' : lambda target , nick , msg : self . do_kick ( send , target , nick , msg ) , 'is_admin' : lambda nick : self . is_admin ( send , nick ) , 'abuse' : lambda nick , limit , cmd : self . abusecheck ( send , nick , target , limit , cmd ) } for arg in modargs : if arg in args : realargs [ arg ] = args [ arg ] else : raise Exception ( "Invalid Argument: %s" % arg ) return realargs
Handle the various args that modules need .
223
8
14,985
def do_welcome ( self ) : self . rate_limited_send ( 'join' , self . config [ 'core' ] [ 'channel' ] ) self . rate_limited_send ( 'join' , self . config [ 'core' ] [ 'ctrlchan' ] , self . config [ 'auth' ] [ 'ctrlkey' ] ) # We use this to pick up info on admins who aren't currently in a channel. self . workers . defer ( 5 , False , self . get_admins ) extrachans = self . config [ 'core' ] [ 'extrachans' ] if extrachans : for chan in [ x . strip ( ) for x in extrachans . split ( ',' ) ] : self . rate_limited_send ( 'join' , chan )
Do setup when connected to server .
176
7
14,986
def get_filtered_send ( self , cmdargs , send , target ) : parser = arguments . ArgParser ( self . config ) parser . add_argument ( '--filter' ) try : filterargs , remainder = parser . parse_known_args ( cmdargs ) except arguments . ArgumentException as ex : return str ( ex ) , None cmdargs = ' ' . join ( remainder ) if filterargs . filter is None : return cmdargs , send filter_list , output = textutils . append_filters ( filterargs . filter ) if filter_list is None : return output , None # define a new send to handle filter chaining def filtersend ( msg , mtype = 'privmsg' , target = target , ignore_length = False ) : self . send ( target , self . connection . real_nickname , msg , mtype , ignore_length , filters = filter_list ) return cmdargs , filtersend
Parse out any filters .
197
6
14,987
def handle_msg ( self , c , e ) : if e . type not in [ 'authenticate' , 'error' , 'join' , 'part' , 'quit' ] : nick = e . source . nick else : nick = e . source if e . arguments is None : msg = "" else : msg = " " . join ( e . arguments ) . strip ( ) # Send the response to private messages to the sending nick. target = nick if e . type == 'privmsg' else e . target def send ( msg , mtype = 'privmsg' , target = target , ignore_length = False ) : self . send ( target , self . connection . real_nickname , msg , mtype , ignore_length ) if e . type in [ 'account' , 'authenticate' , 'bannedfromchan' , 'cap' , 'ctcpreply' , 'error' , 'featurelist' , 'nosuchnick' , 'nick' , 'nicknameinuse' , 'privnotice' , 'welcome' , 'whospcrpl' ] : self . handle_event ( msg , send , c , e ) return # ignore empty messages if not msg and e . type != 'join' : return self . do_log ( target , nick , msg , e . type ) if e . type == 'mode' : self . do_mode ( target , msg , nick , send ) return if e . type == 'join' : self . handle_join ( c , e , target , send ) return if e . type == 'part' : if nick == c . real_nickname : send ( "Parted channel %s" % target , target = self . config [ 'core' ] [ 'ctrlchan' ] ) return if e . type == 'kick' : self . handle_kick ( c , e , target , send ) return if e . target == self . config [ 'core' ] [ 'ctrlchan' ] and self . is_admin ( None , nick ) : control . handle_ctrlchan ( self , msg , nick , send ) if self . is_ignored ( nick ) and not self . is_admin ( None , nick ) : return self . handle_hooks ( send , nick , target , e , msg ) # We only process hooks for notices, not commands. if e . type == 'pubnotice' : return msg = misc . get_cmdchar ( self . config , c , msg , e . type ) cmd_name , cmdargs = self . get_cmd ( msg ) if registry . command_registry . is_registered ( cmd_name ) : self . run_cmd ( send , nick , target , cmd_name , cmdargs , e ) # special commands elif cmd_name == 'reload' : with self . db . session_scope ( ) as session : if session . query ( orm . Permissions ) . filter ( orm . Permissions . nick == nick ) . count ( ) : send ( "Aye Aye Capt'n" )
The Heart and Soul of IrcBot .
655
9
14,988
def process_tag ( self , tag ) : try : if not self . _is_function ( tag ) : self . _tag_type_processor [ tag . data_type ] ( tag ) except KeyError as ex : raise Exception ( 'Tag type {0} not recognized for tag {1}' . format ( tag . data_type , tag . name ) , ex )
Processes tag and detects which function to use
81
9
14,989
def process_boolean ( self , tag ) : tag . set_address ( self . normal_register . current_bit_address ) self . normal_register . move_to_next_bit_address ( )
Process Boolean type tags
46
4
14,990
def process_boolean_array ( self , tag ) : array_size = tag . get_array_size ( ) tag . set_address ( self . normal_register . get_array ( array_size ) ) if self . is_sixteen_bit : # each boolean address needs 1/16 byte self . normal_register . move_to_next_address ( ( array_size / 16 ) + 1 ) return # each boolean address needs 1/8 byte self . normal_register . move_to_next_address ( ( array_size / 8 ) + 1 )
Process Boolean array type tags
124
5
14,991
def process_byte ( self , tag ) : tag . set_address ( self . normal_register . current_address ) # each address needs 1 byte self . normal_register . move_to_next_address ( 1 )
Process byte type tags
48
4
14,992
def process_string ( self , tag ) : tag . set_address ( self . string_register . current_address ) if self . is_sixteen_bit : # each string address needs 1 byte = 1/2 an address self . string_register . move_to_next_address ( 1 ) return # each string address needs 1 byte = 1 address self . string_register . move_to_next_address ( 1 )
Process string type tags
92
4
14,993
def cmd ( send , msg , args ) : nick = args [ 'nick' ] channel = args [ 'target' ] if args [ 'target' ] != 'private' else args [ 'config' ] [ 'core' ] [ 'channel' ] levels = { 1 : 'Whirr...' , 2 : 'Vrrm...' , 3 : 'Zzzzhhhh...' , 4 : 'SHFRRRRM...' , 5 : 'GEEEEZZSH...' , 6 : 'PLAAAAIIID...' , 7 : 'KKKRRRAAKKKAAKRAKKGGARGHGIZZZZ...' , 8 : 'Nuke' , 9 : 'nneeeaaaooowwwwww..... BOOOOOSH BLAM KABOOM' , 10 : 'ssh root@remote.tjhsst.edu rm -rf ~%s' } if not msg : send ( 'What to microwave?' ) return match = re . match ( '(-?[0-9]*) (.*)' , msg ) if not match : send ( 'Power level?' ) else : level = int ( match . group ( 1 ) ) target = match . group ( 2 ) if level > 10 : send ( 'Aborting to prevent extinction of human race.' ) return if level < 1 : send ( 'Anti-matter not yet implemented.' ) return if level > 7 : if not args [ 'is_admin' ] ( nick ) : send ( "I'm sorry. Nukes are a admin-only feature" ) return elif msg == args [ 'botnick' ] : send ( "Sorry, Self-Nuking is disabled pending aquisition of a Lead-Lined Fridge." ) else : with args [ 'handler' ] . data_lock : if target not in args [ 'handler' ] . channels [ channel ] . users ( ) : send ( "I'm sorry. Anonymous Nuking is not allowed" ) return msg = levels [ 1 ] for i in range ( 2 , level + 1 ) : if i < 8 : msg += ' ' + levels [ i ] send ( msg ) if level >= 8 : do_nuke ( args [ 'handler' ] . connection , nick , target , channel ) if level >= 9 : send ( levels [ 9 ] ) if level == 10 : send ( levels [ 10 ] % target ) send ( 'Ding, your %s is ready.' % target )
Microwaves something .
520
5
14,994
async def main ( ) : async with aiohttp . ClientSession ( ) as session : station = OpenSenseMap ( SENSOR_ID , loop , session ) # Print details about the given station await station . get_data ( ) print ( "Name:" , station . name ) print ( "Description:" , station . description ) print ( "Coordinates:" , station . coordinates ) print ( "PM 2.5:" , station . pm2_5 ) print ( "PM 10:" , station . pm10 ) print ( "Temperature:" , station . temperature )
Sample code to retrieve the data from an OpenSenseMap station .
121
13
14,995
def theme ( self , text ) : return self . theme_color + self . BRIGHT + text + self . RESET
Theme style .
26
3
14,996
def _label_desc ( self , label , desc , label_color = '' ) : return self . BRIGHT + label_color + label + self . RESET + desc
Generic styler for a line consisting of a label and description .
37
13
14,997
def error ( self , cmd , desc = '' ) : return self . _label_desc ( cmd , desc , self . error_color )
Style for an error message .
30
6
14,998
def warn ( self , cmd , desc = '' ) : return self . _label_desc ( cmd , desc , self . warn_color )
Style for warning message .
30
5
14,999
def success ( self , cmd , desc = '' ) : return self . _label_desc ( cmd , desc , self . success_color )
Style for a success message .
30
6