idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
41,200
def bind ( self , model , template = "{}" ) : self . _bound_model = model self . _uri_template = template self . _set_uri_from_bound_model ( )
Bind the model to the reference . This uses the model s id attribute and the given template to dynamically produce a uri when accessed .
41,201
def index_bounds ( x ) : if isinstance ( x , ( pd . DataFrame , pd . Series ) ) : return x . iloc [ 0 ] , x . iloc [ - 1 ] else : return x [ 0 ] , x [ - 1 ]
returns tuple with first and last item
41,202
def dqdv_cycle ( cycle , splitter = True , ** kwargs ) : c_first = cycle . loc [ cycle [ "direction" ] == - 1 ] c_last = cycle . loc [ cycle [ "direction" ] == 1 ] converter = Converter ( ** kwargs ) converter . set_data ( c_first [ "capacity" ] , c_first [ "voltage" ] ) converter . inspect_data ( ) converter . pre_process_data ( ) converter . increment_data ( ) converter . post_process_data ( ) voltage_first = converter . voltage_processed incremental_capacity_first = converter . incremental_capacity if splitter : voltage_first = np . append ( voltage_first , np . NaN ) incremental_capacity_first = np . append ( incremental_capacity_first , np . NaN ) converter = Converter ( ** kwargs ) converter . set_data ( c_last [ "capacity" ] , c_last [ "voltage" ] ) converter . inspect_data ( ) converter . pre_process_data ( ) converter . increment_data ( ) converter . post_process_data ( ) voltage_last = converter . voltage_processed [ : : - 1 ] incremental_capacity_last = converter . incremental_capacity [ : : - 1 ] voltage = np . concatenate ( ( voltage_first , voltage_last ) ) incremental_capacity = np . concatenate ( ( incremental_capacity_first , incremental_capacity_last ) ) return voltage , incremental_capacity
Convenience functions for creating dq - dv data from given capacity and voltage cycle .
41,203
def dqdv_cycles ( cycles , ** kwargs ) : ica_dfs = list ( ) cycle_group = cycles . groupby ( "cycle" ) for cycle_number , cycle in cycle_group : v , dq = dqdv_cycle ( cycle , splitter = True , ** kwargs ) _ica_df = pd . DataFrame ( { "voltage" : v , "dq" : dq , } ) _ica_df [ "cycle" ] = cycle_number _ica_df = _ica_df [ [ 'cycle' , 'voltage' , 'dq' ] ] ica_dfs . append ( _ica_df ) ica_df = pd . concat ( ica_dfs ) return ica_df
Convenience functions for creating dq - dv data from given capacity and voltage cycles .
41,204
def dqdv ( voltage , capacity , voltage_resolution = None , capacity_resolution = None , voltage_fwhm = 0.01 , pre_smoothing = True , diff_smoothing = False , post_smoothing = True , post_normalization = True , interpolation_method = None , gaussian_order = None , gaussian_mode = None , gaussian_cval = None , gaussian_truncate = None , points_pr_split = None , savgol_filter_window_divisor_default = None , savgol_filter_window_order = None , max_points = None , ** kwargs ) : converter = Converter ( ** kwargs ) logging . debug ( "dqdv - starting" ) logging . debug ( "dqdv - created Converter obj" ) converter . pre_smoothing = pre_smoothing converter . post_smoothing = post_smoothing converter . smoothing = diff_smoothing converter . normalize = post_normalization converter . voltage_fwhm = voltage_fwhm logging . debug ( f"converter.pre_smoothing: {converter.pre_smoothing}" ) logging . debug ( f"converter.post_smoothing: {converter.post_smoothing}" ) logging . debug ( f"converter.smoothing: {converter.smoothing}" ) logging . debug ( f"converter.normalise: {converter.normalize}" ) logging . debug ( f"converter.voltage_fwhm: {converter.voltage_fwhm}" ) if voltage_resolution is not None : converter . voltage_resolution = voltage_resolution if capacity_resolution is not None : converter . capacity_resolution = capacity_resolution if savgol_filter_window_divisor_default is not None : converter . savgol_filter_window_divisor_default = savgol_filter_window_divisor_default logging . debug ( f"converter.savgol_filter_window_divisor_default: " f"{converter.savgol_filter_window_divisor_default}" ) if savgol_filter_window_order is not None : converter . savgol_filter_window_order = savgol_filter_window_order logging . debug ( f"converter.savgol_filter_window_order: " f"{converter.savgol_filter_window_order}" ) if gaussian_mode is not None : converter . gaussian_mode = gaussian_mode if gaussian_order is not None : converter . gaussian_order = gaussian_order if gaussian_truncate is not None : converter . gaussian_truncate = gaussian_truncate if gaussian_cval is not None : converter . gaussian_cval = gaussian_cval if interpolation_method is not None : converter . interpolation_method = interpolation_method if points_pr_split is not None : converter . points_pr_split = points_pr_split if max_points is not None : converter . max_points = max_points converter . set_data ( capacity , voltage ) converter . inspect_data ( ) converter . pre_process_data ( ) converter . increment_data ( ) converter . post_process_data ( ) return converter . voltage_processed , converter . incremental_capacity
Convenience functions for creating dq - dv data from given capacity and voltage data .
41,205
def _dqdv_combinded_frame ( cell , ** kwargs ) : cycles = cell . get_cap ( method = "forth-and-forth" , categorical_column = True , label_cycle_number = True , ) ica_df = dqdv_cycles ( cycles , ** kwargs ) assert isinstance ( ica_df , pd . DataFrame ) return ica_df
Returns full cycle dqdv data for all cycles as one pd . DataFrame .
41,206
def _dqdv_split_frames ( cell , tidy = False , ** kwargs ) : charge_dfs , cycles , minimum_v , maximum_v = _collect_capacity_curves ( cell , direction = "charge" ) ica_charge_dfs = _make_ica_charge_curves ( charge_dfs , cycles , minimum_v , maximum_v , ** kwargs , ) ica_charge_df = pd . concat ( ica_charge_dfs , axis = 1 , keys = [ k . name for k in ica_charge_dfs ] ) dcharge_dfs , cycles , minimum_v , maximum_v = _collect_capacity_curves ( cell , direction = "discharge" ) ica_dcharge_dfs = _make_ica_charge_curves ( dcharge_dfs , cycles , minimum_v , maximum_v , ** kwargs , ) ica_discharge_df = pd . concat ( ica_dcharge_dfs , axis = 1 , keys = [ k . name for k in ica_dcharge_dfs ] ) ica_charge_df . columns . names = [ "cycle" , "value" ] ica_discharge_df . columns . names = [ "cycle" , "value" ] if tidy : ica_charge_df = ica_charge_df . melt ( "voltage" , var_name = "cycle" , value_name = "dq" , col_level = 0 ) ica_discharge_df = ica_discharge_df . melt ( "voltage" , var_name = "cycle" , value_name = "dq" , col_level = 0 ) return ica_charge_df , ica_discharge_df
Returns dqdv data as pandas . DataFrames for all cycles .
41,207
def inspect_data ( self , capacity = None , voltage = None , err_est = False , diff_est = False ) : logging . debug ( "inspecting the data" ) if capacity is None : capacity = self . capacity if voltage is None : voltage = self . voltage if capacity is None or voltage is None : raise NullData self . len_capacity = len ( capacity ) self . len_voltage = len ( voltage ) if self . len_capacity <= 1 : raise NullData if self . len_voltage <= 1 : raise NullData self . min_capacity , self . max_capacity = value_bounds ( capacity ) self . start_capacity , self . end_capacity = index_bounds ( capacity ) self . number_of_points = len ( capacity ) if diff_est : d_capacity = np . diff ( capacity ) d_voltage = np . diff ( voltage ) self . d_capacity_mean = np . mean ( d_capacity ) self . d_voltage_mean = np . mean ( d_voltage ) if err_est : splits = int ( self . number_of_points / self . points_pr_split ) rest = self . number_of_points % self . points_pr_split if splits < self . minimum_splits : txt = "no point in splitting, too little data" logging . debug ( txt ) self . errors . append ( "splitting: to few points" ) else : if rest > 0 : _cap = capacity [ : - rest ] _vol = voltage [ : - rest ] else : _cap = capacity _vol = voltage c_pieces = np . split ( _cap , splits ) v_pieces = np . split ( _vol , splits ) std_err = [ ] c_pieces_avg = [ ] for c , v in zip ( c_pieces , v_pieces ) : _slope , _intercept , _r_value , _p_value , _std_err = stats . linregress ( c , v ) std_err . append ( _std_err ) c_pieces_avg . append ( np . mean ( c ) ) self . std_err_median = np . median ( std_err ) self . std_err_mean = np . mean ( std_err ) if not self . start_capacity == self . min_capacity : self . errors . append ( "capacity: start<>min" ) if not self . end_capacity == self . max_capacity : self . errors . append ( "capacity: end<>max" ) if self . normalizing_factor is None : self . normalizing_factor = self . end_capacity if self . normalizing_roof is not None : self . normalizing_factor = self . normalizing_factor * self . end_capacity / self . normalizing_roof
check and inspect the data
41,208
def increment_data ( self ) : logging . debug ( "incrementing data" ) v1 , v2 = value_bounds ( self . voltage_preprocessed ) if self . voltage_resolution is not None : len_voltage = round ( abs ( v2 - v1 ) / self . voltage_resolution , 0 ) else : len_voltage = len ( self . voltage_preprocessed ) logging . debug ( " - interpolating capacity(voltage)" ) f = interp1d ( self . voltage_preprocessed , self . capacity_preprocessed , kind = self . interpolation_method ) self . voltage_inverted = np . linspace ( v1 , v2 , len_voltage ) self . voltage_inverted_step = ( v2 - v1 ) / ( len_voltage - 1 ) self . capacity_inverted = f ( self . voltage_inverted ) if self . smoothing : logging . debug ( " - smoothing (savgol filter window)" ) savgol_filter_window_divisor = np . amin ( ( self . savgol_filter_window_divisor_default , len_voltage / 5 ) ) savgol_filter_window_length = int ( len ( self . voltage_inverted ) / savgol_filter_window_divisor ) if savgol_filter_window_length % 2 == 0 : savgol_filter_window_length -= 1 self . capacity_inverted = savgol_filter ( self . capacity_inverted , np . amax ( [ 3 , savgol_filter_window_length ] ) , self . savgol_filter_window_order ) if self . increment_method == "diff" : logging . debug ( " - diff using DIFF" ) self . incremental_capacity = np . ediff1d ( self . capacity_inverted ) / self . voltage_inverted_step self . _incremental_capacity = self . incremental_capacity self . _voltage_processed = self . voltage_inverted [ 1 : ] self . voltage_processed = self . voltage_inverted [ 1 : ] - 0.5 * self . voltage_inverted_step elif self . increment_method == "hist" : logging . debug ( " - diff using HIST" ) raise NotImplementedError
perform the dq - dv transform
41,209
def easybake ( ruleset , in_html , out_html ) : html = etree . parse ( in_html ) oven = Oven ( ruleset ) oven . bake ( html ) out_html . write ( etree . tostring ( html ) )
This adheres to the same interface as cnxeasybake . scripts . main . easyback . ruleset is a string containing the ruleset CSS while in_html and out_html are file - like objects with respective read and write ability .
41,210
def reconstitute ( html ) : try : htree = etree . parse ( html ) except etree . XMLSyntaxError : html . seek ( 0 ) htree = etree . HTML ( html . read ( ) ) xhtml = etree . tostring ( htree , encoding = 'utf-8' ) return adapt_single_html ( xhtml )
Given a file - like object as html reconstruct it into models .
41,211
def collate ( binder , ruleset = None , includes = None ) : html_formatter = SingleHTMLFormatter ( binder , includes ) raw_html = io . BytesIO ( bytes ( html_formatter ) ) collated_html = io . BytesIO ( ) if ruleset is None : return binder easybake ( ruleset , raw_html , collated_html ) collated_html . seek ( 0 ) collated_binder = reconstitute ( collated_html ) return collated_binder
Given a Binder as binder collate the content into a new set of models . Returns the collated binder .
41,212
def adapt_package ( package ) : navigation_item = package . navigation html = etree . parse ( navigation_item . data ) tree = parse_navigation_html_to_tree ( html , navigation_item . name ) return _node_to_model ( tree , package )
Adapts . epub . Package to a BinderItem and cascades the adaptation downward to DocumentItem and ResourceItem . The results of this process provide the same interface as . models . Binder . models . Document and . models . Resource .
41,213
def adapt_item ( item , package , filename = None ) : if item . media_type == 'application/xhtml+xml' : try : html = etree . parse ( item . data ) except Exception as exc : logger . error ( "failed parsing {}" . format ( item . name ) ) raise metadata = DocumentPointerMetadataParser ( html , raise_value_error = False ) ( ) item . data . seek ( 0 ) if metadata . get ( 'is_document_pointer' ) : model = DocumentPointerItem ( item , package ) else : model = DocumentItem ( item , package ) else : model = Resource ( item . name , item . data , item . media_type , filename or item . name ) return model
Adapts . epub . Item to a DocumentItem .
41,214
def _make_package ( binder ) : package_id = binder . id if package_id is None : package_id = hash ( binder ) package_name = "{}.opf" . format ( package_id ) extensions = get_model_extensions ( binder ) template_env = jinja2 . Environment ( trim_blocks = True , lstrip_blocks = True ) items = [ ] navigation_document = bytes ( HTMLFormatter ( binder , extensions ) ) navigation_document_name = "{}{}" . format ( package_id , mimetypes . guess_extension ( 'application/xhtml+xml' , strict = False ) ) item = Item ( str ( navigation_document_name ) , io . BytesIO ( navigation_document ) , 'application/xhtml+xml' , is_navigation = True , properties = [ 'nav' ] ) items . append ( item ) resources = { } for model in flatten_model ( binder ) : for resource in getattr ( model , 'resources' , [ ] ) : resources [ resource . id ] = resource with resource . open ( ) as data : item = Item ( resource . id , data , resource . media_type ) items . append ( item ) if isinstance ( model , ( Binder , TranslucentBinder , ) ) : continue if isinstance ( model , DocumentPointer ) : content = bytes ( HTMLFormatter ( model ) ) item = Item ( '' . join ( [ model . ident_hash , extensions [ model . id ] ] ) , io . BytesIO ( content ) , model . media_type ) items . append ( item ) continue for reference in model . references : if reference . remote_type == INLINE_REFERENCE_TYPE : resource = _make_resource_from_inline ( reference ) model . resources . append ( resource ) resources [ resource . id ] = resource with resource . open ( ) as data : item = Item ( resource . id , data , resource . media_type ) items . append ( item ) reference . bind ( resource , '../resources/{}' ) elif reference . remote_type == INTERNAL_REFERENCE_TYPE : filename = os . path . basename ( reference . uri ) resource = resources . get ( filename ) if resource : reference . bind ( resource , '../resources/{}' ) complete_content = bytes ( HTMLFormatter ( model ) ) item = Item ( '' . join ( [ model . ident_hash , extensions [ model . id ] ] ) , io . BytesIO ( complete_content ) , model . media_type ) items . append ( item ) package = Package ( package_name , items , binder . metadata ) return package
Makes an . epub . Package from a Binder ish instance .
41,215
def _make_item ( model ) : item = Item ( model . id , model . content , model . media_type ) return item
Makes an . epub . Item from a . models . Document or . models . Resource
41,216
def _node_to_model ( tree_or_item , package , parent = None , lucent_id = TRANSLUCENT_BINDER_ID ) : if 'contents' in tree_or_item : tree = tree_or_item metadata = package . metadata . copy ( ) if tree [ 'id' ] == lucent_id : metadata [ 'title' ] = tree [ 'title' ] binder = TranslucentBinder ( metadata = metadata ) else : try : package_item = package . grab_by_name ( tree [ 'id' ] ) binder = BinderItem ( package_item , package ) except KeyError : metadata . update ( { 'title' : tree [ 'title' ] , 'cnx-archive-uri' : tree [ 'id' ] , 'cnx-archive-shortid' : tree [ 'shortId' ] } ) binder = Binder ( tree [ 'id' ] , metadata = metadata ) for item in tree [ 'contents' ] : node = _node_to_model ( item , package , parent = binder , lucent_id = lucent_id ) if node . metadata [ 'title' ] != item [ 'title' ] : binder . set_title_for_node ( node , item [ 'title' ] ) result = binder else : item = tree_or_item package_item = package . grab_by_name ( item [ 'id' ] ) result = adapt_item ( package_item , package ) if parent is not None : parent . append ( result ) return result
Given a tree parse to a set of models
41,217
def adapt_single_html ( html ) : html_root = etree . fromstring ( html ) metadata = parse_metadata ( html_root . xpath ( '//*[@data-type="metadata"]' ) [ 0 ] ) id_ = metadata [ 'cnx-archive-uri' ] or 'book' binder = Binder ( id_ , metadata = metadata ) nav_tree = parse_navigation_html_to_tree ( html_root , id_ ) body = html_root . xpath ( '//xhtml:body' , namespaces = HTML_DOCUMENT_NAMESPACES ) _adapt_single_html_tree ( binder , body [ 0 ] , nav_tree , top_metadata = metadata ) return binder
Adapts a single html document generated by . formatters . SingleHTMLFormatter to a models . Binder
41,218
def get_best_fit_parameters_grouped ( self ) : result_dict = dict ( ) result_dict [ 'ocv' ] = [ parameters [ 'ocv' ] for parameters in self . best_fit_parameters ] for i in range ( self . circuits ) : result_dict [ 't' + str ( i ) ] = [ parameters [ 't' + str ( i ) ] for parameters in self . best_fit_parameters ] result_dict [ 'w' + str ( i ) ] = [ parameters [ 'w' + str ( i ) ] for parameters in self . best_fit_parameters ] return result_dict
Returns a dictionary of the best fit .
41,219
def get_best_fit_parameters_translated_grouped ( self ) : result_dict = dict ( ) result_dict [ 'ocv' ] = [ parameters [ 'ocv' ] for parameters in self . best_fit_parameters_translated ] result_dict [ 'ir' ] = [ parameters [ 'ir' ] for parameters in self . best_fit_parameters_translated ] for i in range ( self . circuits ) : result_dict [ 'r' + str ( i ) ] = [ parameters [ 'r' + str ( i ) ] for parameters in self . best_fit_parameters_translated ] result_dict [ 'c' + str ( i ) ] = [ parameters [ 'c' + str ( i ) ] for parameters in self . best_fit_parameters_translated ] return result_dict
Returns the parameters as a dictionary of the real units for the best fit .
41,220
def plot_summary ( self , cycles = None ) : if cycles is None : cycles = [ 0 ] fig1 = plt . figure ( ) ax1 = fig1 . add_subplot ( 221 ) ax1 . set_title ( 'Fit' ) ax2 = fig1 . add_subplot ( 222 ) ax2 . set_title ( 'OCV' ) ax3 = fig1 . add_subplot ( 223 ) ax3 . set_title ( 'Tau' ) ax3 . set_yscale ( "log" ) ax4 = fig1 . add_subplot ( 224 ) ax4 . set_title ( 'Voltage Impact' ) plot_data = self . get_best_fit_data ( ) for cycle in cycles : ax1 . plot ( plot_data [ cycle ] [ 0 ] , plot_data [ cycle ] [ 1 ] ) ax1 . plot ( plot_data [ cycle ] [ 0 ] , plot_data [ cycle ] [ 2 ] ) plot_data = self . get_best_fit_parameters_grouped ( ) for i in range ( self . circuits ) : ax3 . plot ( self . get_fit_cycles ( ) , plot_data [ 't' + str ( i ) ] ) ax4 . plot ( self . get_fit_cycles ( ) , plot_data [ 'w' + str ( i ) ] ) ax2 . plot ( self . get_fit_cycles ( ) , plot_data [ 'ocv' ] )
Convenience function for plotting the summary of the fit
41,221
def parse_resources ( html ) : xpath = '//*[@data-type="resources"]//xhtml:li/xhtml:a' for resource in html . xpath ( xpath , namespaces = HTML_DOCUMENT_NAMESPACES ) : yield { 'id' : resource . get ( 'href' ) , 'filename' : resource . text . strip ( ) , }
Return a list of resource names found in the html metadata section .
41,222
def on_connect ( self , client , userdata , flags , rc ) : super ( SerialDeviceManager , self ) . on_connect ( client , userdata , flags , rc ) if rc == 0 : self . mqtt_client . subscribe ( 'serial_device/+/connect' ) self . mqtt_client . subscribe ( 'serial_device/+/send' ) self . mqtt_client . subscribe ( 'serial_device/+/close' ) self . mqtt_client . subscribe ( 'serial_device/refresh_comports' ) self . refresh_comports ( )
Callback for when the client receives a CONNACK response from the broker .
41,223
def on_message ( self , client , userdata , msg ) : if msg . topic == 'serial_device/refresh_comports' : self . refresh_comports ( ) return match = CRE_MANAGER . match ( msg . topic ) if match is None : logger . debug ( 'Topic NOT matched: `%s`' , msg . topic ) else : logger . debug ( 'Topic matched: `%s`' , msg . topic ) command = match . group ( 'command' ) port = match . group ( 'port' ) if command == 'send' : self . _serial_send ( port , msg . payload ) elif command == 'connect' : try : request = json . loads ( msg . payload ) except ValueError as exception : logger . error ( 'Error decoding "%s (%s)" request: %s' , command , port , exception ) return self . _serial_connect ( port , request ) elif command == 'close' : self . _serial_close ( port )
Callback for when a PUBLISH message is received from the broker .
41,224
def _publish_status ( self , port ) : if port not in self . open_devices : status = { } else : device = self . open_devices [ port ] . serial properties = ( 'port' , 'baudrate' , 'bytesize' , 'parity' , 'stopbits' , 'timeout' , 'xonxoff' , 'rtscts' , 'dsrdtr' ) status = { k : getattr ( device , k ) for k in properties } status_json = json . dumps ( status ) self . mqtt_client . publish ( topic = 'serial_device/%s/status' % port , payload = status_json , retain = True )
Publish status for specified port .
41,225
def _serial_close ( self , port ) : if port in self . open_devices : try : self . open_devices [ port ] . close ( ) except Exception as exception : logger . error ( 'Error closing device `%s`: %s' , port , exception ) return else : logger . debug ( 'Device not connected to `%s`' , port ) self . _publish_status ( port ) return
Handle close request .
41,226
def _serial_send ( self , port , payload ) : if port not in self . open_devices : logger . error ( 'Error sending data: `%s` not connected' , port ) self . _publish_status ( port ) else : try : device = self . open_devices [ port ] device . write ( payload ) logger . debug ( 'Sent data to `%s`' , port ) except Exception as exception : logger . error ( 'Error sending data to `%s`: %s' , port , exception )
Send data to connected device .
41,227
def print_datetime_object ( dt ) : print ( dt ) print ( 'ctime :' , dt . ctime ( ) ) print ( 'tuple :' , dt . timetuple ( ) ) print ( 'ordinal:' , dt . toordinal ( ) ) print ( 'Year :' , dt . year ) print ( 'Mon :' , dt . month ) print ( 'Day :' , dt . day )
prints a date - object
41,228
def check64bit ( current_system = "python" ) : if current_system == "python" : return sys . maxsize > 2147483647 elif current_system == "os" : import platform pm = platform . machine ( ) if pm != ".." and pm . endswith ( '64' ) : return True else : if 'PROCESSOR_ARCHITEW6432' in os . environ : return True try : return os . environ [ 'PROCESSOR_ARCHITECTURE' ] . endswith ( '64' ) except IndexError : pass try : return '64' in platform . architecture ( ) [ 0 ] except Exception : return False
checks if you are on a 64 bit platform
41,229
def humanize_bytes ( b , precision = 1 ) : abbrevs = ( ( 1 << 50 , 'PB' ) , ( 1 << 40 , 'TB' ) , ( 1 << 30 , 'GB' ) , ( 1 << 20 , 'MB' ) , ( 1 << 10 , 'kB' ) , ( 1 , 'b' ) ) if b == 1 : return '1 byte' for factor , suffix in abbrevs : if b >= factor : break return '%.*f %s' % ( precision , b // factor , suffix )
Return a humanized string representation of a number of b .
41,230
def xldate_as_datetime ( xldate , datemode = 0 , option = "to_datetime" ) : if option == "to_float" : d = ( xldate - 25589 ) * 86400.0 else : try : d = datetime . datetime ( 1899 , 12 , 30 ) + datetime . timedelta ( days = xldate + 1462 * datemode ) if option == "to_string" : date_format = "%Y-%m-%d %H:%M:%S" d = d . strftime ( date_format ) except TypeError : logging . info ( f'The date is not of correct type [{xldate}]' ) d = xldate return d
Converts a xls date stamp to a more sensible format .
41,231
def populate ( self , filename ) : if os . path . isfile ( filename ) : fid_st = os . stat ( filename ) self . name = os . path . abspath ( filename ) self . full_name = filename self . size = fid_st . st_size self . last_modified = fid_st . st_mtime self . last_accessed = fid_st . st_atime self . last_info_changed = fid_st . st_ctime self . location = os . path . dirname ( filename )
Finds the file - stats and populates the class with stat values .
41,232
def get_raw ( self ) : return [ self . name , self . size , self . last_modified , self . location ]
Get a list with information about the file .
41,233
def dfsummary_made ( self ) : try : empty = self . dfsummary . empty except AttributeError : empty = True return not empty
check if the summary table exists
41,234
def step_table_made ( self ) : try : empty = self . step_table . empty except AttributeError : empty = True return not empty
check if the step table exists
41,235
def _open_sheet ( self , dtypes_dict = None ) : table_name = self . db_sheet_table header_row = self . db_header_row nrows = self . nrows if dtypes_dict is None : dtypes_dict = self . dtypes_dict rows_to_skip = self . skiprows logging . debug ( f"Trying to open the file {self.db_file}" ) logging . debug ( f"Number of rows: {nrows}" ) logging . debug ( f"Skipping the following rows: {rows_to_skip}" ) logging . debug ( f"Declaring the following dtyps: {dtypes_dict}" ) work_book = pd . ExcelFile ( self . db_file ) try : sheet = work_book . parse ( table_name , header = header_row , skiprows = rows_to_skip , dtype = dtypes_dict , nrows = nrows , ) except ValueError as e : logging . debug ( "Could not parse all the columns (ValueError) " "using given dtypes. Trying without dtypes." ) logging . debug ( str ( e ) ) sheet = work_book . parse ( table_name , header = header_row , skiprows = rows_to_skip , nrows = nrows , ) return sheet
Opens sheets and returns it
41,236
def _validate ( self ) : probably_good_to_go = True sheet = self . table identity = self . db_sheet_cols . id id_col = sheet . loc [ : , identity ] if any ( id_col . duplicated ( ) ) : warnings . warn ( "your database is corrupt: duplicates" " encountered in the srno-column" ) logger . debug ( "srno duplicates:\n" + str ( id_col . duplicated ( ) ) ) probably_good_to_go = False return probably_good_to_go
Checks that the db - file is ok
41,237
def select_serial_number_row ( self , serial_number ) : sheet = self . table col = self . db_sheet_cols . id rows = sheet . loc [ : , col ] == serial_number return sheet . loc [ rows , : ]
Select row for identification number serial_number
41,238
def select_all ( self , serial_numbers ) : sheet = self . table col = self . db_sheet_cols . id rows = sheet . loc [ : , col ] . isin ( serial_numbers ) return sheet . loc [ rows , : ]
Select rows for identification for a list of serial_number .
41,239
def print_serial_number_info ( self , serial_number , print_to_screen = True ) : r = self . select_serial_number_row ( serial_number ) if r . empty : warnings . warn ( "missing serial number" ) return txt1 = 80 * "=" txt1 += "\n" txt1 += f" serial number {serial_number}\n" txt1 = 80 * "-" txt1 += "\n" txt2 = "" for label , value in zip ( r . columns , r . values [ 0 ] ) : if label in self . headers : txt1 += f"{label}: \t {value}\n" else : txt2 += f"({label}: \t {value})\n" if print_to_screen : print ( txt1 ) print ( 80 * "-" ) print ( txt2 ) print ( 80 * "=" ) return else : return txt1
Print information about the run .
41,240
def main ( argv = None ) : parser = argparse . ArgumentParser ( description = __doc__ ) parser . add_argument ( 'collated_html' , type = argparse . FileType ( 'r' ) , help = 'Path to the collated html' ' file (use - for stdin)' ) parser . add_argument ( '-d' , '--dump-tree' , action = 'store_true' , help = 'Print out parsed model tree.' ) parser . add_argument ( '-o' , '--output' , type = argparse . FileType ( 'w+' ) , help = 'Write out epub of parsed tree.' ) parser . add_argument ( '-i' , '--input' , type = argparse . FileType ( 'r' ) , help = 'Read and copy resources/ for output epub.' ) args = parser . parse_args ( argv ) if args . input and args . output == sys . stdout : raise ValueError ( 'Cannot output to stdout if reading resources' ) from cnxepub . collation import reconstitute binder = reconstitute ( args . collated_html ) if args . dump_tree : print ( pformat ( cnxepub . model_to_tree ( binder ) ) , file = sys . stdout ) if args . output : cnxepub . adapters . make_epub ( binder , args . output ) if args . input : args . output . seek ( 0 ) zout = ZipFile ( args . output , 'a' , ZIP_DEFLATED ) zin = ZipFile ( args . input , 'r' ) for res in zin . namelist ( ) : if res . startswith ( 'resources' ) : zres = zin . open ( res ) zi = zin . getinfo ( res ) zout . writestr ( zi , zres . read ( ) , ZIP_DEFLATED ) zout . close ( ) return 0
Parse passed in cooked single HTML .
41,241
def success ( channel , image , hex_str ) : hex_number = int ( hex_str , 16 ) gui = ui_embed . UI ( channel , "" , "#{}" . format ( hex_str ) , modulename = modulename , colour = hex_number , thumbnail = image , ) return gui
Creates an embed UI containing a hex color message
41,242
def add_md ( text , s , level = 0 ) : if level > 0 : if text != "" : text += "\n" text += "#" * level text += " " text += s + "\n" if level > 0 : text += "\n" return text
Adds text to the readme at the given level
41,243
def add_ul ( text , ul ) : text += "\n" for li in ul : text += "- " + li + "\n" text += "\n" return text
Adds an unordered list to the readme
41,244
def make_editions_dict ( editions ) : d = { } nums = [ '1' , '2' , '3' , '4' , '5' , '6' ] num_counter = 0 for k , date_dict in editions . items ( ) : d [ 'edition%s' % nums [ num_counter ] ] = k if date_dict [ 'start' ] is not None : d [ 'start_e%s' % nums [ num_counter ] ] = date_dict [ 'start' ] . isoformat ( ) if date_dict [ 'end' ] is not None : d [ 'end_e%s' % nums [ num_counter ] ] = date_dict [ 'end' ] . isoformat ( ) num_counter += 1 return d
Take a reporter editions dict and flatten it returning a dict for use in the DictWriter .
41,245
def modify_module ( channel , module_name , module_state ) : gui = ui_embed . UI ( channel , "{} updated" . format ( module_name ) , "{} is now {}" . format ( module_name , "activated" if module_state else "deactivated" ) , modulename = modulename ) return gui
Creates an embed UI containing the module modified message
41,246
def modify_prefix ( channel , new_prefix ) : gui = ui_embed . UI ( channel , "Prefix updated" , "Modis prefix is now `{}`" . format ( new_prefix ) , modulename = modulename ) return gui
Creates an embed UI containing the prefix modified message
41,247
async def update_server_data ( server ) : data = datatools . get_data ( ) send_welcome_message = False if server . id not in data [ "discord" ] [ "servers" ] : logger . debug ( "Adding new server to serverdata" ) data [ "discord" ] [ "servers" ] [ server . id ] = { "prefix" : "!" } if "mute_intro" not in data or not data [ "mute_intro" ] : send_welcome_message = True _dir = os . path . realpath ( os . path . join ( os . getcwd ( ) , os . path . dirname ( __file__ ) ) ) _dir_modules = "{}/../" . format ( _dir ) for module_name in os . listdir ( _dir_modules ) : if module_name . startswith ( "_" ) or module_name . startswith ( "!" ) : continue if not os . path . isfile ( "{}/{}/_data.py" . format ( _dir_modules , module_name ) ) : logger . warning ( "No _data.py file found for module {}" . format ( module_name ) ) continue try : import_name = ".discord_modis.modules.{}.{}" . format ( module_name , "_data" ) _data = importlib . import_module ( import_name , "modis" ) if _data . modulename not in data [ "discord" ] [ "servers" ] [ server . id ] : data [ "discord" ] [ "servers" ] [ server . id ] [ _data . modulename ] = _data . sd_structure datatools . write_data ( data ) except Exception as e : logger . error ( "Could not initialise module {}" . format ( module_name ) ) logger . exception ( e ) datatools . write_data ( data ) if send_welcome_message : default_channel = server . default_channel if not default_channel : for channel in server . channels : if channel . name == "general" : default_channel = channel break if not default_channel : for channel in server . channels : if "general" in channel . name : default_channel = channel break if not default_channel : for channel in server . channels : if channel . type == discord . ChannelType . text : default_channel = channel break if default_channel : hello_message = "Hello! I'm Modis.\n\n" + "The prefix is currently `!`, and can be changed at any time using `!prefix`\n\n" + "You can use `!help` to get help commands for all modules, " + "or {} me to get the server prefix and help commands." . format ( server . me . mention ) await client . send_message ( default_channel , hello_message )
Updates the server info for the given server
41,248
def remove_server_data ( server_id ) : logger . debug ( "Removing server from serverdata" ) data = datatools . get_data ( ) if server_id in data [ "discord" ] [ "servers" ] : data [ "discord" ] [ "servers" ] . pop ( server_id ) datatools . write_data ( data )
Remove a server from the server data
41,249
def check_all_servers ( ) : data = datatools . get_data ( ) for server_id in data [ "discord" ] [ "servers" ] : is_in_client = False for client_server in client . servers : if server_id == client_server . id : is_in_client = True break if not is_in_client : remove_server_data ( server_id )
Checks all servers removing any that Modis isn t part of any more
41,250
def clear_modules ( self ) : for child in self . module_selection . winfo_children ( ) : child . destroy ( ) self . clear_ui ( ) tk . Label ( self . module_ui , text = "Start Modis and select a module" ) . grid ( column = 0 , row = 0 , padx = 0 , pady = 0 , sticky = "W E N S" ) if self . current_button is not None : self . current_button . config ( bg = "white" ) self . module_buttons = { } self . current_button = None
Clears all modules from the list
41,251
def add_module ( self , module_name , module_ui ) : m_button = tk . Label ( self . module_selection , text = module_name , bg = "white" , anchor = "w" ) m_button . grid ( column = 0 , row = len ( self . module_selection . winfo_children ( ) ) , padx = 0 , pady = 0 , sticky = "W E N S" ) self . module_buttons [ module_name ] = m_button m_button . bind ( "<Button-1>" , lambda e : self . module_selected ( module_name , module_ui ) )
Adds a module to the list
41,252
def module_selected ( self , module_name , module_ui ) : if self . current_button == self . module_buttons [ module_name ] : return self . module_buttons [ module_name ] . config ( bg = "#cacaca" ) if self . current_button is not None : self . current_button . config ( bg = "white" ) self . current_button = self . module_buttons [ module_name ] self . clear_ui ( ) try : module_ui_frame = ModuleUIBaseFrame ( self . module_ui , module_name , module_ui ) module_ui_frame . grid ( column = 0 , row = 0 , sticky = "W E N S" ) except Exception as e : logger . error ( "Could not load UI for {}" . format ( module_name ) ) logger . exception ( e ) tk . Label ( self . module_ui , text = "Could not load UI for {}" . format ( module_name ) ) . grid ( column = 0 , row = 0 , padx = 0 , pady = 0 , sticky = "W E N S" )
Called when a module is selected
41,253
def toggle ( self , discord_token , discord_client_id ) : if self . state == 'off' : self . start ( discord_token , discord_client_id ) elif self . state == 'on' : self . stop ( )
Toggles Modis on or off
41,254
def start ( self , discord_token , discord_client_id ) : self . button_toggle_text . set ( "Stop Modis" ) self . state = "on" self . status_bar . set_status ( 1 ) logger . info ( "----------------STARTING DISCORD MODIS----------------" ) self . module_frame . clear_modules ( ) from modis . discord_modis import main logger . debug ( "Creating event loop" ) loop = asyncio . new_event_loop ( ) asyncio . set_event_loop ( loop ) self . discord_thread = threading . Thread ( target = main . start , args = [ discord_token , discord_client_id , loop , self . on_ready ] ) logger . debug ( "Starting event loop" ) self . discord_thread . start ( ) database_dir = "{}/modules" . format ( os . path . dirname ( os . path . realpath ( __file__ ) ) ) for module_name in os . listdir ( database_dir ) : module_dir = "{}/{}" . format ( database_dir , module_name ) if os . path . isdir ( module_dir ) and not module_name . startswith ( "_" ) : module_event_handlers = os . listdir ( module_dir ) if "_ui.py" in module_event_handlers : import_name = ".discord_modis.modules.{}.{}" . format ( module_name , "_ui" ) logger . debug ( "Found module UI file {}" . format ( import_name [ 23 : ] ) ) self . module_frame . add_module ( module_name , importlib . import_module ( import_name , "modis" ) ) else : self . module_frame . add_module ( module_name , None )
Start Modis and log it into Discord .
41,255
def stop ( self ) : self . button_toggle_text . set ( "Start Modis" ) self . state = "off" logger . info ( "Stopping Discord Modis" ) from . _client import client asyncio . run_coroutine_threadsafe ( client . logout ( ) , client . loop ) self . status_bar . set_status ( 0 )
Stop Modis and log it out of Discord .
41,256
def key_changed ( self ) : if self . key_name . get ( ) and self . key_val . get ( ) : self . button_key_add . state ( [ "!disabled" ] ) else : self . button_key_add . state ( [ "disabled" ] )
Checks if the key name and value fields have been set and updates the add key button
41,257
def key_add ( self ) : from . main import add_api_key add_api_key ( self . key_name . get ( ) , self . key_val . get ( ) ) self . key_name . set ( "" ) self . key_val . set ( "" )
Adds the current API key to the bot s data
41,258
def set_status ( self , status ) : text = "" colour = "#FFFFFF" if status == 0 : text = "OFFLINE" colour = "#EF9A9A" elif status == 1 : text = "STARTING" colour = "#FFE082" elif status == 2 : text = "ONLINE" colour = "#A5D6A7" self . status . set ( text ) self . statusbar . config ( background = colour )
Updates the status text
41,259
def get_help_data ( filepath ) : try : with open ( filepath , 'r' ) as file : return _json . load ( file , object_pairs_hook = OrderedDict ) except Exception as e : logger . error ( "Could not load file {}" . format ( filepath ) ) logger . exception ( e ) return { }
Get the json data from a help file
41,260
def get_help_datapacks ( filepath , prefix = "!" ) : help_contents = get_help_data ( filepath ) datapacks = [ ] for d in help_contents : heading = d content = "" if "commands" in d . lower ( ) : for c in help_contents [ d ] : if "name" not in c : continue content += "- `" command = prefix + c [ "name" ] content += "{}" . format ( command ) if "params" in c : for param in c [ "params" ] : content += " [{}]" . format ( param ) content += "`: " if "description" in c : content += c [ "description" ] content += "\n" else : content += help_contents [ d ] datapacks . append ( ( heading , content , False ) ) return datapacks
Load help text from a file and give it as datapacks
41,261
def add_help_text ( parent , filepath , prefix = "!" ) : import tkinter as tk import tkinter . ttk as ttk help_contents = get_help_data ( filepath ) text = tk . Text ( parent , wrap = 'word' , font = ( "Helvetica" , 10 ) ) text . grid ( row = 0 , column = 0 , sticky = "W E N S" ) text . tag_config ( "heading" , font = ( "Helvetica" , 14 ) ) text . tag_config ( "command" , font = ( "Courier" , 10 ) ) text . tag_config ( "param" , font = ( "Courier" , 10 ) ) text . tag_config ( "description" ) scrollbar = ttk . Scrollbar ( parent , orient = "vertical" , command = text . yview ) scrollbar . grid ( column = 1 , row = 0 , sticky = "N S" ) text [ 'yscrollcommand' ] = scrollbar . set for d in help_contents : text . insert ( 'end' , d , "heading" ) text . insert ( 'end' , '\n' ) if "commands" in d . lower ( ) : for c in help_contents [ d ] : if "name" not in c : continue command = prefix + c [ "name" ] text . insert ( 'end' , command , ( "command" , "description" ) ) if "params" in c : for param in c [ "params" ] : text . insert ( 'end' , " [{}]" . format ( param ) , ( "param" , "description" ) ) text . insert ( 'end' , ": " ) if "description" in c : text . insert ( 'end' , c [ "description" ] , "description" ) text . insert ( 'end' , '\n' ) text . insert ( 'end' , '\n' ) else : text . insert ( 'end' , help_contents [ d ] , "description" ) text . insert ( 'end' , '\n\n' ) text . config ( state = tk . DISABLED )
Load help text from a file and adds it to the parent
41,262
def console ( discord_token , discord_client_id ) : state , response = datatools . get_compare_version ( ) logger . info ( "Starting Modis in console" ) logger . info ( response ) import threading import asyncio logger . debug ( "Loading packages" ) from modis . discord_modis import main as discord_modis_console from modis . reddit_modis import main as reddit_modis_console from modis . facebook_modis import main as facebook_modis_console logger . debug ( "Initiating threads" ) loop = asyncio . get_event_loop ( ) discord_thread = threading . Thread ( target = discord_modis_console . start , args = [ discord_token , discord_client_id , loop ] ) reddit_thread = threading . Thread ( target = reddit_modis_console . start , args = [ ] ) facebook_thread = threading . Thread ( target = facebook_modis_console . start , args = [ ] ) logger . debug ( "Starting threads" ) discord_thread . start ( ) reddit_thread . start ( ) facebook_thread . start ( ) logger . debug ( "Root startup completed" )
Start Modis in console format .
41,263
def gui ( discord_token , discord_client_id ) : logger . info ( "Starting Modis in GUI" ) import tkinter as tk logger . debug ( "Loading packages" ) from modis . discord_modis import gui as discord_modis_gui from modis . reddit_modis import gui as reddit_modis_gui from modis . facebook_modis import gui as facebook_modis_gui logger . debug ( "Initialising window" ) root = tk . Tk ( ) root . minsize ( width = 800 , height = 400 ) root . geometry ( "800x600" ) root . title ( "Modis Control Panel" ) root . iconbitmap ( r"{}/assets/modis.ico" . format ( file_dir ) ) discord = discord_modis_gui . Frame ( root , discord_token , discord_client_id ) discord . grid ( column = 0 , row = 0 , padx = 0 , pady = 0 , sticky = "W E N S" ) root . columnconfigure ( 0 , weight = 1 ) root . rowconfigure ( 0 , weight = 1 ) discord . columnconfigure ( 0 , weight = 1 ) discord . rowconfigure ( 0 , weight = 1 ) logger . debug ( "GUI initialised" ) root . mainloop ( )
Start Modis in gui format .
41,264
def write_data ( data ) : sorted_dict = sort_recursive ( data ) with open ( _datafile , 'w' ) as file : _json . dump ( sorted_dict , file , indent = 2 )
Write the data to the data . json file
41,265
def sort_recursive ( data ) : newdict = { } for i in data . items ( ) : if type ( i [ 1 ] ) is dict : newdict [ i [ 0 ] ] = sort_recursive ( i [ 1 ] ) else : newdict [ i [ 0 ] ] = i [ 1 ] return OrderedDict ( sorted ( newdict . items ( ) , key = lambda item : ( compare_type ( type ( item [ 1 ] ) ) , item [ 0 ] ) ) )
Recursively sorts all elements in a dictionary
41,266
def get_compare_version ( ) : state , latest_version = compare_latest_version ( ) if state < 0 : return - 1 , "A new version of Modis is available (v{})" . format ( latest_version ) elif state == 0 : return 0 , "You are running the latest version of Modis (v{})" . format ( version ) else : return 1 , "You are running a preview version of Modis (v{} pre-release)" . format ( version )
Get the version comparison info .
41,267
def success ( channel , stats , name , platform , dp ) : datapacks = [ ( "Platform" , platform , False ) ] for stat in stats : if stat [ 0 ] in ( "Duel 1v1" , "Doubles 2v2" , "Solo Standard 3v3" , "Standard 3v3" ) : stat_name = "__" + stat [ 0 ] + "__" stat_value = "**" + stat [ 1 ] + "**" else : stat_name = stat [ 0 ] stat_value = stat [ 1 ] if stat [ 2 ] : stat_value += " *(Top " + stat [ 2 ] + "%)*" datapacks . append ( ( stat_name , stat_value , True ) ) gui = ui_embed . UI ( channel , "Rocket League Stats: {}" . format ( name ) , "*Stats obtained from [Rocket League Tracker Network](https://rocketleague.tracker.network/)*" , modulename = modulename , colour = 0x0088FF , thumbnail = dp , datapacks = datapacks ) return gui
Creates an embed UI containing the Rocket League stats
41,268
def fail_steamid ( channel ) : gui = ui_embed . UI ( channel , "That SteamID doesn't exist." , "You can get your SteamID by going to your profile page and looking at the url, " "or you can set a custom ID by going to edit profile on your profile page." , modulename = modulename , colour = 0x0088FF ) return gui
Creates an embed UI for invalid SteamIDs
41,269
def fail_api ( channel ) : gui = ui_embed . UI ( channel , "Couldn't get stats off RLTrackerNetwork." , "Maybe the API changed, please tell Infraxion." , modulename = modulename , colour = 0x0088FF ) return gui
Creates an embed UI for when the API call didn t work
41,270
def success ( channel , post ) : datapacks = [ ( "Game" , post [ 0 ] , True ) , ( "Upvotes" , post [ 2 ] , True ) ] gui = ui_embed . UI ( channel , "Link" , post [ 1 ] , modulename = modulename , colour = 0xFF8800 , thumbnail = post [ 1 ] , datapacks = datapacks ) return gui
Creates an embed UI containing the Reddit posts
41,271
def no_results ( channel ) : gui = ui_embed . UI ( channel , "No results" , ":c" , modulename = modulename , colour = 0xFF8800 ) return gui
Creates an embed UI for when there were no results
41,272
def make_timebar ( progress = 0 , duration = 0 ) : duration_string = api_music . duration_to_string ( duration ) if duration <= 0 : return "---" time_counts = int ( round ( ( progress / duration ) * TIMEBAR_LENGTH ) ) if time_counts > TIMEBAR_LENGTH : time_counts = TIMEBAR_LENGTH if duration > 0 : bar = "│" + ( I MEBAR_PCHAR * t me_counts) + ( I MEBAR_ECHAR * ( I MEBAR_LENGTH - t me_counts)) + " " time_bar = "{} {}" . format ( bar , duration_string ) else : time_bar = duration_string return time_bar
Makes a new time bar string
41,273
def build ( self ) : if self . colour : embed = discord . Embed ( title = self . title , type = 'rich' , description = self . description , colour = self . colour ) else : embed = discord . Embed ( title = self . title , type = 'rich' , description = self . description ) if self . thumbnail : embed . set_thumbnail ( url = self . thumbnail ) if self . image : embed . set_image ( url = self . image ) embed . set_author ( name = "Modis" , url = "https://musicbyango.com/modis/" , icon_url = "http://musicbyango.com/modis/dp/modis64t.png" ) for pack in self . datapacks : embed . add_field ( name = pack [ 0 ] , value = pack [ 1 ] , inline = pack [ 2 ] ) return embed
Builds Discord embed GUI
41,274
async def send ( self ) : await client . send_typing ( self . channel ) self . sent_embed = await client . send_message ( self . channel , embed = self . built_embed )
Send new GUI
41,275
def update_data ( self , index , data ) : datapack = self . built_embed . to_dict ( ) [ "fields" ] [ index ] self . built_embed . set_field_at ( index , name = datapack [ "name" ] , value = data , inline = datapack [ "inline" ] )
Updates a particular datapack s data
41,276
def suck_out_variations_only ( reporters ) : variations_out = { } for reporter_key , data_list in reporters . items ( ) : for data in data_list : for variation_key , variation_value in data [ "variations" ] . items ( ) : try : variations_list = variations_out [ variation_key ] if variation_value not in variations_list : variations_list . append ( variation_value ) except KeyError : variations_out [ variation_key ] = [ variation_value ] return variations_out
Builds a dictionary of variations to canonical reporters .
41,277
def suck_out_editions ( reporters ) : editions_out = { } for reporter_key , data_list in reporters . items ( ) : for data in data_list : for edition_key , edition_value in data [ "editions" ] . items ( ) : try : editions_out [ edition_key ] except KeyError : editions_out [ edition_key ] = reporter_key return editions_out
Builds a dictionary mapping edition keys to their root name .
41,278
def names_to_abbreviations ( reporters ) : names = { } for reporter_key , data_list in reporters . items ( ) : for data in data_list : abbrevs = data [ 'editions' ] . keys ( ) sort_func = lambda x : str ( data [ 'editions' ] [ x ] [ 'start' ] ) + x abbrevs = sorted ( abbrevs , key = sort_func ) names [ data [ 'name' ] ] = abbrevs sorted_names = OrderedDict ( sorted ( names . items ( ) , key = lambda t : t [ 0 ] ) ) return sorted_names
Build a dict mapping names to their variations
41,279
def runcoro ( async_function ) : future = _asyncio . run_coroutine_threadsafe ( async_function , client . loop ) result = future . result ( ) return result
Runs an asynchronous function without needing to use await - useful for lambda
41,280
async def warn_user ( channel , user ) : data = datatools . get_data ( ) server_id = channel . server . id if "warnings_max" not in data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] : data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] [ "warnings_max" ] = 3 if "warnings" not in data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] : data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] [ "warnings" ] = { } if user . id in data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] [ "warnings" ] : data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] [ "warnings" ] [ user . id ] += 1 else : data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] [ "warnings" ] [ user . id ] = 1 datatools . write_data ( data ) warnings = data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] [ "warnings" ] [ user . id ] max_warnings = data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] [ "warnings_max" ] await client . send_typing ( channel ) embed = ui_embed . user_warning ( channel , user , warnings , max_warnings ) await embed . send ( ) if warnings >= max_warnings : await ban_user ( channel , user )
Gives a user a warning and bans them if they are over the maximum warnings
41,281
async def ban_user ( channel , user ) : data = datatools . get_data ( ) server_id = channel . server . id try : await client . ban ( user ) except discord . errors . Forbidden : await client . send_typing ( channel ) embed = ui_embed . error ( channel , "Ban Error" , "I do not have the permissions to ban that person." ) await embed . send ( ) return if "warnings" in data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] : if user . id in data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] [ "warnings" ] : data [ "discord" ] [ "servers" ] [ server_id ] [ _data . modulename ] [ "warnings" ] [ user . id ] = 0 datatools . write_data ( data ) await client . send_typing ( channel ) embed = ui_embed . user_ban ( channel , user ) await embed . send ( ) try : response = "You have been banned from the server '{}' " "contact the owners to resolve this issue." . format ( channel . server . name ) await client . send_message ( user , response ) except Exception as e : logger . exception ( e )
Bans a user from a server
41,282
def get_help_datapacks ( module_name , server_prefix ) : _dir = os . path . realpath ( os . path . join ( os . getcwd ( ) , os . path . dirname ( __file__ ) ) ) module_dir = "{}/../{}" . format ( _dir , module_name , "_help.json" ) if os . path . isdir ( module_dir ) : module_help_path = "{}/{}" . format ( module_dir , "_help.json" ) if os . path . isfile ( module_help_path ) : return helptools . get_help_datapacks ( module_help_path , server_prefix ) else : return [ ( "Help" , "{} does not have a help.json file" . format ( module_name ) , False ) ] else : return [ ( "Help" , "No module found called {}" . format ( module_name ) , False ) ]
Get the help datapacks for a module
41,283
def get_help_commands ( server_prefix ) : datapacks = [ ] _dir = os . path . realpath ( os . path . join ( os . getcwd ( ) , os . path . dirname ( __file__ ) ) ) for module_name in os . listdir ( "{}/../" . format ( _dir ) ) : if not module_name . startswith ( "_" ) and not module_name . startswith ( "!" ) : help_command = "`{}help {}`" . format ( server_prefix , module_name ) datapacks . append ( ( module_name , help_command , True ) ) return datapacks
Get the help commands for all modules
41,284
def clear_cache_root ( ) : logger . debug ( "Clearing root cache" ) if os . path . isdir ( _root_songcache_dir ) : for filename in os . listdir ( _root_songcache_dir ) : file_path = os . path . join ( _root_songcache_dir , filename ) try : if os . path . isfile ( file_path ) : os . unlink ( file_path ) elif os . path . isdir ( file_path ) : shutil . rmtree ( file_path ) except PermissionError : pass except Exception as e : logger . exception ( e ) logger . debug ( "Root cache cleared" )
Clears everything in the song cache
41,285
async def play ( self , author , text_channel , query , index = None , stop_current = False , shuffle = False ) : if self . state == 'off' : self . state = 'starting' self . prev_queue = [ ] await self . set_topic ( "" ) await self . msetup ( text_channel ) await self . enqueue ( query , index , stop_current , shuffle ) await self . vsetup ( author ) self . state = 'ready' if self . mready and self . vready else 'off' else : await self . enqueue ( query , index , stop_current , shuffle ) if self . state == 'ready' : if self . streamer is None : await self . vplay ( )
The play command
41,286
async def destroy ( self ) : self . logger . debug ( "destroy command" ) self . state = 'destroyed' await self . set_topic ( "" ) self . nowplayinglog . debug ( "---" ) self . nowplayingauthorlog . debug ( "---" ) self . nowplayingsourcelog . debug ( "---" ) self . timelog . debug ( _timebar . make_timebar ( ) ) self . prev_time = "---" self . statuslog . debug ( "Destroying" ) self . mready = False self . vready = False self . pause_time = None self . loop_type = 'off' if self . vclient : try : await self . vclient . disconnect ( ) except Exception as e : logger . error ( e ) pass if self . streamer : try : self . streamer . stop ( ) except : pass self . vclient = None self . vchannel = None self . streamer = None self . current_duration = 0 self . current_download_elapsed = 0 self . is_live = False self . queue = [ ] self . prev_queue = [ ] if self . embed : await self . embed . delete ( ) self . embed = None self . clear_cache ( )
Destroy the whole gui and music player
41,287
async def toggle ( self ) : self . logger . debug ( "toggle command" ) if not self . state == 'ready' : return if self . streamer is None : return try : if self . streamer . is_playing ( ) : await self . pause ( ) else : await self . resume ( ) except Exception as e : logger . error ( e ) pass
Toggles between pause and resume command
41,288
async def pause ( self ) : self . logger . debug ( "pause command" ) if not self . state == 'ready' : return if self . streamer is None : return try : if self . streamer . is_playing ( ) : self . streamer . pause ( ) self . pause_time = self . vclient . loop . time ( ) self . statuslog . info ( "Paused" ) except Exception as e : logger . error ( e ) pass
Pauses playback if playing
41,289
async def resume ( self ) : self . logger . debug ( "resume command" ) if not self . state == 'ready' : return if self . streamer is None : return try : if not self . streamer . is_playing ( ) : play_state = "Streaming" if self . is_live else "Playing" self . statuslog . info ( play_state ) self . streamer . resume ( ) if self . pause_time is not None : self . vclient_starttime += ( self . vclient . loop . time ( ) - self . pause_time ) self . pause_time = None except Exception as e : logger . error ( e ) pass
Resumes playback if paused
41,290
async def skip ( self , query = "1" ) : if not self . state == 'ready' : logger . debug ( "Trying to skip from wrong state '{}'" . format ( self . state ) ) return if query == "" : query = "1" elif query == "all" : query = str ( len ( self . queue ) + 1 ) try : num = int ( query ) except TypeError : self . statuslog . error ( "Skip argument must be a number" ) except ValueError : self . statuslog . error ( "Skip argument must be a number" ) else : self . statuslog . info ( "Skipping" ) for i in range ( num - 1 ) : if len ( self . queue ) > 0 : self . prev_queue . append ( self . queue . pop ( 0 ) ) try : self . streamer . stop ( ) except Exception as e : logger . exception ( e )
The skip command
41,291
async def remove ( self , index = "" ) : if not self . state == 'ready' : logger . debug ( "Trying to remove from wrong state '{}'" . format ( self . state ) ) return if index == "" : self . statuslog . error ( "Must provide index to remove" ) return elif index == "all" : self . queue = [ ] self . update_queue ( ) self . statuslog . info ( "Removed all songs" ) return indexes = index . split ( "-" ) self . logger . debug ( "Removing {}" . format ( indexes ) ) try : if len ( indexes ) == 0 : self . statuslog . error ( "Remove must specify an index or range" ) return elif len ( indexes ) == 1 : num_lower = int ( indexes [ 0 ] ) - 1 num_upper = num_lower + 1 elif len ( indexes ) == 2 : num_lower = int ( indexes [ 0 ] ) - 1 num_upper = int ( indexes [ 1 ] ) else : self . statuslog . error ( "Cannot have more than 2 indexes for remove range" ) return except TypeError : self . statuslog . error ( "Remove index must be a number" ) return except ValueError : self . statuslog . error ( "Remove index must be a number" ) return if num_lower < 0 or num_lower >= len ( self . queue ) or num_upper > len ( self . queue ) : if len ( self . queue ) == 0 : self . statuslog . warning ( "No songs in queue" ) elif len ( self . queue ) == 1 : self . statuslog . error ( "Remove index must be 1 (only 1 song in queue)" ) else : self . statuslog . error ( "Remove index must be between 1 and {}" . format ( len ( self . queue ) ) ) return if num_upper <= num_lower : self . statuslog . error ( "Second index in range must be greater than first" ) return lower_songname = self . queue [ num_lower ] [ 1 ] for num in range ( 0 , num_upper - num_lower ) : self . logger . debug ( "Removed {}" . format ( self . queue [ num_lower ] [ 1 ] ) ) self . queue . pop ( num_lower ) if len ( indexes ) == 1 : self . statuslog . info ( "Removed {}" . format ( lower_songname ) ) else : self . statuslog . info ( "Removed songs {}-{}" . format ( num_lower + 1 , num_upper ) ) self . update_queue ( )
The remove command
41,292
async def rewind ( self , query = "1" ) : if not self . state == 'ready' : logger . debug ( "Trying to rewind from wrong state '{}'" . format ( self . state ) ) return if query == "" : query = "1" try : num = int ( query ) except TypeError : self . statuslog . error ( "Rewind argument must be a number" ) except ValueError : self . statuslog . error ( "Rewind argument must be a number" ) else : if len ( self . prev_queue ) == 0 : self . statuslog . error ( "No songs to rewind" ) return if num < 0 : self . statuslog . error ( "Rewind must be postitive or 0" ) return elif num > len ( self . prev_queue ) : self . statuslog . warning ( "Rewinding to start" ) else : self . statuslog . info ( "Rewinding" ) for i in range ( num + 1 ) : if len ( self . prev_queue ) > 0 : self . queue . insert ( 0 , self . prev_queue . pop ( ) ) try : self . streamer . stop ( ) except Exception as e : logger . exception ( e )
The rewind command
41,293
async def shuffle ( self ) : self . logger . debug ( "shuffle command" ) if not self . state == 'ready' : return self . statuslog . debug ( "Shuffling" ) random . shuffle ( self . queue ) self . update_queue ( ) self . statuslog . debug ( "Shuffled" )
The shuffle command
41,294
async def set_loop ( self , loop_value ) : if loop_value not in [ 'on' , 'off' , 'shuffle' ] : self . statuslog . error ( "Loop value must be `off`, `on`, or `shuffle`" ) return self . loop_type = loop_value if self . loop_type == 'on' : self . statuslog . info ( "Looping on" ) elif self . loop_type == 'off' : self . statuslog . info ( "Looping off" ) elif self . loop_type == 'shuffle' : self . statuslog . info ( "Looping on and shuffling" )
Updates the loop value can be off on or shuffle
41,295
async def setvolume ( self , value ) : self . logger . debug ( "volume command" ) if self . state != 'ready' : return logger . debug ( "Volume command received" ) if value == '+' : if self . volume < 100 : self . statuslog . debug ( "Volume up" ) self . volume = ( 10 * ( self . volume // 10 ) ) + 10 self . volumelog . info ( str ( self . volume ) ) try : self . streamer . volume = self . volume / 100 except AttributeError : pass else : self . statuslog . warning ( "Already at maximum volume" ) elif value == '-' : if self . volume > 0 : self . statuslog . debug ( "Volume down" ) self . volume = ( 10 * ( ( self . volume + 9 ) // 10 ) ) - 10 self . volumelog . info ( str ( self . volume ) ) try : self . streamer . volume = self . volume / 100 except AttributeError : pass else : self . statuslog . warning ( "Already at minimum volume" ) else : try : value = int ( value ) except ValueError : self . statuslog . error ( "Volume argument must be +, -, or a %" ) else : if 0 <= value <= 200 : self . statuslog . debug ( "Setting volume" ) self . volume = value self . volumelog . info ( str ( self . volume ) ) try : self . streamer . volume = self . volume / 100 except AttributeError : pass else : self . statuslog . error ( "Volume must be between 0 and 200" ) self . write_volume ( )
The volume command
41,296
def write_volume ( self ) : data = datatools . get_data ( ) data [ "discord" ] [ "servers" ] [ self . server_id ] [ _data . modulename ] [ "volume" ] = self . volume datatools . write_data ( data )
Writes the current volume to the data . json
41,297
async def movehere ( self , channel ) : self . logger . debug ( "movehere command" ) await self . embed . delete ( ) self . embed . channel = channel await self . embed . send ( ) await self . add_reactions ( ) self . statuslog . info ( "Moved to front" )
Moves the embed message to a new channel ; can also be used to move the musicplayer to the front
41,298
async def vsetup ( self , author ) : if self . vready : logger . warning ( "Attempt to init voice when already initialised" ) return if self . state != 'starting' : logger . error ( "Attempt to init from wrong state ('{}'), must be 'starting'." . format ( self . state ) ) return self . logger . debug ( "Setting up voice" ) self . vchannel = author . voice . voice_channel if self . vchannel : self . statuslog . info ( "Connecting to voice" ) try : self . vclient = await client . join_voice_channel ( self . vchannel ) except discord . ClientException as e : logger . exception ( e ) self . statuslog . warning ( "I'm already connected to a voice channel." ) return except discord . opus . OpusNotLoaded as e : logger . exception ( e ) logger . error ( "Could not load Opus. This is an error with your FFmpeg setup." ) self . statuslog . error ( "Could not load Opus." ) return except discord . DiscordException as e : logger . exception ( e ) self . statuslog . error ( "I couldn't connect to the voice channel. Check my permissions." ) return except Exception as e : self . statuslog . error ( "Internal error connecting to voice, disconnecting." ) logger . error ( "Error connecting to voice {}" . format ( e ) ) return else : self . statuslog . error ( "You're not connected to a voice channel." ) return self . vready = True
Creates the voice client
41,299
async def msetup ( self , text_channel ) : if self . mready : logger . warning ( "Attempt to init music when already initialised" ) return if self . state != 'starting' : logger . error ( "Attempt to init from wrong state ('{}'), must be 'starting'." . format ( self . state ) ) return self . logger . debug ( "Setting up gui" ) self . mchannel = text_channel self . new_embed_ui ( ) await self . embed . send ( ) await self . embed . usend ( ) await self . add_reactions ( ) self . mready = True
Creates the gui