idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
11,800
def _unwrap_func ( cls , decorated_func ) : if click is not None : # Workaround for click.command() decorator not setting # __wrapped__ if isinstance ( decorated_func , click . Command ) : return cls . _unwrap_func ( decorated_func . callback ) if hasattr ( decorated_func , '__wrapped__' ) : # Recursion: unwrap more if needed return cls . _unwrap_func ( decorated_func . __wrapped__ ) else : # decorated_func isn't actually decorated, no more # unwrapping to do return decorated_func
This unwraps a decorated func returning the inner wrapped func .
134
13
11,801
def _register_dependent ( self , dependent , resource_name ) : if dependent not in self . dependents : self . dependents [ dependent ] = [ ] self . dependents [ dependent ] . insert ( 0 , resource_name )
Register a mapping of the dependent to resource name .
50
10
11,802
def register ( self , resource_name , dependent = None ) : if dependent is None : # Give a partial usable as a decorator return partial ( self . register , resource_name ) dependent = self . _unwrap_dependent ( dependent ) self . _register_dependent ( dependent , resource_name ) self . _register_resource_dependency ( resource_name , dependent ) # Return dependent to ease use as decorator return dependent
Register the given dependent as depending on the resource named by resource_name .
91
15
11,803
def verify_ocsp ( cls , certificate , issuer ) : return OCSPVerifier ( certificate , issuer , cls . get_ocsp_url ( ) , cls . get_ocsp_responder_certificate_path ( ) ) . verify ( )
Runs OCSP verification and returns error code - 0 means success
58
13
11,804
def requests_retry_session ( retries = 3 , backoff_factor = 0.3 , status_forcelist = ( 500 , 502 , 504 ) , session = None ) : session = session or requests . Session ( ) retry = Retry ( total = retries , read = retries , connect = retries , backoff_factor = backoff_factor , status_forcelist = status_forcelist , ) adapter = HTTPAdapter ( max_retries = retry ) session . mount ( 'http://' , adapter ) session . mount ( 'https://' , adapter ) return session
Create a requests session that handles errors by retrying .
128
11
11,805
def configure_technote ( meta_stream ) : _metadata = yaml . load ( meta_stream ) confs = _build_confs ( _metadata ) return confs
Builds a dict of Sphinx configuration variables given a central configuration for LSST Design Documents and a metadata YAML file .
38
26
11,806
def ocsp_responder_certificate_path ( ) : certificate_path = getattr ( settings , 'ESTEID_OCSP_RESPONDER_CERTIFICATE_PATH' , 'TEST_of_SK_OCSP_RESPONDER_2011.pem' ) if certificate_path in [ 'TEST_of_SK_OCSP_RESPONDER_2011.pem' , 'sk-ocsp-responder-certificates.pem' ] : return os . path . join ( os . path . dirname ( __file__ ) , 'certs' , certificate_path ) return certificate_path
Get ocsp responder certificate path
145
8
11,807
def new ( self , * * kwargs ) : app = self . app or current_app mailer = app . extensions [ 'marrowmailer' ] msg = mailer . new ( * * kwargs ) msg . __class__ = Message return msg
Return a new Message instance . The arguments are passed to the marrow . mailer . Message constructor .
57
20
11,808
def send ( self , msg ) : app = self . app or current_app mailer = app . extensions [ 'marrowmailer' ] mailer . start ( ) if not hasattr ( msg , '__iter__' ) : result = mailer . send ( msg ) else : result = map ( lambda message : mailer . send ( message ) , msg ) mailer . stop ( ) return result
Send the message . If message is an iterable then send all the messages .
87
16
11,809
def wdiff ( settings , wrap_with_html = False , fold_breaks = False , hard_breaks = False ) : diff = generate_wdiff ( settings . org_file , settings . new_file , fold_breaks ) if wrap_with_html : return wrap_content ( diff , settings , hard_breaks ) else : return diff
Returns the results of wdiff in a HTML compatible format .
74
12
11,810
def _load_config ( config_file ) : logger . debug ( 'Config file: {}' . format ( config_file ) ) parser = configparser . ConfigParser ( ) try : with config_file . open ( 'r' ) as f : parser . read_file ( f ) except FileNotFoundError as e : logger . warning ( 'Config file not found' ) parser = _use_default ( config_file ) except configparser . ParsingError as e : logger . warning ( 'Error in config file: {}' . format ( e ) ) parser = _use_default ( config_file ) finally : try : config = _load_options ( parser ) except ( configparser . NoOptionError ) : parser = _use_default ( config_file ) config = _load_options ( parser ) logger . debug ( 'Config loaded: {}' . format ( config_file ) ) return config
Load settings from config file and return them as a dict . If the config file is not found or if it is invalid create and use a default config file .
194
32
11,811
def _load_options ( parser ) : config = dict ( MESSAGE_DURATION = parser . getint ( 'gui' , 'message_duration' ) , GUI_WELCOME_LABLE = parser . get ( 'gui' , 'gui_welcome_label' ) , FULL_USER_NAMES = parser . getboolean ( 'gui' , 'full_user_names' ) , LARGE_FONT_SIZE = parser . getint ( 'gui' , 'large_font_size' ) , MEDIUM_FONT_SIZE = parser . getint ( 'gui' , 'medium_font_size' ) , SMALL_FONT_SIZE = parser . getint ( 'gui' , 'small_font_size' ) , TINY_FONT_SIZE = parser . getint ( 'gui' , 'tiny_font_size' ) , MAX_INPUT_LENGTH = parser . getint ( 'gui' , 'max_input_length' ) , ) return config
Load config options from parser and return them as a dict .
226
12
11,812
def _use_default ( config_file ) : default_config = OrderedDict ( ( ( 'gui' , OrderedDict ( ( ( 'message_duration' , 5 ) , ( 'gui_welcome_label' , 'Welcome to the STEM Learning Center!' ) , ( 'full_user_names' , True ) , ( 'large_font_size' , 30 ) , ( 'medium_font_size' , 18 ) , ( 'small_font_size' , 15 ) , ( 'tiny_font_size' , 10 ) , ( 'max_input_length' , 9 ) , ) ) , ) , ) ) parser = configparser . ConfigParser ( ) parser . read_dict ( default_config ) if config_file . exists ( ) : backup = config_file . with_suffix ( '.bak' ) os . rename ( str ( config_file ) , str ( backup ) ) logger . info ( '{} moved to {}.' . format ( config_file , backup ) ) with config_file . open ( 'w' ) as f : parser . write ( f ) logger . info ( 'Default config file created.' ) return parser
Write default values to a config file . If another config file already exists back it up before replacing it with the new file .
255
25
11,813
def add_atom ( self , key , atom ) : self . graph . add_node ( key , atom = atom )
Set an atom . Existing atom will be overwritten .
26
12
11,814
def atoms_iter ( self ) : for n , atom in self . graph . nodes . data ( "atom" ) : yield n , atom
Iterate over atoms .
30
5
11,815
def add_bond ( self , key1 , key2 , bond ) : self . graph . add_edge ( key1 , key2 , bond = bond )
Set a bond . Existing bond will be overwritten .
35
12
11,816
def bonds_iter ( self ) : for u , v , bond in self . graph . edges . data ( "bond" ) : yield u , v , bond
Iterate over bonds .
35
5
11,817
def neighbors ( self , key ) : return { n : attr [ "bond" ] for n , attr in self . graph [ key ] . items ( ) }
Return dict of neighbor atom index and connecting bond .
37
10
11,818
def neighbors_iter ( self ) : for n , adj in self . graph . adj . items ( ) : yield n , { n : attr [ "bond" ] for n , attr in adj . items ( ) }
Iterate over atoms and return its neighbors .
49
9
11,819
def clear ( self ) : # self.graph = nx.Graph() self . graph . clear ( ) self . data . clear ( ) self . descriptors . clear ( ) self . size2d = None self . rings = None self . scaffolds = None self . isolated = None
Empty the instance
61
3
11,820
def _convert ( self , pos ) : px = pos [ 0 ] + self . logical_size . width ( ) / 2 py = self . logical_size . height ( ) / 2 - pos [ 1 ] return px , py
For QPainter coordinate system reflect over X axis and translate from center to top - left
52
18
11,821
def run_sphinx ( root_dir ) : logger = logging . getLogger ( __name__ ) # This replicates what Sphinx's internal command line hander does in # https://github.com/sphinx-doc/sphinx/blob/master/sphinx/cmd/build.py # build_main() # configuration root_dir = os . path . abspath ( root_dir ) srcdir = root_dir # root directory of Sphinx content confdir = root_dir # directory where conf.py is located outdir = os . path . join ( root_dir , '_build' , 'html' ) doctreedir = os . path . join ( root_dir , '_build' , 'doctree' ) builder = 'html' confoverrides = { } status = sys . stdout # set to None for 'quiet' mode warning = sys . stderr error = sys . stderr freshenv = False # attempt to re-use existing build artificats warningiserror = False tags = [ ] verbosity = 0 jobs = 1 # number of processes force_all = True filenames = [ ] logger . debug ( 'Sphinx config: srcdir={0}' . format ( srcdir ) ) logger . debug ( 'Sphinx config: confdir={0}' . format ( confdir ) ) logger . debug ( 'Sphinx config: outdir={0}' . format ( outdir ) ) logger . debug ( 'Sphinx config: doctreedir={0}' . format ( doctreedir ) ) logger . debug ( 'Sphinx config: builder={0}' . format ( builder ) ) logger . debug ( 'Sphinx config: freshenv={0:b}' . format ( freshenv ) ) logger . debug ( 'Sphinx config: warningiserror={0:b}' . format ( warningiserror ) ) logger . debug ( 'Sphinx config: verbosity={0:d}' . format ( verbosity ) ) logger . debug ( 'Sphinx config: jobs={0:d}' . format ( jobs ) ) logger . debug ( 'Sphinx config: force_all={0:b}' . format ( force_all ) ) app = None try : with patch_docutils ( ) , docutils_namespace ( ) : app = Sphinx ( srcdir , confdir , outdir , doctreedir , builder , confoverrides , status , warning , freshenv , warningiserror , tags , verbosity , jobs ) app . build ( force_all , filenames ) return app . statuscode except ( Exception , KeyboardInterrupt ) as exc : args = MockSphinxNamespace ( verbosity = verbosity , traceback = True ) handle_exception ( app , args , exc , error ) return 1
Run the Sphinx build process .
625
7
11,822
def get_settings ( config_file ) : default_settings = { 'general' : { 'endpoint' : 'http://guacamole.antojitos.io/files/' , 'shortener' : 'http://t.antojitos.io/api/v1/urls' , } } settings = configparser . ConfigParser ( ) try : settings . read_dict ( default_settings ) except AttributeError : # using python 2.7 for section , options in default_settings . items ( ) : settings . add_section ( section ) for option , value in options . items ( ) : settings . set ( section , option , value ) if config_file is not None and os . path . exists ( config_file ) : settings . read ( config_file ) return settings if os . path . exists ( CONFIG_FILE ) : settings . read ( CONFIG_FILE ) return settings return settings
Search and load a configuration file .
199
7
11,823
def encrypt ( self , message , public_key ) : # Get the maximum message length based on the key max_str_len = rsa . common . byte_size ( public_key . n ) - 11 # If the message is longer than the key size, split it into a list to # be encrypted if len ( message ) > max_str_len : message = textwrap . wrap ( message , width = max_str_len ) else : message = [ message ] # Create a list for the encrypted message to send enc_msg = [ ] # If we have a long message, loop through and encrypt each part of the # string for line in message : # Encrypt the line in the message into a bytestring enc_line = rsa . encrypt ( line , public_key ) # Convert the encrypted bytestring into ASCII, so we can send it # over the network enc_line_converted = binascii . b2a_base64 ( enc_line ) enc_msg . append ( enc_line_converted ) # Serialize the encrypted message again with json enc_msg = json . dumps ( enc_msg ) # Return the list of encrypted strings return enc_msg
Encrypts a string using a given rsa . PublicKey object . If the message is larger than the key it will split it up into a list and encrypt each line in the list .
255
39
11,824
def decrypt ( self , message ) : # Unserialize the encrypted message message = json . loads ( message ) # Set up a list for the unencrypted lines of the message unencrypted_msg = [ ] for line in message : # Convert from ascii back to bytestring enc_line = binascii . a2b_base64 ( line ) # Decrypt the line using our private key unencrypted_line = rsa . decrypt ( enc_line , self . private_key ) unencrypted_msg . append ( unencrypted_line ) # Convert the message from a list back into a string unencrypted_msg = "" . join ( unencrypted_msg ) return unencrypted_msg
Decrypts a string using our own private key object .
148
12
11,825
def on_any_event ( self , event ) : for delegate in self . delegates : if hasattr ( delegate , "on_any_event" ) : delegate . on_any_event ( event )
On any event method
44
4
11,826
def on_created ( self , event ) : for delegate in self . delegates : if hasattr ( delegate , "on_created" ) : delegate . on_created ( event )
On created method
38
3
11,827
def on_deleted ( self , event ) : for delegate in self . delegates : if hasattr ( delegate , "on_deleted" ) : delegate . on_deleted ( event )
On deleted method
41
3
11,828
def on_modified ( self , event ) : for delegate in self . delegates : if hasattr ( delegate , "on_modified" ) : delegate . on_modified ( event )
On modified method
38
3
11,829
def on_moved ( self , event ) : for delegate in self . delegates : if hasattr ( delegate , "on_moved" ) : delegate . on_moved ( event )
On moved method
41
3
11,830
def on_created ( self , event ) : if self . trigger != "create" : return action_input = ActionInput ( event , "" , self . name ) flows . Global . MESSAGE_DISPATCHER . send_message ( action_input )
Fired when something s been created
57
7
11,831
def start ( cls ) : if cls . _thread is None : cls . _thread = threading . Thread ( target = cls . _run , name = "Heartbeat" ) cls . _thread . daemon = True cls . _thread . start ( )
Start background thread if not already started
60
7
11,832
def resolved_task ( cls , task ) : for t in cls . tasks : if t is task or t . execute is task : return t
Task instance representing task if any
32
6
11,833
def _run ( cls ) : if cls . _thread : with cls . _lock : # First run: execute each task once to get it started for task in cls . tasks : cls . _execute_task ( task ) cls . tasks . sort ( ) cls . _last_execution = time . time ( ) while cls . _thread : with cls . _lock : if cls . tasks : for task in cls . tasks : if task . next_execution - cls . _last_execution < 0.5 : cls . _execute_task ( task ) else : break cls . tasks . sort ( ) cls . _last_execution = time . time ( ) cls . _sleep_delay = cls . tasks [ 0 ] . next_execution - cls . _last_execution else : cls . _sleep_delay = 1 sleep_delay = max ( 0.1 , cls . _sleep_delay ) # Don't hold cls._lock while sleeping, sleep delay should be 1 second when no tasks are present time . sleep ( sleep_delay )
Background thread s main function execute registered tasks accordingly to their frequencies
246
12
11,834
def parity_plot ( X , Y , model , devmodel , axes_labels = None ) : model_outputs = Y . shape [ 1 ] with plt . style . context ( 'seaborn-whitegrid' ) : fig = plt . figure ( figsize = ( 2.5 * model_outputs , 2.5 ) , dpi = 300 ) for i in range ( model_outputs ) : ax = fig . add_subplot ( 1 , model_outputs , i + 1 ) minval = np . min ( [ np . exp ( model . predict ( X ) [ : , i ] ) , np . exp ( Y ) [ : , i ] ] ) maxval = np . max ( [ np . exp ( model . predict ( X ) [ : , i ] ) , np . exp ( Y ) [ : , i ] ] ) buffer = ( maxval - minval ) / 100 * 2 minval = minval - buffer maxval = maxval + buffer ax . plot ( [ minval , maxval ] , [ minval , maxval ] , linestyle = "-" , label = None , c = "black" , linewidth = 1 ) ax . plot ( np . exp ( Y ) [ : , i ] , np . exp ( model . predict ( X ) ) [ : , i ] , marker = "*" , linestyle = "" , alpha = 0.4 ) if axes_labels : ax . set_ylabel ( "Predicted {}" . format ( axes_labels [ '{}' . format ( i ) ] ) ) ax . set_xlabel ( "Actual {}" . format ( axes_labels [ '{}' . format ( i ) ] ) ) else : ax . set_ylabel ( "Predicted {}" . format ( devmodel . Data . columns [ - ( 6 - i ) ] . split ( "<" ) [ 0 ] ) , wrap = True , fontsize = 5 ) ax . set_xlabel ( "Actual {}" . format ( devmodel . Data . columns [ - ( 6 - i ) ] . split ( "<" ) [ 0 ] ) , wrap = True , fontsize = 5 ) plt . xlim ( minval , maxval ) plt . ylim ( minval , maxval ) ax . grid ( ) plt . tight_layout ( ) return plt
A standard method of creating parity plots between predicted and experimental values for trained models .
517
16
11,835
def expand_dates ( df , columns = [ ] ) : columns = df . columns . intersection ( columns ) df2 = df . reindex ( columns = set ( df . columns ) . difference ( columns ) ) for column in columns : df2 [ column + '_year' ] = df [ column ] . apply ( lambda x : x . year ) df2 [ column + '_month' ] = df [ column ] . apply ( lambda x : x . month ) df2 [ column + '_day' ] = df [ column ] . apply ( lambda x : x . day ) return df2
generate year month day features from specified date features
128
10
11,836
def binarize ( df , category_classes , all_classes = True , drop = True , astype = None , inplace = True , min_freq = None ) : if type ( category_classes ) is not dict : columns = set ( category_classes ) category_classes = { column : df [ column ] . unique ( ) for column in columns } else : columns = category_classes . keys ( ) df_new = df if inplace else df . drop ( columns , axis = 1 ) for category in columns : classes = category_classes [ category ] for i in range ( len ( classes ) - 1 if not all_classes else len ( classes ) ) : c = df [ category ] == classes [ i ] if not min_freq or c . sum ( ) >= min_freq : if astype is not None : c = c . astype ( astype ) df_new [ '%s_%s' % ( category , str ( classes [ i ] ) . replace ( ' ' , '_' ) ) ] = c if drop and inplace : df_new . drop ( columns , axis = 1 , inplace = True ) return df_new
Binarize specified categoricals . Works inplace!
253
12
11,837
def select_regexes ( strings , regexes ) : strings = set ( strings ) select = set ( ) if isinstance ( strings , collections . Iterable ) : for r in regexes : s = set ( filter ( re . compile ( '^' + r + '$' ) . search , strings ) ) strings -= s select |= s return select else : raise ValueError ( "exclude should be iterable" )
select subset of strings matching a regex treats strings as a set
92
12
11,838
def parse_delta ( s ) : if s == 'all' : return None else : ls = delta_regex . findall ( s ) if len ( ls ) == 1 : return relativedelta ( * * { delta_chars [ ls [ 0 ] [ 1 ] ] : int ( ls [ 0 ] [ 0 ] ) } ) else : raise ValueError ( 'Invalid delta string: %s' % s )
parse a string to a delta all is represented by None
91
11
11,839
def apply ( self , df ) : if hasattr ( self . definition , '__call__' ) : r = self . definition ( df ) elif self . definition in df . columns : r = df [ self . definition ] elif not isinstance ( self . definition , string_types ) : r = pd . Series ( self . definition , index = df . index ) else : raise ValueError ( "Invalid column definition: %s" % str ( self . definition ) ) return r . astype ( self . astype ) if self . astype else r
Takes a pd . DataFrame and returns the newly defined column i . e . a pd . Series that has the same index as df .
121
31
11,840
def ip_to_long ( ip ) : quad = ip . split ( '.' ) if len ( quad ) == 1 : quad = quad + [ 0 , 0 , 0 ] elif len ( quad ) < 4 : host = quad [ - 1 : ] quad = quad [ : - 1 ] + [ 0 , ] * ( 4 - len ( quad ) ) + host lip = 0 for q in quad : lip = ( lip << 8 ) | int ( q ) return lip
Convert ip address to a network byte order 32 - bit integer .
101
14
11,841
def lsst_doc_shortlink_role ( name , rawtext , text , lineno , inliner , options = None , content = None ) : options = options or { } content = content or [ ] node = nodes . reference ( text = '{0}-{1}' . format ( name . upper ( ) , text ) , refuri = 'https://ls.st/{0}-{1}' . format ( name , text ) , * * options ) return [ node ] , [ ]
Link to LSST documents given their handle using LSST s ls . st link shortener .
111
19
11,842
def lsst_doc_shortlink_titlecase_display_role ( name , rawtext , text , lineno , inliner , options = None , content = None ) : options = options or { } content = content or [ ] node = nodes . reference ( text = '{0}-{1}' . format ( name . title ( ) , text ) , refuri = 'https://ls.st/{0}-{1}' . format ( name , text ) , * * options ) return [ node ] , [ ]
Link to LSST documents given their handle using LSST s ls . st link shortener with the document handle displayed in title case .
116
27
11,843
def run ( ) : args = parse_args ( ) if args . verbose : log_level = logging . DEBUG else : log_level = logging . INFO logging . basicConfig ( level = log_level , format = '%(asctime)s %(levelname)s %(name)s: %(message)s' ) if not args . verbose : # Manage third-party loggers req_logger = logging . getLogger ( 'requests' ) req_logger . setLevel ( logging . WARNING ) logger = logging . getLogger ( __name__ ) logger . info ( 'refresh-lsst-bib version {}' . format ( __version__ ) ) error_count = process_bib_files ( args . dir ) sys . exit ( error_count )
Command line entrypoint for the refresh - lsst - bib program .
176
15
11,844
def run_build_cli ( ) : args = parse_args ( ) if args . verbose : log_level = logging . DEBUG else : log_level = logging . INFO logging . basicConfig ( level = log_level , format = '%(asctime)s %(levelname)s %(name)s: %(message)s' ) logger = logging . getLogger ( __name__ ) logger . info ( 'build-stack-docs version {0}' . format ( __version__ ) ) return_code = build_stack_docs ( args . root_project_dir ) if return_code == 0 : logger . info ( 'build-stack-docs succeeded' ) sys . exit ( 0 ) else : logger . error ( 'Sphinx errored: code {0:d}' . format ( return_code ) ) sys . exit ( 1 )
Command line entrypoint for the build - stack - docs program .
191
13
11,845
def parse_args ( ) : parser = argparse . ArgumentParser ( description = "Build a Sphinx documentation site for an EUPS stack, " "such as pipelines.lsst.io." , epilog = "Version {0}" . format ( __version__ ) ) parser . add_argument ( '-d' , '--dir' , dest = 'root_project_dir' , help = "Root Sphinx project directory" ) parser . add_argument ( '-v' , '--verbose' , dest = 'verbose' , action = 'store_true' , default = False , help = 'Enable Verbose output (debug level logging)' ) return parser . parse_args ( )
Create an argument parser for the build - stack - docs program .
152
13
11,846
def discover_setup_packages ( ) : logger = logging . getLogger ( __name__ ) # Not a PyPI dependency; assumed to be available in the build environment. import eups eups_client = eups . Eups ( ) products = eups_client . getSetupProducts ( ) packages = { } for package in products : name = package . name info = { 'dir' : package . dir , 'version' : package . version } packages [ name ] = info logger . debug ( 'Found setup package: {name} {version} {dir}' . format ( name = name , * * info ) ) return packages
Summarize packages currently set up by EUPS listing their set up directories and EUPS version names .
136
21
11,847
def find_table_file ( root_project_dir ) : ups_dir_path = os . path . join ( root_project_dir , 'ups' ) table_path = None for name in os . listdir ( ups_dir_path ) : if name . endswith ( '.table' ) : table_path = os . path . join ( ups_dir_path , name ) break if not os . path . exists ( table_path ) : raise RuntimeError ( 'Could not find the EUPS table file at {}' . format ( table_path ) ) return table_path
Find the EUPS table file for a project .
128
10
11,848
def list_packages_in_eups_table ( table_text ) : logger = logging . getLogger ( __name__ ) # This pattern matches required product names in EUPS table files. pattern = re . compile ( r'setupRequired\((?P<name>\w+)\)' ) listed_packages = [ m . group ( 'name' ) for m in pattern . finditer ( table_text ) ] logger . debug ( 'Packages listed in the table file: %r' , listed_packages ) return listed_packages
List the names of packages that are required by an EUPS table file .
117
15
11,849
def find_package_docs ( package_dir , skippedNames = None ) : logger = logging . getLogger ( __name__ ) if skippedNames is None : skippedNames = [ ] doc_dir = os . path . join ( package_dir , 'doc' ) modules_yaml_path = os . path . join ( doc_dir , 'manifest.yaml' ) if not os . path . exists ( modules_yaml_path ) : raise NoPackageDocs ( 'Manifest YAML not found: {0}' . format ( modules_yaml_path ) ) with open ( modules_yaml_path ) as f : manifest_data = yaml . safe_load ( f ) module_dirs = { } package_dirs = { } static_dirs = { } if 'modules' in manifest_data : for module_name in manifest_data [ 'modules' ] : if module_name in skippedNames : logger . debug ( 'Skipping module {0}' . format ( module_name ) ) continue module_dir = os . path . join ( doc_dir , module_name ) # validate that the module's documentation directory does exist if not os . path . isdir ( module_dir ) : message = 'module doc dir not found: {0}' . format ( module_dir ) logger . warning ( message ) continue module_dirs [ module_name ] = module_dir logger . debug ( 'Found module doc dir {0}' . format ( module_dir ) ) if 'package' in manifest_data : package_name = manifest_data [ 'package' ] full_package_dir = os . path . join ( doc_dir , package_name ) # validate the directory exists if os . path . isdir ( full_package_dir ) and package_name not in skippedNames : package_dirs [ package_name ] = full_package_dir logger . debug ( 'Found package doc dir {0}' . format ( full_package_dir ) ) else : logger . warning ( 'package doc dir excluded or not found: {0}' . format ( full_package_dir ) ) if 'statics' in manifest_data : for static_dirname in manifest_data [ 'statics' ] : full_static_dir = os . path . join ( doc_dir , static_dirname ) # validate the directory exists if not os . path . isdir ( full_static_dir ) : message = '_static doc dir not found: {0}' . format ( full_static_dir ) logger . warning ( message ) continue # Make a relative path to `_static` that's used as the # link source in the root docproject's _static/ directory relative_static_dir = os . path . relpath ( full_static_dir , os . path . join ( doc_dir , '_static' ) ) static_dirs [ relative_static_dir ] = full_static_dir logger . debug ( 'Found _static doc dir: {0}' . format ( full_static_dir ) ) Dirs = namedtuple ( 'Dirs' , [ 'module_dirs' , 'package_dirs' , 'static_dirs' ] ) return Dirs ( module_dirs = module_dirs , package_dirs = package_dirs , static_dirs = static_dirs )
Find documentation directories in a package using manifest . yaml .
740
12
11,850
def remove_existing_links ( root_dir ) : logger = logging . getLogger ( __name__ ) for name in os . listdir ( root_dir ) : full_name = os . path . join ( root_dir , name ) if os . path . islink ( full_name ) : logger . debug ( 'Deleting existing symlink {0}' . format ( full_name ) ) os . remove ( full_name )
Delete any symlinks present at the root of a directory .
98
12
11,851
def render_diagram ( out_base ) : import codecs import subprocess import sadisplay # generate class descriptions desc = sadisplay . describe ( list ( model_registry . values ( ) ) , show_methods = False , show_properties = True , show_indexes = True , ) # write description in DOT format with codecs . open ( out_base + '.dot' , 'w' , encoding = 'utf-8' ) as f : f . write ( sadisplay . dot ( desc ) ) # check existence of DOT_EXECUTABLE variable and file if not hasattr ( config , 'DOT_EXECUTABLE' ) : raise RuntimeError ( "Please configure the 'DOT_EXECUTABLE' variable in your 'project_config.py'" ) if not os . path . exists ( config . DOT_EXECUTABLE ) : raise IOError ( "Could not find file pointed to by 'DOT_EXECUTABLE': " + str ( config . DOT_EXECUTABLE ) ) # render to image using DOT # noinspection PyUnresolvedReferences subprocess . check_call ( [ config . DOT_EXECUTABLE , '-T' , 'png' , '-o' , out_base + '.png' , out_base + '.dot' ] )
Render a data model diagram
289
5
11,852
def get_max_id ( cls , session ) : # sqlalchemy allows only one level of inheritance, so just check this class and all its bases id_base = None for c in [ cls ] + list ( cls . __bases__ ) : for base_class in c . __bases__ : if base_class . __name__ == 'Base' : if id_base is None : # we found our base class for determining the ID id_base = c else : raise RuntimeError ( "Multiple base object classes for class " + cls . __name__ ) # this should never happen if id_base is None : raise RuntimeError ( "Error searching for base class of " + cls . __name__ ) # get its max ID max_id = session . query ( func . max ( id_base . id ) ) . scalar ( ) # if no object is present, None is returned if max_id is None : max_id = 0 return max_id
Get the current max value of the id column .
213
10
11,853
def truncate_to_field_length ( self , field , value ) : max_len = getattr ( self . __class__ , field ) . prop . columns [ 0 ] . type . length if value and len ( value ) > max_len : return value [ : max_len ] else : return value
Truncate the value of a string field to the field s max length .
67
16
11,854
def extern ( obj , timeout = 200 ) : global installed # Register it just once. if not installed : install_hook ( obj , timeout ) installed = True
Tell Tkinter to process untnwisted event loop . It registers just once the update handle .
34
20
11,855
def intern ( obj , timeout ) : core . gear . timeout = timeout core . gear . pool . append ( obj )
Tell untwisted to process an extern event loop .
25
12
11,856
def _make_ticket_node ( ticket_id , config , options = None ) : options = options or { } ref = config . jira_uri_template . format ( ticket = ticket_id ) link = nodes . reference ( text = ticket_id , refuri = ref , * * options ) return link
Construct a reference node for a JIRA ticket .
67
11
11,857
def _oxford_comma_separator ( i , length ) : if length == 1 : return None elif length < 3 and i == 0 : return ' and ' elif i < length - 2 : return ', ' elif i == length - 2 : return ', and ' else : return None
Make a separator for a prose - like list with between items except for and after the second to last item .
64
23
11,858
def jira_role ( name , rawtext , text , lineno , inliner , options = None , content = None , oxford_comma = True ) : options = options or { } content = content or [ ] config = inliner . document . settings . env . app . config ticket_ids = [ each . strip ( ) for each in utils . unescape ( text ) . split ( ',' ) ] n_tickets = len ( ticket_ids ) if oxford_comma : sep_factory = _oxford_comma_separator else : sep_factory = _comma_separator node_list = [ ] for i , ticket_id in enumerate ( ticket_ids ) : node = _make_ticket_node ( ticket_id , config , options = options ) node_list . append ( node ) sep_text = sep_factory ( i , n_tickets ) if sep_text is not None : sep = nodes . raw ( text = sep_text , format = 'html' ) node_list . append ( sep ) return node_list , [ ]
Sphinx role for referencing a JIRA ticket .
240
12
11,859
def jira_bracket_role ( name , rawtext , text , lineno , inliner , options = None , content = None , open_symbol = '[' , close_symbol = ']' ) : node_list , _ = jira_role ( name , rawtext , text , lineno , inliner , options = options , content = None , oxford_comma = False ) node_list = nodes . raw ( text = open_symbol , format = 'html' ) + node_list + nodes . raw ( text = close_symbol , format = 'html' ) return node_list , [ ]
Sphinx role for referencing a JIRA ticket with ticket numbers enclosed in braces . Useful for changelogs .
137
24
11,860
def jira_parens_role ( name , rawtext , text , lineno , inliner , options = None , content = None ) : return jira_bracket_role ( name , rawtext , text , lineno , inliner , options = None , content = None , open_symbol = '(' , close_symbol = ')' )
Sphinx role for referencing a JIRA ticket with ticket numbers enclosed in parentheses . Useful for changelogs .
77
24
11,861
def _method_call ( self , method , category , * * kwargs ) : session = requests . Session ( ) try : response = session . get ( "http://" + self . _api_address ) except requests . exceptions . ConnectionError : raise FantasyDataError ( 'Error: Cannot connect to the FantasyData API' ) method = method . format ( format = self . _response_format , * * kwargs ) request_url = "/v3/{game_type}/{category}/{format}/{method}?{get_params}" . format ( game_type = self . game_type , category = category , format = self . _response_format , method = method , get_params = self . _get_params ) response = session . get ( self . _api_schema + self . _api_address + request_url , headers = self . _headers ) result = response . json ( ) if isinstance ( result , dict ) and response . status_code : if response . status_code == 401 : raise FantasyDataError ( 'Error: Invalid API key' ) elif response . status_code == 200 : # for NBA everything is ok here. pass else : raise FantasyDataError ( 'Error: Failed to get response' ) return result
Call API method . Generate request . Parse response . Process errors method str API method url for request . Contains parameters params dict parameters for method url
277
30
11,862
def get_projected_player_game_stats_by_player ( self , season , week , player_id ) : result = self . _method_call ( "PlayerGameProjectionStatsByPlayerID/{season}/{week}/{player_id}" , "projections" , season = season , week = week , player_id = player_id ) return result
Projected Player Game Stats by Player
83
7
11,863
def get_projected_player_game_stats_by_team ( self , season , week , team_id ) : result = self . _method_call ( "PlayerGameProjectionStatsByTeam/{season}/{week}/{team_id}" , "projections" , season = season , week = week , team_id = team_id ) return result
Projected Player Game Stats by Team
82
7
11,864
def get_projected_player_game_stats_by_week ( self , season , week ) : result = self . _method_call ( "PlayerGameProjectionStatsByWeek/{season}/{week}" , "projections" , season = season , week = week ) return result
Projected Player Game Stats by Week
64
7
11,865
def get_projected_fantasy_defense_game_stats_by_week ( self , season , week ) : result = self . _method_call ( "FantasyDefenseProjectionsByGame/{season}/{week}" , "projections" , season = season , week = week ) return result
Projected Fantasy Defense Game Stats by Week
67
8
11,866
def get_injuries ( self , season , week ) : result = self . _method_call ( "Injuries/{season}/{week}" , "stats" , season = season , week = week ) return result
Injuries by week
48
4
11,867
def get_injuries_by_team ( self , season , week , team_id ) : result = self . _method_call ( "Injuries/{season}/{week}/{team_id}" , "stats" , season = season , week = week , team_id = team_id ) return result
Injuries by week and team
70
6
11,868
def get_box_score_by_team ( self , season , week , team_id ) : result = self . _method_call ( "BoxScoreV3/{season}/{week}/{team_id}" , "stats" , season = season , week = week , team_id = team_id ) return result
Box score by week and team
73
6
11,869
def authenticate ( self , password ) : user = None try : self . _authenticate_user_dn ( password ) self . _check_requirements ( ) self . _get_or_create_user ( ) user = self . _user except self . AuthenticationFailed as e : logger . debug ( u"Authentication failed for %s: %s" % ( self . _username , e ) ) except ldap . LDAPError as e : results = ldap_error . send ( self . backend . __class__ , context = 'authenticate' , exception = e ) if len ( results ) == 0 : logger . warning ( u"Caught LDAPError while authenticating %s: %s" , self . _username , pprint . pformat ( e ) ) except Exception : logger . exception ( u"Caught Exception while authenticating %s" , self . _username ) raise return user
Authenticates against the LDAP directory and returns the corresponding User object if successful . Returns None on failure .
198
21
11,870
def get_group_permissions ( self ) : if self . _group_permissions is None : self . _group_permissions = set ( ) if self . settings . FIND_GROUP_PERMS : try : self . _load_group_permissions ( ) except ldap . LDAPError as e : results = ldap_error . send ( self . backend . __class__ , context = 'get_group_permissions' , exception = e ) if len ( results ) == 0 : logger . warning ( "Caught LDAPError loading group permissions: %s" , pprint . pformat ( e ) ) return self . _group_permissions
If allowed by the configuration this returns the set of permissions defined by the user s LDAP group memberships .
146
22
11,871
def _populate_user ( self ) : self . _populate_user_from_attributes ( ) self . _populate_user_from_group_memberships ( ) self . _populate_user_from_dn_regex ( ) self . _populate_user_from_dn_regex_negation ( )
Populates our User object with information from the LDAP directory .
75
13
11,872
def _populate_and_save_user_profile ( self ) : try : app_label , class_name = django . conf . settings . AUTH_PROFILE_MODULE . split ( '.' ) profile_model = apps . get_model ( app_label , class_name ) profile , created = profile_model . objects . get_or_create ( user = self . _user ) save_profile = False logger . debug ( "Populating Django user profile for %s" , get_user_username ( self . _user ) ) save_profile = self . _populate_profile_from_attributes ( profile ) or save_profile save_profile = self . _populate_profile_flags_from_dn_regex ( profile ) or save_profile save_profile = self . _populate_profile_from_group_memberships ( profile ) or save_profile signal_responses = populate_user_profile . send ( self . backend . __class__ , profile = profile , ldap_user = self ) if len ( signal_responses ) > 0 : save_profile = True if save_profile : profile . save ( ) except ObjectDoesNotExist : logger . debug ( "Django user %s does not have a profile to populate" , get_user_username ( self . _user ) ) except LookupError : logger . debug ( 'User Profile model defined in settings.AUTH_PROFILE_MODULE is invalid' )
Populates a User profile object with fields from the LDAP directory .
321
14
11,873
def _populate_profile_from_attributes ( self , profile ) : save_profile = False for field , attr in self . settings . PROFILE_ATTR_MAP . items ( ) : try : # user_attrs is a hash of lists of attribute values setattr ( profile , field , self . attrs [ attr ] [ 0 ] ) save_profile = True except Exception : logger . warning ( "%s does not have a value for the attribute %s" , self . dn , attr ) return save_profile
Populate the given profile object from AUTH_LDAP_PROFILE_ATTR_MAP . Returns True if the profile was modified .
116
28
11,874
def _populate_profile_from_group_memberships ( self , profile ) : save_profile = False for field , group_dns in self . settings . PROFILE_FLAGS_BY_GROUP . items ( ) : if isinstance ( group_dns , six . string_types ) : group_dns = [ group_dns ] value = any ( self . _get_groups ( ) . is_member_of ( dn ) for dn in group_dns ) setattr ( profile , field , value ) save_profile = True return save_profile
Populate the given profile object from AUTH_LDAP_PROFILE_FLAGS_BY_GROUP . Returns True if the profile was modified .
126
30
11,875
def _load_group_permissions ( self ) : group_names = self . _get_groups ( ) . get_group_names ( ) perms = Permission . objects . filter ( group__name__in = group_names ) perms = perms . values_list ( 'content_type__app_label' , 'codename' ) perms = perms . order_by ( ) self . _group_permissions = set ( [ "%s.%s" % ( ct , name ) for ct , name in perms ] )
Populates self . _group_permissions based on LDAP group membership and Django group permissions .
122
20
11,876
def get_task_id ( self ) : task_id = self . json_response . get ( "task_id" , None ) self . logger . info ( "%s\t%s" % ( self . request_method , self . request_url ) ) return task_id
Method to get all department members .
62
7
11,877
def get_message_id ( self ) : message_id = self . json_response . get ( "messageId" , None ) self . logger . info ( "%s\t%s" % ( self . request_method , self . request_url ) ) return message_id
Method to get messageId of group created .
61
9
11,878
def change_dir ( directory ) : def cd_decorator ( func ) : @ wraps ( func ) def wrapper ( * args , * * kwargs ) : org_path = os . getcwd ( ) os . chdir ( directory ) func ( * args , * * kwargs ) os . chdir ( org_path ) return wrapper return cd_decorator
Wraps a function to run in a given directory .
82
11
11,879
def build_css ( minimize = True ) : print ( 'Build CSS' ) args = { } args [ 'style' ] = 'compressed' if minimize else 'nested' cmd = CMD_SASS . format ( * * args ) run ( cmd )
Builds CSS from SASS .
57
7
11,880
def profile ( func ) : def _f ( * args , * * kwargs ) : print ( "\n<<<---" ) pr = cProfile . Profile ( ) pr . enable ( ) res = func ( * args , * * kwargs ) p = pstats . Stats ( pr ) p . strip_dirs ( ) . sort_stats ( 'cumtime' ) . print_stats ( 20 ) print ( "\n--->>>" ) return res return _f
Decorator Execute cProfile
103
7
11,881
def total_size ( obj , verbose = False ) : seen = set ( ) def sizeof ( o ) : if id ( o ) in seen : return 0 seen . add ( id ( o ) ) s = sys . getsizeof ( o , default = 0 ) if verbose : print ( s , type ( o ) , repr ( o ) ) if isinstance ( o , ( tuple , list , set , frozenset , deque ) ) : s += sum ( map ( sizeof , iter ( o ) ) ) elif isinstance ( o , dict ) : s += sum ( map ( sizeof , chain . from_iterable ( o . items ( ) ) ) ) elif "__dict__" in dir ( o ) : s += sum ( map ( sizeof , chain . from_iterable ( o . __dict__ . items ( ) ) ) ) return s return sizeof ( obj )
Returns approximate memory size
191
4
11,882
def mute ( func ) : def _f ( * args , * * kwargs ) : sys . stdout = open ( os . devnull , 'w' ) res = func ( * args , * * kwargs ) sys . stdout . close ( ) sys . stdout = sys . __stdout__ return res return _f
Decorator Make stdout silent
73
7
11,883
def _insert_common_sphinx_configs ( c , * , project_name ) : c [ 'project' ] = project_name # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: c [ 'source_suffix' ] = '.rst' # The encoding of source files. c [ 'source_encoding' ] = 'utf-8-sig' # The master toctree document. c [ 'master_doc' ] = 'index' # Configure figure numbering c [ 'numfig' ] = True c [ 'numfig_format' ] = { 'figure' : 'Figure %s' , 'table' : 'Table %s' , 'code-block' : 'Listing %s' } # The reST default role (used for this markup: `text`) c [ 'default_role' ] = 'obj' # This is added to the end of RST files - a good place to put substitutions # to be used globally. c [ 'rst_epilog' ] = """ .. _Astropy: http://astropy.org """ # A list of warning types to suppress arbitrary warning messages. We mean # to override directives in # astropy_helpers.sphinx.ext.autodoc_enhancements, thus need to ignore # those warning. This can be removed once the patch gets released in # upstream Sphinx (https://github.com/sphinx-doc/sphinx/pull/1843). # Suppress the warnings requires Sphinx v1.4.2 c [ 'suppress_warnings' ] = [ 'app.add_directive' , ] return c
Add common core Sphinx configurations to the state .
373
10
11,884
def _insert_breathe_configs ( c , * , project_name , doxygen_xml_dirname ) : if doxygen_xml_dirname is not None : c [ 'breathe_projects' ] = { project_name : doxygen_xml_dirname } c [ 'breathe_default_project' ] = project_name return c
Add breathe extension configurations to the state .
85
8
11,885
def _insert_automodapi_configs ( c ) : # Don't show summaries of the members in each class along with the # class' docstring c [ 'numpydoc_show_class_members' ] = False c [ 'autosummary_generate' ] = True c [ 'automodapi_toctreedirnm' ] = 'py-api' c [ 'automodsumm_inherited_members' ] = True # Docstrings for classes and methods are inherited from parents. c [ 'autodoc_inherit_docstrings' ] = True # Class documentation should only contain the class docstring and # ignore the __init__ docstring, account to LSST coding standards. # c['autoclass_content'] = "both" c [ 'autoclass_content' ] = "class" # Default flags for automodapi directives. Special members are dunder # methods. # NOTE: We want to used `inherited-members`, but it seems to be causing # documentation duplication in the automodapi listings. We're leaving # this out for now. See https://jira.lsstcorp.org/browse/DM-14782 for # additional notes. # NOTE: Without inherited members set, special-members doesn't need seem # to have an effect (even for special members where the docstrings are # directly written in the class, not inherited. # c['autodoc_default_flags'] = ['inherited-members'] c [ 'autodoc_default_flags' ] = [ 'show-inheritance' , 'special-members' ] return c
Add configurations related to automodapi autodoc and numpydoc to the state .
357
19
11,886
def _insert_matplotlib_configs ( c ) : if 'extensions' not in c : c [ 'extensions' ] = [ ] try : import matplotlib . sphinxext . plot_directive c [ 'extensions' ] += [ matplotlib . sphinxext . plot_directive . __name__ ] except ( ImportError , AttributeError ) : # AttributeError is checked here in case matplotlib is installed but # Sphinx isn't. Note that this module is imported by the config file # generator, even if we're not building the docs. warnings . warn ( "matplotlib's plot_directive could not be imported. " "Inline plots will not be included in the output." ) return c
Add configurations related to matplotlib s plot directive to the state .
163
14
11,887
def _insert_single_package_eups_version ( c , eups_version ) : c [ 'release_eups_tag' ] = 'current' c [ 'release_git_ref' ] = 'master' c [ 'version' ] = eups_version c [ 'release' ] = eups_version c [ 'scipipe_conda_ref' ] = 'master' c [ 'pipelines_demo_ref' ] = 'master' c [ 'newinstall_ref' ] = 'master' return c
Insert version information into the configuration namespace .
121
8
11,888
def _insert_eups_version ( c ) : # Attempt to get the eups tag from the build environment eups_tag = os . getenv ( 'EUPS_TAG' ) if eups_tag is None : eups_tag = 'd_latest' # Try to guess the git ref that corresponds to this tag if eups_tag in ( 'd_latest' , 'w_latest' , 'current' ) : git_ref = 'master' elif eups_tag . startswith ( 'd_' ) : # Daily EUPS tags are not tagged on git git_ref = 'master' elif eups_tag . startswith ( 'v' ) : # Major version or release candidate tag git_ref = eups_tag . lstrip ( 'v' ) . replace ( '_' , '.' ) elif eups_tag . startswith ( 'w_' ) : # Regular weekly tag git_ref = eups_tag . replace ( '_' , '.' ) else : # Ideally shouldn't get to this point git_ref = 'master' # Now set variables for the Jinja context c [ 'release_eups_tag' ] = eups_tag c [ 'release_git_ref' ] = git_ref c [ 'version' ] = eups_tag c [ 'release' ] = eups_tag c [ 'scipipe_conda_ref' ] = git_ref c [ 'pipelines_demo_ref' ] = git_ref c [ 'newinstall_ref' ] = git_ref return c
Insert information about the current EUPS tag into the configuration namespace .
351
13
11,889
def build_pipelines_lsst_io_configs ( * , project_name , copyright = None ) : # Work around Sphinx bug related to large and highly-nested source files sys . setrecursionlimit ( 2000 ) c = { } c = _insert_common_sphinx_configs ( c , project_name = project_name ) # HTML theme c = _insert_html_configs ( c , project_name = project_name , short_project_name = project_name ) # Sphinx extension modules c = _insert_extensions ( c ) # Intersphinx configuration c = _insert_intersphinx_mapping ( c ) # Breathe extension configuration # FIXME configure this for multiple sites # Automodapi and numpydoc configurations c = _insert_automodapi_configs ( c ) # Matplotlib configurations c = _insert_matplotlib_configs ( c ) # Graphviz configurations c = _insert_graphviz_configs ( c ) # Add versioning information c = _insert_eups_version ( c ) # Always use "now" as the date for the main site's docs because we can't # look at the Git history of each stack package. date = datetime . datetime . now ( ) c [ 'today' ] = date . strftime ( '%Y-%m-%d' ) # Use this copyright for now. Ultimately we want to gather COPYRIGHT files # and build an integrated copyright that way. c [ 'copyright' ] = '2015-{year} LSST contributors' . format ( year = date . year ) # Hide todo directives in the "published" documentation on the main site. c [ 'todo_include_todos' ] = False # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. c [ 'exclude_patterns' ] = [ 'README.rst' , # Build products '_build' , # Source for release notes (contents are included in built pages) 'releases/note-source/*.rst' , 'releases/tickets-source/*.rst' , # EUPS configuration directory 'ups' , # Recommended directory for pip installing doc eng Python packages '.pyvenv' , # GitHub templates '.github' , # This 'home' directory is created by the docubase image for the # sqre/infra/documenteer ci.lsst.codes Jenkins job. Ideally this # shouldn't be in the directory at all, but we certainly need to # ignore it while its here. 'home' , ] # Insert rst_epilog configuration c = _insert_rst_epilog ( c ) # Set up the context for the sphinx-jinja extension c = _insert_jinja_configuration ( c ) return c
Build a dict of Sphinx configurations that populate the conf . py of the main pipelines_lsst_io Sphinx project for LSST Science Pipelines documentation .
628
33
11,890
def setup ( self , app ) : super ( ) . setup ( app ) self . cfg . port = int ( self . cfg . port ) self . cfg . db = int ( self . cfg . db ) self . cfg . poolsize = int ( self . cfg . poolsize )
Setup the plugin .
66
4
11,891
async def startup ( self , app ) : if self . cfg . fake : if not FakeConnection : raise PluginException ( 'Install fakeredis for fake connections.' ) self . conn = await FakeConnection . create ( ) if self . cfg . pubsub : self . pubsub_conn = self . conn else : try : if self . cfg . poolsize <= 1 : self . conn = await asyncio . wait_for ( asyncio_redis . Connection . create ( host = self . cfg . host , port = self . cfg . port , password = self . cfg . password , db = self . cfg . db , ) , self . cfg . timeout ) else : self . conn = await asyncio . wait_for ( asyncio_redis . Pool . create ( host = self . cfg . host , port = self . cfg . port , password = self . cfg . password , db = self . cfg . db , poolsize = self . cfg . poolsize , ) , self . cfg . timeout ) if self . cfg . pubsub : self . pubsub_conn = await asyncio . wait_for ( asyncio_redis . Connection . create ( host = self . cfg . host , port = self . cfg . port , password = self . cfg . password , db = self . cfg . db , ) , self . cfg . timeout ) except asyncio . TimeoutError : raise PluginException ( 'Muffin-redis connection timeout.' ) if self . cfg . pubsub : self . pubsub_subscription = await self . pubsub_conn . start_subscribe ( ) self . pubsub_reader = ensure_future ( self . _pubsub_reader_proc ( ) , loop = self . app . loop )
Connect to Redis .
393
5
11,892
async def cleanup ( self , app ) : self . conn . close ( ) if self . pubsub_conn : self . pubsub_reader . cancel ( ) self . pubsub_conn . close ( ) # give connections a chance to actually terminate # TODO: use better method once it will be added, # see https://github.com/jonathanslenders/asyncio-redis/issues/56 await asyncio . sleep ( 0 )
Close self connections .
98
4
11,893
def set ( self , key , value , * args , * * kwargs ) : if self . cfg . jsonpickle : value = jsonpickle . encode ( value ) return self . conn . set ( key , value , * args , * * kwargs )
Store the given value into Redis .
59
8
11,894
async def get ( self , key ) : value = await self . conn . get ( key ) if self . cfg . jsonpickle : if isinstance ( value , bytes ) : return jsonpickle . decode ( value . decode ( 'utf-8' ) ) if isinstance ( value , str ) : return jsonpickle . decode ( value ) return value
Decode the value .
78
5
11,895
def publish ( self , channel , message ) : if self . cfg . jsonpickle : message = jsonpickle . encode ( message ) return self . conn . publish ( channel , message )
Publish message to channel .
41
6
11,896
def start_subscribe ( self ) : if not self . conn : raise ValueError ( 'Not connected' ) elif not self . pubsub_conn : raise ValueError ( 'PubSub not enabled' ) # creates a new context manager return Subscription ( self )
Create a new Subscription context manager .
57
8
11,897
async def _subscribe ( self , channels , is_mask ) : news = [ ] for channel in channels : key = channel , is_mask self . _channels . append ( key ) if key in self . _plugin . _subscriptions : self . _plugin . _subscriptions [ key ] . append ( self . _queue ) else : self . _plugin . _subscriptions [ key ] = [ self . _queue ] news . append ( channel ) if news : await getattr ( self . _sub , 'psubscribe' if is_mask else 'subscribe' ) ( news )
Subscribe to given channel .
132
5
11,898
async def _unsubscribe ( self , channels , is_mask ) : vanished = [ ] if channels : for channel in channels : key = channel , is_mask self . _channels . remove ( key ) self . _plugin . _subscriptions [ key ] . remove ( self . _queue ) if not self . _plugin . _subscriptions [ key ] : # we were last sub? vanished . append ( channel ) del self . _plugin . _subscriptions [ key ] else : while self . _channels : channel , is_mask = key = self . _channels . pop ( ) self . _plugin . _subscriptions [ key ] . remove ( self . _queue ) if not self . _plugin . _subscriptions [ key ] : vanished . append ( channel ) del self . _plugin . _subscriptions [ key ] if vanished : await getattr ( self . _sub , 'punsubscribe' if is_mask else 'unsubscribe' ) ( vanished )
Unsubscribe from given channel .
217
7
11,899
def xor ( a , b ) : return bytearray ( i ^ j for i , j in zip ( a , b ) )
Bitwise xor on equal length bytearrays .
30
12