idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
45,600 | def getZoom ( self , resolution ) : "Return the zoom level for a given resolution" assert resolution in self . RESOLUTIONS return self . RESOLUTIONS . index ( resolution ) | Return the zoom level for a given resolution |
45,601 | def _getZoomLevelRange ( self , resolution , unit = 'meters' ) : "Return lower and higher zoom level given a resolution" assert unit in ( 'meters' , 'degrees' ) if unit == 'meters' and self . unit == 'degrees' : resolution = resolution / self . metersPerUnit elif unit == 'degrees' and self . unit == 'meters' : resolution = resolution * EPSG4326_METERS_PER_UNIT lo = 0 hi = len ( self . RESOLUTIONS ) while lo < hi : mid = ( lo + hi ) // 2 if resolution > self . RESOLUTIONS [ mid ] : hi = mid else : lo = mid + 1 return lo , hi | Return lower and higher zoom level given a resolution |
45,602 | def getScale ( self , zoom ) : if self . unit == 'degrees' : resolution = self . getResolution ( zoom ) * EPSG4326_METERS_PER_UNIT else : resolution = self . getResolution ( zoom ) return resolution / STANDARD_PIXEL_SIZE | Returns the scale at a given zoom level |
45,603 | def make_dict_config ( graph ) : formatters = { } handlers = { } loggers = { } formatters [ "ExtraFormatter" ] = make_extra_console_formatter ( graph ) handlers [ "console" ] = make_stream_handler ( graph , formatter = "ExtraFormatter" ) if enable_loggly ( graph ) : formatters [ "JSONFormatter" ] = make_json_formatter ( graph ) handlers [ "LogglyHTTPSHandler" ] = make_loggly_handler ( graph , formatter = "JSONFormatter" ) loggers [ "" ] = { "handlers" : handlers . keys ( ) , "level" : graph . config . logging . level , } loggers . update ( make_library_levels ( graph ) ) return dict ( version = 1 , disable_existing_loggers = False , formatters = formatters , handlers = handlers , loggers = loggers , ) | Build a dictionary configuration from conventions and configuration . |
45,604 | def make_json_formatter ( graph ) : return { "()" : graph . config . logging . json_formatter . formatter , "fmt" : graph . config . logging . json_required_keys , } | Create the default json formatter . |
45,605 | def make_loggly_handler ( graph , formatter ) : base_url = graph . config . logging . loggly . base_url loggly_url = "{}/inputs/{}/tag/{}" . format ( base_url , graph . config . logging . loggly . token , "," . join ( [ graph . metadata . name , graph . config . logging . loggly . environment , ] ) , ) return { "class" : graph . config . logging . https_handler . class_ , "formatter" : formatter , "level" : graph . config . logging . level , "url" : loggly_url , } | Create the loggly handler . |
45,606 | def make_library_levels ( graph ) : levels = { } for level in [ "DEBUG" , "INFO" , "WARN" , "ERROR" ] : levels . update ( { component : { "level" : level , } for component in graph . config . logging . levels . default [ level . lower ( ) ] } ) for level in [ "DEBUG" , "INFO" , "WARN" , "ERROR" ] : levels . update ( { component : { "level" : level , } for component in graph . config . logging . levels . override [ level . lower ( ) ] } ) return levels | Create third party library logging level configurations . |
45,607 | def process_request ( self , request ) : site = request . site cache_key = '{prefix}-{site}' . format ( prefix = settings . REDIRECT_CACHE_KEY_PREFIX , site = site . domain ) redirects = cache . get ( cache_key ) if redirects is None : redirects = { redirect . old_path : redirect . new_path for redirect in Redirect . objects . filter ( site = site ) } cache . set ( cache_key , redirects , settings . REDIRECT_CACHE_TIMEOUT ) redirect_to = redirects . get ( request . path ) if redirect_to : return redirect ( redirect_to , permanent = True ) | Redirects the current request if there is a matching Redirect model with the current request URL as the old_path field . |
45,608 | def context_logger ( context_func , func , parent = None ) : if parent is None : parent = func . __self__ def wrapped ( * args , ** kwargs ) : parent . logger = ContextLogger ( getattr ( parent , 'logger' , getLogger ( parent . __class__ . __name__ ) ) , context_func ( * args , ** kwargs ) or dict ( ) , ) try : result = func ( * args , ** kwargs ) return result finally : parent . logger = parent . logger . logger return wrapped | The results of context_func will be executed and applied to a ContextLogger instance for the execution of func . The resulting ContextLogger instance will be available on parent . logger for the duration of func . |
45,609 | def str_brief ( obj , lim = 20 , dots = '...' , use_repr = True ) : if isinstance ( obj , basestring ) or not use_repr : full = str ( obj ) else : full = repr ( obj ) postfix = [ ] CLOSERS = { '(' : ')' , '{' : '}' , '[' : ']' , '"' : '"' , "'" : "'" , '<' : '>' } for i , c in enumerate ( full ) : if i >= lim + len ( postfix ) : return full [ : i ] + dots + '' . join ( reversed ( postfix ) ) if postfix and postfix [ - 1 ] == c : postfix . pop ( - 1 ) continue closer = CLOSERS . get ( c , None ) if closer is not None : postfix . append ( closer ) return full | Truncates a string starting from lim chars . The given object can be a string or something that can be casted to a string . |
45,610 | def get_mirror_resources_by_name_map ( self , scope = None ) : volumes_mirrors_by_name = dict ( ) cgs_mirrors_by_name = dict ( ) if ( ( scope is None ) or ( scope . lower ( ) == 'volume' ) ) : mirror_list = self . xcli_client . cmd . mirror_list ( scope = 'Volume' ) for xcli_mirror in mirror_list : name = MirroredEntities . get_mirrored_object_name ( xcli_mirror ) volumes_mirrors_by_name [ name ] = xcli_mirror if ( ( scope is None ) or ( scope . lower ( ) == CG ) ) : for xcli_mirror in self . xcli_client . cmd . mirror_list ( scope = 'CG' ) : name = MirroredEntities . get_mirrored_object_name ( xcli_mirror ) cgs_mirrors_by_name [ name ] = xcli_mirror res = Bunch ( volumes = volumes_mirrors_by_name , cgs = cgs_mirrors_by_name ) return res | returns a map volume_name - > volume cg_name - > cg scope is either None or CG or Volume |
45,611 | def get_host_port_names ( self , host_name ) : port_names = list ( ) host = self . get_hosts_by_name ( host_name ) fc_ports = host . fc_ports iscsi_ports = host . iscsi_ports port_names . extend ( fc_ports . split ( ',' ) if fc_ports != '' else [ ] ) port_names . extend ( iscsi_ports . split ( ',' ) if iscsi_ports != '' else [ ] ) return port_names | return a list of the port names of XIV host |
45,612 | def get_cluster_port_names ( self , cluster_name ) : port_names = list ( ) for host_name in self . get_hosts_by_clusters ( ) [ cluster_name ] : port_names . extend ( self . get_hosts_by_name ( host_name ) ) return port_names | return a list of the port names under XIV CLuster |
45,613 | def flush ( self ) : now = time . time ( ) to_remove = [ ] for k , entry in self . pool . items ( ) : if entry . timestamp < now : entry . client . close ( ) to_remove . append ( k ) for k in to_remove : del self . pool [ k ] | remove all stale clients from pool |
45,614 | def get ( self , user , password , endpoints ) : now = time . time ( ) if isinstance ( endpoints , str ) : endpoints = [ endpoints ] for ep in endpoints : if ep not in self . pool : continue entry = self . pool [ ep ] if ( not entry . client . is_connected ( ) or entry . timestamp + self . time_to_live < now ) : xlog . debug ( "XCLIClientPool: clearing stale client %s" , ep ) del self . pool [ ep ] entry . client . close ( ) continue user_client = entry . user_clients . get ( user , None ) if not user_client or not user_client . is_connected ( ) : user_client = entry . client . get_user_client ( user , password ) entry . user_clients [ user ] = user_client return user_client xlog . debug ( "XCLIClientPool: connecting to %s" , endpoints ) client = self . connector ( None , None , endpoints ) user_client = { user : client . get_user_client ( user , password ) } for ep in endpoints : self . pool [ ep ] = PoolEntry ( client , now , user_client ) return user_client [ user ] | Gets an existing connection or opens a new one |
45,615 | def _create_mirror ( self , resource_type , resource_name , target_name , mirror_type , slave_resource_name , create_slave = 'no' , remote_pool = None , rpo = None , remote_rpo = None , schedule = None , remote_schedule = None , activate_mirror = 'no' ) : kwargs = { resource_type : resource_name , 'target' : target_name , 'type' : mirror_type , 'slave_' + resource_type : slave_resource_name , 'create_slave' : create_slave , 'remote_pool' : remote_pool , 'rpo' : rpo , 'remote_rpo' : remote_rpo , 'schedule' : schedule , 'remote_schedule' : remote_schedule } if mirror_type == 'sync' : kwargs [ 'type' ] = 'sync_best_effort' kwargs [ 'rpo' ] = None else : kwargs [ 'type' ] = 'async_interval' if kwargs [ 'remote_schedule' ] is None : kwargs [ 'remote_schedule' ] = kwargs [ 'schedule' ] keys = set ( kwargs . keys ( ) ) . copy ( ) for k in keys : if kwargs [ k ] is None : kwargs . pop ( k ) logger . info ( 'creating mirror with arguments: %s' % kwargs ) self . xcli_client . cmd . mirror_create ( ** kwargs ) if activate_mirror == 'yes' : logger . info ( 'Activating mirror %s' % resource_name ) self . activate_mirror ( resource_name ) return self . get_mirror_resources ( ) [ resource_name ] | creates a mirror and returns a mirror object . resource_type must be vol or cg target name must be a valid target from target_list mirror type must be sync or async slave_resource_name would be the slave_vol or slave_cg name |
45,616 | def get_user ( self , username , * , mode = OsuMode . osu , event_days = 31 ) : return self . _make_req ( endpoints . USER , dict ( k = self . key , u = username , type = _username_type ( username ) , m = mode . value , event_days = event_days ) , JsonList ( User ) ) | Get a user profile . |
45,617 | def get_user_best ( self , username , * , mode = OsuMode . osu , limit = 50 ) : return self . _make_req ( endpoints . USER_BEST , dict ( k = self . key , u = username , type = _username_type ( username ) , m = mode . value , limit = limit ) , JsonList ( SoloScore ) ) | Get a user s best scores . |
45,618 | def get_user_recent ( self , username , * , mode = OsuMode . osu , limit = 10 ) : return self . _make_req ( endpoints . USER_RECENT , dict ( k = self . key , u = username , type = _username_type ( username ) , m = mode . value , limit = limit ) , JsonList ( RecentScore ) ) | Get a user s most recent scores within the last 24 hours . |
45,619 | def get_scores ( self , beatmap_id , * , username = None , mode = OsuMode . osu , mods = None , limit = 50 ) : return self . _make_req ( endpoints . SCORES , dict ( k = self . key , b = beatmap_id , u = username , type = _username_type ( username ) , m = mode . value , mods = mods . value if mods else None , limit = limit ) , JsonList ( BeatmapScore ) ) | Get the top scores for a given beatmap . |
45,620 | def get_beatmaps ( self , * , since = None , beatmapset_id = None , beatmap_id = None , username = None , mode = None , include_converted = False , beatmap_hash = None , limit = 500 ) : return self . _make_req ( endpoints . BEATMAPS , dict ( k = self . key , s = beatmapset_id , b = beatmap_id , u = username , since = "{:%Y-%m-%d %H:%M:%S}" . format ( since ) if since is not None else None , type = _username_type ( username ) , m = mode . value if mode else None , a = int ( include_converted ) , h = beatmap_hash , limit = limit ) , JsonList ( Beatmap ) ) | Get beatmaps . |
45,621 | def get_match ( self , match_id ) : return self . _make_req ( endpoints . MATCH , dict ( k = self . key , mp = match_id ) , Match ) | Get a multiplayer match . |
45,622 | def get_user_client ( self , user , password , populate = True ) : return XCLIClientForUser ( weakproxy ( self ) , user , password , populate = populate ) | Returns a new client for the given user . This is a lightweight client that only uses different credentials and shares the transport with the underlying client |
45,623 | def get_remote_client ( self , target_name , user = None , password = None ) : if user : base = self . get_user_client ( user , password , populate = False ) else : base = weakproxy ( self ) return RemoteXCLIClient ( base , target_name , populate = True ) | Returns a new client for the remote target . This is a lightweight client that only uses different credentials and shares the transport with the underlying client |
45,624 | def as_user ( self , user , password ) : with self . options ( user = user , password = password ) : yield self | A context - manager for get_user_client . Allows the execution of commands as a different user with ease . |
45,625 | def accuracy ( self , mode : OsuMode ) : if mode is OsuMode . osu : return ( ( 6 * self . count300 + 2 * self . count100 + self . count50 ) / ( 6 * ( self . count300 + self . count100 + self . count50 + self . countmiss ) ) ) if mode is OsuMode . taiko : return ( ( self . count300 + self . countgeki + ( 0.5 * ( self . count100 + self . countkatu ) ) ) / ( self . count300 + self . countgeki + self . count100 + self . countkatu + self . countmiss ) ) if mode is OsuMode . mania : return ( ( 6 * ( self . countgeki + self . count300 ) + 4 * self . countkatu + 2 * self . count100 + self . count50 ) / ( 6 * ( self . countgeki + self . count300 + self . countkatu + self . count100 + self . count50 + self . countmiss ) ) ) if mode is OsuMode . ctb : return ( ( self . count50 + self . count100 + self . count300 ) / ( self . count50 + self . count100 + self . count300 + self . countmiss + self . countkatu ) ) | Calculated accuracy . |
45,626 | def create_mirror ( self , resource_name , target_name , mirror_type , slave_resource_name , rpo = None , remote_rpo = None , schedule = None , remote_schedule = None , activate_mirror = 'no' ) : return self . _create_mirror ( 'cg' , resource_name , target_name , mirror_type , slave_resource_name , rpo = rpo , remote_rpo = remote_rpo , schedule = schedule , remote_schedule = remote_schedule , activate_mirror = activate_mirror ) | creates a mirror and returns a mirror object . target name must be a valid target from target_list mirror type must be sync or async slave_resource_name would be the slave_cg name |
45,627 | def get_cg_volumes ( self , group_id ) : for volume in self . xcli_client . cmd . vol_list ( cg = group_id ) : if volume . snapshot_of == '' : yield volume . name | return all non snapshots volumes in cg |
45,628 | def _certificate_required ( cls , hostname , port = XCLI_DEFAULT_PORT , ca_certs = None , validate = None ) : if not ca_certs : return False xlog . debug ( "CONNECT SSL %s:%s, cert_file=%s" , hostname , port , ca_certs ) certificate = ssl . get_server_certificate ( ( hostname , port ) , ca_certs = None ) if validate : return not validate ( certificate ) return True | returns true if connection should verify certificate |
45,629 | def _populate_bunch_with_element ( element ) : if 'value' in element . attrib : return element . get ( 'value' ) current_bunch = Bunch ( ) if element . get ( 'id' ) : current_bunch [ 'nextra_element_id' ] = element . get ( 'id' ) for subelement in element . getchildren ( ) : current_bunch [ subelement . tag ] = _populate_bunch_with_element ( subelement ) return current_bunch | Helper function to recursively populates a Bunch from an XML tree . Returns leaf XML elements as a simple value branch elements are returned as Bunches containing their subelements as value or recursively generated Bunch members . |
45,630 | def as_single_element ( self ) : if self . as_return_etree is None : return None if len ( self . as_return_etree . getchildren ( ) ) == 1 : return _populate_bunch_with_element ( self . as_return_etree . getchildren ( ) [ 0 ] ) return _populate_bunch_with_element ( self . as_return_etree ) | Processes the response as a single - element response like config_get or system_counters_get . If there is more then one element in the response or no elements this raises a ResponseError |
45,631 | def enabled_flags ( self ) : if not self . value : yield self . __flags_members__ [ 0 ] return val = self . value while val : lowest_bit = val & - val val ^= lowest_bit yield self . __flags_members__ [ lowest_bit ] | Return the objects for each individual set flag . |
45,632 | def contains_any ( self , other ) : return self . value == other . value or self . value & other . value | Check if any flags are set . |
45,633 | def get_statistics ( self ) : return { 'cumulative_elapsed_time' : self . get_cumulative_elapsed_time ( ) , 'percentage' : self . get_percentage ( ) , 'n_splits' : self . get_n_splits ( ) , 'mean_per_split' : self . get_mean_per_split ( ) , } | Get all statistics as a dictionary . |
45,634 | def conform_query ( cls , query ) : query = parse_qs ( query , keep_blank_values = True ) for key , vals in query . items ( ) : query [ key ] = yaml . load ( vals [ 0 ] or 'true' , Loader = yaml . FullLoader ) for key , val in cls . default_query . items ( ) : if key not in query : query [ key ] = val return query | Converts the query string from a target uri uses cls . default_query to populate default arguments . |
45,635 | def hog ( concurrency , requests , limit , timeout , params , paramfile , headers , headerfile , method , url ) : params = parse_from_list_and_file ( params , paramfile ) headers = parse_from_list_and_file ( headers , headerfile ) click . echo ( HR ) click . echo ( "Hog is running with {} threads, " . format ( concurrency ) + "{} requests " . format ( requests ) + "and timeout in {} second(s)." . format ( timeout ) ) if limit != 0 : click . echo ( ">>> Limit: {} request(s) per second." . format ( limit ) ) click . echo ( HR ) result = Hog ( callback ) . run ( url , params , headers , method , timeout , concurrency , requests , limit ) sys . stdout . write ( "\n" ) print_result ( result ) | Sending multiple HTTP requests ON GREEN thread |
45,636 | def _append_dict ( self , value , _file ) : _tabs = '\t' * self . _tctr _labs = '{tabs}<dict>\n' . format ( tabs = _tabs ) _file . write ( _labs ) self . _tctr += 1 for ( _item , _text ) in value . items ( ) : if _text is None : continue _tabs = '\t' * self . _tctr _keys = '{tabs}<key>{item}</key>\n' . format ( tabs = _tabs , item = _item ) _file . write ( _keys ) _text = self . object_hook ( _text ) _type = type ( _text ) . __name__ _MAGIC_TYPES [ _type ] ( self , _text , _file ) self . _tctr -= 1 _tabs = '\t' * self . _tctr _labs = '{tabs}</dict>\n' . format ( tabs = _tabs ) _file . write ( _labs ) | Call this function to write dict contents . |
45,637 | def _append_data ( self , value , _file ) : _tabs = '\t' * self . _tctr _text = base64 . b64encode ( value ) . decode ( ) _labs = '{tabs}<data>{text}</data>\n' . format ( tabs = _tabs , text = _text ) _file . write ( _labs ) | Call this function to write data contents . |
45,638 | def _append_integer ( self , value , _file ) : _tabs = '\t' * self . _tctr _text = value _labs = '{tabs}<integer>{text}</integer>\n' . format ( tabs = _tabs , text = _text ) _file . write ( _labs ) | Call this function to write integer contents . |
45,639 | def register_output_name ( self , input_folder , rel_name , rel_output_name ) : self . improper_input_file_mapping [ rel_name ] . add ( rel_output_name ) self . proper_input_file_mapping [ os . path . join ( input_folder , rel_name ) ] = rel_output_name self . proper_input_file_mapping [ rel_output_name ] = rel_output_name | Register proper and improper file mappings . |
45,640 | def _discover ( self ) : for ep in pkg_resources . iter_entry_points ( 'yamlsettings10' ) : ext = ep . load ( ) if callable ( ext ) : ext = ext ( ) self . add ( ext ) | Find and install all extensions |
45,641 | def get_extension ( self , protocol ) : if protocol not in self . registry : raise NoProtocolError ( "No protocol for %s" % protocol ) index = self . registry [ protocol ] return self . extensions [ index ] | Retrieve extension for the given protocol |
45,642 | def add ( self , extension ) : index = len ( self . extensions ) self . extensions [ index ] = extension for protocol in extension . protocols : self . registry [ protocol ] = index | Adds an extension to the registry |
45,643 | def _load_first ( self , target_uris , load_method , ** kwargs ) : if isinstance ( target_uris , string_types ) : target_uris = [ target_uris ] for target_uri in target_uris : target = urlsplit ( target_uri , scheme = self . default_protocol ) extension = self . get_extension ( target . scheme ) query = extension . conform_query ( target . query ) try : yaml_dict = extension . load_target ( target . scheme , target . path , target . fragment , target . username , target . password , target . hostname , target . port , query , load_method , ** kwargs ) return yaml_dict except extension . not_found_exception : pass raise IOError ( "unable to load: {0}" . format ( target_uris ) ) | Load first yamldict target found in uri list . |
45,644 | def load ( self , target_uris , fields = None , ** kwargs ) : yaml_dict = self . _load_first ( target_uris , yamlsettings . yamldict . load , ** kwargs ) if fields : yaml_dict . limit ( fields ) return yaml_dict | Load first yamldict target found in uri . |
45,645 | def load_all ( stream ) : loader = YAMLDictLoader ( stream ) try : while loader . check_data ( ) : yield loader . get_data ( ) finally : loader . dispose ( ) | Parse all YAML documents in a stream and produce corresponding YAMLDict objects . |
45,646 | def rebase ( self , yaml_dict ) : base = yaml_dict . clone ( ) base . update ( self ) self . clear ( ) self . update ( base ) | Use yaml_dict as self s new base and update with existing reverse of update . |
45,647 | def limit ( self , keys ) : if not isinstance ( keys , list ) and not isinstance ( keys , tuple ) : keys = [ keys ] remove_keys = [ k for k in self . keys ( ) if k not in keys ] for k in remove_keys : self . pop ( k ) | Remove all keys other than the keys specified . |
45,648 | def save ( yaml_dict , filepath ) : yamldict . dump ( yaml_dict , open ( filepath , 'w' ) , default_flow_style = False ) | Save YAML settings to the specified file path . |
45,649 | def update_from_file ( yaml_dict , filepaths ) : yaml_dict . update ( registry . load ( filepaths , list ( yaml_dict ) ) ) | Override YAML settings with loaded values from filepaths . |
45,650 | def update_from_env ( yaml_dict , prefix = None ) : prefix = prefix or "" def _set_env_var ( path , node ) : env_path = "{0}{1}{2}" . format ( prefix . upper ( ) , '_' if prefix else '' , '_' . join ( [ str ( key ) . upper ( ) for key in path ] ) ) env_val = os . environ . get ( env_path , None ) if env_val is not None : env_dict = yamldict . load ( 'val: {0}' . format ( env_val ) ) return env_dict . val else : return None yaml_dict . traverse ( _set_env_var ) | Override YAML settings with values from the environment variables . |
45,651 | def circle ( rad = 0.5 ) : _ctx = _state [ "ctx" ] _ctx . arc ( 0 , 0 , rad , 0 , 2 * math . pi ) _ctx . set_line_width ( 0 ) _ctx . stroke_preserve ( ) _ctx . fill ( ) | Draw a circle |
45,652 | def triangle ( rad = 0.5 ) : ctx = _state [ "ctx" ] side = 3 * rad / math . sqrt ( 3 ) ctx . move_to ( 0 , - rad / 2 ) ctx . line_to ( - side / 2 , - rad / 2 ) ctx . line_to ( 0 , rad ) ctx . line_to ( side / 2 , - rad / 2 ) ctx . close_path ( ) ctx . fill ( ) | Draw a triangle |
45,653 | def box ( side = 1 ) : half_side = side / 2 _state [ "ctx" ] . rectangle ( - half_side , - half_side , side , side ) _state [ "ctx" ] . fill ( ) | Draw a box |
45,654 | def _dump_header ( self ) : with open ( self . _file , 'w' ) as _file : _file . write ( self . _hsrt ) self . _sptr = _file . tell ( ) _file . write ( self . _hend ) | Initially dump file heads and tails . |
45,655 | def find_version ( package_name : str , version_module_name : str = '_version' , version_variable_name : str = 'VERSION' ) -> str : version_module = importlib . import_module ( '{}.{}' . format ( package_name . replace ( '-' , '_' ) , version_module_name ) ) return getattr ( version_module , version_variable_name ) | Simulate behaviour of from package_name . _version import VERSION and return VERSION . |
45,656 | def find_packages ( root_directory : str = '.' ) -> t . List [ str ] : exclude = [ 'test*' , 'test.*' ] if ( 'bdist_wheel' in sys . argv or 'bdist' in sys . argv ) else [ ] packages_list = setuptools . find_packages ( root_directory , exclude = exclude ) return packages_list | Find packages to pack . |
45,657 | def parse_requirements ( requirements_path : str = 'requirements.txt' ) -> t . List [ str ] : requirements = [ ] with HERE . joinpath ( requirements_path ) . open ( ) as reqs_file : for requirement in [ line . strip ( ) for line in reqs_file . read ( ) . splitlines ( ) ] : if not requirement or requirement . startswith ( '#' ) : continue requirements . append ( requirement ) return requirements | Read contents of requirements . txt file and return data from its relevant lines . |
45,658 | def partition_version_classifiers ( classifiers : t . Sequence [ str ] , version_prefix : str = 'Programming Language :: Python :: ' , only_suffix : str = ' :: Only' ) -> t . Tuple [ t . List [ str ] , t . List [ str ] ] : versions_min , versions_only = [ ] , [ ] for classifier in classifiers : version = classifier . replace ( version_prefix , '' ) versions = versions_min if version . endswith ( only_suffix ) : version = version . replace ( only_suffix , '' ) versions = versions_only try : versions . append ( tuple ( [ int ( _ ) for _ in version . split ( '.' ) ] ) ) except ValueError : pass return versions_min , versions_only | Find version number classifiers in given list and partition them into 2 groups . |
45,659 | def find_required_python_version ( classifiers : t . Sequence [ str ] , version_prefix : str = 'Programming Language :: Python :: ' , only_suffix : str = ' :: Only' ) -> t . Optional [ str ] : versions_min , versions_only = partition_version_classifiers ( classifiers , version_prefix , only_suffix ) if len ( versions_only ) > 1 : raise ValueError ( 'more than one "{}" version encountered in {}' . format ( only_suffix , versions_only ) ) only_version = None if len ( versions_only ) == 1 : only_version = versions_only [ 0 ] for version in versions_min : if version [ : len ( only_version ) ] != only_version : raise ValueError ( 'the "{}" version {} is inconsistent with version {}' . format ( only_suffix , only_version , version ) ) min_supported_version = None for version in versions_min : if min_supported_version is None or ( len ( version ) >= len ( min_supported_version ) and version < min_supported_version ) : min_supported_version = version if min_supported_version is None : if only_version is not None : return '.' . join ( [ str ( _ ) for _ in only_version ] ) else : return '>=' + '.' . join ( [ str ( _ ) for _ in min_supported_version ] ) return None | Determine the minimum required Python version . |
45,660 | def parse_rst ( text : str ) -> docutils . nodes . document : parser = docutils . parsers . rst . Parser ( ) components = ( docutils . parsers . rst . Parser , ) settings = docutils . frontend . OptionParser ( components = components ) . get_default_values ( ) document = docutils . utils . new_document ( '<rst-doc>' , settings = settings ) parser . parse ( text , document ) return document | Parse text assuming it s an RST markup . |
45,661 | def resolve_relative_rst_links ( text : str , base_link : str ) : document = parse_rst ( text ) visitor = SimpleRefCounter ( document ) document . walk ( visitor ) for target in visitor . references : name = target . attributes [ 'name' ] uri = target . attributes [ 'refuri' ] new_link = '`{} <{}{}>`_' . format ( name , base_link , uri ) if name == uri : text = text . replace ( '`<{}>`_' . format ( uri ) , new_link ) else : text = text . replace ( '`{} <{}>`_' . format ( name , uri ) , new_link ) return text | Resolve all relative links in a given RST document . |
45,662 | def visit_reference ( self , node : docutils . nodes . reference ) -> None : path = pathlib . Path ( node . attributes [ 'refuri' ] ) try : if path . is_absolute ( ) : return resolved_path = path . resolve ( ) except FileNotFoundError : return try : resolved_path . relative_to ( HERE ) except ValueError : return if not path . is_file ( ) : return assert node . attributes [ 'name' ] == node . children [ 0 ] . astext ( ) self . references . append ( node ) | Called for reference nodes . |
45,663 | def try_fields ( cls , * names ) -> t . Optional [ t . Any ] : for name in names : if hasattr ( cls , name ) : return getattr ( cls , name ) raise AttributeError ( ( cls , names ) ) | Return first existing of given class field names . |
45,664 | def parse_readme ( cls , readme_path : str = 'README.rst' , encoding : str = 'utf-8' ) -> str : with HERE . joinpath ( readme_path ) . open ( encoding = encoding ) as readme_file : long_description = readme_file . read ( ) if readme_path . endswith ( '.rst' ) and cls . download_url . startswith ( 'https://github.com/' ) : base_url = '{}/blob/v{}/' . format ( cls . download_url , cls . version ) long_description = resolve_relative_rst_links ( long_description , base_url ) return long_description | Parse readme and resolve relative links in it if it is feasible . |
45,665 | def prepare ( cls ) -> None : if cls . version is None : cls . version = find_version ( cls . name ) if cls . long_description is None : cls . long_description = cls . parse_readme ( ) if cls . packages is None : cls . packages = find_packages ( cls . root_directory ) if cls . install_requires is None : cls . install_requires = parse_requirements ( ) if cls . python_requires is None : cls . python_requires = find_required_python_version ( cls . classifiers ) | Fill in possibly missing package metadata . |
45,666 | def surface_to_image ( surface ) : from IPython . display import Image buf = BytesIO ( ) surface . write_to_png ( buf ) data = buf . getvalue ( ) buf . close ( ) return Image ( data = data ) | Renders current buffer surface to IPython image |
45,667 | def check_limits ( user_rule ) : def wrapper ( * args , ** kwargs ) : global _state _state [ "cnt_elements" ] += 1 _state [ "depth" ] += 1 matrix = _state [ "ctx" ] . get_matrix ( ) if _state [ "depth" ] >= MAX_DEPTH : logger . info ( "stop recursion by reaching max depth {}" . format ( MAX_DEPTH ) ) else : min_size_scaled = SIZE_MIN_FEATURE / min ( WIDTH , HEIGHT ) current_scale = max ( [ abs ( matrix [ i ] ) for i in range ( 2 ) ] ) if ( current_scale < min_size_scaled ) : logger . info ( "stop recursion by reaching min feature size" ) else : if _state [ "cnt_elements" ] > MAX_ELEMENTS : logger . info ( "stop recursion by reaching max elements" ) else : user_rule ( * args , ** kwargs ) _state [ "depth" ] -= 1 return wrapper | Stop recursion if resolution is too low on number of components is too high |
45,668 | def init ( canvas_size = ( 512 , 512 ) , max_depth = 12 , face_color = None , background_color = None ) : global _background_color _background_color = background_color global _ctx global cnt_elements global MAX_DEPTH global WIDTH global HEIGHT _init_state ( ) sys . setrecursionlimit ( 20000 ) MAX_DEPTH = max_depth WIDTH , HEIGHT = canvas_size if face_color is not None : r , g , b = htmlcolor_to_rgb ( face_color ) _state [ "ctx" ] . set_source_rgb ( r , g , b ) hue , saturation , brightness = colorsys . rgb_to_hsv ( r , g , b ) _state [ "color" ] = ( hue , saturation , brightness , 1 ) logger . debug ( "Init done" ) | Initializes global state |
45,669 | def htmlcolor_to_rgb ( str_color ) : if not ( str_color . startswith ( '#' ) and len ( str_color ) == 7 ) : raise ValueError ( "Bad html color format. Expected: '#RRGGBB' " ) result = [ 1.0 * int ( n , 16 ) / 255 for n in ( str_color [ 1 : 3 ] , str_color [ 3 : 5 ] , str_color [ 5 : ] ) ] return result | function to convert HTML - styly color string to RGB values |
45,670 | def compile_string ( self , data , source_path = None , is_two_file = True , post = None , lang = None ) : if not is_two_file : _ , data = self . split_metadata ( data , None , lang ) new_data , shortcodes = sc . extract_shortcodes ( data ) path_templates = os . path . join ( self . plugin_path , "tempaltes" ) LOGGER . info ( f"set path tempaltes to {path_templates}" ) with tempfile . TemporaryDirectory ( ) as tmpdir : subprocess . check_call ( [ 'pdoc' , '--html' , '--html-no-source' , '--html-dir' , tmpdir , "--template-dir" , path_templates ] + shlex . split ( new_data . strip ( ) ) ) fname = os . listdir ( tmpdir ) [ 0 ] tmd_subdir = os . path . join ( tmpdir , fname ) fname = os . listdir ( tmd_subdir ) [ 0 ] LOGGER . info ( f"tmpdir = {tmd_subdir}, fname = {fname}" ) with open ( os . path . join ( tmd_subdir , fname ) , 'r' , encoding = 'utf8' ) as inf : output = inf . read ( ) return self . site . apply_shortcodes_uuid ( output , shortcodes , filename = source_path , extra_context = { 'post' : post } ) | Compile docstrings into HTML strings with shortcode support . |
45,671 | def compile ( self , source , dest , is_two_file = True , post = None , lang = None ) : makedirs ( os . path . dirname ( dest ) ) with io . open ( dest , "w+" , encoding = "utf8" ) as out_file : with io . open ( source , "r" , encoding = "utf8" ) as in_file : data = in_file . read ( ) data , shortcode_deps = self . compile_string ( data , source , is_two_file , post , lang ) out_file . write ( data ) if post is None : if shortcode_deps : self . logger . error ( "Cannot save dependencies for post {0} (post unknown)" , source ) else : post . _depfile [ dest ] += shortcode_deps return True | Compile the docstring into HTML and save as dest . |
45,672 | def create_post ( self , path , ** kw ) : content = kw . pop ( 'content' , None ) onefile = kw . pop ( 'onefile' , False ) kw . pop ( 'is_page' , False ) metadata = { } metadata . update ( self . default_metadata ) metadata . update ( kw ) makedirs ( os . path . dirname ( path ) ) if not content . endswith ( '\n' ) : content += '\n' with io . open ( path , "w+" , encoding = "utf8" ) as fd : if onefile : fd . write ( write_metadata ( metadata , comment_wrap = False , site = self . site , compiler = self ) ) fd . write ( content ) | Create a new post . |
45,673 | def _append_branch ( self , value , _file ) : if not value : return self . _tctr += 1 _vlen = len ( value ) for ( _vctr , ( _item , _text ) ) in enumerate ( value . items ( ) ) : _text = self . object_hook ( _text ) _type = type ( _text ) . __name__ flag_dict = ( _type == 'dict' ) flag_list = ( _type == 'list' and ( len ( _text ) > 1 or ( len ( _text ) == 1 and type ( _text [ 0 ] ) . __name__ == 'dict' ) ) ) flag_tuple = ( _type == 'tuple' and ( len ( _text ) > 1 or ( len ( _text ) == 1 and type ( _text [ 0 ] ) . __name__ == 'dict' ) ) ) flag_bytes = ( _type == 'bytes' and len ( _text ) > 16 ) if any ( ( flag_dict , flag_list , flag_tuple , flag_bytes ) ) : _pref = '\n' else : _pref = ' ->' _labs = '' for _ in range ( self . _tctr ) : _labs += _TEMP_SPACES if self . _bctr [ _ ] else _TEMP_BRANCH _keys = '{labs} |-- {item}{pref}' . format ( labs = _labs , item = _item , pref = _pref ) _file . write ( _keys ) if _vctr == _vlen - 1 : self . _bctr [ self . _tctr ] = 1 _MAGIC_TYPES [ _type ] ( self , _text , _file ) _suff = '' if _type == 'dict' else '\n' _file . write ( _suff ) self . _bctr [ self . _tctr ] = 0 self . _tctr -= 1 | Call this function to write branch contents . |
45,674 | def _append_number ( self , value , _file ) : _text = value _labs = ' {text}' . format ( text = _text ) _file . write ( _labs ) | Call this function to write number contents . |
45,675 | def _append_object ( self , value , _file ) : _labs = ' {' _file . write ( _labs ) self . _tctr += 1 for ( _item , _text ) in value . items ( ) : _tabs = '\t' * self . _tctr _cmma = ',' if self . _vctr [ self . _tctr ] else '' _keys = '{cmma}\n{tabs}"{item}" :' . format ( cmma = _cmma , tabs = _tabs , item = _item ) _file . write ( _keys ) self . _vctr [ self . _tctr ] += 1 _text = self . object_hook ( _text ) _type = type ( _text ) . __name__ _MAGIC_TYPES [ _type ] ( self , _text , _file ) self . _vctr [ self . _tctr ] = 0 self . _tctr -= 1 _tabs = '\t' * self . _tctr _labs = '\n{tabs}{}' . format ( '}' , tabs = _tabs ) _file . write ( _labs ) | Call this function to write object contents . |
45,676 | def _append_string ( self , value , _file ) : _text = str ( value ) . replace ( '"' , '\\"' ) _labs = ' "{text}"' . format ( text = _text ) _file . write ( _labs ) | Call this function to write string contents . |
45,677 | def start ( self , verbose = None , end_in_new_line = None ) : if self . _start_time is not None and self . _end_time is None : return self if verbose is None : verbose = self . verbose_start if verbose : if end_in_new_line is None : end_in_new_line = self . end_in_new_line if end_in_new_line : self . log ( self . description ) else : self . log ( self . description , end = "" , flush = True ) self . _end_time = None self . _start_time = datetime . datetime . now ( ) return self | Start the stopwatch if it is paused . |
45,678 | def pause ( self ) : if self . _end_time is not None : return self . _end_time = datetime . datetime . now ( ) self . _elapsed_time += self . _end_time - self . _start_time | Pause the stopwatch . |
45,679 | def get_elapsed_time ( self ) : if self . _start_time is None or self . _end_time is not None : return self . _elapsed_time return self . _elapsed_time + ( datetime . datetime . now ( ) - self . _start_time ) | Get the elapsed time of the current split . |
45,680 | def split ( self , verbose = None , end_in_new_line = None ) : elapsed_time = self . get_elapsed_time ( ) self . split_elapsed_time . append ( elapsed_time ) self . _cumulative_elapsed_time += elapsed_time self . _elapsed_time = datetime . timedelta ( ) if verbose is None : verbose = self . verbose_end if verbose : if end_in_new_line is None : end_in_new_line = self . end_in_new_line if end_in_new_line : self . log ( "{} done in {}" . format ( self . description , elapsed_time ) ) else : self . log ( " done in {}" . format ( elapsed_time ) ) self . _start_time = datetime . datetime . now ( ) | Save the elapsed time of the current split and restart the stopwatch . |
45,681 | def reset ( self ) : self . _start_time = None self . _end_time = None self . _elapsed_time = datetime . timedelta ( ) self . _cumulative_elapsed_time = datetime . timedelta ( ) self . split_elapsed_time = [ ] | Reset the stopwatch . |
45,682 | def DownloadAccount ( self , next_page_token = None , max_results = None ) : param = { } if next_page_token : param [ 'nextPageToken' ] = next_page_token if max_results : param [ 'maxResults' ] = max_results response = self . _InvokeGitkitApi ( 'downloadAccount' , param ) return response . get ( 'nextPageToken' , None ) , response . get ( 'users' , { } ) | Downloads multiple accounts from Gitkit server . |
45,683 | def UploadAccount ( self , hash_algorithm , hash_key , accounts ) : param = { 'hashAlgorithm' : hash_algorithm , 'signerKey' : hash_key , 'users' : accounts } return self . _InvokeGitkitApi ( 'uploadAccount' , param ) | Uploads multiple accounts to Gitkit server . |
45,684 | def GetPublicCert ( self ) : cert_url = self . google_api_url + 'publicKeys' resp , content = self . http . request ( cert_url ) if resp . status == 200 : return simplejson . loads ( content ) else : raise errors . GitkitServerError ( 'Error response for cert url: %s' % content ) | Download Gitkit public cert . |
45,685 | def _InvokeGitkitApi ( self , method , params = None , need_service_account = True ) : body = simplejson . dumps ( params ) if params else None req = urllib_request . Request ( self . google_api_url + method ) req . add_header ( 'Content-type' , 'application/json' ) if need_service_account : if self . credentials : access_token = self . credentials . get_access_token ( ) . access_token elif self . service_account_email and self . service_account_key : access_token = self . _GetAccessToken ( ) else : raise errors . GitkitClientError ( 'Missing service account credentials' ) req . add_header ( 'Authorization' , 'Bearer ' + access_token ) try : binary_body = body . encode ( 'utf-8' ) if body else None raw_response = urllib_request . urlopen ( req , binary_body ) . read ( ) except urllib_request . HTTPError as err : if err . code == 400 : raw_response = err . read ( ) else : raise return self . _CheckGitkitError ( raw_response ) | Invokes Gitkit API with optional access token for service account . |
45,686 | def _GetAccessToken ( self ) : d = { 'assertion' : self . _GenerateAssertion ( ) , 'grant_type' : 'urn:ietf:params:oauth:grant-type:jwt-bearer' , } try : body = parse . urlencode ( d ) except AttributeError : body = urllib . urlencode ( d ) req = urllib_request . Request ( RpcHelper . TOKEN_ENDPOINT ) req . add_header ( 'Content-type' , 'application/x-www-form-urlencoded' ) binary_body = body . encode ( 'utf-8' ) raw_response = urllib_request . urlopen ( req , binary_body ) return simplejson . loads ( raw_response . read ( ) ) [ 'access_token' ] | Gets oauth2 access token for Gitkit API using service account . |
45,687 | def _GenerateAssertion ( self ) : now = int ( time . time ( ) ) payload = { 'aud' : RpcHelper . TOKEN_ENDPOINT , 'scope' : 'https://www.googleapis.com/auth/identitytoolkit' , 'iat' : now , 'exp' : now + RpcHelper . MAX_TOKEN_LIFETIME_SECS , 'iss' : self . service_account_email } return crypt . make_signed_jwt ( crypt . Signer . from_string ( self . service_account_key ) , payload ) | Generates the signed assertion that will be used in the request . |
45,688 | def _CheckGitkitError ( self , raw_response ) : try : response = simplejson . loads ( raw_response ) if 'error' not in response : return response else : error = response [ 'error' ] if 'code' in error : code = error [ 'code' ] if str ( code ) . startswith ( '4' ) : raise errors . GitkitClientError ( error [ 'message' ] ) else : raise errors . GitkitServerError ( error [ 'message' ] ) except simplejson . JSONDecodeError : pass raise errors . GitkitServerError ( 'null error code from Gitkit server' ) | Raises error if API invocation failed . |
45,689 | def FromDictionary ( cls , dictionary ) : if 'user_id' in dictionary : raise errors . GitkitClientError ( 'use localId instead' ) if 'localId' not in dictionary : raise errors . GitkitClientError ( 'must specify localId' ) if 'email' not in dictionary : raise errors . GitkitClientError ( 'must specify email' ) return cls ( decode = False , ** dictionary ) | Initializes from user specified dictionary . |
45,690 | def ToRequest ( self ) : param = { } if self . email : param [ 'email' ] = self . email if self . user_id : param [ 'localId' ] = self . user_id if self . name : param [ 'displayName' ] = self . name if self . photo_url : param [ 'photoUrl' ] = self . photo_url if self . email_verified is not None : param [ 'emailVerified' ] = self . email_verified if self . password_hash : param [ 'passwordHash' ] = base64 . urlsafe_b64encode ( self . password_hash ) if self . salt : param [ 'salt' ] = base64 . urlsafe_b64encode ( self . salt ) if self . provider_info : param [ 'providerUserInfo' ] = self . provider_info return param | Converts to gitkit api request parameter dict . |
45,691 | def VerifyGitkitToken ( self , jwt ) : certs = self . rpc_helper . GetPublicCert ( ) crypt . MAX_TOKEN_LIFETIME_SECS = 30 * 86400 parsed = None for aud in filter ( lambda x : x is not None , [ self . project_id , self . client_id ] ) : try : parsed = crypt . verify_signed_jwt_with_certs ( jwt , certs , aud ) except crypt . AppIdentityError as e : if "Wrong recipient" not in e . message : return None if parsed : return GitkitUser . FromToken ( parsed ) return None | Verifies a Gitkit token string . |
45,692 | def GetUserByEmail ( self , email ) : user = self . rpc_helper . GetAccountInfoByEmail ( email ) return GitkitUser . FromApiResponse ( user ) | Gets user info by email . |
45,693 | def GetUserById ( self , local_id ) : user = self . rpc_helper . GetAccountInfoById ( local_id ) return GitkitUser . FromApiResponse ( user ) | Gets user info by id . |
45,694 | def UploadUsers ( self , hash_algorithm , hash_key , accounts ) : return self . rpc_helper . UploadAccount ( hash_algorithm , base64 . urlsafe_b64encode ( hash_key ) , [ GitkitUser . ToRequest ( i ) for i in accounts ] ) | Uploads multiple users to Gitkit server . |
45,695 | def GetAllUsers ( self , pagination_size = 10 ) : next_page_token , accounts = self . rpc_helper . DownloadAccount ( None , pagination_size ) while accounts : for account in accounts : yield GitkitUser . FromApiResponse ( account ) next_page_token , accounts = self . rpc_helper . DownloadAccount ( next_page_token , pagination_size ) | Gets all user info from Gitkit server . |
45,696 | def _BuildOobLink ( self , param , mode ) : code = self . rpc_helper . GetOobCode ( param ) if code : parsed = list ( parse . urlparse ( self . widget_url ) ) query = dict ( parse . parse_qsl ( parsed [ 4 ] ) ) query . update ( { 'mode' : mode , 'oobCode' : code } ) try : parsed [ 4 ] = parse . urlencode ( query ) except AttributeError : parsed [ 4 ] = urllib . urlencode ( query ) return code , parse . urlunparse ( parsed ) raise errors . GitkitClientError ( 'invalid request' ) | Builds out - of - band URL . |
45,697 | def char_code ( columns , name = None ) : if name is None : name = 'Char Code Field (' + str ( columns ) + ' columns)' if columns <= 0 : raise BaseException ( ) char_sets = None for char_set in _tables . get_data ( 'character_set' ) : regex = '[ ]{' + str ( 15 - len ( char_set ) ) + '}' + char_set if char_sets is None : char_sets = regex else : char_sets += '|' + regex _character_sets = pp . Regex ( char_sets ) _unicode_1_16b = pp . Regex ( 'U\+0[0-8,A-F]{3}[ ]{' + str ( columns - 6 ) + '}' ) _unicode_2_21b = pp . Regex ( 'U\+0[0-8,A-F]{4}[ ]{' + str ( columns - 7 ) + '}' ) char_code_field = ( _character_sets | _unicode_1_16b | _unicode_2_21b ) char_code_field = char_code_field . setParseAction ( lambda s : s [ 0 ] . strip ( ) ) char_code_field . setName ( name ) return char_code_field | Character set code field . |
45,698 | def make_choice_validator ( choices , default_key = None , normalizer = None ) : def normalize_all ( _choices ) : if normalizer : _choices = [ ( normalizer ( key ) , value ) for key , value in choices ] return _choices choices = normalize_all ( choices ) def choice_validator ( value ) : if normalizer : value = normalizer ( value ) if not value and default_key : value = choices [ default_key ] [ 0 ] results = [ ] for choice , mapped in choices : if value == choice : return mapped if choice . startswith ( value ) : results . append ( ( choice , mapped ) ) if len ( results ) == 1 : return results [ 0 ] [ 1 ] elif not results : raise ValueError ( 'Invalid choice.' ) else : raise ValueError ( 'Choice ambiguous between (%s)' % ', ' . join ( k for k , v in normalize_all ( results ) ) ) return choice_validator | Returns a callable that accepts the choices provided . |
45,699 | def prompt ( question , validator = None , choices = None , default_key = NotImplemented , normalizer = str . lower , _stdin = None , _stdout = None ) : def write_choices ( choice_keys , default_key ) : _stdout . write ( '(' ) _stdout . write ( '/' . join ( choice_keys ) ) _stdout . write ( ') ' ) if default_key is not NotImplemented : _stdout . write ( '[' ) _stdout . write ( choice_keys [ default_key ] ) _stdout . write ( '] ' ) if _stdin is None : _stdin = sys . stdin if _stdout is None : _stdout = sys . stdout _stdout . write ( question ) _stdout . write ( ' ' ) if not check_interactive ( ) : if choices and default_key is not NotImplemented : choice_keys = [ choice for choice , mapped in choices ] write_choices ( choice_keys , default_key ) display , answer = choices [ default_key ] _stdout . write ( display ) _stdout . write ( '\n' ) logger . warning ( 'non-interactive mode; auto-selected default option [%s]' , display ) return answer logger . warning ( 'interactive code triggered within non-interactive session' ) _stdout . write ( 'Aborted.\n' ) return None choice_keys = [ ] if validator is None : if choices : validator = make_choice_validator ( choices , default_key , normalizer ) choice_keys = [ choice for choice , mapped in choices ] else : validator = null_validator answer = NotImplemented while answer is NotImplemented : if choice_keys : write_choices ( choice_keys , default_key ) _stdout . flush ( ) try : answer = validator ( _stdin . readline ( ) . strip ( ) . encode ( locale ) . decode ( locale ) ) except ValueError as e : _stdout . write ( '%s\n' % e ) _stdout . write ( question . splitlines ( ) [ - 1 ] ) _stdout . write ( ' ' ) except KeyboardInterrupt : _stdout . write ( 'Aborted.\n' ) answer = None return answer | Prompt user for question maybe choices and get answer . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.