idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
8,000
def publish ( self , topic , dct ) : get_logger ( ) . info ( "Publishing message {} on routing key " "{}..." . format ( dct , topic ) ) self . _channel . basic_publish ( exchange = self . exchange , routing_key = topic , body = json . dumps ( dct ) )
Send a dict with internal routing key to the exchange .
73
11
8,001
def _callback ( self , ch , method , properties , body ) : get_logger ( ) . info ( "Message received! Calling listeners..." ) topic = method . routing_key dct = json . loads ( body . decode ( 'utf-8' ) ) for listener in self . listeners : listener ( self , topic , dct )
Internal method that will be called when receiving message .
73
10
8,002
def _handle_ping ( client , topic , dct ) : if dct [ 'type' ] == 'request' : resp = { 'type' : 'answer' , 'name' : client . name , 'source' : dct } client . publish ( 'ping' , resp )
Internal method that will be called when receiving ping message .
63
11
8,003
def _create_argument_value_pairs ( func , * args , * * kwargs ) : # Capture parameters that have been explicitly specified in function call try : arg_dict = signature ( func ) . bind_partial ( * args , * * kwargs ) . arguments except TypeError : return dict ( ) # Capture parameters that have not been explicitly specified # but have default values arguments = signature ( func ) . parameters for arg_name in arguments : if ( arguments [ arg_name ] . default != Parameter . empty ) and ( arguments [ arg_name ] . name not in arg_dict ) : arg_dict [ arguments [ arg_name ] . name ] = arguments [ arg_name ] . default return arg_dict
Create dictionary with argument names as keys and their passed values as values .
156
14
8,004
def _get_contract_exception_dict ( contract_msg ) : # A pcontract-defined custom exception message is wrapped in a string # that starts with '[START CONTRACT MSG:' and ends with # '[STOP CONTRACT MSG]'. This is done to easily detect if an # exception raised is from a custom contract and thus be able # to easily retrieve the actual exception message start_token = "[START CONTRACT MSG: " stop_token = "[STOP CONTRACT MSG]" # No custom contract if contract_msg . find ( start_token ) == - 1 : return { "num" : 0 , "msg" : "Argument `*[argument_name]*` is not valid" , "type" : RuntimeError , "field" : "argument_name" , } # Custom contract msg_start = contract_msg . find ( start_token ) + len ( start_token ) contract_msg = contract_msg [ msg_start : ] contract_name = contract_msg [ : contract_msg . find ( "]" ) ] contract_msg = contract_msg [ contract_msg . find ( "]" ) + 1 : contract_msg . find ( stop_token ) ] exdict = _CUSTOM_CONTRACTS [ contract_name ] for exvalue in exdict . values ( ) : # pragma: no branch if exvalue [ "msg" ] == contract_msg : return exvalue
Generate message for exception .
307
6
8,005
def _get_custom_contract ( param_contract ) : if not isinstance ( param_contract , str ) : return None for custom_contract in _CUSTOM_CONTRACTS : if re . search ( r"\b{0}\b" . format ( custom_contract ) , param_contract ) : return custom_contract return None
Return True if parameter contract is a custom contract False otherwise .
75
12
8,006
def _get_replacement_token ( msg ) : return ( None if not re . search ( r"\*\[[\w|\W]+\]\*" , msg ) else re . search ( r"\*\[[\w|\W]+\]\*" , msg ) . group ( ) [ 2 : - 2 ] )
Extract replacement token from exception message .
77
8
8,007
def _get_type_name ( type_ ) : # type: (type) -> str name = repr ( type_ ) if name . startswith ( "<" ) : name = getattr ( type_ , "__qualname__" , getattr ( type_ , "__name__" , "" ) ) return name . rsplit ( "." , 1 ) [ - 1 ] or repr ( type_ )
Return a displayable name for the type .
89
9
8,008
def _get_class_frame_source ( class_name ) : # type: (str) -> Optional[str] for frame_info in inspect . stack ( ) : try : with open ( frame_info [ 1 ] ) as fp : src = "" . join ( fp . readlines ( ) [ frame_info [ 2 ] - 1 : ] ) except IOError : continue if re . search ( r"\bclass\b\s+\b{}\b" . format ( class_name ) , src ) : reader = six . StringIO ( src ) . readline tokens = tokenize . generate_tokens ( reader ) source_tokens = [ ] indent_level = 0 base_indent_level = 0 has_base_level = False for token , value , _ , _ , _ in tokens : # type: ignore source_tokens . append ( ( token , value ) ) if token == tokenize . INDENT : indent_level += 1 elif token == tokenize . DEDENT : indent_level -= 1 if has_base_level and indent_level <= base_indent_level : return ( tokenize . untokenize ( source_tokens ) , frame_info [ 0 ] . f_globals , frame_info [ 0 ] . f_locals , ) elif not has_base_level : has_base_level = True base_indent_level = indent_level raise TypeError ( 'Unable to retrieve source for class "{}"' . format ( class_name ) )
Return the source code for a class by checking the frame stack .
338
13
8,009
def _is_propertyable ( names , # type: List[str] attrs , # type: Dict[str, Any] annotations , # type: Dict[str, type] attr , # Dict[str, Any] ) : # type: (...) -> bool return ( attr in annotations and not attr . startswith ( "_" ) and not attr . isupper ( ) and "__{}" . format ( attr ) not in names and not isinstance ( getattr ( attrs , attr , None ) , types . MethodType ) )
Determine if an attribute can be replaced with a property .
125
13
8,010
def _create_typed_object_meta ( get_fset ) : # type: (Callable[[str, str, Type[_T]], Callable[[_T], None]]) -> type def _get_fget ( attr , private_attr , type_ ) : # type: (str, str, Type[_T]) -> Callable[[], Any] """Create a property getter method for an attribute. Args: attr: The name of the attribute that will be retrieved. private_attr: The name of the attribute that will store any data related to the attribute. type_: The annotated type defining what values can be stored in the attribute. Returns: A function that takes self and retrieves the private attribute from self. """ def _fget ( self ) : # type: (...) -> Any """Get attribute from self without revealing the private name.""" try : return getattr ( self , private_attr ) except AttributeError : raise AttributeError ( "'{}' object has no attribute '{}'" . format ( _get_type_name ( type_ ) , attr ) ) return _fget class _AnnotatedObjectMeta ( type ) : """A metaclass that reads annotations from a class definition.""" def __new__ ( mcs , # type: Type[_AnnotatedObjectMeta] name , # type: str bases , # type: List[type] attrs , # type: Dict[str, Any] * * kwargs # type: Dict[str, Any] ) : # type: (...) -> type """Create class objs that replaces annotated attrs with properties. Args: mcs: The class object being created. name: The name of the class to create. bases: The list of all base classes for the new class. attrs: The list of all attributes for the new class from the definition. Returns: A new class instance with the expected base classes and attributes, but with annotated, public, non-constant, non-method attributes replaced by property objects that validate against the annotated type. """ annotations = attrs . get ( "__annotations__" , { } ) use_comment_type_hints = ( not annotations and attrs . get ( "__module__" ) != __name__ ) if use_comment_type_hints : frame_source = _get_class_frame_source ( name ) annotations = get_type_hints ( * frame_source ) names = list ( attrs ) + list ( annotations ) typed_attrs = { } for attr in names : typed_attrs [ attr ] = attrs . get ( attr ) if _is_propertyable ( names , attrs , annotations , attr ) : private_attr = "__{}" . format ( attr ) if attr in attrs : typed_attrs [ private_attr ] = attrs [ attr ] type_ = ( Optional [ annotations [ attr ] ] if not use_comment_type_hints and attr in attrs and attrs [ attr ] is None else annotations [ attr ] ) typed_attrs [ attr ] = property ( _get_fget ( attr , private_attr , type_ ) , get_fset ( attr , private_attr , type_ ) , ) properties = [ attr for attr in annotations if _is_propertyable ( names , attrs , annotations , attr ) ] typed_attrs [ "_tp__typed_properties" ] = properties typed_attrs [ "_tp__required_typed_properties" ] = [ attr for attr in properties if ( attr not in attrs or attrs [ attr ] is None and use_comment_type_hints ) and NoneType not in getattr ( annotations [ attr ] , "__args__" , ( ) ) ] return super ( _AnnotatedObjectMeta , mcs ) . __new__ ( # type: ignore mcs , name , bases , typed_attrs , * * kwargs ) return _AnnotatedObjectMeta
Create a metaclass for typed objects .
886
9
8,011
def _tp__get_typed_properties ( self ) : try : return tuple ( getattr ( self , p ) for p in self . _tp__typed_properties ) except AttributeError : raise NotImplementedError
Return a tuple of typed attrs that can be used for comparisons .
50
14
8,012
def run ( cls , routes , * args , * * kwargs ) : # pragma: no cover app = init ( cls , routes , * args , * * kwargs ) HOST = os . getenv ( 'HOST' , '0.0.0.0' ) PORT = int ( os . getenv ( 'PORT' , 8000 ) ) aiohttp . web . run_app ( app , port = PORT , host = HOST )
Run a web application .
104
5
8,013
def add ( self , vector , InterventionAnophelesParams = None ) : # TODO # 1. If there are GVI interventions, for every GVI, add anophelesParams section. # (gvi_anophelesParams field in AnophelesSnippets models) # 2. If there are ITN interventions, for every ITN, add anophelesParams section # (itn_anophelesParams field in AnophelesSnippets models) # 3. If there are IRS interventions, for every IRS section add anophelesParams section # (irs_anophelesParams field in AnophelesSnippets models) assert isinstance ( vector , six . string_types ) et = ElementTree . fromstring ( vector ) # check if it is valid vector mosquito = Vector ( et ) assert isinstance ( mosquito . mosquito , str ) assert isinstance ( mosquito . propInfected , float ) assert len ( mosquito . seasonality . monthlyValues ) == 12 index = len ( self . et . findall ( "anopheles" ) ) self . et . insert ( index , et )
Add a vector to entomology section . vector is either ElementTree or xml snippet
243
17
8,014
def _format_msg ( text , width , indent = 0 , prefix = "" ) : text = repr ( text ) . replace ( "`" , "\\`" ) . replace ( "\\n" , " ``\\n`` " ) sindent = " " * indent if not prefix else prefix wrapped_text = textwrap . wrap ( text , width , subsequent_indent = sindent ) # [1:-1] eliminates quotes generated by repr in first line return ( "\n" . join ( wrapped_text ) ) [ 1 : - 1 ] . rstrip ( )
r Format exception message .
125
5
8,015
def _validate_fname ( fname , arg_name ) : if fname is not None : msg = "Argument `{0}` is not valid" . format ( arg_name ) if ( not isinstance ( fname , str ) ) or ( isinstance ( fname , str ) and ( "\0" in fname ) ) : raise RuntimeError ( msg ) try : if not os . path . exists ( fname ) : os . access ( fname , os . W_OK ) except ( TypeError , ValueError ) : # pragma: no cover raise RuntimeError ( msg )
Validate that a string is a valid file name .
131
11
8,016
def _build_ex_tree ( self ) : # Load exception data into tree structure sep = self . _exh_obj . callables_separator data = self . _exh_obj . exceptions_db if not data : raise RuntimeError ( "Exceptions database is empty" ) # Add root node to exceptions, needed when tracing done # through test runner which is excluded from callable path for item in data : item [ "name" ] = "root{sep}{name}" . format ( sep = sep , name = item [ "name" ] ) self . _tobj = ptrie . Trie ( sep ) try : self . _tobj . add_nodes ( data ) except ValueError as eobj : if str ( eobj ) . startswith ( "Illegal node name" ) : raise RuntimeError ( "Exceptions do not have a common callable" ) raise # Find closest root node to first multi-leaf branching or first # callable with exceptions and make that the root node node = self . _tobj . root_name while ( len ( self . _tobj . get_children ( node ) ) == 1 ) and ( not self . _tobj . get_data ( node ) ) : node = self . _tobj . get_children ( node ) [ 0 ] if not self . _tobj . is_root ( node ) : # pragma: no branch self . _tobj . make_root ( node ) nsep = self . _tobj . node_separator prefix = nsep . join ( node . split ( self . _tobj . node_separator ) [ : - 1 ] ) self . _tobj . delete_prefix ( prefix ) self . _print_ex_tree ( )
Construct exception tree from trace .
379
6
8,017
def _build_module_db ( self ) : tdict = collections . defaultdict ( lambda : [ ] ) for callable_name , callable_dict in self . _exh_obj . callables_db . items ( ) : fname , line_no = callable_dict [ "code_id" ] cname = ( "{cls_name}.__init__" . format ( cls_name = callable_name ) if callable_dict [ "type" ] == "class" else callable_name ) tdict [ fname ] . append ( { "name" : cname , "line" : line_no } ) for fname in tdict . keys ( ) : self . _module_obj_db [ fname ] = sorted ( tdict [ fname ] , key = lambda idict : idict [ "line" ] )
Build database of module callables sorted by line number .
190
11
8,018
def _process_exlist ( self , exc , raised ) : if ( not raised ) or ( raised and exc . endswith ( "*" ) ) : return exc [ : - 1 ] if exc . endswith ( "*" ) else exc return None
Remove raised info from exception message and create separate list for it .
57
13
8,019
def _set_depth ( self , depth ) : if depth and ( ( not isinstance ( depth , int ) ) or ( isinstance ( depth , int ) and ( depth < 0 ) ) ) : raise RuntimeError ( "Argument `depth` is not valid" ) self . _depth = depth
Depth setter .
64
4
8,020
def _set_exclude ( self , exclude ) : if exclude and ( ( not isinstance ( exclude , list ) ) or ( isinstance ( exclude , list ) and any ( [ not isinstance ( item , str ) for item in exclude ] ) ) ) : raise RuntimeError ( "Argument `exclude` is not valid" ) self . _exclude = exclude
Exclude setter .
79
5
8,021
def get_sphinx_autodoc ( self , depth = None , exclude = None , width = 72 , error = False , raised = False , no_comment = False , ) : # This code is cog-specific: cog code file name is the module # file name, a plus (+), and then the line number where the # cog function is frame = sys . _getframe ( 1 ) index = frame . f_code . co_filename . rfind ( "+" ) fname = os . path . abspath ( frame . f_code . co_filename [ : index ] ) # Find name of callable based on module name and line number # within that module, then get the exceptions by using the # get_sphinx_doc() method with this information line_num = int ( frame . f_code . co_filename [ index + 1 : ] ) module_db = self . _module_obj_db [ fname ] names = [ callable_dict [ "name" ] for callable_dict in module_db ] line_nums = [ callable_dict [ "line" ] for callable_dict in module_db ] name = names [ bisect . bisect ( line_nums , line_num ) - 1 ] return self . get_sphinx_doc ( name = name , depth = depth , exclude = exclude , width = width , error = error , raised = raised , no_comment = no_comment , )
r Return exception list in reStructuredText _ auto - determining callable name .
316
17
8,022
def resize ( self , size ) : if size < len ( self ) : raise ValueError ( "Value is out of bound. Array can't be shrinked" ) current_size = self . __size for i in range ( size - current_size ) : self . __array . append ( WBinArray ( 0 , self . __class__ . byte_size ) ) self . __size = size
Grow this array to specified length . This array can t be shrinked
85
15
8,023
def swipe ( self ) : result = WFixedSizeByteArray ( len ( self ) ) for i in range ( len ( self ) ) : result [ len ( self ) - i - 1 ] = self [ i ] return result
Mirror current array value in reverse . Bytes that had greater index will have lesser index and vice - versa . This method doesn t change this array . It creates a new one and return it as a result .
48
43
8,024
def mime_type ( filename ) : # TODO: write lock-free mime_type function try : __mime_lock . acquire ( ) extension = filename . split ( "." ) extension = extension [ len ( extension ) - 1 ] if extension == "woff2" : return "application/font-woff2" if extension == "css" : return "text/css" m = magic . from_file ( filename , mime = True ) m = m . decode ( ) if isinstance ( m , bytes ) else m # compatibility fix, some versions return bytes some - str if m == "text/plain" : guessed_type = mimetypes . guess_type ( filename ) [ 0 ] # for js-detection if guessed_type : return guessed_type return m finally : __mime_lock . release ( )
Guess mime type for the given file name
182
10
8,025
def _validate_type ( self , item , name ) : if item is None : # don't validate None items, since they'll be caught by the portion # of the validator responsible for handling `required`ness return if not isinstance ( item , self . allowed_types ) : item_class_name = item . __class__ . __name__ raise ArgumentError ( name , "Expected one of %s, but got `%s`" % ( self . allowed_types , item_class_name ) )
Validate the item against allowed_types .
112
9
8,026
def _validate_required ( self , item , name ) : if self . required is True and item is None : raise ArgumentError ( name , "This argument is required." )
Validate that the item is present if it s required .
38
12
8,027
def doc_dict ( self ) : doc = { 'type' : self . __class__ . __name__ , 'description' : self . description , 'default' : self . default , 'required' : self . required } if hasattr ( self , 'details' ) : doc [ 'detailed_description' ] = self . details return doc
Returns the documentation dictionary for this argument .
75
8
8,028
def validate_items ( self , input_list ) : output_list = [ ] for item in input_list : valid = self . list_item_type . validate ( item , self . item_name ) output_list . append ( valid ) # this might lead to confusing error messages. tbh, we need to # figure out a better way to do validation and error handling here, # but i'm brute forcing this a bit so that we have something # workable return output_list
Validates that items in the list are of the type specified .
103
13
8,029
def startserver ( self , hostname = "localhost" , port = 8080 , daemon = False , handle_sigint = True ) : if daemon : print ( "Sorry daemon server not supported just yet." ) # TODO start as daemon similar to bitcoind else : print ( "Starting %s json-rpc service at http://%s:%s" % ( self . __class__ . __name__ , hostname , port ) ) self . _http_server = HTTPServer ( server_address = ( hostname , int ( port ) ) , RequestHandlerClass = self . get_http_request_handler ( ) ) if handle_sigint : def sigint_handler ( signum , frame ) : self . _post_shutdown ( ) sys . exit ( 0 ) signal . signal ( signal . SIGINT , sigint_handler ) self . _http_server . serve_forever ( )
Start json - rpc service .
198
7
8,030
def _get_asym_hel ( self , d ) : # get data 1+ 2+ 1- 2- d0 = d [ 0 ] d1 = d [ 2 ] d2 = d [ 1 ] d3 = d [ 3 ] # pre-calcs denom1 = d0 + d1 denom2 = d2 + d3 # check for div by zero denom1 [ denom1 == 0 ] = np . nan denom2 [ denom2 == 0 ] = np . nan # asymmetries in both helicities asym_hel = [ ( d0 - d1 ) / denom1 , ( d2 - d3 ) / denom2 ] # errors # https://www.wolframalpha.com/input/?i=%E2%88%9A(F*(derivative+of+((F-B)%2F(F%2BB))+with+respect+to+F)%5E2+%2B+B*(derivative+of+((F-B)%2F(F%2BB))+with+respect+to+B)%5E2) asym_hel_err = [ 2 * np . sqrt ( d0 * d1 / np . power ( denom1 , 3 ) ) , 2 * np . sqrt ( d2 * d3 / np . power ( denom2 , 3 ) ) ] # remove nan for i in range ( 2 ) : asym_hel [ i ] [ np . isnan ( asym_hel [ i ] ) ] = 0. asym_hel_err [ i ] [ np . isnan ( asym_hel_err [ i ] ) ] = 0. # exit return [ [ asym_hel [ 1 ] , asym_hel_err [ 1 ] ] , # something wrong with file? [ asym_hel [ 0 ] , asym_hel_err [ 0 ] ] ]
Find the asymmetry of each helicity .
425
9
8,031
def _get_asym_comb ( self , d ) : # get data d0 = d [ 0 ] d1 = d [ 2 ] d2 = d [ 1 ] d3 = d [ 3 ] # pre-calcs r_denom = d0 * d3 r_denom [ r_denom == 0 ] = np . nan r = np . sqrt ( ( d1 * d2 / r_denom ) ) r [ r == - 1 ] = np . nan # combined asymmetry asym_comb = ( r - 1 ) / ( r + 1 ) # check for div by zero d0 [ d0 == 0 ] = np . nan d1 [ d1 == 0 ] = np . nan d2 [ d2 == 0 ] = np . nan d3 [ d3 == 0 ] = np . nan # error in combined asymmetry asym_comb_err = r * np . sqrt ( 1 / d1 + 1 / d0 + 1 / d3 + 1 / d2 ) / np . square ( r + 1 ) # replace nan with zero asym_comb [ np . isnan ( asym_comb ) ] = 0. asym_comb_err [ np . isnan ( asym_comb_err ) ] = 0. return [ asym_comb , asym_comb_err ]
Find the combined asymmetry for slr runs . Elegant 4 - counter method .
291
18
8,032
def _get_1f_sum_scans ( self , d , freq ) : # combine scans: values with same frequency unique_freq = np . unique ( freq ) sum_scans = [ [ ] for i in range ( len ( d ) ) ] for f in unique_freq : tag = freq == f for i in range ( len ( d ) ) : sum_scans [ i ] . append ( np . sum ( d [ i ] [ tag ] ) ) return ( np . array ( unique_freq ) , np . array ( sum_scans ) )
Sum counts in each frequency bin over 1f scans .
129
11
8,033
def get_pulse_s ( self ) : try : dwelltime = self . ppg . dwelltime . mean beam_on = self . ppg . beam_on . mean except AttributeError : raise AttributeError ( "Missing logged ppg parameter: dwelltime " + "or beam_on" ) return dwelltime * beam_on / 1000.
Get pulse duration in seconds for pulsed measurements .
77
10
8,034
def extract_endpoints ( api_module ) : if not hasattr ( api_module , 'endpoints' ) : raise ValueError ( ( "pale.extract_endpoints expected the passed in " "api_module to have an `endpoints` attribute, but it didn't!" ) ) endpoints = api_module . endpoints if isinstance ( endpoints , types . ModuleType ) : classes = [ v for ( k , v ) in inspect . getmembers ( endpoints , inspect . isclass ) ] elif isinstance ( endpoints , ( list , tuple ) ) : classes = endpoints else : raise ValueError ( "Endpoints is not a module or list type!" ) instances = [ ] for cls in classes : if cls not in ( Endpoint , PatchEndpoint , PutResourceEndpoint ) and Endpoint in inspect . getmro ( cls ) : source_code = inspect . getsource ( cls ) if "@requires_permission" in source_code : permission_match = re . search ( r"@requires_permission\(\[?[\'\"]+(\w+)[\'\"]+" , source_code ) if permission_match != None : cls . _requires_permission = permission_match . group ( 1 ) instances . append ( cls ( ) ) return instances
Return the endpoints from an API implementation module .
287
10
8,035
def extract_resources ( api_module ) : endpoints = extract_endpoints ( api_module ) resource_classes = [ e . _returns . __class__ for e in endpoints ] return list ( set ( resource_classes ) )
Return the resources from an API implementation module .
52
9
8,036
def load_template_source ( self , template_name , template_dirs = None ) : #Get every app's folder log . error ( "Calling zip loader" ) for folder in app_template_dirs : if ".zip/" in folder . replace ( "\\" , "/" ) : lib_file , relative_folder = get_zip_file_and_relative_path ( folder ) log . error ( lib_file , relative_folder ) try : z = zipfile . ZipFile ( lib_file ) log . error ( relative_folder + template_name ) template_path_in_zip = os . path . join ( relative_folder , template_name ) . replace ( "\\" , "/" ) source = z . read ( template_path_in_zip ) except ( IOError , KeyError ) as e : import traceback log . error ( traceback . format_exc ( ) ) try : z . close ( ) except : pass continue z . close ( ) # We found a template, so return the source. template_path = "%s:%s" % ( lib_file , template_path_in_zip ) return ( source , template_path ) # If we reach here, the template couldn't be loaded raise TemplateDoesNotExist ( template_name )
Template loader that loads templates from zipped modules .
279
10
8,037
def fetch ( self , start = None , stop = None ) : # Set defaults if no explicit indices were provided. if not start : start = 0 if not stop : stop = len ( self . log ) # Sanity check: indices must be valid. if start < 0 : start = 0 if stop > len ( self . log ) : stop = len ( self . log ) # Clear the fetch flag. It will be set again in the emit() # method once new data arrives. self . waitForFetch = False # Return the specified range of log records. return self . log [ start : stop ]
Fetch log records and return them as a list .
127
11
8,038
def bind_blueprint ( pale_api_module , flask_blueprint ) : if not isinstance ( flask_blueprint , Blueprint ) : raise TypeError ( ( "pale.flask_adapter.bind_blueprint expected the " "passed in flask_blueprint to be an instance of " "Blueprint, but it was an instance of %s instead." ) % ( type ( flask_blueprint ) , ) ) if not pale . is_pale_module ( pale_api_module ) : raise TypeError ( ( "pale.flask_adapter.bind_blueprint expected the " "passed in pale_api_module to be a module, and to " "have a _module_type defined to equal " "pale.ImplementationModule, but it was an instance of " "%s instead." ) % ( type ( pale_api_module ) , ) ) endpoints = pale . extract_endpoints ( pale_api_module ) for endpoint in endpoints : endpoint . _set_response_class ( RESPONSE_CLASS ) method = [ endpoint . _http_method ] name = endpoint . _route_name handler = endpoint . _execute flask_blueprint . add_url_rule ( endpoint . _uri , name , view_func = ContextualizedHandler ( handler ) , methods = method )
Binds an implemented pale API module to a Flask Blueprint .
290
12
8,039
def cookie_name_check ( cookie_name ) : cookie_match = WHTTPCookie . cookie_name_non_compliance_re . match ( cookie_name . encode ( 'us-ascii' ) ) return len ( cookie_name ) > 0 and cookie_match is None
Check cookie name for validity . Return True if name is valid
64
12
8,040
def cookie_attr_value_check ( attr_name , attr_value ) : attr_value . encode ( 'us-ascii' ) return WHTTPCookie . cookie_attr_value_compliance [ attr_name ] . match ( attr_value ) is not None
Check cookie attribute value for validity . Return True if value is valid
66
13
8,041
def __attr_name ( self , name ) : if name not in self . cookie_attr_value_compliance . keys ( ) : suggested_name = name . replace ( '_' , '-' ) . lower ( ) if suggested_name not in self . cookie_attr_value_compliance . keys ( ) : raise ValueError ( 'Invalid attribute name is specified' ) name = suggested_name return name
Return suitable and valid attribute name . This method replaces dash char to underscore . If name is invalid ValueError exception is raised
87
24
8,042
def remove_cookie ( self , cookie_name ) : if self . __ro_flag : raise RuntimeError ( 'Read-only cookie-jar changing attempt' ) if cookie_name in self . __cookies . keys ( ) : self . __cookies . pop ( cookie_name )
Remove cookie by its name
62
5
8,043
def ro ( self ) : ro_jar = WHTTPCookieJar ( ) for cookie in self . __cookies . values ( ) : ro_jar . add_cookie ( cookie . ro ( ) ) ro_jar . __ro_flag = True return ro_jar
Return read - only copy
59
5
8,044
def import_simple_cookie ( cls , simple_cookie ) : cookie_jar = WHTTPCookieJar ( ) for cookie_name in simple_cookie . keys ( ) : cookie_attrs = { } for attr_name in WHTTPCookie . cookie_attr_value_compliance . keys ( ) : attr_value = simple_cookie [ cookie_name ] [ attr_name ] if attr_value != '' : cookie_attrs [ attr_name ] = attr_value cookie_jar . add_cookie ( WHTTPCookie ( cookie_name , simple_cookie [ cookie_name ] . value , * * cookie_attrs ) ) return cookie_jar
Create cookie jar from SimpleCookie object
154
8
8,045
def is_prime ( n ) : if n % 2 == 0 and n > 2 : return False return all ( n % i for i in range ( 3 , int ( math . sqrt ( n ) ) + 1 , 2 ) )
Check if n is a prime number
50
7
8,046
def loadFile ( self , fileName ) : # Assign QFile object with the current name. self . file = QtCore . QFile ( fileName ) if self . file . exists ( ) : self . qteText . append ( open ( fileName ) . read ( ) ) else : msg = "File <b>{}</b> does not exist" . format ( self . qteAppletID ( ) ) self . qteLogger . info ( msg )
Display the file associated with the appletID .
103
10
8,047
def _encode ( self ) : obj = { k : v for k , v in self . __dict__ . items ( ) if not k . startswith ( '_' ) and type ( v ) in SAFE_TYPES } obj . update ( { k : v . _encode ( ) for k , v in self . __dict__ . items ( ) if isinstance ( v , Ent ) } ) return obj
Generate a recursive JSON representation of the ent .
93
10
8,048
def merge ( cls , * args , * * kwargs ) : newkeys = bool ( kwargs . get ( 'newkeys' , False ) ) ignore = kwargs . get ( 'ignore' , list ( ) ) if len ( args ) < 1 : raise ValueError ( 'no ents given to Ent.merge()' ) elif not all ( isinstance ( s , Ent ) for s in args ) : raise ValueError ( 'all positional arguments to Ent.merge() must ' 'be instances of Ent' ) ent = args [ 0 ] data = cls . load ( ent ) for ent in args [ 1 : ] : for key , value in ent . __dict__ . items ( ) : if key in ignore : continue if key in data . __dict__ : v1 = data . __dict__ [ key ] if type ( value ) == type ( v1 ) : if isinstance ( v1 , Ent ) : data . __dict__ [ key ] = cls . merge ( v1 , value , * * kwargs ) else : data . __dict__ [ key ] = cls . load ( value ) elif newkeys : data . __dict__ [ key ] = value return data
Create a new Ent from one or more existing Ents . Keys in the later Ent objects will overwrite the keys of the previous Ents . Later keys of different type than in earlier Ents will be bravely ignored .
264
44
8,049
def diff ( cls , * args , * * kwargs ) : newkeys = bool ( kwargs . get ( 'newkeys' , False ) ) ignore = kwargs . get ( 'ignore' , list ( ) ) if len ( args ) < 2 : raise ValueError ( 'less than two ents given to Ent.diff()' ) elif not all ( isinstance ( s , Ent ) for s in args ) : raise ValueError ( 'all positional arguments to Ent.diff() must ' 'be instances of Ent' ) s1 = args [ 0 ] differences = Ent ( ) for s2 in args [ 1 : ] : for key , value in s2 . __dict__ . items ( ) : if key in ignore : continue if key in s1 . __dict__ : v1 = s1 . __dict__ [ key ] if type ( value ) == type ( v1 ) : if isinstance ( v1 , Ent ) : delta = cls . diff ( v1 , value , * * kwargs ) if len ( delta . __dict__ ) : differences . __dict__ [ key ] = delta elif v1 != value : differences . __dict__ [ key ] = cls . load ( value ) elif newkeys : differences . __dict__ [ key ] = cls . load ( value ) s1 = s2 return differences
Create a new Ent representing the differences in two or more existing Ents . Keys in the later Ents with values that differ from the earlier Ents will be present in the final Ent with the latest value seen for that key . Later keys of different type than in earlier Ents will be bravely ignored .
294
62
8,050
def subclasses ( cls ) : seen = set ( ) queue = set ( [ cls ] ) while queue : c = queue . pop ( ) seen . add ( c ) sc = c . __subclasses__ ( ) for c in sc : if c not in seen : queue . add ( c ) seen . remove ( cls ) return seen
Return a set of all Ent subclasses recursively .
74
12
8,051
def base_url ( self ) : if self . location in self . known_locations : return self . known_locations [ self . location ] elif '.' in self . location or self . location == 'localhost' : return 'https://' + self . location else : return 'https://' + self . location + API_HOST_SUFFIX
Protocol + hostname
78
5
8,052
def _build_exclusion_list ( exclude ) : mod_files = [ ] if exclude : for mod in exclude : mdir = None mod_file = None for token in mod . split ( "." ) : try : mfile , mdir , _ = imp . find_module ( token , mdir and [ mdir ] ) if mfile : mod_file = mfile . name mfile . close ( ) except ImportError : msg = "Source for module {mod_name} could not be found" raise ValueError ( msg . format ( mod_name = mod ) ) if mod_file : mod_files . append ( mod_file . replace ( ".pyc" , ".py" ) ) return mod_files
Build file names list of modules to exclude from exception handling .
156
12
8,053
def _invalid_frame ( fobj ) : fin = fobj . f_code . co_filename invalid_module = any ( [ fin . endswith ( item ) for item in _INVALID_MODULES_LIST ] ) return invalid_module or ( not os . path . isfile ( fin ) )
Select valid stack frame to process .
70
7
8,054
def _sorted_keys_items ( dobj ) : keys = sorted ( dobj . keys ( ) ) for key in keys : yield key , dobj [ key ]
Return dictionary items sorted by key .
37
7
8,055
def addex ( extype , exmsg , condition = None , edata = None ) : return _ExObj ( extype , exmsg , condition , edata ) . craise
r Add an exception in the global exception handler .
39
10
8,056
def addai ( argname , condition = None ) : # pylint: disable=C0123 if not isinstance ( argname , str ) : raise RuntimeError ( "Argument `argname` is not valid" ) if ( condition is not None ) and ( type ( condition ) != bool ) : raise RuntimeError ( "Argument `condition` is not valid" ) obj = _ExObj ( RuntimeError , "Argument `{0}` is not valid" . format ( argname ) , condition ) return obj . craise
r Add an AI exception in the global exception handler .
116
11
8,057
def get_or_create_exh_obj ( full_cname = False , exclude = None , callables_fname = None ) : if not hasattr ( __builtin__ , "_EXH" ) : set_exh_obj ( ExHandle ( full_cname = full_cname , exclude = exclude , callables_fname = callables_fname ) ) return get_exh_obj ( )
r Return global exception handler if set otherwise create a new one and return it .
94
16
8,058
def _flatten_ex_dict ( self ) : odict = { } for _ , fdict in self . _ex_dict . items ( ) : for ( extype , exmsg ) , value in fdict . items ( ) : key = value [ "name" ] odict [ key ] = copy . deepcopy ( value ) del odict [ key ] [ "name" ] odict [ key ] [ "type" ] = extype odict [ key ] [ "msg" ] = exmsg return odict
Flatten structure of exceptions dictionary .
113
7
8,059
def _format_msg ( self , msg , edata ) : edata = edata if isinstance ( edata , list ) else [ edata ] for fdict in edata : if "*[{token}]*" . format ( token = fdict [ "field" ] ) not in msg : raise RuntimeError ( "Field {token} not in exception message" . format ( token = fdict [ "field" ] ) ) msg = msg . replace ( "*[{token}]*" . format ( token = fdict [ "field" ] ) , "{value}" ) . format ( value = fdict [ "value" ] ) return msg
Substitute parameters in exception message .
143
8
8,060
def _get_exceptions_db ( self ) : template = "{extype} ({exmsg}){raised}" if not self . _full_cname : # When full callable name is not used the calling path is # irrelevant and there is no function associated with an # exception ret = [ ] for _ , fdict in self . _ex_dict . items ( ) : for key in fdict . keys ( ) : ret . append ( { "name" : fdict [ key ] [ "name" ] , "data" : template . format ( extype = _ex_type_str ( key [ 0 ] ) , exmsg = key [ 1 ] , raised = "*" if fdict [ key ] [ "raised" ] [ 0 ] else "" , ) , } ) return ret # When full callable name is used, all calling paths are saved ret = [ ] for fdict in self . _ex_dict . values ( ) : for key in fdict . keys ( ) : for func_name in fdict [ key ] [ "function" ] : rindex = fdict [ key ] [ "function" ] . index ( func_name ) raised = fdict [ key ] [ "raised" ] [ rindex ] ret . append ( { "name" : self . decode_call ( func_name ) , "data" : template . format ( extype = _ex_type_str ( key [ 0 ] ) , exmsg = key [ 1 ] , raised = "*" if raised else "" , ) , } ) return ret
Return a list of dictionaries suitable to be used with ptrie module .
334
15
8,061
def _get_ex_data ( self ) : func_id , func_name = self . _get_callable_path ( ) if self . _full_cname : func_name = self . encode_call ( func_name ) return func_id , func_name
Return hierarchical function name .
61
5
8,062
def _property_search ( self , fobj ) : # Get class object scontext = fobj . f_locals . get ( "self" , None ) class_obj = scontext . __class__ if scontext is not None else None if not class_obj : del fobj , scontext , class_obj return None # Get class properties objects class_props = [ ( member_name , member_obj ) for member_name , member_obj in inspect . getmembers ( class_obj ) if isinstance ( member_obj , property ) ] if not class_props : del fobj , scontext , class_obj return None class_file = inspect . getfile ( class_obj ) . replace ( ".pyc" , ".py" ) class_name = self . _callables_obj . get_callable_from_line ( class_file , inspect . getsourcelines ( class_obj ) [ 1 ] ) # Get properties actions prop_actions_dicts = { } for prop_name , prop_obj in class_props : prop_dict = { "fdel" : None , "fget" : None , "fset" : None } for action in prop_dict : action_obj = getattr ( prop_obj , action ) if action_obj : # Unwrap action object. Contracts match the wrapped # code object while exceptions registered in the # body of the function/method which has decorators # match the unwrapped object prev_func_obj , next_func_obj = ( action_obj , getattr ( action_obj , "__wrapped__" , None ) , ) while next_func_obj : prev_func_obj , next_func_obj = ( next_func_obj , getattr ( next_func_obj , "__wrapped__" , None ) , ) prop_dict [ action ] = [ id ( _get_func_code ( action_obj ) ) , id ( _get_func_code ( prev_func_obj ) ) , ] prop_actions_dicts [ prop_name ] = prop_dict # Create properties directory func_id = id ( fobj . f_code ) desc_dict = { "fget" : "getter" , "fset" : "setter" , "fdel" : "deleter" } for prop_name , prop_actions_dict in prop_actions_dicts . items ( ) : for action_name , action_id_list in prop_actions_dict . items ( ) : if action_id_list and ( func_id in action_id_list ) : prop_name = "." . join ( [ class_name , prop_name ] ) del fobj , scontext , class_obj , class_props return "{prop_name}({prop_action})" . format ( prop_name = prop_name , prop_action = desc_dict [ action_name ] ) return None
Return full name if object is a class property otherwise return None .
647
13
8,063
def _raise_exception ( self , eobj , edata = None ) : _ , _ , tbobj = sys . exc_info ( ) if edata : emsg = self . _format_msg ( eobj [ "msg" ] , edata ) _rwtb ( eobj [ "type" ] , emsg , tbobj ) else : _rwtb ( eobj [ "type" ] , eobj [ "msg" ] , tbobj )
Raise exception by name .
105
6
8,064
def _unwrap_obj ( self , fobj , fun ) : try : prev_func_obj , next_func_obj = ( fobj . f_globals [ fun ] , getattr ( fobj . f_globals [ fun ] , "__wrapped__" , None ) , ) while next_func_obj : prev_func_obj , next_func_obj = ( next_func_obj , getattr ( next_func_obj , "__wrapped__" , None ) , ) return ( prev_func_obj , inspect . getfile ( prev_func_obj ) . replace ( ".pyc" , "py" ) ) except ( KeyError , AttributeError , TypeError ) : # KeyErrror: fun not in fobj.f_globals # AttributeError: fobj.f_globals does not have # a __wrapped__ attribute # TypeError: pref_func_obj does not have a file associated with it return None , None
Unwrap decorators .
220
5
8,065
def _validate_edata ( self , edata ) : # pylint: disable=R0916 if edata is None : return True if not ( isinstance ( edata , dict ) or _isiterable ( edata ) ) : return False edata = [ edata ] if isinstance ( edata , dict ) else edata for edict in edata : if ( not isinstance ( edict , dict ) ) or ( isinstance ( edict , dict ) and ( ( "field" not in edict ) or ( "field" in edict and ( not isinstance ( edict [ "field" ] , str ) ) ) or ( "value" not in edict ) ) ) : return False return True
Validate edata argument of raise_exception_if method .
158
14
8,066
def add_exception ( self , exname , extype , exmsg ) : if not isinstance ( exname , str ) : raise RuntimeError ( "Argument `exname` is not valid" ) number = True try : int ( exname ) except ValueError : number = False if number : raise RuntimeError ( "Argument `exname` is not valid" ) if not isinstance ( exmsg , str ) : raise RuntimeError ( "Argument `exmsg` is not valid" ) msg = "" try : raise extype ( exmsg ) except Exception as eobj : msg = _get_ex_msg ( eobj ) if msg != exmsg : raise RuntimeError ( "Argument `extype` is not valid" ) # A callable that defines an exception can be accessed by # multiple functions or paths, therefore the callable # dictionary key 'function' is a list func_id , func_name = self . _get_ex_data ( ) if func_id not in self . _ex_dict : self . _ex_dict [ func_id ] = { } key = ( extype , exmsg ) exname = "{0}{1}{2}" . format ( func_id , self . _callables_separator , exname ) entry = self . _ex_dict [ func_id ] . get ( key , { "function" : [ ] , "name" : exname , "raised" : [ ] } ) if func_name not in entry [ "function" ] : entry [ "function" ] . append ( func_name ) entry [ "raised" ] . append ( False ) self . _ex_dict [ func_id ] [ key ] = entry return ( func_id , key , func_name )
r Add an exception to the handler .
380
8
8,067
def decode_call ( self , call ) : # Callable name is None when callable is part of exclude list if call is None : return None itokens = call . split ( self . _callables_separator ) odict = { } for key , value in self . _clut . items ( ) : if value in itokens : odict [ itokens [ itokens . index ( value ) ] ] = key return self . _callables_separator . join ( [ odict [ itoken ] for itoken in itokens ] )
Replace callable tokens with callable names .
122
10
8,068
def encode_call ( self , call ) : # Callable name is None when callable is part of exclude list if call is None : return None itokens = call . split ( self . _callables_separator ) otokens = [ ] for itoken in itokens : otoken = self . _clut . get ( itoken , None ) if not otoken : otoken = str ( len ( self . _clut ) ) self . _clut [ itoken ] = otoken otokens . append ( otoken ) return self . _callables_separator . join ( otokens )
Replace callables with tokens to reduce object memory footprint .
134
12
8,069
def default ( self , obj ) : try : if isinstance ( obj , datetime . datetime ) : # do the datetime thing, or encoded = arrow . get ( obj ) . isoformat ( ) else : # try the normal encoder encoded = json . JSONEncoder . default ( self , obj ) except TypeError as e : # if that fails, check for the to_dict method, if hasattr ( obj , 'to_dict' ) and callable ( obj . to_dict ) : # and use it! encoded = obj . to_dict ( ) else : raise e return encoded
Default JSON encoding .
128
4
8,070
def _fix_up_fields ( cls ) : cls . _arguments = dict ( ) if cls . __module__ == __name__ : # skip the classes in this file return for name in set ( dir ( cls ) ) : attr = getattr ( cls , name , None ) if isinstance ( attr , BaseArgument ) : if name . startswith ( '_' ) : raise TypeError ( "Endpoint argument %s cannot begin with " "an underscore, as these attributes are reserved " "for instance variables of the endpoint object, " "rather than for arguments to your HTTP Endpoint." % name ) attr . _fix_up ( cls , name ) cls . _arguments [ attr . name ] = attr
Add names to all of the Endpoint s Arguments .
167
12
8,071
def _execute ( self , request , * * kwargs ) : try : self . _create_context ( request ) self . _authenticate ( ) context = get_current_context ( ) self . _parse_args ( ) if hasattr ( self , '_before_handlers' ) and isinstance ( self . _before_handlers , ( list , tuple ) ) : for handler in self . _before_handlers : handler ( context ) context . handler_result = self . _handle ( context ) if hasattr ( self , '_after_handlers' ) and isinstance ( self . _after_handlers , ( list , tuple ) ) : for handler in self . _after_handlers : handler ( context ) self . _render ( ) response = context . response # After calling ._render(), the response is ready to go, so we # shouldn't need to handle any other exceptions beyond this point. except AuthenticationError as e : if hasattr ( e , 'message' ) and e . message is not None : message = e . message else : message = "You don't have permission to do that." err = APIError . Forbidden ( message ) response = self . _response_class ( * err . response ) response . headers [ "Content-Type" ] = 'application/json' except ArgumentError as e : err = APIError . UnprocessableEntity ( e . message ) response = self . _response_class ( * err . response ) response . headers [ "Content-Type" ] = 'application/json' except APIError as e : response = self . _response_class ( * e . response ) response . headers [ "Content-Type" ] = 'application/json' except PaleRaisedResponse as r : response = self . _response_class ( * r . response ) response . headers [ "Content-Type" ] = 'application/json' except Exception as e : logging . exception ( "Failed to handle Pale Endpoint %s: %r" , self . __class__ . __name__ , e ) err = APIError . Exception ( repr ( e ) ) response = self . _response_class ( * err . response ) response . headers [ "Content-Type" ] = 'application/json' allow_cors = getattr ( self , "_allow_cors" , None ) if allow_cors is True : response . headers [ 'Access-Control-Allow-Origin' ] = '*' elif isinstance ( allow_cors , basestring ) : response . headers [ 'Access-Control-Allow-Origin' ] = allow_cors context . response = response try : if hasattr ( self , '_after_response_handlers' ) and isinstance ( self . _after_response_handlers , ( list , tuple ) ) : for handler in self . _after_response_handlers : handler ( context , response ) except Exception as e : logging . exception ( "Failed to process _after_response_handlers for Endpoint %s" , self . __class__ . __name__ ) raise return response
The top - level execute function for the endpoint .
665
10
8,072
def construct_concierge_header ( self , url ) : concierge_request_header = ( etree . Element ( etree . QName ( XHTML_NAMESPACE , "ConciergeRequestHeader" ) , nsmap = { 'sch' : XHTML_NAMESPACE } ) ) if self . session_id : session = ( etree . SubElement ( concierge_request_header , etree . QName ( XHTML_NAMESPACE , "SessionId" ) ) ) session . text = self . session_id access_key = ( etree . SubElement ( concierge_request_header , etree . QName ( XHTML_NAMESPACE , "AccessKeyId" ) ) ) access_key . text = self . access_key association_id = ( etree . SubElement ( concierge_request_header , etree . QName ( XHTML_NAMESPACE , "AssociationId" ) ) ) association_id . text = self . association_id signature = ( etree . SubElement ( concierge_request_header , etree . QName ( XHTML_NAMESPACE , "Signature" ) ) ) signature . text = self . get_hashed_signature ( url = url ) return concierge_request_header
Constructs the Concierge Request Header lxml object to be used as the _soapheaders argument for WSDL methods .
294
27
8,073
def options_string_builder ( option_mapping , args ) : options_string = "" for option , flag in option_mapping . items ( ) : if option in args : options_string += str ( " %s %s" % ( flag , str ( args [ option ] ) ) ) return options_string
Return arguments for CLI invocation of kal .
68
9
8,074
def build_kal_scan_band_string ( kal_bin , band , args ) : option_mapping = { "gain" : "-g" , "device" : "-d" , "error" : "-e" } if not sanity . scan_band_is_valid ( band ) : err_txt = "Unsupported band designation: %" % band raise ValueError ( err_txt ) base_string = "%s -v -s %s" % ( kal_bin , band ) base_string += options_string_builder ( option_mapping , args ) return ( base_string )
Return string for CLI invocation of kal for band scan .
134
12
8,075
def build_kal_scan_channel_string ( kal_bin , channel , args ) : option_mapping = { "gain" : "-g" , "device" : "-d" , "error" : "-e" } base_string = "%s -v -c %s" % ( kal_bin , channel ) base_string += options_string_builder ( option_mapping , args ) return ( base_string )
Return string for CLI invocation of kal for channel scan .
97
12
8,076
def determine_final_freq ( base , direction , modifier ) : result = 0 if direction == "+" : result = base + modifier elif direction == "-" : result = base - modifier return ( result )
Return integer for frequency .
45
5
8,077
def to_eng ( num_in ) : x = decimal . Decimal ( str ( num_in ) ) eng_not = x . normalize ( ) . to_eng_string ( ) return ( eng_not )
Return number in engineering notation .
48
6
8,078
def determine_device ( kal_out ) : device = "" while device == "" : for line in kal_out . splitlines ( ) : if "Using device " in line : device = str ( line . split ( ' ' , 2 ) [ - 1 ] ) if device == "" : device = None return device
Extract and return device from scan results .
68
9
8,079
def extract_value_from_output ( canary , split_offset , kal_out ) : retval = "" while retval == "" : for line in kal_out . splitlines ( ) : if canary in line : retval = str ( line . split ( ) [ split_offset ] ) if retval == "" : retval = None return retval
Return value parsed from output .
80
6
8,080
def determine_chan_detect_threshold ( kal_out ) : channel_detect_threshold = "" while channel_detect_threshold == "" : for line in kal_out . splitlines ( ) : if "channel detect threshold: " in line : channel_detect_threshold = str ( line . split ( ) [ - 1 ] ) if channel_detect_threshold == "" : print ( "Unable to parse sample rate" ) channel_detect_threshold = None return channel_detect_threshold
Return channel detect threshold from kal output .
119
9
8,081
def determine_band_channel ( kal_out ) : band = "" channel = "" tgt_freq = "" while band == "" : for line in kal_out . splitlines ( ) : if "Using " in line and " channel " in line : band = str ( line . split ( ) [ 1 ] ) channel = str ( line . split ( ) [ 3 ] ) tgt_freq = str ( line . split ( ) [ 4 ] ) . replace ( "(" , "" ) . replace ( ")" , "" ) if band == "" : band = None return ( band , channel , tgt_freq )
Return band channel target frequency from kal output .
136
10
8,082
def parse_kal_scan ( kal_out ) : kal_data = [ ] scan_band = determine_scan_band ( kal_out ) scan_gain = determine_scan_gain ( kal_out ) scan_device = determine_device ( kal_out ) sample_rate = determine_sample_rate ( kal_out ) chan_detect_threshold = determine_chan_detect_threshold ( kal_out ) for line in kal_out . splitlines ( ) : if "chan:" in line : p_line = line . split ( ' ' ) chan = str ( p_line [ 1 ] ) modifier = str ( p_line [ 3 ] ) power = str ( p_line [ 5 ] ) mod_raw = str ( p_line [ 4 ] ) . replace ( ')\tpower:' , '' ) base_raw = str ( ( p_line [ 2 ] ) . replace ( '(' , '' ) ) mod_freq = herz_me ( mod_raw ) base_freq = herz_me ( base_raw ) final_freq = to_eng ( determine_final_freq ( base_freq , modifier , mod_freq ) ) kal_run = { "channel" : chan , "base_freq" : base_freq , "mod_freq" : mod_freq , "modifier" : modifier , "final_freq" : final_freq , "power" : power , "band" : scan_band , "gain" : scan_gain , "device" : scan_device , "sample_rate" : sample_rate , "channel_detect_threshold" : chan_detect_threshold } kal_data . append ( kal_run . copy ( ) ) return kal_data
Parse kal band scan output .
408
8
8,083
def parse_kal_channel ( kal_out ) : scan_band , scan_channel , tgt_freq = determine_band_channel ( kal_out ) kal_data = { "device" : determine_device ( kal_out ) , "sample_rate" : determine_sample_rate ( kal_out ) , "gain" : determine_scan_gain ( kal_out ) , "band" : scan_band , "channel" : scan_channel , "frequency" : tgt_freq , "avg_absolute_error" : determine_avg_absolute_error ( kal_out ) , "measurements" : get_measurements_from_kal_scan ( kal_out ) , "raw_scan_result" : kal_out } return kal_data
Parse kal channel scan output .
186
8
8,084
def get_measurements_from_kal_scan ( kal_out ) : result = [ ] for line in kal_out . splitlines ( ) : if "offset " in line : p_line = line . split ( ' ' ) result . append ( p_line [ - 1 ] ) return result
Return a list of all measurements from kalibrate channel scan .
69
14
8,085
def render ( self , obj , name , context ) : if self . value_lambda is not None : val = self . value_lambda ( obj ) else : attr_name = name if self . property_name is not None : attr_name = self . property_name if isinstance ( obj , dict ) : val = obj . get ( attr_name , None ) else : val = getattr ( obj , attr_name , None ) if callable ( val ) : try : val = val ( ) except : logging . exception ( "Attempted to call `%s` on obj of type %s." , attr_name , type ( obj ) ) raise return val
The default field renderer .
148
6
8,086
def doc_dict ( self ) : doc = { 'type' : self . value_type , 'description' : self . description , 'extended_description' : self . details } return doc
Generate the documentation for this field .
42
8
8,087
def capability ( self , cap_name ) : if cap_name in self . __class_capabilities__ : function_name = self . __class_capabilities__ [ cap_name ] return getattr ( self , function_name )
Return capability by its name
51
5
8,088
def has_capabilities ( self , * cap_names ) : for name in cap_names : if name not in self . __class_capabilities__ : return False return True
Check if class has all of the specified capabilities
38
9
8,089
def add_entity_errors ( self , property_name , direct_errors = None , schema_errors = None ) : if direct_errors is None and schema_errors is None : return self # direct errors if direct_errors is not None : if property_name not in self . errors : self . errors [ property_name ] = dict ( ) if 'direct' not in self . errors [ property_name ] : self . errors [ property_name ] [ 'direct' ] = [ ] if type ( direct_errors ) is not list : direct_errors = [ direct_errors ] for error in direct_errors : if not isinstance ( error , Error ) : err = 'Error must be of type {}' raise x . InvalidErrorType ( err . format ( Error ) ) self . errors [ property_name ] [ 'direct' ] . append ( error ) # schema errors if schema_errors is not None : if isinstance ( schema_errors , Result ) : schema_errors = schema_errors . errors if not schema_errors : return self if property_name not in self . errors : self . errors [ property_name ] = dict ( ) if 'schema' not in self . errors [ property_name ] : self . errors [ property_name ] [ 'schema' ] = schema_errors else : self . errors [ property_name ] [ 'schema' ] = self . merge_errors ( self . errors [ property_name ] [ 'schema' ] , schema_errors ) return self
Attach nested entity errors Accepts a list errors coming from validators attached directly or a dict of errors produced by a nested schema .
323
26
8,090
def add_collection_errors ( self , property_name , direct_errors = None , collection_errors = None ) : if direct_errors is None and collection_errors is None : return self # direct errors if direct_errors is not None : if type ( direct_errors ) is not list : direct_errors = [ direct_errors ] if property_name not in self . errors : self . errors [ property_name ] = dict ( ) if 'direct' not in self . errors [ property_name ] : self . errors [ property_name ] [ 'direct' ] = [ ] for error in direct_errors : if not isinstance ( error , Error ) : err = 'Error must be of type {}' raise x . InvalidErrorType ( err . format ( Error ) ) self . errors [ property_name ] [ 'direct' ] . append ( error ) # collection errors if collection_errors : enum = enumerate ( collection_errors ) errors_dict = { i : e for i , e in enum if not bool ( e ) } if not errors_dict : return self if property_name not in self . errors : self . errors [ property_name ] = dict ( ) if 'collection' not in self . errors [ property_name ] : self . errors [ property_name ] [ 'collection' ] = errors_dict else : local = self . errors [ property_name ] [ 'collection' ] remote = errors_dict for index , result in remote . items ( ) : if index not in local : self . errors [ property_name ] [ 'collection' ] [ index ] = result else : merged = self . merge_errors ( local [ index ] . errors , remote [ index ] . errors ) self . errors [ property_name ] [ 'collection' ] [ index ] = merged return self
Add collection errors Accepts a list errors coming from validators attached directly or a list of schema results for each item in the collection .
385
27
8,091
def merge_errors ( self , errors_local , errors_remote ) : for prop in errors_remote : # create if doesn't exist if prop not in errors_local : errors_local [ prop ] = errors_remote [ prop ] continue local = errors_local [ prop ] local = local . errors if isinstance ( local , Result ) else local remote = errors_remote [ prop ] remote = remote . errors if isinstance ( remote , Result ) else remote # check compatibility if not isinstance ( local , type ( remote ) ) : msg = 'Type mismatch on property [{}] when merging errors. ' msg += 'Unable to merge [{}] into [{}]' raise x . UnableToMergeResultsType ( msg . format ( prop , type ( errors_remote [ prop ] ) , type ( self . errors [ prop ] ) ) ) mismatch = 'Unable to merge nested entity errors with nested ' mismatch += 'collection errors on property [{}]' if 'schema' in local and 'collection' in remote : raise x . UnableToMergeResultsType ( mismatch . format ( prop ) ) if 'collection' in local and 'schema' in remote : raise x . UnableToMergeResultsType ( mismatch . format ( prop ) ) # merge simple & state if type ( remote ) is list : errors_local [ prop ] . extend ( remote ) continue # merge direct errors on nested entities and collection if 'direct' in remote and 'direct' in local : errors_local [ prop ] [ 'direct' ] . extend ( remote [ 'direct' ] ) # merge nested schema errors if 'schema' in remote and 'schema' in local : errors_local [ prop ] [ 'schema' ] = self . merge_errors ( errors_local [ prop ] [ 'schema' ] , remote [ 'schema' ] ) # merge nested collections errors if 'collection' in remote and 'collection' in local : for index , result in remote [ 'collection' ] . items ( ) : if index not in local [ 'collection' ] : errors_local [ prop ] [ 'collection' ] [ index ] = result else : merged = self . merge_errors ( errors_local [ prop ] [ 'collection' ] [ index ] . errors , errors_remote [ prop ] [ 'collection' ] [ index ] . errors , ) errors_local [ prop ] [ 'collection' ] [ index ] = merged # and return return errors_local
Merge errors Recursively traverses error graph to merge remote errors into local errors to return a new joined graph .
525
24
8,092
def merge ( self , another ) : if isinstance ( another , Result ) : another = another . errors self . errors = self . merge_errors ( self . errors , another )
Merges another validation result graph into itself
38
8
8,093
def get_messages ( self , locale = None ) : if locale is None : locale = self . locale if self . translator : def translate ( error ) : return self . translator . translate ( error , locale ) else : def translate ( error ) : return error errors = deepcopy ( self . errors ) errors = self . _translate_errors ( errors , translate ) return errors
Get a dictionary of translated messages
80
6
8,094
def _translate_errors ( self , errors , translate ) : for prop in errors : prop_errors = errors [ prop ] # state and simple if type ( prop_errors ) is list : for index , error in enumerate ( prop_errors ) : message = translate ( error . message ) message = self . format_error ( message , error . kwargs ) errors [ prop ] [ index ] = message # entity and collection direct if type ( prop_errors ) is dict and 'direct' in prop_errors : for index , error in enumerate ( prop_errors [ 'direct' ] ) : message = translate ( error . message ) message = self . format_error ( message , error . kwargs ) errors [ prop ] [ 'direct' ] [ index ] = message # entity schema if type ( prop_errors ) is dict and 'schema' in prop_errors : errors [ prop ] [ 'schema' ] = self . _translate_errors ( prop_errors [ 'schema' ] , translate ) # collection schema if type ( prop_errors ) is dict and 'collection' in prop_errors : translated = dict ( ) for index , result in prop_errors [ 'collection' ] . items ( ) : translated [ index ] = self . _translate_errors ( result . errors , translate ) errors [ prop ] [ 'collection' ] = translated return errors
Recursively apply translate callback to each error message
295
10
8,095
def make_url ( self , path , api_root = u'/v2/' ) : return urljoin ( urljoin ( self . url , api_root ) , path )
Gets a full URL from just path .
39
9
8,096
def make_key_url ( self , key ) : if type ( key ) is bytes : key = key . decode ( 'utf-8' ) buf = io . StringIO ( ) buf . write ( u'keys' ) if not key . startswith ( u'/' ) : buf . write ( u'/' ) buf . write ( key ) return self . make_url ( buf . getvalue ( ) )
Gets a URL for a key .
90
8
8,097
def get ( self , key , recursive = False , sorted = False , quorum = False , wait = False , wait_index = None , timeout = None ) : url = self . make_key_url ( key ) params = self . build_args ( { 'recursive' : ( bool , recursive or None ) , 'sorted' : ( bool , sorted or None ) , 'quorum' : ( bool , quorum or None ) , 'wait' : ( bool , wait or None ) , 'waitIndex' : ( int , wait_index ) , } ) if timeout is None : # Try again when :exc:`TimedOut` thrown. while True : try : try : res = self . session . get ( url , params = params ) except : self . erred ( ) except ( TimedOut , ChunkedEncodingError ) : continue else : break else : try : res = self . session . get ( url , params = params , timeout = timeout ) except ChunkedEncodingError : raise TimedOut except : self . erred ( ) return self . wrap_response ( res )
Requests to get a node by the given key .
240
11
8,098
def delete ( self , key , dir = False , recursive = False , prev_value = None , prev_index = None , timeout = None ) : url = self . make_key_url ( key ) params = self . build_args ( { 'dir' : ( bool , dir or None ) , 'recursive' : ( bool , recursive or None ) , 'prevValue' : ( six . text_type , prev_value ) , 'prevIndex' : ( int , prev_index ) , } ) try : res = self . session . delete ( url , params = params , timeout = timeout ) except : self . erred ( ) return self . wrap_response ( res )
Requests to delete a node by the given key .
147
11
8,099
def login_to_portal ( username , password , client , retries = 2 , delay = 0 ) : if not client . session_id : client . request_session ( ) concierge_request_header = client . construct_concierge_header ( url = ( "http://membersuite.com/contracts/IConciergeAPIService/" "LoginToPortal" ) ) attempts = 0 while attempts < retries : if attempts : time . sleep ( delay ) result = client . client . service . LoginToPortal ( _soapheaders = [ concierge_request_header ] , portalUserName = username , portalPassword = password ) login_to_portal_result = result [ "body" ] [ "LoginToPortalResult" ] if login_to_portal_result [ "Success" ] : portal_user = login_to_portal_result [ "ResultValue" ] [ "PortalUser" ] session_id = get_session_id ( result = result ) return PortalUser ( membersuite_object_data = portal_user , session_id = session_id ) else : attempts += 1 try : error_code = login_to_portal_result [ "Errors" ] [ "ConciergeError" ] [ 0 ] [ "Code" ] except IndexError : # Not a ConciergeError continue else : if attempts < retries and error_code == "GeneralException" : continue raise LoginToPortalError ( result = result )
Log username into the MemberSuite Portal .
333
9