idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
23,000
def resolve ( self , from_email , resolution = None ) : if from_email is None or not isinstance ( from_email , six . string_types ) : raise MissingFromEmail ( from_email ) endpoint = '/' . join ( ( self . endpoint , self . id , ) ) add_headers = { 'from' : from_email , } data = { 'incident' : { 'type' : 'incident' , 'status' : 'resolved' , } } if resolution is not None : data [ 'resolution' ] = resolution result = self . request ( 'PUT' , endpoint = endpoint , add_headers = add_headers , data = data , ) return result
Resolve an incident using a valid email address .
23,001
def reassign ( self , from_email , user_ids ) : endpoint = '/' . join ( ( self . endpoint , self . id , ) ) if from_email is None or not isinstance ( from_email , six . string_types ) : raise MissingFromEmail ( from_email ) if user_ids is None or not isinstance ( user_ids , list ) : raise InvalidArguments ( user_ids ) if not all ( [ isinstance ( i , six . string_types ) for i in user_ids ] ) : raise InvalidArguments ( user_ids ) assignees = [ { 'assignee' : { 'id' : user_id , 'type' : 'user_reference' , } } for user_id in user_ids ] add_headers = { 'from' : from_email , } data = { 'incident' : { 'type' : 'incident' , 'assignments' : assignees , } } result = self . request ( 'PUT' , endpoint = endpoint , add_headers = add_headers , data = data , ) return result
Reassign an incident to other users using a valid email address .
23,002
def log_entries ( self , time_zone = 'UTC' , is_overview = False , include = None , fetch_all = True ) : endpoint = '/' . join ( ( self . endpoint , self . id , 'log_entries' ) ) query_params = { 'time_zone' : time_zone , 'is_overview' : json . dumps ( is_overview ) , } if include : query_params [ 'include' ] = include result = self . logEntryFactory . find ( endpoint = endpoint , api_key = self . api_key , fetch_all = fetch_all , ** query_params ) return result
Query for log entries on an incident instance .
23,003
def notes ( self ) : endpoint = '/' . join ( ( self . endpoint , self . id , 'notes' ) ) return self . noteFactory . find ( endpoint = endpoint , api_key = self . api_key , )
Query for notes attached to this incident .
23,004
def create_note ( self , from_email , content ) : if from_email is None or not isinstance ( from_email , six . string_types ) : raise MissingFromEmail ( from_email ) endpoint = '/' . join ( ( self . endpoint , self . id , 'notes' ) ) add_headers = { 'from' : from_email , } return self . noteFactory . create ( endpoint = endpoint , api_key = self . api_key , add_headers = add_headers , data = { 'content' : content } , )
Create a note for this incident .
23,005
def snooze ( self , from_email , duration ) : if from_email is None or not isinstance ( from_email , six . string_types ) : raise MissingFromEmail ( from_email ) endpoint = '/' . join ( ( self . endpoint , self . id , 'snooze' ) ) add_headers = { 'from' : from_email , } return self . __class__ . create ( endpoint = endpoint , api_key = self . api_key , add_headers = add_headers , data_key = 'duration' , data = duration , )
Snooze this incident for duration seconds .
23,006
def merge ( self , from_email , source_incidents ) : if from_email is None or not isinstance ( from_email , six . string_types ) : raise MissingFromEmail ( from_email ) add_headers = { 'from' : from_email , } endpoint = '/' . join ( ( self . endpoint , self . id , 'merge' ) ) incident_ids = [ entity [ 'id' ] if isinstance ( entity , Entity ) else entity for entity in source_incidents ] incident_references = [ { 'type' : 'incident_reference' , 'id' : id_ } for id_ in incident_ids ] return self . __class__ . create ( endpoint = endpoint , api_key = self . api_key , add_headers = add_headers , data_key = 'source_incidents' , data = incident_references , method = 'PUT' , )
Merge other incidents into this incident .
23,007
def alerts ( self ) : endpoint = '/' . join ( ( self . endpoint , self . id , 'alerts' ) ) return self . alertFactory . find ( endpoint = endpoint , api_key = self . api_key , )
Query for alerts attached to this incident .
23,008
def find ( cls , * args , ** kwargs ) : seconds = 60 * 60 * 24 * 30 until = kwargs . pop ( 'until' , None ) since = kwargs . pop ( 'since' , None ) if until is None : until = datetime . datetime . now ( ) if since is None : since = until - datetime . timedelta ( seconds = seconds ) dt = until - since if dt > datetime . timedelta ( seconds = seconds ) : raise InvalidArguments ( until , since ) kwargs [ 'since' ] = since . isoformat ( ) kwargs [ 'until' ] = until . isoformat ( ) return getattr ( Entity , 'find' ) . __func__ ( cls , * args , ** kwargs )
Find notifications .
23,009
def services ( self ) : ids = [ ref [ 'id' ] for ref in self [ 'services' ] ] return [ Service . fetch ( id ) for id in ids ]
Fetch all instances of services for this EP .
23,010
def fetch ( cls , id , incident = None , endpoint = None , * args , ** kwargs ) : if incident is None and endpoint is None : raise InvalidArguments ( incident , endpoint ) if endpoint is None : iid = incident [ 'id' ] if isinstance ( incident , Entity ) else incident endpoint = 'incidents/{0}/alerts' . format ( iid ) return getattr ( Entity , 'fetch' ) . __func__ ( cls , id , endpoint = endpoint , * args , ** kwargs )
Customize fetch because this is a nested resource .
23,011
def resolve ( self , from_email ) : if from_email is None or not isinstance ( from_email , six . string_types ) : raise MissingFromEmail ( from_email ) parent_incident_id = self [ 'incident' ] [ 'id' ] endpoint_format = 'incidents/{0}/alerts/{1}' endpoint = endpoint_format . format ( parent_incident_id , self [ 'id' ] ) add_headers = { 'from' : from_email , } data = { 'alert' : { 'id' : self [ 'id' ] , 'type' : 'alert' , 'status' : 'resolved' , } } result = self . request ( 'PUT' , endpoint = endpoint , add_headers = add_headers , data = data , ) return result
Resolve an alert using a valid email address .
23,012
def associate ( self , from_email , new_parent_incident = None ) : if from_email is None or not isinstance ( from_email , six . string_types ) : raise MissingFromEmail ( from_email ) if new_parent_incident is None : raise InvalidArguments ( new_parent_incident ) parent_incident_id = self [ 'incident' ] [ 'id' ] endpoint_format = 'incidents/{0}/alerts/{1}' endpoint = endpoint_format . format ( parent_incident_id , self [ 'id' ] ) if isinstance ( new_parent_incident , Entity ) : new_parent_incident_id = new_parent_incident [ 'id' ] else : new_parent_incident_id = new_parent_incident add_headers = { 'from' : from_email , } data = { 'alert' : { 'id' : self [ 'id' ] , 'type' : 'alert' , 'incident' : { 'type' : 'incident' , 'id' : new_parent_incident_id , } } } result = self . request ( 'PUT' , endpoint = endpoint , add_headers = add_headers , data = data , ) return result
Associate an alert with an incident using a valid email address .
23,013
def fetch ( cls , id , service = None , endpoint = None , * args , ** kwargs ) : if service is None and endpoint is None : raise InvalidArguments ( service , endpoint ) if endpoint is None : sid = service [ 'id' ] if isinstance ( service , Entity ) else service endpoint = 'services/{0}/integrations' . format ( sid ) return getattr ( Entity , 'fetch' ) . __func__ ( cls , id , endpoint = endpoint , * args , ** kwargs )
Customize fetch because it lives on a special endpoint .
23,014
def create ( cls , service = None , endpoint = None , data = None , * args , ** kwargs ) : cls . validate ( data ) if service is None and endpoint is None : raise InvalidArguments ( service , endpoint ) if endpoint is None : sid = service [ 'id' ] if isinstance ( service , Entity ) else service endpoint = 'services/{0}/integrations' . format ( sid ) return getattr ( Entity , 'create' ) . __func__ ( cls , endpoint = endpoint , data = data , * args , ** kwargs )
Create an integration within the scope of an service .
23,015
def get_oncall ( self , ** kwargs ) : endpoint = '/' . join ( ( self . endpoint , self . id , 'users' ) ) return self . request ( 'GET' , endpoint = endpoint , query_params = kwargs )
Retrieve this schedule s on call users .
23,016
def _do_request ( self , method , * args , ** kwargs ) : log ( 'Doing HTTP [{3}] request: {0} - headers: {1} - payload: {2}' . format ( args [ 0 ] , kwargs . get ( 'headers' ) , kwargs . get ( 'json' ) , method , ) , level = logging . DEBUG , ) requests_method = getattr ( requests , method ) return self . _handle_response ( requests_method ( * args , ** kwargs ) )
Modularized because API was broken .
23,017
def sanitize_ep ( endpoint , plural = False ) : if plural : if endpoint . endswith ( 'y' ) : endpoint = endpoint [ : - 1 ] + 'ies' elif not endpoint . endswith ( 's' ) : endpoint += 's' else : if endpoint . endswith ( 'ies' ) : endpoint = endpoint [ : - 3 ] + 'y' elif endpoint . endswith ( 's' ) : endpoint = endpoint [ : - 1 ] return endpoint
Sanitize an endpoint to a singular or plural form .
23,018
def get_endpoint ( cls ) : if cls . endpoint is not None : return cls . endpoint s1 = re . sub ( '(.)([A-Z][a-z]+)' , r'\1_\2' , cls . __name__ ) return cls . sanitize_ep ( re . sub ( '([a-z0-9])([A-Z])' , r'\1_\2' , s1 ) . lower ( ) , plural = True )
Accessor method to enable omition of endpoint name .
23,019
def _fetch_all ( cls , api_key , endpoint = None , offset = 0 , limit = 25 , ** kwargs ) : output = [ ] qp = kwargs . copy ( ) limit = max ( 1 , min ( 100 , limit ) ) maximum = kwargs . get ( 'maximum' ) qp [ 'limit' ] = min ( limit , maximum ) if maximum is not None else limit qp [ 'offset' ] = offset more , total = None , None while True : entities , options = cls . _fetch_page ( api_key = api_key , endpoint = endpoint , ** qp ) output += entities more = options . get ( 'more' ) limit = options . get ( 'limit' ) offset = options . get ( 'offset' ) total = options . get ( 'total' ) if more is None : if total is None or offset is None : break more = ( limit + offset ) < total if not more or ( maximum is not None and len ( output ) >= maximum ) : break qp [ 'limit' ] = limit qp [ 'offset' ] = offset + limit return output
Call self . _fetch_page for as many pages as exist .
23,020
def _fetch_page ( cls , api_key , endpoint = None , page_index = 0 , offset = None , limit = 25 , ** kwargs ) : if offset is not None : page_index = int ( offset / limit ) limit = max ( 1 , min ( cls . MAX_LIMIT_VALUE , limit ) ) inst = cls ( api_key = api_key ) kwargs [ 'offset' ] = int ( page_index * limit ) maximum = kwargs . pop ( 'maximum' , None ) kwargs [ 'limit' ] = min ( limit , maximum ) if maximum is not None else limit ep = parse_key = cls . sanitize_ep ( cls . get_endpoint ( ) , plural = True ) if endpoint is not None : ep = endpoint response = inst . request ( 'GET' , endpoint = ep , query_params = kwargs ) datas = cls . _parse ( response , key = parse_key ) response . pop ( parse_key , None ) entities = [ cls ( api_key = api_key , _data = d ) for d in datas ] return entities , response
Fetch a single page of limit number of results .
23,021
def fetch ( cls , id , api_key = None , endpoint = None , add_headers = None , ** kwargs ) : if endpoint is None : endpoint = cls . get_endpoint ( ) inst = cls ( api_key = api_key ) parse_key = cls . sanitize_ep ( endpoint ) . split ( "/" ) [ - 1 ] endpoint = '/' . join ( ( endpoint , id ) ) data = cls . _parse ( inst . request ( 'GET' , endpoint = endpoint , add_headers = add_headers , query_params = kwargs ) , key = parse_key ) inst . _set ( data ) return inst
Fetch a single entity from the API endpoint .
23,022
def translate_query_params ( cls , ** kwargs ) : values = [ ] output = kwargs . copy ( ) query = kwargs . pop ( 'query' , None ) for param in ( cls . TRANSLATE_QUERY_PARAM or [ ] ) : popped = output . pop ( param , None ) if popped is not None : values . append ( popped ) if query is not None : output [ 'query' ] = query return output try : output [ 'query' ] = next ( iter ( values ) ) except StopIteration : pass return output
Translate an arbirtary keyword argument to the expected query .
23,023
def find ( cls , api_key = None , fetch_all = True , endpoint = None , maximum = None , ** kwargs ) : exclude = kwargs . pop ( 'exclude' , None ) if isinstance ( exclude , six . string_types ) : exclude = [ exclude , ] query_params = cls . translate_query_params ( ** kwargs ) if endpoint is None : endpoint = cls . get_endpoint ( ) if fetch_all : result = cls . _fetch_all ( api_key = api_key , endpoint = endpoint , maximum = maximum , ** query_params ) else : result = cls . _fetch_page ( api_key = api_key , endpoint = endpoint , maximum = maximum , ** query_params ) collection = [ r for r in result if not cls . _find_exclude_filter ( exclude , r ) ] return collection
Find some entities from the API endpoint .
23,024
def create ( cls , data = None , api_key = None , endpoint = None , add_headers = None , data_key = None , response_data_key = None , method = 'POST' , ** kwargs ) : inst = cls ( api_key = api_key ) if data_key is None : data_key = cls . sanitize_ep ( cls . get_endpoint ( ) ) if response_data_key is None : response_data_key = cls . sanitize_ep ( cls . get_endpoint ( ) ) body = { } body [ data_key ] = data if endpoint is None : endpoint = cls . get_endpoint ( ) inst . _set ( cls . _parse ( inst . request ( method , endpoint = endpoint , data = body , query_params = kwargs , add_headers = add_headers , ) , key = response_data_key ) ) return inst
Create an instance of the Entity model by calling to the API endpoint .
23,025
def _parse ( cls , data , key = None ) : parse = cls . parse if cls . parse is not None else cls . get_endpoint ( ) if callable ( parse ) : data = parse ( data ) elif isinstance ( parse , str ) : data = data [ key ] else : raise Exception ( '"parse" should be a callable or string got, {0}' . format ( parse ) ) return data
Parse a set of data to extract entity - only data .
23,026
def log ( * args , ** kwargs ) : level = kwargs . pop ( 'level' , logging . INFO ) logger . log ( level , * args , ** kwargs )
Log things with the global logger .
23,027
def create ( cls , data = None , api_key = None , endpoint = None , add_headers = None , ** kwargs ) : cls . validate ( data ) inst = cls ( api_key = api_key ) endpoint = '' return inst . request ( 'POST' , endpoint = endpoint , data = data , query_params = kwargs , add_headers = add_headers , )
Create an event on your PagerDuty account .
23,028
def remove_escalation_policy ( self , escalation_policy , ** kwargs ) : if isinstance ( escalation_policy , Entity ) : escalation_policy = escalation_policy [ 'id' ] assert isinstance ( escalation_policy , six . string_types ) endpoint = '{0}/{1}/escalation_policies/{2}' . format ( self . endpoint , self [ 'id' ] , escalation_policy , ) return self . request ( 'DELETE' , endpoint = endpoint , query_params = kwargs )
Remove an escalation policy from this team .
23,029
def remove_user ( self , user , ** kwargs ) : if isinstance ( user , Entity ) : user = user [ 'id' ] assert isinstance ( user , six . string_types ) endpoint = '{0}/{1}/users/{2}' . format ( self . endpoint , self [ 'id' ] , user , ) return self . request ( 'DELETE' , endpoint = endpoint , query_params = kwargs )
Remove a user from this team .
23,030
def add_user ( self , user , ** kwargs ) : if isinstance ( user , User ) : user = user [ 'id' ] assert isinstance ( user , six . string_types ) endpoint = '{0}/{1}/users/{2}' . format ( self . endpoint , self [ 'id' ] , user , ) result = self . request ( 'PUT' , endpoint = endpoint , query_params = kwargs ) return result
Add a user to this team .
23,031
def create_integration ( self , integration_info , ** kwargs ) : service_info = integration_info . get ( 'service' ) vendor_info = integration_info . get ( 'vendor' ) if service_info is not None : self . __class__ . validate ( service_info ) if vendor_info is not None : self . vendorFactory . validate ( vendor_info ) endpoint = '{0}/{1}/integrations' . format ( self . endpoint , self [ 'id' ] , ) return self . integrationFactory . create ( endpoint = endpoint , api_key = self . api_key , data = integration_info , query_params = kwargs )
Create an integration for this service .
23,032
def integrations ( self , ** kwargs ) : ids = [ ref [ 'id' ] for ref in self [ 'integrations' ] ] return [ Integration . fetch ( id , service = self , query_params = kwargs ) for id in ids ]
Retrieve all this services integrations .
23,033
def get_integration ( self , id , ** kwargs ) : return Integration . fetch ( id , service = self , query_params = kwargs )
Retrieve a single integration by id .
23,034
def contact_methods ( self , ** kwargs ) : endpoint = '{0}/{1}/contact_methods' . format ( self . endpoint , self [ 'id' ] , ) result = self . request ( 'GET' , endpoint = endpoint , query_params = kwargs ) return result [ 'contact_methods' ]
Get all contact methods for this user .
23,035
def delete_contact_method ( self , id , ** kwargs ) : endpoint = '{0}/{1}/contact_methods/{2}' . format ( self . endpoint , self [ 'id' ] , id , ) return self . request ( 'DELETE' , endpoint = endpoint , query_params = kwargs )
Delete a contact method for this user .
23,036
def get_contact_method ( self , id , ** kwargs ) : endpoint = '{0}/{1}/contact_methods/{2}' . format ( self . endpoint , self [ 'id' ] , id , ) result = self . request ( 'GET' , endpoint = endpoint , query_params = kwargs ) return result [ 'contact_method' ]
Get a contact method for this user .
23,037
def notification_rules ( self , ** kwargs ) : endpoint = '{0}/{1}/notification_rules' . format ( self . endpoint , self [ 'id' ] , ) result = self . request ( 'GET' , endpoint = endpoint , query_params = kwargs ) return result [ 'notification_rules' ]
Get all notification rules for this user .
23,038
def create_notification_rule ( self , data , ** kwargs ) : data = { 'notification_rule' : data , } endpoint = '{0}/{1}/notification_rules' . format ( self . endpoint , self [ 'id' ] , ) result = self . request ( 'POST' , endpoint = endpoint , data = data , query_params = kwargs ) self . _data [ 'notification_rules' ] . append ( result [ 'notification_rule' ] ) return result
Create a notification rule for this user .
23,039
def delete_notification_rule ( self , id , ** kwargs ) : endpoint = '{0}/{1}/notification_rules/{2}' . format ( self . endpoint , self [ 'id' ] , id , ) return self . request ( 'DELETE' , endpoint = endpoint , query_params = kwargs )
Get a notification rule for this user .
23,040
def install ( cls , type_ , name , src , * args , ** kwargs ) : data = kwargs . pop ( 'data' , None ) if data is None : data = { 'addon' : { 'type' : type_ , 'name' : name , 'src' : src , } } cls . create ( data = data , * args , ** kwargs )
Install an add - on to this account .
23,041
def create ( cls , data = None , * args , ** kwargs ) : cls . validate ( data ) getattr ( Entity , 'create' ) . __func__ ( cls , data = data , * args , ** kwargs )
Validate and then create a Vendor entity .
23,042
def unstruct_strat ( self ) : return ( UnstructureStrategy . AS_DICT if self . _unstructure_attrs == self . unstructure_attrs_asdict else UnstructureStrategy . AS_TUPLE )
The default way of unstructuring attrs classes .
23,043
def register_structure_hook ( self , cl , func ) : if is_union_type ( cl ) : self . _union_registry [ cl ] = func else : self . _structure_func . register_cls_list ( [ ( cl , func ) ] )
Register a primitive - to - class converter function for a type .
23,044
def structure ( self , obj , cl ) : return self . _structure_func . dispatch ( cl ) ( obj , cl )
Convert unstructured Python data structures to structured data .
23,045
def unstructure_attrs_asdict ( self , obj ) : attrs = obj . __class__ . __attrs_attrs__ dispatch = self . _unstructure_func . dispatch rv = self . _dict_factory ( ) for a in attrs : name = a . name v = getattr ( obj , name ) rv [ name ] = dispatch ( v . __class__ ) ( v ) return rv
Our version of attrs . asdict so we can call back to us .
23,046
def unstructure_attrs_astuple ( self , obj ) : attrs = obj . __class__ . __attrs_attrs__ return tuple ( self . unstructure ( getattr ( obj , a . name ) ) for a in attrs )
Our version of attrs . astuple so we can call back to us .
23,047
def _unstructure_mapping ( self , mapping ) : dispatch = self . _unstructure_func . dispatch return mapping . __class__ ( ( dispatch ( k . __class__ ) ( k ) , dispatch ( v . __class__ ) ( v ) ) for k , v in mapping . items ( ) )
Convert a mapping of attr classes to primitive equivalents .
23,048
def _structure_default ( self , obj , cl ) : if cl is Any or cl is Optional : return obj msg = ( "Unsupported type: {0}. Register a structure hook for " "it." . format ( cl ) ) raise ValueError ( msg )
This is the fallthrough case . Everything is a subclass of Any .
23,049
def _structure_unicode ( self , obj , cl ) : if not isinstance ( obj , ( bytes , unicode ) ) : return cl ( str ( obj ) ) else : return obj
Just call cl with the given obj
23,050
def _structure_attr_from_tuple ( self , a , name , value ) : type_ = a . type if type_ is None : return value return self . _structure_func . dispatch ( type_ ) ( value , type_ )
Handle an individual attrs attribute .
23,051
def _structure_list ( self , obj , cl ) : if is_bare ( cl ) or cl . __args__ [ 0 ] is Any : return [ e for e in obj ] else : elem_type = cl . __args__ [ 0 ] return [ self . _structure_func . dispatch ( elem_type ) ( e , elem_type ) for e in obj ]
Convert an iterable to a potentially generic list .
23,052
def _structure_set ( self , obj , cl ) : if is_bare ( cl ) or cl . __args__ [ 0 ] is Any : return set ( obj ) else : elem_type = cl . __args__ [ 0 ] return { self . _structure_func . dispatch ( elem_type ) ( e , elem_type ) for e in obj }
Convert an iterable into a potentially generic set .
23,053
def _structure_frozenset ( self , obj , cl ) : if is_bare ( cl ) or cl . __args__ [ 0 ] is Any : return frozenset ( obj ) else : elem_type = cl . __args__ [ 0 ] dispatch = self . _structure_func . dispatch return frozenset ( dispatch ( elem_type ) ( e , elem_type ) for e in obj )
Convert an iterable into a potentially generic frozenset .
23,054
def _structure_dict ( self , obj , cl ) : if is_bare ( cl ) or cl . __args__ == ( Any , Any ) : return dict ( obj ) else : key_type , val_type = cl . __args__ if key_type is Any : val_conv = self . _structure_func . dispatch ( val_type ) return { k : val_conv ( v , val_type ) for k , v in obj . items ( ) } elif val_type is Any : key_conv = self . _structure_func . dispatch ( key_type ) return { key_conv ( k , key_type ) : v for k , v in obj . items ( ) } else : key_conv = self . _structure_func . dispatch ( key_type ) val_conv = self . _structure_func . dispatch ( val_type ) return { key_conv ( k , key_type ) : val_conv ( v , val_type ) for k , v in obj . items ( ) }
Convert a mapping into a potentially generic dict .
23,055
def _structure_union ( self , obj , union ) : union_params = union . __args__ if NoneType in union_params : if obj is None : return None if len ( union_params ) == 2 : other = ( union_params [ 0 ] if union_params [ 1 ] is NoneType else union_params [ 1 ] ) return self . _structure_func . dispatch ( other ) ( obj , other ) handler = self . _union_registry . get ( union ) if handler is not None : return handler ( obj , union ) cl = self . _dis_func_cache ( union ) ( obj ) return self . _structure_func . dispatch ( cl ) ( obj , cl )
Deal with converting a union .
23,056
def _structure_tuple ( self , obj , tup ) : tup_params = tup . __args__ has_ellipsis = tup_params and tup_params [ - 1 ] is Ellipsis if tup_params is None or ( has_ellipsis and tup_params [ 0 ] is Any ) : return tuple ( obj ) if has_ellipsis : tup_type = tup_params [ 0 ] conv = self . _structure_func . dispatch ( tup_type ) return tuple ( conv ( e , tup_type ) for e in obj ) else : return tuple ( self . _structure_func . dispatch ( t ) ( e , t ) for t , e in zip ( tup_params , obj ) )
Deal with converting to a tuple .
23,057
def _get_dis_func ( self , union ) : union_types = union . __args__ if NoneType in union_types : union_types = tuple ( e for e in union_types if e is not NoneType ) if not all ( hasattr ( e , "__attrs_attrs__" ) for e in union_types ) : raise ValueError ( "Only unions of attr classes supported " "currently. Register a loads hook manually." ) return create_uniq_field_dis_func ( * union_types )
Fetch or try creating a disambiguation function for a union .
23,058
def create_uniq_field_dis_func ( * classes ) : if len ( classes ) < 2 : raise ValueError ( "At least two classes required." ) cls_and_attrs = [ ( cl , set ( at . name for at in fields ( cl ) ) ) for cl in classes ] if len ( [ attrs for _ , attrs in cls_and_attrs if len ( attrs ) == 0 ] ) > 1 : raise ValueError ( "At least two classes have no attributes." ) uniq_attrs_dict = OrderedDict ( ) cls_and_attrs . sort ( key = lambda c_a : - len ( c_a [ 1 ] ) ) fallback = None for i , ( cl , cl_reqs ) in enumerate ( cls_and_attrs ) : other_classes = cls_and_attrs [ i + 1 : ] if other_classes : other_reqs = reduce ( or_ , ( c_a [ 1 ] for c_a in other_classes ) ) uniq = cl_reqs - other_reqs if not uniq : m = "{} has no usable unique attributes." . format ( cl ) raise ValueError ( m ) uniq_attrs_dict [ next ( iter ( uniq ) ) ] = cl else : fallback = cl def dis_func ( data ) : if not isinstance ( data , Mapping ) : raise ValueError ( "Only input mappings are supported." ) for k , v in uniq_attrs_dict . items ( ) : if k in data : return v return fallback return dis_func
Given attr classes generate a disambiguation function .
23,059
def _dispatch ( self , typ ) : for can_handle , handler in self . _handler_pairs : try : if can_handle ( typ ) : return handler except Exception : pass raise KeyError ( "unable to find handler for {0}" . format ( typ ) )
returns the appropriate handler for the object passed .
23,060
def register_cls_list ( self , cls_and_handler ) : for cls , handler in cls_and_handler : self . _single_dispatch . register ( cls , handler ) self . dispatch . cache_clear ( )
register a class to singledispatch
23,061
def register_func_list ( self , func_and_handler ) : for func , handler in func_and_handler : self . _function_dispatch . register ( func , handler ) self . dispatch . cache_clear ( )
register a function to determine if the handle should be used for the type
23,062
def resource_uri ( self ) : primary_key_value = getattr ( self , self . primary_key ( ) , None ) return '/{}/{}' . format ( self . endpoint ( ) , primary_key_value )
Return the URI at which the resource can be found .
23,063
def primary_key ( cls ) : if cls . __from_class__ : cls = cls . __from_class__ return cls . __table__ . primary_key . columns . values ( ) [ 0 ] . name
Return the name of the table s primary key
23,064
def links ( self ) : links = [ ] for foreign_key in self . __table__ . foreign_keys : column = foreign_key . column . name column_value = getattr ( self , column , None ) if column_value : table = foreign_key . column . table . name with app . app_context ( ) : endpoint = current_app . class_references [ table ] links . append ( { 'rel' : 'related' , 'uri' : '/{}/{}' . format ( endpoint . __name__ , column_value ) } ) links . append ( { 'rel' : 'self' , 'uri' : self . resource_uri ( ) } ) return links
Return a list of links for endpoints related to the resource .
23,065
def as_dict ( self , depth = 0 ) : result_dict = { } for column in self . __table__ . columns . keys ( ) : result_dict [ column ] = getattr ( self , column , None ) if isinstance ( result_dict [ column ] , Decimal ) : result_dict [ column ] = str ( result_dict [ column ] ) result_dict [ 'links' ] = self . links ( ) for foreign_key in self . __table__ . foreign_keys : column_name = foreign_key . column . name column_value = getattr ( self , column_name , None ) if column_value : table = foreign_key . column . table . name with app . app_context ( ) : endpoint = current_app . class_references [ table ] session = db . session ( ) resource = session . query ( endpoint ) . get ( column_value ) if depth > 0 : result_dict . update ( { 'rel' : endpoint . __name__ , endpoint . __name__ . lower ( ) : resource . as_dict ( depth - 1 ) } ) else : result_dict [ endpoint . __name__ . lower ( ) + '_url' ] = '/{}/{}' . format ( endpoint . __name__ , column_value ) result_dict [ 'self' ] = self . resource_uri ( ) return result_dict
Return a dictionary containing only the attributes which map to an instance s database columns .
23,066
def meta ( cls ) : if getattr ( cls , '__from_class__' , None ) is not None : cls = cls . __from_class__ attribute_info = { } for name , value in cls . __table__ . columns . items ( ) : attribute_info [ name ] = str ( value . type ) . lower ( ) return { cls . __name__ : attribute_info }
Return a dictionary containing meta - information about the given resource .
23,067
def print_version ( ctx , value ) : if not value : return import pkg_resources version = None try : version = pkg_resources . get_distribution ( 'sandman' ) . version finally : del pkg_resources click . echo ( version ) ctx . exit ( )
Print the current version of sandman and exit .
23,068
def _get_acceptable_response_type ( ) : if ( 'Accept' not in request . headers or request . headers [ 'Accept' ] in ALL_CONTENT_TYPES ) : return JSON acceptable_content_types = set ( request . headers [ 'ACCEPT' ] . strip ( ) . split ( ',' ) ) if acceptable_content_types & HTML_CONTENT_TYPES : return HTML elif acceptable_content_types & JSON_CONTENT_TYPES : return JSON else : raise InvalidAPIUsage ( 406 )
Return the mimetype for this request .
23,069
def handle_exception ( error ) : try : if _get_acceptable_response_type ( ) == JSON : response = jsonify ( error . to_dict ( ) ) response . status_code = error . code return response else : return error . abort ( ) except InvalidAPIUsage : response = jsonify ( error . to_dict ( ) ) response . status_code = 415 return response
Return a response with the appropriate status code message and content type when an InvalidAPIUsage exception is raised .
23,070
def _single_attribute_html_response ( resource , name , value ) : return make_response ( render_template ( 'attribute.html' , resource = resource , name = name , value = value ) )
Return the json representation of a single attribute of a resource .
23,071
def put_resource ( collection , key ) : resource = retrieve_resource ( collection , key ) _validate ( endpoint_class ( collection ) , request . method , resource ) resource . replace ( get_resource_data ( request ) ) try : _perform_database_action ( 'add' , resource ) except IntegrityError as exception : raise InvalidAPIUsage ( 422 , FORWARDED_EXCEPTION_MESSAGE . format ( exception ) ) return no_content_response ( )
Replace the resource identified by the given key and return the appropriate response .
23,072
def index ( ) : classes = [ ] with app . app_context ( ) : classes = set ( current_app . class_references . values ( ) ) if _get_acceptable_response_type ( ) == JSON : meta_data = { } for cls in classes : meta_data [ cls . endpoint ( ) ] = { 'link' : '/' + cls . endpoint ( ) , 'meta' : '/' + cls . endpoint ( ) + '/meta' } return jsonify ( meta_data ) else : return render_template ( 'index.html' , classes = classes )
Return information about each type of resource and how it can be accessed .
23,073
def get_meta ( collection ) : cls = endpoint_class ( collection ) description = cls . meta ( ) return jsonify ( description )
Return the meta - description of a given resource .
23,074
def abort ( self ) : resp = make_response ( render_template ( 'error.html' , error = self . code , message = self . message ) , self . code ) return resp
Return an HTML Response representation of the exception .
23,075
def generate_endpoint_classes ( db , generate_pks = False ) : seen_classes = set ( ) for cls in current_app . class_references . values ( ) : seen_classes . add ( cls . __tablename__ ) with app . app_context ( ) : db . metadata . reflect ( bind = db . engine ) for name , table in db . metadata . tables . items ( ) : if not name in seen_classes : seen_classes . add ( name ) if not table . primary_key and generate_pks : cls = add_pk_if_required ( db , table , name ) else : cls = type ( str ( name ) , ( sandman_model , db . Model ) , { '__tablename__' : name } ) register ( cls )
Return a list of model classes generated for each reflected database table .
23,076
def add_pk_if_required ( db , table , name ) : db . metadata . reflect ( bind = db . engine ) cls_dict = { '__tablename__' : name } if not table . primary_key : for column in table . columns : column . primary_key = True Table ( name , db . metadata , * table . columns , extend_existing = True ) cls_dict [ '__table__' ] = table db . metadata . create_all ( bind = db . engine ) return type ( str ( name ) , ( sandman_model , db . Model ) , cls_dict )
Return a class deriving from our Model class as well as the SQLAlchemy model .
23,077
def prepare_relationships ( db , known_tables ) : inspector = reflection . Inspector . from_engine ( db . engine ) for cls in set ( known_tables . values ( ) ) : for foreign_key in inspector . get_foreign_keys ( cls . __tablename__ ) : if foreign_key [ 'referred_table' ] in known_tables : other = known_tables [ foreign_key [ 'referred_table' ] ] constrained_column = foreign_key [ 'constrained_columns' ] if other not in cls . __related_tables__ and cls not in ( other . __related_tables__ ) and other != cls : cls . __related_tables__ . add ( other ) setattr ( cls , other . __table__ . name , relationship ( other . __name__ , backref = db . backref ( cls . __name__ . lower ( ) ) , foreign_keys = str ( cls . __name__ ) + '.' + '' . join ( constrained_column ) ) )
Enrich the registered Models with SQLAlchemy relationships so that related tables are correctly processed up by the admin .
23,078
def register_classes_for_admin ( db_session , show_pks = True , name = 'admin' ) : with app . app_context ( ) : admin_view = Admin ( current_app , name = name ) for cls in set ( cls for cls in current_app . class_references . values ( ) if cls . use_admin ) : column_list = [ column . name for column in cls . __table__ . columns . values ( ) ] if hasattr ( cls , '__view__' ) : admin_view_class = type ( 'AdminView' , ( cls . __view__ , ) , { 'form_columns' : column_list } ) elif show_pks : admin_view_class = type ( 'AdminView' , ( AdminModelViewWithPK , ) , { 'form_columns' : column_list } ) else : admin_view_class = ModelView admin_view . add_view ( admin_view_class ( cls , db_session ) )
Registers classes for the Admin view that ultimately creates the admin interface .
23,079
def _get_classes ( self , xml_document , src_path ) : src_rel_path = self . _to_unix_path ( GitPathTool . relative_path ( src_path ) ) src_abs_path = self . _to_unix_path ( GitPathTool . absolute_path ( src_path ) ) sources = xml_document . findall ( 'sources/source' ) sources = [ source . text for source in sources if source . text ] classes = [ class_tree for class_tree in xml_document . findall ( ".//class" ) or [ ] ] classes = ( [ clazz for clazz in classes if src_abs_path in [ self . _to_unix_path ( os . path . join ( source . strip ( ) , clazz . get ( 'filename' ) ) ) for source in sources ] ] or [ clazz for clazz in classes if self . _to_unix_path ( clazz . get ( 'filename' ) ) == src_abs_path ] or [ clazz for clazz in classes if self . _to_unix_path ( clazz . get ( 'filename' ) ) == src_rel_path ] ) return classes
Given a path and parsed xml_document provides class nodes with the relevant lines
23,080
def _cache_file ( self , src_path ) : if src_path not in self . _info_cache : violations = None measured = set ( ) for xml_document in self . _xml_roots : if xml_document . findall ( '.[@clover]' ) : line_nodes = self . _get_src_path_line_nodes_clover ( xml_document , src_path ) _number = 'num' _hits = 'count' elif xml_document . findall ( '.[@name]' ) : line_nodes = self . _get_src_path_line_nodes_jacoco ( xml_document , src_path ) _number = 'nr' _hits = 'ci' else : line_nodes = self . _get_src_path_line_nodes_cobertura ( xml_document , src_path ) _number = 'number' _hits = 'hits' if line_nodes is None : continue if violations is None : violations = set ( Violation ( int ( line . get ( _number ) ) , None ) for line in line_nodes if int ( line . get ( _hits , 0 ) ) == 0 ) else : violations = violations & set ( Violation ( int ( line . get ( _number ) ) , None ) for line in line_nodes if int ( line . get ( _hits , 0 ) ) == 0 ) measured = measured | set ( int ( line . get ( _number ) ) for line in line_nodes ) if violations is None : violations = set ( ) self . _info_cache [ src_path ] = ( violations , measured )
Load the data from self . _xml_roots for src_path if it hasn t been already .
23,081
def _process_dupe_code_violation ( self , lines , current_line , message ) : src_paths = [ ] message_match = self . dupe_code_violation_regex . match ( message ) if message_match : for _ in range ( int ( message_match . group ( 1 ) ) ) : current_line += 1 match = self . multi_line_violation_regex . match ( lines [ current_line ] ) src_path , l_number = match . groups ( ) src_paths . append ( ( '%s.py' % src_path , l_number ) ) return src_paths
The duplicate code violation is a multi line error . This pulls out all the relevant files
23,082
def set_cwd ( cls , cwd ) : if not cwd : try : cwd = os . getcwdu ( ) except AttributeError : cwd = os . getcwd ( ) if isinstance ( cwd , six . binary_type ) : cwd = cwd . decode ( sys . getdefaultencoding ( ) ) cls . _cwd = cwd cls . _root = cls . _git_root ( )
Set the cwd that is used to manipulate paths .
23,083
def relative_path ( cls , git_diff_path ) : root_rel_path = os . path . relpath ( cls . _cwd , cls . _root ) rel_path = os . path . relpath ( git_diff_path , root_rel_path ) return rel_path
Returns git_diff_path relative to cwd .
23,084
def _is_path_excluded ( self , path ) : exclude = self . _exclude if not exclude : return False basename = os . path . basename ( path ) if self . _fnmatch ( basename , exclude ) : return True absolute_path = os . path . abspath ( path ) match = self . _fnmatch ( absolute_path , exclude ) return match
Check if a path is excluded .
23,085
def src_paths_changed ( self ) : diff_dict = self . _git_diff ( ) return sorted ( diff_dict . keys ( ) , key = lambda x : x . lower ( ) )
See base class docstring .
23,086
def _get_included_diff_results ( self ) : included = [ self . _git_diff_tool . diff_committed ( self . _compare_branch ) ] if not self . _ignore_staged : included . append ( self . _git_diff_tool . diff_staged ( ) ) if not self . _ignore_unstaged : included . append ( self . _git_diff_tool . diff_unstaged ( ) ) return included
Return a list of stages to be included in the diff results .
23,087
def _git_diff ( self ) : if self . _diff_dict is None : result_dict = dict ( ) for diff_str in self . _get_included_diff_results ( ) : diff_dict = self . _parse_diff_str ( diff_str ) for src_path in diff_dict . keys ( ) : if self . _is_path_excluded ( src_path ) : continue root , extension = os . path . splitext ( src_path ) extension = extension [ 1 : ] . lower ( ) if not self . _supported_extensions or extension in self . _supported_extensions : added_lines , deleted_lines = diff_dict [ src_path ] result_dict [ src_path ] = [ line for line in result_dict . get ( src_path , [ ] ) if not line in deleted_lines ] + added_lines for ( src_path , lines ) in result_dict . items ( ) : result_dict [ src_path ] = self . _unique_ordered_lines ( lines ) self . _diff_dict = result_dict return self . _diff_dict
Run git diff and returns a dict in which the keys are changed file paths and the values are lists of line numbers .
23,088
def _parse_source_sections ( self , diff_str ) : source_dict = dict ( ) src_path = None found_hunk = False for line in diff_str . split ( '\n' ) : if line . startswith ( 'diff --git' ) or line . startswith ( 'diff --cc' ) : src_path = self . _parse_source_line ( line ) if src_path not in source_dict : source_dict [ src_path ] = [ ] found_hunk = False else : if found_hunk or line . startswith ( '@@' ) : found_hunk = True if src_path is not None : source_dict [ src_path ] . append ( line ) else : if line . startswith ( "@@" ) : msg = "Hunk has no source file: '{}'" . format ( line ) raise GitDiffError ( msg ) return source_dict
Given the output of git diff return a dictionary with keys that are source file paths .
23,089
def _parse_source_line ( self , line ) : if '--git' in line : regex = self . SRC_FILE_RE elif '--cc' in line : regex = self . MERGE_CONFLICT_RE else : msg = "Do not recognize format of source in line '{}'" . format ( line ) raise GitDiffError ( msg ) groups = regex . findall ( line ) if len ( groups ) == 1 : return groups [ 0 ] else : msg = "Could not parse source path in line '{}'" . format ( line ) raise GitDiffError ( msg )
Given a source line in git diff output return the path to the source file .
23,090
def _parse_hunk_line ( self , line ) : components = line . split ( '@@' ) if len ( components ) >= 2 : hunk_info = components [ 1 ] groups = self . HUNK_LINE_RE . findall ( hunk_info ) if len ( groups ) == 1 : try : return int ( groups [ 0 ] ) except ValueError : msg = "Could not parse '{}' as a line number" . format ( groups [ 0 ] ) raise GitDiffError ( msg ) else : msg = "Could not find start of hunk in line '{}'" . format ( line ) raise GitDiffError ( msg ) else : msg = "Could not parse hunk in line '{}'" . format ( line ) raise GitDiffError ( msg )
Given a hunk line in git diff output return the line number at the start of the hunk . A hunk is a segment of code that contains changes .
23,091
def _unique_ordered_lines ( line_numbers ) : if len ( line_numbers ) == 0 : return [ ] line_set = set ( line_numbers ) return sorted ( [ line for line in line_set ] )
Given a list of line numbers return a list in which each line number is included once and the lines are ordered sequentially .
23,092
def src_paths ( self ) : return { src for src , summary in self . _diff_violations ( ) . items ( ) if len ( summary . measured_lines ) > 0 }
Return a list of source files in the diff for which we have coverage information .
23,093
def percent_covered ( self , src_path ) : diff_violations = self . _diff_violations ( ) . get ( src_path ) if diff_violations is None : return None num_measured = len ( diff_violations . measured_lines ) if num_measured > 0 : num_uncovered = len ( diff_violations . lines ) return 100 - float ( num_uncovered ) / num_measured * 100 else : return None
Return a float percent of lines covered for the source in src_path .
23,094
def total_num_lines ( self ) : return sum ( [ len ( summary . measured_lines ) for summary in self . _diff_violations ( ) . values ( ) ] )
Return the total number of lines in the diff for which we have coverage info .
23,095
def total_num_violations ( self ) : return sum ( len ( summary . lines ) for summary in self . _diff_violations ( ) . values ( ) )
Returns the total number of lines in the diff that are in violation .
23,096
def generate_report ( self , output_file ) : if self . TEMPLATE_NAME is not None : template = TEMPLATE_ENV . get_template ( self . TEMPLATE_NAME ) report = template . render ( self . _context ( ) ) if isinstance ( report , six . string_types ) : report = report . encode ( 'utf-8' ) output_file . write ( report )
See base class . output_file must be a file handler that takes in bytes!
23,097
def generate_css ( self , output_file ) : if self . CSS_TEMPLATE_NAME is not None : template = TEMPLATE_ENV . get_template ( self . CSS_TEMPLATE_NAME ) style = template . render ( self . _context ( ) ) if isinstance ( style , six . string_types ) : style = style . encode ( 'utf-8' ) output_file . write ( style )
Generate an external style sheet file .
23,098
def _context ( self ) : src_stats = { src : self . _src_path_stats ( src ) for src in self . src_paths ( ) } if self . INCLUDE_SNIPPETS : snippet_style = Snippet . style_defs ( ) else : snippet_style = None return { 'css_url' : self . css_url , 'report_name' : self . coverage_report_name ( ) , 'diff_name' : self . diff_report_name ( ) , 'src_stats' : src_stats , 'total_num_lines' : self . total_num_lines ( ) , 'total_num_violations' : self . total_num_violations ( ) , 'total_percent_covered' : self . total_percent_covered ( ) , 'snippet_style' : snippet_style }
Return the context to pass to the template .
23,099
def combine_adjacent_lines ( line_numbers ) : combine_template = "{0}-{1}" combined_list = [ ] line_numbers . append ( None ) start = line_numbers [ 0 ] end = None for line_number in line_numbers [ 1 : ] : if ( end if end else start ) + 1 == line_number : end = line_number else : if end : combined_list . append ( combine_template . format ( start , end ) ) else : combined_list . append ( str ( start ) ) start = line_number end = None return combined_list
Given a sorted collection of line numbers this will turn them to strings and combine adjacent values