idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
26,000
def build ( self , tool ) : tools = self . _validate_tools ( tool ) if tools == - 1 : return - 1 result = 0 for build_tool in tools : builder = ToolsSupported ( ) . get_tool ( build_tool ) # None is an error if builder is None : logger . debug ( "Tool: %s was not found" % builder ) result = - 1 continue logger . debug ( "Building for tool: %s" , build_tool ) logger . debug ( self . generated_files ) if builder ( self . generated_files [ build_tool ] , self . settings ) . build_project ( ) == - 1 : # if one fails, set to -1 to report result = - 1 return result
build the project
157
3
26,001
def get_generated_project_files ( self , tool ) : exporter = ToolsSupported ( ) . get_tool ( tool ) return exporter ( self . generated_files [ tool ] , self . settings ) . get_generated_project_files ( )
Get generated project files the content depends on a tool . Look at tool implementation
55
15
26,002
def export_project ( self ) : generated_projects = deepcopy ( self . generated_projects ) self . process_data_for_makefile ( self . workspace ) generated_projects [ 'path' ] , generated_projects [ 'files' ] [ 'makefile' ] = self . gen_file_jinja ( 'makefile_armcc.tmpl' , self . workspace , 'Makefile' , self . workspace [ 'output_dir' ] [ 'path' ] ) return generated_projects
Processes misc options specific for GCC ARM and run generator
109
11
26,003
def _parse_specific_options ( self , data ) : data [ 'common_flags' ] = [ ] data [ 'ld_flags' ] = [ ] data [ 'c_flags' ] = [ ] data [ 'cxx_flags' ] = [ ] data [ 'asm_flags' ] = [ ] for k , v in data [ 'misc' ] . items ( ) : if type ( v ) is list : if k not in data : data [ k ] = [ ] data [ k ] . extend ( v ) else : if k not in data : data [ k ] = '' data [ k ] = v
Parse all specific setttings .
135
8
26,004
def export_project ( self ) : output = copy . deepcopy ( self . generated_project ) self . process_data_for_makefile ( self . workspace ) self . _fix_sublime_paths ( self . workspace ) self . workspace [ 'linker_options' ] = [ ] output [ 'path' ] , output [ 'files' ] [ 'makefile' ] = self . gen_file_jinja ( 'makefile_gcc.tmpl' , self . workspace , 'Makefile' , self . workspace [ 'output_dir' ] [ 'path' ] ) self . workspace [ 'buildsys_name' ] = 'Make' self . workspace [ 'buildsys_cmd' ] = 'make all' path , output [ 'files' ] [ 'sublimetext' ] = self . gen_file_jinja ( 'sublimetext.sublime-project.tmpl' , self . workspace , '%s.sublime-project' % self . workspace [ 'name' ] , self . workspace [ 'output_dir' ] [ 'path' ] ) generated_projects = output return generated_projects
Processes misc options specific for GCC ARM and run generator .
250
12
26,005
def fix_paths ( project_data , rel_path , extensions ) : norm_func = lambda path : os . path . normpath ( os . path . join ( rel_path , path ) ) for key in extensions : if type ( project_data [ key ] ) is dict : for k , v in project_data [ key ] . items ( ) : project_data [ key ] [ k ] = [ norm_func ( i ) for i in v ] elif type ( project_data [ key ] ) is list : project_data [ key ] = [ norm_func ( i ) for i in project_data [ key ] ] else : project_data [ key ] = norm_func ( project_data [ key ] )
Fix paths for extension list
158
5
26,006
def _make_cls ( self , value ) : if isinstance ( value , self . _cls ) : return value return self . _cls ( value )
If value is not instance of self . _cls converts and returns it . Otherwise returns value .
36
20
26,007
def _send_request ( self , request ) : headers = { "X-Experience-API-Version" : self . version } if self . auth is not None : headers [ "Authorization" ] = self . auth headers . update ( request . headers ) params = request . query_params params = { k : unicode ( params [ k ] ) . encode ( 'utf-8' ) for k in params . keys ( ) } params = urllib . urlencode ( params ) if request . resource . startswith ( 'http' ) : url = request . resource else : url = self . endpoint url += request . resource parsed = urlparse ( url ) if parsed . scheme == "https" : web_req = httplib . HTTPSConnection ( parsed . hostname , parsed . port ) else : web_req = httplib . HTTPConnection ( parsed . hostname , parsed . port ) path = parsed . path if parsed . query or parsed . path : path += "?" if parsed . query : path += parsed . query if params : path += params if hasattr ( request , "content" ) and request . content is not None : web_req . request ( method = request . method , url = path , body = request . content , headers = headers , ) else : web_req . request ( method = request . method , url = path , headers = headers , ) response = web_req . getresponse ( ) data = response . read ( ) web_req . close ( ) if ( 200 <= response . status < 300 or ( response . status == 404 and hasattr ( request , "ignore404" ) and request . ignore404 ) ) : success = True else : success = False return LRSResponse ( success = success , request = request , response = response , data = data , )
Establishes connection and returns http response based off of request .
385
13
26,008
def about ( self ) : request = HTTPRequest ( method = "GET" , resource = "about" ) lrs_response = self . _send_request ( request ) if lrs_response . success : lrs_response . content = About . from_json ( lrs_response . data ) return lrs_response
Gets about response from LRS
70
7
26,009
def save_statement ( self , statement ) : if not isinstance ( statement , Statement ) : statement = Statement ( statement ) request = HTTPRequest ( method = "POST" , resource = "statements" ) if statement . id is not None : request . method = "PUT" request . query_params [ "statementId" ] = statement . id request . headers [ "Content-Type" ] = "application/json" request . content = statement . to_json ( self . version ) lrs_response = self . _send_request ( request ) if lrs_response . success : if statement . id is None : statement . id = json . loads ( lrs_response . data ) [ 0 ] lrs_response . content = statement return lrs_response
Save statement to LRS and update statement id if necessary
164
11
26,010
def save_statements ( self , statements ) : if not isinstance ( statements , StatementList ) : statements = StatementList ( statements ) request = HTTPRequest ( method = "POST" , resource = "statements" ) request . headers [ "Content-Type" ] = "application/json" request . content = statements . to_json ( ) lrs_response = self . _send_request ( request ) if lrs_response . success : id_list = json . loads ( lrs_response . data ) for s , statement_id in zip ( statements , id_list ) : s . id = statement_id lrs_response . content = statements return lrs_response
Save statements to LRS and update their statement id s
147
11
26,011
def retrieve_statement ( self , statement_id ) : request = HTTPRequest ( method = "GET" , resource = "statements" ) request . query_params [ "statementId" ] = statement_id lrs_response = self . _send_request ( request ) if lrs_response . success : lrs_response . content = Statement . from_json ( lrs_response . data ) return lrs_response
Retrieve a statement from the server from its id
92
10
26,012
def query_statements ( self , query ) : params = { } param_keys = [ "registration" , "since" , "until" , "limit" , "ascending" , "related_activities" , "related_agents" , "format" , "attachments" , ] for k , v in query . iteritems ( ) : if v is not None : if k == "agent" : params [ k ] = v . to_json ( self . version ) elif k == "verb" or k == "activity" : params [ k ] = v . id elif k in param_keys : params [ k ] = v request = HTTPRequest ( method = "GET" , resource = "statements" ) request . query_params = params lrs_response = self . _send_request ( request ) if lrs_response . success : lrs_response . content = StatementsResult . from_json ( lrs_response . data ) return lrs_response
Query the LRS for statements with specified parameters
214
9
26,013
def more_statements ( self , more_url ) : if isinstance ( more_url , StatementsResult ) : more_url = more_url . more more_url = self . get_endpoint_server_root ( ) + more_url request = HTTPRequest ( method = "GET" , resource = more_url ) lrs_response = self . _send_request ( request ) if lrs_response . success : lrs_response . content = StatementsResult . from_json ( lrs_response . data ) return lrs_response
Query the LRS for more statements
119
7
26,014
def retrieve_state_ids ( self , activity , agent , registration = None , since = None ) : if not isinstance ( activity , Activity ) : activity = Activity ( activity ) if not isinstance ( agent , Agent ) : agent = Agent ( agent ) request = HTTPRequest ( method = "GET" , resource = "activities/state" ) request . query_params = { "activityId" : activity . id , "agent" : agent . to_json ( self . version ) } if registration is not None : request . query_params [ "registration" ] = registration if since is not None : request . query_params [ "since" ] = since lrs_response = self . _send_request ( request ) if lrs_response . success : lrs_response . content = json . loads ( lrs_response . data ) return lrs_response
Retrieve state id s from the LRS with the provided parameters
187
13
26,015
def retrieve_state ( self , activity , agent , state_id , registration = None ) : if not isinstance ( activity , Activity ) : activity = Activity ( activity ) if not isinstance ( agent , Agent ) : agent = Agent ( agent ) request = HTTPRequest ( method = "GET" , resource = "activities/state" , ignore404 = True ) request . query_params = { "activityId" : activity . id , "agent" : agent . to_json ( self . version ) , "stateId" : state_id } if registration is not None : request . query_params [ "registration" ] = registration lrs_response = self . _send_request ( request ) if lrs_response . success : doc = StateDocument ( id = state_id , content = lrs_response . data , activity = activity , agent = agent ) if registration is not None : doc . registration = registration headers = lrs_response . response . getheaders ( ) if "lastModified" in headers and headers [ "lastModified" ] is not None : doc . timestamp = headers [ "lastModified" ] if "contentType" in headers and headers [ "contentType" ] is not None : doc . content_type = headers [ "contentType" ] if "etag" in headers and headers [ "etag" ] is not None : doc . etag = headers [ "etag" ] lrs_response . content = doc return lrs_response
Retrieve state from LRS with the provided parameters
319
10
26,016
def save_state ( self , state ) : request = HTTPRequest ( method = "PUT" , resource = "activities/state" , content = state . content , ) if state . content_type is not None : request . headers [ "Content-Type" ] = state . content_type else : request . headers [ "Content-Type" ] = "application/octet-stream" if state . etag is not None : request . headers [ "If-Match" ] = state . etag request . query_params = { "stateId" : state . id , "activityId" : state . activity . id , "agent" : state . agent . to_json ( self . version ) } lrs_response = self . _send_request ( request ) lrs_response . content = state return self . _send_request ( request )
Save a state doc to the LRS
184
8
26,017
def _delete_state ( self , activity , agent , state_id = None , registration = None , etag = None ) : if not isinstance ( activity , Activity ) : activity = Activity ( activity ) if not isinstance ( agent , Agent ) : agent = Agent ( agent ) request = HTTPRequest ( method = "DELETE" , resource = "activities/state" ) if etag is not None : request . headers [ "If-Match" ] = etag request . query_params = { "activityId" : activity . id , "agent" : agent . to_json ( self . version ) } if state_id is not None : request . query_params [ "stateId" ] = state_id if registration is not None : request . query_params [ "registration" ] = registration lrs_response = self . _send_request ( request ) return lrs_response
Private method to delete a specified state from the LRS
194
11
26,018
def delete_state ( self , state ) : return self . _delete_state ( activity = state . activity , agent = state . agent , state_id = state . id , etag = state . etag )
Delete a specified state from the LRS
46
8
26,019
def retrieve_activity_profile ( self , activity , profile_id ) : if not isinstance ( activity , Activity ) : activity = Activity ( activity ) request = HTTPRequest ( method = "GET" , resource = "activities/profile" , ignore404 = True ) request . query_params = { "profileId" : profile_id , "activityId" : activity . id } lrs_response = self . _send_request ( request ) if lrs_response . success : doc = ActivityProfileDocument ( id = profile_id , content = lrs_response . data , activity = activity ) headers = lrs_response . response . getheaders ( ) if "lastModified" in headers and headers [ "lastModified" ] is not None : doc . timestamp = headers [ "lastModified" ] if "contentType" in headers and headers [ "contentType" ] is not None : doc . content_type = headers [ "contentType" ] if "etag" in headers and headers [ "etag" ] is not None : doc . etag = headers [ "etag" ] lrs_response . content = doc return lrs_response
Retrieve activity profile with the specified parameters
251
8
26,020
def save_activity_profile ( self , profile ) : request = HTTPRequest ( method = "PUT" , resource = "activities/profile" , content = profile . content ) if profile . content_type is not None : request . headers [ "Content-Type" ] = profile . content_type else : request . headers [ "Content-Type" ] = "application/octet-stream" if profile . etag is not None : request . headers [ "If-Match" ] = profile . etag request . query_params = { "profileId" : profile . id , "activityId" : profile . activity . id } lrs_response = self . _send_request ( request ) lrs_response . content = profile return lrs_response
Save an activity profile doc to the LRS
163
9
26,021
def delete_activity_profile ( self , profile ) : request = HTTPRequest ( method = "DELETE" , resource = "activities/profile" ) request . query_params = { "profileId" : profile . id , "activityId" : profile . activity . id } if profile . etag is not None : request . headers [ "If-Match" ] = profile . etag return self . _send_request ( request )
Delete activity profile doc from LRS
95
7
26,022
def retrieve_agent_profile ( self , agent , profile_id ) : if not isinstance ( agent , Agent ) : agent = Agent ( agent ) request = HTTPRequest ( method = "GET" , resource = "agents/profile" , ignore404 = True ) request . query_params = { "profileId" : profile_id , "agent" : agent . to_json ( self . version ) } lrs_response = self . _send_request ( request ) if lrs_response . success : doc = AgentProfileDocument ( id = profile_id , content = lrs_response . data , agent = agent ) headers = lrs_response . response . getheaders ( ) if "lastModified" in headers and headers [ "lastModified" ] is not None : doc . timestamp = headers [ "lastModified" ] if "contentType" in headers and headers [ "contentType" ] is not None : doc . content_type = headers [ "contentType" ] if "etag" in headers and headers [ "etag" ] is not None : doc . etag = headers [ "etag" ] lrs_response . content = doc return lrs_response
Retrieve agent profile with the specified parameters
256
8
26,023
def save_agent_profile ( self , profile ) : request = HTTPRequest ( method = "PUT" , resource = "agents/profile" , content = profile . content , ) if profile . content_type is not None : request . headers [ "Content-Type" ] = profile . content_type else : request . headers [ "Content-Type" ] = "application/octet-stream" if profile . etag is not None : request . headers [ "If-Match" ] = profile . etag request . query_params = { "profileId" : profile . id , "agent" : profile . agent . to_json ( self . version ) } lrs_response = self . _send_request ( request ) lrs_response . content = profile return lrs_response
Save an agent profile doc to the LRS
169
9
26,024
def delete_agent_profile ( self , profile ) : request = HTTPRequest ( method = "DELETE" , resource = "agents/profile" ) request . query_params = { "profileId" : profile . id , "agent" : profile . agent . to_json ( self . version ) } if profile . etag is not None : request . headers [ "If-Match" ] = profile . etag return self . _send_request ( request )
Delete agent profile doc from LRS
100
7
26,025
def get_endpoint_server_root ( self ) : parsed = urlparse ( self . _endpoint ) root = parsed . scheme + "://" + parsed . hostname if parsed . port is not None : root += ":" + unicode ( parsed . port ) return root
Parses RemoteLRS object s endpoint and returns its root
60
13
26,026
def from_json ( cls , json_data ) : data = json . loads ( json_data ) result = cls ( data ) if hasattr ( result , "_from_json" ) : result . _from_json ( ) return result
Tries to convert a JSON representation to an object of the same type as self
53
16
26,027
def to_json ( self , version = Version . latest ) : return json . dumps ( self . as_version ( version ) )
Tries to convert an object into a JSON representation and return the resulting string
28
15
26,028
def as_version ( self , version = Version . latest ) : if not isinstance ( self , list ) : result = { } for k , v in self . iteritems ( ) if isinstance ( self , dict ) else vars ( self ) . iteritems ( ) : k = self . _props_corrected . get ( k , k ) if isinstance ( v , SerializableBase ) : result [ k ] = v . as_version ( version ) elif isinstance ( v , list ) : result [ k ] = [ ] for val in v : if isinstance ( val , SerializableBase ) : result [ k ] . append ( val . as_version ( version ) ) else : result [ k ] . append ( val ) elif isinstance ( v , uuid . UUID ) : result [ k ] = unicode ( v ) elif isinstance ( v , datetime . timedelta ) : result [ k ] = jsonify_timedelta ( v ) elif isinstance ( v , datetime . datetime ) : result [ k ] = jsonify_datetime ( v ) else : result [ k ] = v result = self . _filter_none ( result ) else : result = [ ] for v in self : if isinstance ( v , SerializableBase ) : result . append ( v . as_version ( version ) ) else : result . append ( v ) return result
Returns a dict that has been modified based on versioning in order to be represented in JSON properly
303
19
26,029
def _filter_none ( obj ) : result = { } for k , v in obj . iteritems ( ) : if v is not None : if k . startswith ( '_' ) : k = k [ 1 : ] result [ k ] = v return result
Filters out attributes set to None prior to serialization and returns a new object without those attributes . This saves the serializer from sending empty bytes over the network . This method also fixes the keys to look as expected by ignoring a leading _ if it is present .
58
53
26,030
def jsonify_timedelta ( value ) : assert isinstance ( value , datetime . timedelta ) # split seconds to larger units seconds = value . total_seconds ( ) minutes , seconds = divmod ( seconds , 60 ) hours , minutes = divmod ( minutes , 60 ) days , hours = divmod ( hours , 24 ) days , hours , minutes = map ( int , ( days , hours , minutes ) ) seconds = round ( seconds , 6 ) # build date date = '' if days : date = '%sD' % days # build time time = u'T' # hours bigger_exists = date or hours if bigger_exists : time += '{:02}H' . format ( hours ) # minutes bigger_exists = bigger_exists or minutes if bigger_exists : time += '{:02}M' . format ( minutes ) # seconds if seconds . is_integer ( ) : seconds = '{:02}' . format ( int ( seconds ) ) else : # 9 chars long w/leading 0, 6 digits after decimal seconds = '%09.6f' % seconds # remove trailing zeros seconds = seconds . rstrip ( '0' ) time += '{}S' . format ( seconds ) return u'P' + date + time
Converts a datetime . timedelta to an ISO 8601 duration string for JSON - ification .
277
21
26,031
def zip_dicts ( left , right , prefix = ( ) ) : for key , left_value in left . items ( ) : path = prefix + ( key , ) right_value = right . get ( key ) if isinstance ( left_value , dict ) : yield from zip_dicts ( left_value , right_value or { } , path ) else : yield path , left , left_value , right , right_value
Modified zip through two dictionaries .
94
8
26,032
def configure ( defaults , metadata , loader ) : config = Configuration ( defaults ) config . merge ( loader ( metadata ) ) validate ( defaults , metadata , config ) return config
Build a fresh configuration .
35
5
26,033
def boolean ( value ) : if isinstance ( value , bool ) : return value if value == "" : return False return strtobool ( value )
Configuration - friendly boolean type converter .
32
7
26,034
def _load_from_environ ( metadata , value_func = None ) : # We'll match the ennvar name against the metadata's name. The ennvar # name must be uppercase and hyphens in names converted to underscores. # # | envar | name | matches? | # +-------------+---------+----------+ # | FOO_BAR | foo | yes | # | FOO_BAR | bar | no | # | foo_bar | bar | no | # | FOO_BAR_BAZ | foo_bar | yes | # | FOO_BAR_BAZ | foo-bar | yes | # +-------------+---------+----------+ prefix = metadata . name . upper ( ) . replace ( "-" , "_" ) return expand_config ( environ , separator = "__" , skip_to = 1 , key_parts_filter = lambda key_parts : len ( key_parts ) > 1 and key_parts [ 0 ] == prefix , value_func = lambda value : value_func ( value ) if value_func else value , )
Load configuration from environment variables .
237
6
26,035
def load_from_dict ( dct = None , * * kwargs ) : dct = dct or dict ( ) dct . update ( kwargs ) def _load_from_dict ( metadata ) : return dict ( dct ) return _load_from_dict
Load configuration from a dictionary .
61
6
26,036
def binding ( key , registry = None ) : if registry is None : registry = _registry def decorator ( func ) : registry . bind ( key , func ) return func return decorator
Creates a decorator that binds a factory function to a key .
40
14
26,037
def defaults ( * * kwargs ) : def decorator ( func ) : setattr ( func , DEFAULTS , kwargs ) return func return decorator
Creates a decorator that saves the provided kwargs as defaults for a factory function .
35
19
26,038
def _invoke_hook ( hook_name , target ) : try : for value in getattr ( target , hook_name ) : func , args , kwargs = value func ( target , * args , * * kwargs ) except AttributeError : # no hook defined pass except ( TypeError , ValueError ) : # hook not properly defined (might be a mock) pass
Generic hook invocation .
81
4
26,039
def _register_hook ( hook_name , target , func , * args , * * kwargs ) : call = ( func , args , kwargs ) try : getattr ( target , hook_name ) . append ( call ) except AttributeError : setattr ( target , hook_name , [ call ] )
Generic hook registration .
69
4
26,040
def on_resolve ( target , func , * args , * * kwargs ) : return _register_hook ( ON_RESOLVE , target , func , * args , * * kwargs )
Register a resolution hook .
45
5
26,041
def create_cache ( name ) : caches = { subclass . name ( ) : subclass for subclass in Cache . __subclasses__ ( ) } return caches . get ( name , NaiveCache ) ( )
Create a cache by name .
43
6
26,042
def get_config_filename ( metadata ) : envvar = "{}__SETTINGS" . format ( underscore ( metadata . name ) . upper ( ) ) try : return environ [ envvar ] except KeyError : return None
Derive a configuration file name from the FOO_SETTINGS environment variable .
49
17
26,043
def _load_from_file ( metadata , load_func ) : config_filename = get_config_filename ( metadata ) if config_filename is None : return dict ( ) with open ( config_filename , "r" ) as file_ : data = load_func ( file_ . read ( ) ) return dict ( data )
Load configuration from a file .
71
6
26,044
def get_scoped_config ( self , graph ) : def loader ( metadata ) : if not self . current_scope : target = graph . config else : target = graph . config . get ( self . current_scope , { } ) return { self . key : target . get ( self . key , { } ) , } defaults = { self . key : get_defaults ( self . func ) , } return configure ( defaults , graph . metadata , loader )
Compute a configuration using the current scope .
99
9
26,045
def resolve ( self , graph ) : cached = graph . get ( self . scoped_key ) if cached : return cached component = self . create ( graph ) graph . assign ( self . scoped_key , component ) return component
Resolve a scoped component respecting the graph cache .
49
11
26,046
def create ( self , graph ) : scoped_config = self . get_scoped_config ( graph ) scoped_graph = ScopedGraph ( graph , scoped_config ) return self . func ( scoped_graph )
Create a new scoped component .
50
7
26,047
def infect ( cls , graph , key , default_scope = None ) : func = graph . factory_for ( key ) if isinstance ( func , cls ) : func = func . func factory = cls ( key , func , default_scope ) graph . _registry . factories [ key ] = factory return factory
Forcibly convert an entry - point based factory to a ScopedFactory .
69
16
26,048
def load_each ( * loaders ) : def _load_each ( metadata ) : return merge ( loader ( metadata ) for loader in loaders ) return _load_each
Loader factory that combines a series of loaders .
37
10
26,049
def all ( self ) : return { key : value for key , value in chain ( self . entry_points . items ( ) , self . factories . items ( ) ) }
Return a synthetic dictionary of all factories .
37
8
26,050
def defaults ( self ) : return { key : get_defaults ( value ) for key , value in self . all . items ( ) }
Return a nested dicionary of all registered factory defaults .
30
12
26,051
def bind ( self , key , factory ) : if key in self . factories : raise AlreadyBoundError ( key ) else : self . factories [ key ] = factory
Bind a factory to a key .
34
7
26,052
def resolve ( self , key ) : try : return self . _resolve_from_binding ( key ) except NotBoundError : return self . _resolve_from_entry_point ( key )
Resolve a key to a factory .
43
8
26,053
def expand_config ( dct , separator = '.' , skip_to = 0 , key_func = lambda key : key . lower ( ) , key_parts_filter = lambda key_parts : True , value_func = lambda value : value ) : config = { } for key , value in dct . items ( ) : key_separator = separator ( key ) if callable ( separator ) else separator key_parts = key . split ( key_separator ) if not key_parts_filter ( key_parts ) : continue key_config = config # skip prefix for key_part in key_parts [ skip_to : - 1 ] : key_config = key_config . setdefault ( key_func ( key_part ) , dict ( ) ) key_config [ key_func ( key_parts [ - 1 ] ) ] = value_func ( value ) return config
Expand a dictionary recursively by splitting keys along the separator .
194
15
26,054
def create_object_graph ( name , debug = False , testing = False , import_name = None , root_path = None , loader = load_from_environ , registry = _registry , profiler = None , cache = None ) : metadata = Metadata ( name = name , debug = debug , testing = testing , import_name = import_name , root_path = root_path , ) defaults = registry . defaults config = configure ( defaults , metadata , loader ) if profiler is None : profiler = NoopProfiler ( ) if cache is None or isinstance ( cache , str ) : cache = create_cache ( cache ) return ObjectGraph ( metadata = metadata , config = config , registry = registry , profiler = profiler , cache = cache , loader = loader , )
Create a new object graph .
170
6
26,055
def _reserve ( self , key ) : self . assign ( key , RESERVED ) try : yield finally : del self . _cache [ key ]
Reserve a component s binding temporarily .
34
8
26,056
def _resolve_key ( self , key ) : with self . _reserve ( key ) : factory = self . factory_for ( key ) with self . _profiler ( key ) : component = factory ( self ) invoke_resolve_hook ( component ) return self . assign ( key , component )
Attempt to lazily create a component .
66
8
26,057
def scoped_to ( self , scope ) : previous_scope = self . __factory__ . current_scope try : self . __factory__ . current_scope = scope yield finally : self . __factory__ . current_scope = previous_scope
Context manager to switch scopes .
56
7
26,058
def scoped ( self , func ) : @ wraps ( func ) def wrapper ( * args , * * kwargs ) : scope = kwargs . get ( "scope" , self . __factory__ . default_scope ) with self . scoped_to ( scope ) : return func ( * args , * * kwargs ) return wrapper
Decorator to switch scopes .
75
8
26,059
def merge ( self , dct = None , * * kwargs ) : if dct is None : dct = { } if kwargs : dct . update ( * * kwargs ) for key , value in dct . items ( ) : if all ( ( isinstance ( value , dict ) , isinstance ( self . get ( key ) , Configuration ) , getattr ( self . get ( key ) , "__merge__" , True ) , ) ) : # recursively merge self [ key ] . merge ( value ) elif isinstance ( value , list ) and isinstance ( self . get ( key ) , list ) : # append self [ key ] += value else : # set the new value self [ key ] = value
Recursively merge a dictionary or kwargs into the current dict .
162
15
26,060
def validate ( self , metadata , path , value ) : if isinstance ( value , Requirement ) : # if the RHS is still a Requirement object, it was not set if metadata . testing and self . mock_value is not None : value = self . mock_value elif self . default_value is not None : value = self . default_value elif not value . required : return None else : raise ValidationError ( f"Missing required configuration for: {'.'.join(path)}" ) try : return self . type ( value ) except ValueError : raise ValidationError ( f"Missing required configuration for: {'.'.join(path)}: {value}" )
Validate this requirement .
148
5
26,061
def decode_conjure_union_type ( cls , obj , conjure_type ) : type_of_union = obj [ "type" ] # type: str for attr , conjure_field in conjure_type . _options ( ) . items ( ) : if conjure_field . identifier == type_of_union : attribute = attr conjure_field_definition = conjure_field break else : raise ValueError ( "unknown union type {0} for {1}" . format ( type_of_union , conjure_type ) ) deserialized = { } # type: Dict[str, Any] if type_of_union not in obj or obj [ type_of_union ] is None : cls . check_null_field ( obj , deserialized , conjure_field_definition ) else : value = obj [ type_of_union ] field_type = conjure_field_definition . field_type deserialized [ attribute ] = cls . do_decode ( value , field_type ) return conjure_type ( * * deserialized )
Decodes json into a conjure union type .
241
10
26,062
def decode_conjure_enum_type ( cls , obj , conjure_type ) : if not ( isinstance ( obj , str ) or str ( type ( obj ) ) == "<type 'unicode'>" ) : raise Exception ( 'Expected to find str type but found {} instead' . format ( type ( obj ) ) ) if obj in conjure_type . __members__ : return conjure_type [ obj ] else : return conjure_type [ "UNKNOWN" ]
Decodes json into a conjure enum type .
107
10
26,063
def decode_list ( cls , obj , element_type ) : # type: (List[Any], ConjureTypeType) -> List[Any] if not isinstance ( obj , list ) : raise Exception ( "expected a python list" ) return list ( map ( lambda x : cls . do_decode ( x , element_type ) , obj ) )
Decodes json into a list handling conversion of the elements .
79
12
26,064
def do_decode ( cls , obj , obj_type ) : # type: (Any, ConjureTypeType) -> Any if inspect . isclass ( obj_type ) and issubclass ( # type: ignore obj_type , ConjureBeanType ) : return cls . decode_conjure_bean_type ( obj , obj_type ) # type: ignore elif inspect . isclass ( obj_type ) and issubclass ( # type: ignore obj_type , ConjureUnionType ) : return cls . decode_conjure_union_type ( obj , obj_type ) elif inspect . isclass ( obj_type ) and issubclass ( # type: ignore obj_type , ConjureEnumType ) : return cls . decode_conjure_enum_type ( obj , obj_type ) elif isinstance ( obj_type , DictType ) : return cls . decode_dict ( obj , obj_type . key_type , obj_type . value_type ) elif isinstance ( obj_type , ListType ) : return cls . decode_list ( obj , obj_type . item_type ) elif isinstance ( obj_type , OptionalType ) : return cls . decode_optional ( obj , obj_type . item_type ) return cls . decode_primitive ( obj , obj_type )
Decodes json into the specified type
301
7
26,065
def encode_conjure_bean_type ( cls , obj ) : # type: (ConjureBeanType) -> Any encoded = { } # type: Dict[str, Any] for attribute_name , field_definition in obj . _fields ( ) . items ( ) : encoded [ field_definition . identifier ] = cls . do_encode ( getattr ( obj , attribute_name ) ) return encoded
Encodes a conjure bean into json
93
8
26,066
def encode_conjure_union_type ( cls , obj ) : # type: (ConjureUnionType) -> Any encoded = { } # type: Dict[str, Any] encoded [ "type" ] = obj . type for attr , field_definition in obj . _options ( ) . items ( ) : if field_definition . identifier == obj . type : attribute = attr break else : raise ValueError ( "could not find attribute for union " + "member {0} of type {1}" . format ( obj . type , obj . __class__ ) ) defined_field_definition = obj . _options ( ) [ attribute ] encoded [ defined_field_definition . identifier ] = cls . do_encode ( getattr ( obj , attribute ) ) return encoded
Encodes a conjure union into json
171
8
26,067
def do_encode ( cls , obj ) : # type: (Any) -> Any if isinstance ( obj , ConjureBeanType ) : return cls . encode_conjure_bean_type ( obj ) elif isinstance ( obj , ConjureUnionType ) : return cls . encode_conjure_union_type ( obj ) elif isinstance ( obj , ConjureEnumType ) : return obj . value elif isinstance ( obj , list ) : return list ( map ( cls . do_encode , obj ) ) elif isinstance ( obj , dict ) : return { cls . do_encode ( key ) : cls . do_encode ( value ) for key , value in obj . items ( ) } else : return cls . encode_primitive ( obj )
Encodes the passed object into json
179
7
26,068
def radians_to ( self , other ) : d2r = math . pi / 180.0 lat1rad = self . latitude * d2r long1rad = self . longitude * d2r lat2rad = other . latitude * d2r long2rad = other . longitude * d2r delta_lat = lat1rad - lat2rad delta_long = long1rad - long2rad sin_delta_lat_div2 = math . sin ( delta_lat / 2.0 ) sin_delta_long_div2 = math . sin ( delta_long / 2.0 ) a = ( ( sin_delta_lat_div2 * sin_delta_lat_div2 ) + ( math . cos ( lat1rad ) * math . cos ( lat2rad ) * sin_delta_long_div2 * sin_delta_long_div2 ) ) a = min ( 1.0 , a ) return 2 * math . asin ( math . sqrt ( a ) )
Returns the distance from this GeoPoint to another in radians .
225
13
26,069
def append ( self , item ) : self . beginInsertRows ( QtCore . QModelIndex ( ) , self . rowCount ( ) , self . rowCount ( ) ) self . items . append ( item ) self . endInsertRows ( )
Append item to end of model
54
7
26,070
def on_item_toggled ( self , index , state = None ) : if not index . data ( model . IsIdle ) : return self . info ( "Cannot toggle" ) if not index . data ( model . IsOptional ) : return self . info ( "This item is mandatory" ) if state is None : state = not index . data ( model . IsChecked ) index . model ( ) . setData ( index , state , model . IsChecked ) # Withdraw option to publish if no instances are toggled play = self . findChild ( QtWidgets . QWidget , "Play" ) validate = self . findChild ( QtWidgets . QWidget , "Validate" ) any_instances = any ( index . data ( model . IsChecked ) for index in self . data [ "models" ] [ "instances" ] ) play . setEnabled ( any_instances ) validate . setEnabled ( any_instances ) # Emit signals if index . data ( model . Type ) == "instance" : instance = self . data [ "models" ] [ "instances" ] . items [ index . row ( ) ] util . defer ( 100 , lambda : self . controller . emit_ ( signal = "instanceToggled" , kwargs = { "new_value" : state , "old_value" : not state , "instance" : instance } ) ) if index . data ( model . Type ) == "plugin" : util . defer ( 100 , lambda : self . controller . emit_ ( signal = "pluginToggled" , kwargs = { "new_value" : state , "old_value" : not state , "plugin" : index . data ( model . Object ) } ) )
An item is requesting to be toggled
382
9
26,071
def on_comment_entered ( self ) : text_edit = self . findChild ( QtWidgets . QWidget , "CommentBox" ) comment = text_edit . text ( ) # Store within context context = self . controller . context context . data [ "comment" ] = comment placeholder = self . findChild ( QtWidgets . QLabel , "CommentPlaceholder" ) placeholder . setVisible ( not comment )
The user has typed a comment
93
6
26,072
def on_finished ( self ) : self . controller . is_running = False error = self . controller . current_error if error is not None : self . info ( self . tr ( "Stopped due to error(s), see Terminal." ) ) else : self . info ( self . tr ( "Finished successfully!" ) )
Finished signal handler
71
4
26,073
def reset ( self ) : self . info ( self . tr ( "About to reset.." ) ) models = self . data [ "models" ] models [ "instances" ] . store_checkstate ( ) models [ "plugins" ] . store_checkstate ( ) # Reset current ids to secure no previous instances get mixed in. models [ "instances" ] . ids = [ ] for m in models . values ( ) : m . reset ( ) for b in self . data [ "buttons" ] . values ( ) : b . hide ( ) comment_box = self . findChild ( QtWidgets . QWidget , "CommentBox" ) comment_box . hide ( ) util . defer ( 500 , self . controller . reset )
Prepare GUI for reset
163
5
26,074
def closeEvent ( self , event ) : # Make it snappy, but take care to clean it all up. # TODO(marcus): Enable GUI to return on problem, such # as asking whether or not the user really wants to quit # given there are things currently running. self . hide ( ) if self . data [ "state" ] [ "is_closing" ] : # Explicitly clear potentially referenced data self . info ( self . tr ( "Cleaning up models.." ) ) for v in self . data [ "views" ] . values ( ) : v . model ( ) . deleteLater ( ) v . setModel ( None ) self . info ( self . tr ( "Cleaning up terminal.." ) ) for item in self . data [ "models" ] [ "terminal" ] . items : del ( item ) self . info ( self . tr ( "Cleaning up controller.." ) ) self . controller . cleanup ( ) self . info ( self . tr ( "All clean!" ) ) self . info ( self . tr ( "Good bye" ) ) return super ( Window , self ) . closeEvent ( event ) self . info ( self . tr ( "Closing.." ) ) def on_problem ( ) : self . heads_up ( "Warning" , "Had trouble closing down. " "Please tell someone and try again." ) self . show ( ) if self . controller . is_running : self . info ( self . tr ( "..as soon as processing is finished.." ) ) self . controller . is_running = False self . finished . connect ( self . close ) util . defer ( 2000 , on_problem ) return event . ignore ( ) self . data [ "state" ] [ "is_closing" ] = True util . defer ( 200 , self . close ) return event . ignore ( )
Perform post - flight checks before closing
396
8
26,075
def reject ( self ) : if self . controller . is_running : self . info ( self . tr ( "Stopping.." ) ) self . controller . is_running = False
Handle ESC key
39
3
26,076
def info ( self , message ) : info = self . findChild ( QtWidgets . QLabel , "Info" ) info . setText ( message ) # Include message in terminal self . data [ "models" ] [ "terminal" ] . append ( { "label" : message , "type" : "info" } ) animation = self . data [ "animation" ] [ "display_info" ] animation . stop ( ) animation . start ( ) # TODO(marcus): Should this be configurable? Do we want # the shell to fill up with these messages? util . u_print ( message )
Print user - facing information
134
5
26,077
def rowsInserted ( self , parent , start , end ) : super ( LogView , self ) . rowsInserted ( parent , start , end ) # IMPORTANT: This must be done *after* the superclass to get # an accurate value of the delegate's height. self . scrollToBottom ( )
Automatically scroll to bottom on each new item added
66
10
26,078
def reset ( self ) : self . context = pyblish . api . Context ( ) self . plugins = pyblish . api . discover ( ) self . was_discovered . emit ( ) self . pair_generator = None self . current_pair = ( None , None ) self . current_error = None self . processing = { "nextOrder" : None , "ordersWithError" : set ( ) } self . _load ( ) self . _run ( until = pyblish . api . CollectorOrder , on_finished = self . was_reset . emit )
Discover plug - ins and run collection
124
7
26,079
def _load ( self ) : self . is_running = True self . pair_generator = self . _iterator ( self . plugins , self . context ) self . current_pair = next ( self . pair_generator , ( None , None ) ) self . current_error = None self . is_running = False
Initiate new generator and load first pair
69
9
26,080
def _process ( self , plugin , instance = None ) : self . processing [ "nextOrder" ] = plugin . order try : result = pyblish . plugin . process ( plugin , self . context , instance ) except Exception as e : raise Exception ( "Unknown error: %s" % e ) else : # Make note of the order at which the # potential error error occured. has_error = result [ "error" ] is not None if has_error : self . processing [ "ordersWithError" ] . add ( plugin . order ) return result
Produce result from plugin and instance
119
7
26,081
def _run ( self , until = float ( "inf" ) , on_finished = lambda : None ) : def on_next ( ) : if self . current_pair == ( None , None ) : return util . defer ( 100 , on_finished_ ) # The magic number 0.5 is the range between # the various CVEI processing stages; # e.g. # - Collection is 0 +- 0.5 (-0.5 - 0.5) # - Validation is 1 +- 0.5 (0.5 - 1.5) # # TODO(marcus): Make this less magical # order = self . current_pair [ 0 ] . order if order > ( until + 0.5 ) : return util . defer ( 100 , on_finished_ ) self . about_to_process . emit ( * self . current_pair ) util . defer ( 10 , on_process ) def on_process ( ) : try : result = self . _process ( * self . current_pair ) if result [ "error" ] is not None : self . current_error = result [ "error" ] self . was_processed . emit ( result ) except Exception as e : stack = traceback . format_exc ( e ) return util . defer ( 500 , lambda : on_unexpected_error ( error = stack ) ) # Now that processing has completed, and context potentially # modified with new instances, produce the next pair. # # IMPORTANT: This *must* be done *after* processing of # the current pair, otherwise data generated at that point # will *not* be included. try : self . current_pair = next ( self . pair_generator ) except StopIteration : # All pairs were processed successfully! self . current_pair = ( None , None ) return util . defer ( 500 , on_finished_ ) except Exception as e : # This is a bug stack = traceback . format_exc ( e ) self . current_pair = ( None , None ) return util . defer ( 500 , lambda : on_unexpected_error ( error = stack ) ) util . defer ( 10 , on_next ) def on_unexpected_error ( error ) : util . u_print ( u"An unexpected error occurred:\n %s" % error ) return util . defer ( 500 , on_finished_ ) def on_finished_ ( ) : on_finished ( ) self . was_finished . emit ( ) self . is_running = True util . defer ( 10 , on_next )
Process current pair and store next pair for next process
542
10
26,082
def _iterator ( self , plugins , context ) : test = pyblish . logic . registered_test ( ) for plug , instance in pyblish . logic . Iterator ( plugins , context ) : if not plug . active : continue if instance is not None and instance . data . get ( "publish" ) is False : continue self . processing [ "nextOrder" ] = plug . order if not self . is_running : raise StopIteration ( "Stopped" ) if test ( * * self . processing ) : raise StopIteration ( "Stopped due to %s" % test ( * * self . processing ) ) yield plug , instance
Yield next plug - in and instance to process .
139
11
26,083
def cleanup ( self ) : for instance in self . context : del ( instance ) for plugin in self . plugins : del ( plugin )
Forcefully delete objects from memory
28
6
26,084
def get_root_uri ( uri ) : chunks = urlsplit ( uri ) return urlunsplit ( ( chunks . scheme , chunks . netloc , chunks . path , '' , '' ) )
Return root URI - strip query and fragment .
46
9
26,085
def collect ( since , to , top = DEFAULT_TOP ) : summary = CspReportSummary ( since , to , top = top ) queryset = CSPReport . objects . filter ( created__range = ( since , to ) ) valid_queryset = queryset . filter ( is_valid = True ) invalid_queryset = queryset . filter ( is_valid = False ) summary . total_count = queryset . count ( ) summary . valid_count = valid_queryset . count ( ) # Collect sources sources = { } for report in valid_queryset : root_uri = get_root_uri ( report . document_uri ) info = sources . setdefault ( root_uri , ViolationInfo ( root_uri ) ) info . append ( report ) summary . sources = sorted ( sources . values ( ) , key = attrgetter ( 'count' ) , reverse = True ) [ : top ] # Collect blocks blocks = { } for report in valid_queryset : root_uri = get_root_uri ( report . blocked_uri ) info = blocks . setdefault ( root_uri , ViolationInfo ( root_uri ) ) info . append ( report ) summary . blocks = sorted ( blocks . values ( ) , key = attrgetter ( 'count' ) , reverse = True ) [ : top ] # Collect invalid reports summary . invalid_count = invalid_queryset . count ( ) summary . invalid_reports = tuple ( invalid_queryset [ : top ] ) return summary
Collect the CSP report .
335
6
26,086
def append ( self , report ) : assert report not in self . examples self . count += 1 if len ( self . examples ) < self . top : self . examples . append ( report )
Append a new CSP report .
40
8
26,087
def render ( self ) : engine = Engine ( ) return engine . from_string ( SUMMARY_TEMPLATE ) . render ( Context ( self . __dict__ ) )
Render the summary .
39
4
26,088
def _parse_date_input ( date_input , default_offset = 0 ) : if date_input : try : return parse_date_input ( date_input ) except ValueError as err : raise CommandError ( force_text ( err ) ) else : return get_midnight ( ) - timedelta ( days = default_offset )
Parses a date input .
73
7
26,089
def nice_report ( self ) : if not self . json : return '[no CSP report data]' try : data = json . loads ( self . json ) except ValueError : return "Invalid CSP report: '{}'" . format ( self . json ) if 'csp-report' not in data : return 'Invalid CSP report: ' + json . dumps ( data , indent = 4 , sort_keys = True , separators = ( ',' , ': ' ) ) return json . dumps ( data [ 'csp-report' ] , indent = 4 , sort_keys = True , separators = ( ',' , ': ' ) )
Return a nicely formatted original report .
141
7
26,090
def from_message ( cls , message ) : self = cls ( json = message ) try : decoded_data = json . loads ( message ) except ValueError : # Message is not a valid JSON. Return as invalid. return self try : report_data = decoded_data [ 'csp-report' ] except KeyError : # Message is not a valid CSP report. Return as invalid. return self # Extract individual fields for report_name , field_name in REQUIRED_FIELD_MAP + OPTIONAL_FIELD_MAP : setattr ( self , field_name , report_data . get ( report_name ) ) # Extract integer fields for report_name , field_name in INTEGER_FIELD_MAP : value = report_data . get ( report_name ) field = self . _meta . get_field ( field_name ) min_value , max_value = connection . ops . integer_field_range ( field . get_internal_type ( ) ) if min_value is None : min_value = 0 # All these fields are possitive. Value can't be negative. min_value = max ( min_value , 0 ) if value is not None and min_value <= value and ( max_value is None or value <= max_value ) : setattr ( self , field_name , value ) # Extract disposition disposition = report_data . get ( 'disposition' ) if disposition in dict ( DISPOSITIONS ) . keys ( ) : self . disposition = disposition # Check if report is valid is_valid = True for field_name in dict ( REQUIRED_FIELD_MAP ) . values ( ) : if getattr ( self , field_name ) is None : is_valid = False break self . is_valid = is_valid return self
Creates an instance from CSP report message .
386
10
26,091
def data ( self ) : try : data = self . _data except AttributeError : data = self . _data = json . loads ( self . json ) return data
Returns self . json loaded as a python object .
36
10
26,092
def json_as_html ( self ) : # To avoid circular import from cspreports import utils formatted_json = utils . format_report ( self . json ) return mark_safe ( "<pre>\n%s</pre>" % escape ( formatted_json ) )
Print out self . json in a nice way .
60
10
26,093
def process_report ( request ) : if config . EMAIL_ADMINS : email_admins ( request ) if config . LOG : log_report ( request ) if config . SAVE : save_report ( request ) if config . ADDITIONAL_HANDLERS : run_additional_handlers ( request )
Given the HTTP request of a CSP violation report log it in the required ways .
71
17
26,094
def get_additional_handlers ( ) : global _additional_handlers if not isinstance ( _additional_handlers , list ) : handlers = [ ] for name in config . ADDITIONAL_HANDLERS : module_name , function_name = name . rsplit ( '.' , 1 ) function = getattr ( import_module ( module_name ) , function_name ) handlers . append ( function ) _additional_handlers = handlers return _additional_handlers
Returns the actual functions from the dotted paths specified in ADDITIONAL_HANDLERS .
108
19
26,095
def parse_date_input ( value ) : try : limit = parse_date ( value ) except ValueError : limit = None if limit is None : raise ValueError ( "'{}' is not a valid date." . format ( value ) ) limit = datetime ( limit . year , limit . month , limit . day ) if settings . USE_TZ : limit = make_aware ( limit ) return limit
Return datetime based on the user s input .
87
10
26,096
def get_midnight ( ) : limit = now ( ) if settings . USE_TZ : limit = localtime ( limit ) return limit . replace ( hour = 0 , minute = 0 , second = 0 , microsecond = 0 )
Return last midnight in localtime as datetime .
50
10
26,097
def python_job ( self , function , parameters = None ) : if not callable ( function ) : raise utils . StimelaCabRuntimeError ( 'Object given as function is not callable' ) if self . name is None : self . name = function . __name__ self . job = { 'function' : function , 'parameters' : parameters , } return 0
Run python function
81
3
26,098
def pull ( image , store_path , docker = True ) : if docker : fp = "docker://{0:s}" . format ( image ) else : fp = image utils . xrun ( "singularity" , [ "pull" , "--force" , "--name" , store_path , fp ] ) return 0
pull an image
75
3
26,099
def start ( self , * args ) : if self . volumes : volumes = " --bind " + " --bind " . join ( self . volumes ) else : volumes = "" self . _print ( "Instantiating container [{0:s}]. Timeout set to {1:d}. The container ID is printed below." . format ( self . name , self . time_out ) ) utils . xrun ( "singularity instance.start" , list ( args ) + [ volumes , # "-c", self . image , self . name ] ) self . status = "created" return 0
Create a singularity container instance
128
6