idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
39,700
def quotes_by_instrument_urls ( cls , client , urls ) : instruments = "," . join ( urls ) params = { "instruments" : instruments } url = "https://api.robinhood.com/marketdata/quotes/" data = client . get ( url , params = params ) results = data [ "results" ] while "next" in data and data [ "next" ] : data = client . get ( data [ "next" ] ) results . extend ( data [ "results" ] ) return results
fetch and return results
39,701
def all ( cls , client , ** kwargs ) : max_date = kwargs [ 'max_date' ] if 'max_date' in kwargs else None max_fetches = kwargs [ 'max_fetches' ] if 'max_fetches' in kwargs else None url = 'https://api.robinhood.com/options/positions/' params = { } data = client . get ( url , params = params ) results = data [ "results" ] if is_max_date_gt ( max_date , results [ - 1 ] [ 'updated_at' ] [ 0 : 10 ] ) : return results if max_fetches == 1 : return results fetches = 1 while data [ "next" ] : fetches = fetches + 1 data = client . get ( data [ "next" ] ) results . extend ( data [ "results" ] ) if is_max_date_gt ( max_date , results [ - 1 ] [ 'updated_at' ] [ 0 : 10 ] ) : return results if max_fetches and ( fetches >= max_fetches ) : return results return results
fetch all option positions
39,702
def mergein_marketdata_list ( cls , client , option_positions ) : ids = cls . _extract_ids ( option_positions ) mds = OptionMarketdata . quotes_by_instrument_ids ( client , ids ) results = [ ] for op in option_positions : md = [ x for x in mds if x [ 'instrument' ] == op [ 'option' ] ] [ 0 ] merged_dict = dict ( list ( op . items ( ) ) + list ( md . items ( ) ) ) results . append ( merged_dict ) return results
Fetch and merge in Marketdata for each option position
39,703
def evaluateRawString ( self , escaped ) : unescaped = [ ] hexdigit = None escape = False for char in escaped : number = ord ( char ) if hexdigit is not None : if hexdigit : number = ( int ( hexdigit , 16 ) << 4 ) + int ( char , 16 ) hexdigit = None else : hexdigit = char continue if escape : escape = False try : number = self . ESCAPE_CHARS [ number ] except KeyError : if number == 120 : hexdigit = '' continue raise ValueError ( 'Unknown escape character %c' % char ) elif number == 92 : escape = True continue unescaped . append ( number ) return unescaped
Evaluates raw Python string like ast . literal_eval does
39,704
def subnode_parse ( self , node , pieces = None , indent = 0 , ignore = [ ] , restrict = None ) : if pieces is not None : old_pieces , self . pieces = self . pieces , pieces else : old_pieces = [ ] if type ( indent ) is int : indent = indent * ' ' if len ( indent ) > 0 : pieces = '' . join ( self . pieces ) i_piece = pieces [ : len ( indent ) ] if self . pieces [ - 1 : ] == [ '' ] : self . pieces = [ pieces [ len ( indent ) : ] ] + [ '' ] elif self . pieces != [ ] : self . pieces = [ pieces [ len ( indent ) : ] ] self . indent += len ( indent ) for n in node . childNodes : if restrict is not None : if n . nodeType == n . ELEMENT_NODE and n . tagName in restrict : self . parse ( n ) elif n . nodeType != n . ELEMENT_NODE or n . tagName not in ignore : self . parse ( n ) if len ( indent ) > 0 : self . pieces = shift ( self . pieces , indent , i_piece ) self . indent -= len ( indent ) old_pieces . extend ( self . pieces ) self . pieces = old_pieces
Parse the subnodes of a given node . Subnodes with tags in the ignore list are ignored . If pieces is given use this as target for the parse results instead of self . pieces . Indent all lines by the amount given in indent . Note that the initial content in pieces is not indented . The final result is in any case added to self . pieces .
39,705
def surround_parse ( self , node , pre_char , post_char ) : self . add_text ( pre_char ) self . subnode_parse ( node ) self . add_text ( post_char )
Parse the subnodes of a given node . Subnodes with tags in the ignore list are ignored . Prepend pre_char and append post_char to the output in self . pieces .
39,706
def get_specific_subnodes ( self , node , name , recursive = 0 ) : children = [ x for x in node . childNodes if x . nodeType == x . ELEMENT_NODE ] ret = [ x for x in children if x . tagName == name ] if recursive > 0 : for x in children : ret . extend ( self . get_specific_subnodes ( x , name , recursive - 1 ) ) return ret
Given a node and a name return a list of child ELEMENT_NODEs that have a tagName matching the name . Search recursively for recursive levels .
39,707
def get_specific_nodes ( self , node , names ) : nodes = [ ( x . tagName , x ) for x in node . childNodes if x . nodeType == x . ELEMENT_NODE and x . tagName in names ] return dict ( nodes )
Given a node and a sequence of strings in names return a dictionary containing the names as keys and child ELEMENT_NODEs that have a tagName equal to the name .
39,708
def add_text ( self , value ) : if isinstance ( value , ( list , tuple ) ) : self . pieces . extend ( value ) else : self . pieces . append ( value )
Adds text corresponding to value into self . pieces .
39,709
def start_new_paragraph ( self ) : if self . pieces [ - 1 : ] == [ '' ] : return elif self . pieces == [ ] : self . pieces = [ '\n' ] elif self . pieces [ - 1 ] [ - 1 : ] != '\n' : self . pieces . extend ( [ ' \n' , '\n' ] ) else : self . pieces . append ( '\n' )
Make sure to create an empty line . This is overridden if the previous text ends with the special marker . In that case nothing is done .
39,710
def add_line_with_subsequent_indent ( self , line , indent = 4 ) : if isinstance ( line , ( list , tuple ) ) : line = '' . join ( line ) line = line . strip ( ) width = self . textwidth - self . indent - indent wrapped_lines = textwrap . wrap ( line [ indent : ] , width = width ) for i in range ( len ( wrapped_lines ) ) : if wrapped_lines [ i ] != '' : wrapped_lines [ i ] = indent * ' ' + wrapped_lines [ i ] self . pieces . append ( line [ : indent ] + '\n' . join ( wrapped_lines ) [ indent : ] + ' \n' )
Add line of text and wrap such that subsequent lines are indented by indent spaces .
39,711
def extract_text ( self , node ) : if not isinstance ( node , ( list , tuple ) ) : node = [ node ] pieces , self . pieces = self . pieces , [ '' ] for n in node : for sn in n . childNodes : self . parse ( sn ) ret = '' . join ( self . pieces ) self . pieces = pieces return ret
Return the string representation of the node or list of nodes by parsing the subnodes but returning the result as a string instead of adding it to self . pieces . Note that this allows extracting text even if the node is in the ignore list .
39,712
def get_function_signature ( self , node ) : name = self . extract_text ( self . get_specific_subnodes ( node , 'name' ) ) if self . with_type_info : argsstring = self . extract_text ( self . get_specific_subnodes ( node , 'argsstring' ) ) else : argsstring = [ ] param_id = 1 for n_param in self . get_specific_subnodes ( node , 'param' ) : declname = self . extract_text ( self . get_specific_subnodes ( n_param , 'declname' ) ) if not declname : declname = 'arg' + str ( param_id ) defval = self . extract_text ( self . get_specific_subnodes ( n_param , 'defval' ) ) if defval : defval = '=' + defval argsstring . append ( declname + defval ) param_id = param_id + 1 argsstring = '(' + ', ' . join ( argsstring ) + ')' type = self . extract_text ( self . get_specific_subnodes ( node , 'type' ) ) function_definition = name + argsstring if type != '' and type != 'void' : function_definition = function_definition + ' -> ' + type return '`' + function_definition + '` '
Returns the function signature string for memberdef nodes .
39,713
def handle_typical_memberdefs_no_overload ( self , signature , memberdef_nodes ) : for n in memberdef_nodes : self . add_text ( [ '\n' , '%feature("docstring") ' , signature , ' "' , '\n' ] ) if self . with_function_signature : self . add_line_with_subsequent_indent ( self . get_function_signature ( n ) ) self . subnode_parse ( n , pieces = [ ] , ignore = [ 'definition' , 'name' ] ) self . add_text ( [ '";' , '\n' ] )
Produce standard documentation for memberdef_nodes .
39,714
def handle_typical_memberdefs ( self , signature , memberdef_nodes ) : if len ( memberdef_nodes ) == 1 or not self . with_overloaded_functions : self . handle_typical_memberdefs_no_overload ( signature , memberdef_nodes ) return self . add_text ( [ '\n' , '%feature("docstring") ' , signature , ' "' , '\n' ] ) if self . with_function_signature : for n in memberdef_nodes : self . add_line_with_subsequent_indent ( self . get_function_signature ( n ) ) self . add_text ( '\n' ) self . add_text ( [ 'Overloaded function' , '\n' , '-------------------' ] ) for n in memberdef_nodes : self . add_text ( '\n' ) self . add_line_with_subsequent_indent ( '* ' + self . get_function_signature ( n ) ) self . subnode_parse ( n , pieces = [ ] , indent = 4 , ignore = [ 'definition' , 'name' ] ) self . add_text ( [ '";' , '\n' ] )
Produces docstring entries containing an Overloaded function section with the documentation for each overload if the function is overloaded and self . with_overloaded_functions is set . Else produce normal documentation .
39,715
def do_memberdef ( self , node ) : prot = node . attributes [ 'prot' ] . value id = node . attributes [ 'id' ] . value kind = node . attributes [ 'kind' ] . value tmp = node . parentNode . parentNode . parentNode compdef = tmp . getElementsByTagName ( 'compounddef' ) [ 0 ] cdef_kind = compdef . attributes [ 'kind' ] . value if cdef_kind in ( 'file' , 'namespace' , 'class' , 'struct' ) : return if prot != 'public' : return first = self . get_specific_nodes ( node , ( 'definition' , 'name' ) ) name = self . extract_text ( first [ 'name' ] ) if name [ : 8 ] == 'operator' : return if not 'definition' in first or kind in [ 'variable' , 'typedef' ] : return data = self . extract_text ( first [ 'definition' ] ) self . add_text ( '\n' ) self . add_text ( [ '/* where did this entry come from??? */' , '\n' ] ) self . add_text ( '%feature("docstring") %s "\n%s' % ( data , data ) ) for n in node . childNodes : if n not in first . values ( ) : self . parse ( n ) self . add_text ( [ '";' , '\n' ] )
Handle cases outside of class struct file or namespace . These are now dealt with by handle_overloaded_memberfunction . Do these even exist???
39,716
def do_header ( self , node ) : data = self . extract_text ( node ) self . add_text ( '\n/*\n %s \n*/\n' % data ) parent = node . parentNode idx = parent . childNodes . index ( node ) if len ( parent . childNodes ) >= idx + 2 : nd = parent . childNodes [ idx + 2 ] if nd . nodeName == 'description' : nd = parent . removeChild ( nd ) self . add_text ( '\n/*' ) self . subnode_parse ( nd ) self . add_text ( '\n*/\n' )
For a user defined section def a header field is present which should not be printed as such so we comment it in the output .
39,717
def visiblename ( name , all = None , obj = None ) : if name in { '__author__' , '__builtins__' , '__cached__' , '__credits__' , '__date__' , '__doc__' , '__file__' , '__spec__' , '__loader__' , '__module__' , '__name__' , '__package__' , '__path__' , '__qualname__' , '__slots__' , '__version__' } : return 0 if name . endswith ( "_swigregister" ) : return 0 if name . startswith ( "__swig" ) : return 0 if name . startswith ( '__' ) and name . endswith ( '__' ) : return 1 if name . startswith ( '_' ) and hasattr ( obj , '_fields' ) : return True if all is not None : return name in all else : return not name . startswith ( '_' )
Decide whether to show documentation on a variable .
39,718
def _download_file ( uri , bulk_api ) : resp = requests . get ( uri , headers = bulk_api . headers ( ) , stream = True ) with tempfile . TemporaryFile ( "w+b" ) as f : for chunk in resp . iter_content ( chunk_size = None ) : f . write ( chunk ) f . seek ( 0 ) yield f
Download the bulk API result file for a single batch
39,719
def _load_mapping ( self , mapping ) : mapping [ "oid_as_pk" ] = bool ( mapping . get ( "fields" , { } ) . get ( "Id" ) ) job_id , local_ids_for_batch = self . _create_job ( mapping ) result = self . _wait_for_job ( job_id ) self . _store_inserted_ids ( mapping , job_id , local_ids_for_batch ) return result
Load data for a single step .
39,720
def _create_job ( self , mapping ) : job_id = self . bulk . create_insert_job ( mapping [ "sf_object" ] , contentType = "CSV" ) self . logger . info ( " Created bulk job {}" . format ( job_id ) ) local_ids_for_batch = { } for batch_file , local_ids in self . _get_batches ( mapping ) : batch_id = self . bulk . post_batch ( job_id , batch_file ) local_ids_for_batch [ batch_id ] = local_ids self . logger . info ( " Uploaded batch {}" . format ( batch_id ) ) self . bulk . close_job ( job_id ) return job_id , local_ids_for_batch
Initiate a bulk insert and upload batches to run in parallel .
39,721
def _get_batches ( self , mapping , batch_size = 10000 ) : action = mapping . get ( "action" , "insert" ) fields = mapping . get ( "fields" , { } ) . copy ( ) static = mapping . get ( "static" , { } ) lookups = mapping . get ( "lookups" , { } ) record_type = mapping . get ( "record_type" ) if action == "insert" and "Id" in fields : del fields [ "Id" ] columns = [ ] columns . extend ( fields . keys ( ) ) columns . extend ( lookups . keys ( ) ) columns . extend ( static . keys ( ) ) if record_type : columns . append ( "RecordTypeId" ) query = ( "SELECT Id FROM RecordType WHERE SObjectType='{0}'" "AND DeveloperName = '{1}' LIMIT 1" ) record_type_id = self . sf . query ( query . format ( mapping . get ( "sf_object" ) , record_type ) ) [ "records" ] [ 0 ] [ "Id" ] query = self . _query_db ( mapping ) total_rows = 0 batch_num = 1 def start_batch ( ) : batch_file = io . BytesIO ( ) writer = unicodecsv . writer ( batch_file ) writer . writerow ( columns ) batch_ids = [ ] return batch_file , writer , batch_ids batch_file , writer , batch_ids = start_batch ( ) for row in query . yield_per ( batch_size ) : total_rows += 1 pkey = row [ 0 ] row = list ( row [ 1 : ] ) + list ( static . values ( ) ) if record_type : row . append ( record_type_id ) writer . writerow ( [ self . _convert ( value ) for value in row ] ) batch_ids . append ( pkey ) if not total_rows % batch_size : batch_file . seek ( 0 ) self . logger . info ( " Processing batch {}" . format ( batch_num ) ) yield batch_file , batch_ids batch_file , writer , batch_ids = start_batch ( ) batch_num += 1 if batch_ids : batch_file . seek ( 0 ) yield batch_file , batch_ids self . logger . info ( " Prepared {} rows for import to {}" . format ( total_rows , mapping [ "sf_object" ] ) )
Get data from the local db
39,722
def _query_db ( self , mapping ) : model = self . models [ mapping . get ( "table" ) ] fields = mapping . get ( "fields" , { } ) . copy ( ) if mapping [ "oid_as_pk" ] : del fields [ "Id" ] id_column = model . __table__ . primary_key . columns . keys ( ) [ 0 ] columns = [ getattr ( model , id_column ) ] for f in fields . values ( ) : columns . append ( model . __table__ . columns [ f ] ) lookups = mapping . get ( "lookups" , { } ) . copy ( ) for lookup in lookups . values ( ) : lookup [ "aliased_table" ] = aliased ( self . metadata . tables [ "{}_sf_ids" . format ( lookup [ "table" ] ) ] ) columns . append ( lookup [ "aliased_table" ] . columns . sf_id ) query = self . session . query ( * columns ) if "record_type" in mapping and hasattr ( model , "record_type" ) : query = query . filter ( model . record_type == mapping [ "record_type" ] ) if "filters" in mapping : filter_args = [ ] for f in mapping [ "filters" ] : filter_args . append ( text ( f ) ) query = query . filter ( * filter_args ) for sf_field , lookup in lookups . items ( ) : key_field = get_lookup_key_field ( lookup , sf_field ) value_column = getattr ( model , key_field ) query = query . outerjoin ( lookup [ "aliased_table" ] , lookup [ "aliased_table" ] . columns . id == value_column , ) lookup_column = getattr ( model , key_field ) query = query . order_by ( lookup_column ) self . logger . info ( str ( query ) ) return query
Build a query to retrieve data from the local db .
39,723
def _store_inserted_ids ( self , mapping , job_id , local_ids_for_batch ) : id_table_name = self . _reset_id_table ( mapping ) conn = self . session . connection ( ) for batch_id , local_ids in local_ids_for_batch . items ( ) : try : results_url = "{}/job/{}/batch/{}/result" . format ( self . bulk . endpoint , job_id , batch_id ) with _download_file ( results_url , self . bulk ) as f : self . logger . info ( " Downloaded results for batch {}" . format ( batch_id ) ) self . _store_inserted_ids_for_batch ( f , local_ids , id_table_name , conn ) self . logger . info ( " Updated {} for batch {}" . format ( id_table_name , batch_id ) ) except Exception : self . logger . error ( "Could not download batch results: {}" . format ( batch_id ) ) continue self . session . commit ( )
Get the job results and store inserted SF Ids in a new table
39,724
def _reset_id_table ( self , mapping ) : if not hasattr ( self , "_initialized_id_tables" ) : self . _initialized_id_tables = set ( ) id_table_name = "{}_sf_ids" . format ( mapping [ "table" ] ) if id_table_name not in self . _initialized_id_tables : if id_table_name in self . metadata . tables : self . metadata . remove ( self . metadata . tables [ id_table_name ] ) id_table = Table ( id_table_name , self . metadata , Column ( "id" , Unicode ( 255 ) , primary_key = True ) , Column ( "sf_id" , Unicode ( 18 ) ) , ) if id_table . exists ( ) : id_table . drop ( ) id_table . create ( ) self . _initialized_id_tables . add ( id_table_name ) return id_table_name
Create an empty table to hold the inserted SF Ids
39,725
def _get_mapping_for_table ( self , table ) : for mapping in self . mappings . values ( ) : if mapping [ "table" ] == table : return mapping
Returns the first mapping for a table name
39,726
def _load_config ( self ) : if ( self . config ) : return repo_root = self . repo_root if not repo_root : raise NotInProject ( "No git repository was found in the current path. You must be in a git repository to set up and use CCI for a project." ) if not self . config_project_path : raise ProjectConfigNotFound ( "The file {} was not found in the repo root: {}. Are you in a CumulusCI Project directory?" . format ( self . config_filename , repo_root ) ) with open ( self . config_project_path , "r" ) as f_config : project_config = ordered_yaml_load ( f_config ) if project_config : self . config_project . update ( project_config ) if self . config_project_local_path : with open ( self . config_project_local_path , "r" ) as f_local_config : local_config = ordered_yaml_load ( f_local_config ) if local_config : self . config_project_local . update ( local_config ) if self . additional_yaml : additional_yaml_config = ordered_yaml_load ( self . additional_yaml ) if additional_yaml_config : self . config_additional_yaml . update ( additional_yaml_config ) self . config = merge_config ( OrderedDict ( [ ( "global_config" , self . config_global ) , ( "global_local" , self . config_global_local ) , ( "project_config" , self . config_project ) , ( "project_local_config" , self . config_project_local ) , ( "additional_yaml" , self . config_additional_yaml ) , ] ) )
Loads the configuration from YAML if no override config was passed in initially .
39,727
def init_sentry ( self , ) : if not self . use_sentry : return sentry_config = self . keychain . get_service ( "sentry" ) tags = { "repo" : self . repo_name , "branch" : self . repo_branch , "commit" : self . repo_commit , "cci version" : cumulusci . __version__ , } tags . update ( self . config . get ( "sentry_tags" , { } ) ) env = self . config . get ( "sentry_environment" , "CumulusCI CLI" ) self . sentry = raven . Client ( dsn = sentry_config . dsn , environment = env , tags = tags , processors = ( "raven.processors.SanitizePasswordsProcessor" , ) , )
Initializes sentry . io error logging for this session
39,728
def get_previous_version ( self ) : gh = self . get_github_api ( ) repo = gh . repository ( self . repo_owner , self . repo_name ) most_recent = None for release in repo . releases ( ) : if release . tag_name . startswith ( self . project__git__prefix_release ) : if most_recent is None : most_recent = release else : return LooseVersion ( self . get_version_for_tag ( release . tag_name ) )
Query GitHub releases to find the previous production release
39,729
def get_static_dependencies ( self , dependencies = None , include_beta = None ) : if not dependencies : dependencies = self . project__dependencies if not dependencies : return [ ] static_dependencies = [ ] for dependency in dependencies : if "github" not in dependency : static_dependencies . append ( dependency ) else : static = self . process_github_dependency ( dependency , include_beta = include_beta ) static_dependencies . extend ( static ) return static_dependencies
Resolves the project - > dependencies section of cumulusci . yml to convert dynamic github dependencies into static dependencies by inspecting the referenced repositories
39,730
def _init_logger ( self ) : if self . flow : self . logger = self . flow . logger . getChild ( self . __class__ . __name__ ) else : self . logger = logging . getLogger ( __name__ )
Initializes self . logger
39,731
def _init_options ( self , kwargs ) : self . options = self . task_config . options if self . options is None : self . options = { } if kwargs : self . options . update ( kwargs ) for option , value in list ( self . options . items ( ) ) : try : if value . startswith ( "$project_config." ) : attr = value . replace ( "$project_config." , "" , 1 ) self . options [ option ] = getattr ( self . project_config , attr , None ) except AttributeError : pass
Initializes self . options
39,732
def _log_begin ( self ) : self . logger . info ( "Beginning task: %s" , self . __class__ . __name__ ) if self . salesforce_task and not self . flow : self . logger . info ( "%15s %s" , "As user:" , self . org_config . username ) self . logger . info ( "%15s %s" , "In org:" , self . org_config . org_id ) self . logger . info ( "" )
Log the beginning of the task execution
39,733
def _poll ( self ) : while True : self . poll_count += 1 self . _poll_action ( ) if self . poll_complete : break time . sleep ( self . poll_interval_s ) self . _poll_update_interval ( )
poll for a result in a loop
39,734
def _poll_update_interval ( self ) : if old_div ( self . poll_count , 3 ) > self . poll_interval_level : self . poll_interval_level += 1 self . poll_interval_s += 1 self . logger . info ( "Increased polling interval to %d seconds" , self . poll_interval_s )
update the polling interval to be used next iteration
39,735
def get_project_config ( self , * args , ** kwargs ) : warnings . warn ( "BaseGlobalConfig.get_project_config is pending deprecation" , DeprecationWarning , ) return self . project_config_class ( self , * args , ** kwargs )
Returns a ProjectConfig for the given project
39,736
def _load_config ( self ) : with open ( self . config_global_path , "r" ) as f_config : config = ordered_yaml_load ( f_config ) self . config_global = config if self . config_global_local_path : config = ordered_yaml_load ( open ( self . config_global_local_path , "r" ) ) self . config_global_local = config self . config = merge_config ( OrderedDict ( [ ( "global_config" , self . config_global ) , ( "global_local" , self . config_global_local ) , ] ) )
Loads the local configuration
39,737
def username ( self ) : username = self . config . get ( "username" ) if not username : username = self . userinfo__preferred_username return username
Username for the org connection .
39,738
def timestamp_file ( ) : config_dir = os . path . join ( os . path . expanduser ( "~" ) , BaseGlobalConfig . config_local_dir ) if not os . path . exists ( config_dir ) : os . mkdir ( config_dir ) timestamp_file = os . path . join ( config_dir , "cumulus_timestamp" ) try : with open ( timestamp_file , "r+" ) as f : yield f except IOError : with open ( timestamp_file , "w+" ) as f : yield f
Opens a file for tracking the time of the last version check
39,739
def pass_config ( func = None , ** config_kw ) : def decorate ( func ) : def new_func ( * args , ** kw ) : config = load_config ( ** config_kw ) func ( config , * args , ** kw ) return functools . update_wrapper ( new_func , func ) if func is None : return decorate else : return decorate ( func )
Decorator which passes the CCI config object as the first arg to a click command .
39,740
def list_commands ( self , ctx ) : config = load_config ( ** self . load_config_kwargs ) services = self . _get_services_config ( config ) return sorted ( services . keys ( ) )
list the services that can be configured
39,741
def parse_api_datetime ( value ) : dt = datetime . strptime ( value [ 0 : DATETIME_LEN ] , API_DATE_FORMAT ) offset_str = value [ DATETIME_LEN : ] assert offset_str in [ "+0000" , "Z" ] , "The Salesforce API returned a weird timezone." return dt
parse a datetime returned from the salesforce API .
39,742
def removeXmlElement ( name , directory , file_pattern , logger = None ) : for path , dirs , files in os . walk ( os . path . abspath ( directory ) ) : for filename in fnmatch . filter ( files , file_pattern ) : filepath = os . path . join ( path , filename ) remove_xml_element_file ( name , filepath )
Recursively walk a directory and remove XML elements
39,743
def remove_xml_element_file ( name , path ) : ET . register_namespace ( "" , "http://soap.sforce.com/2006/04/metadata" ) tree = elementtree_parse_file ( path ) tree = remove_xml_element ( name , tree ) return tree . write ( path , encoding = UTF8 , xml_declaration = True )
Remove XML elements from a single file
39,744
def remove_xml_element_string ( name , content ) : ET . register_namespace ( "" , "http://soap.sforce.com/2006/04/metadata" ) tree = ET . fromstring ( content ) tree = remove_xml_element ( name , tree ) clean_content = ET . tostring ( tree , encoding = UTF8 ) return clean_content
Remove XML elements from a string
39,745
def remove_xml_element ( name , tree ) : remove = tree . findall ( ".//{{http://soap.sforce.com/2006/04/metadata}}{}" . format ( name ) ) if not remove : return tree parent_map = { c : p for p in tree . iter ( ) for c in p } for elem in remove : parent = parent_map [ elem ] parent . remove ( elem ) return tree
Removes XML elements from an ElementTree content tree
39,746
def temporary_dir ( ) : d = tempfile . mkdtemp ( ) try : with cd ( d ) : yield d finally : if os . path . exists ( d ) : shutil . rmtree ( d )
Context manager that creates a temporary directory and chdirs to it .
39,747
def in_directory ( filepath , dirpath ) : filepath = os . path . realpath ( filepath ) dirpath = os . path . realpath ( dirpath ) return filepath == dirpath or filepath . startswith ( os . path . join ( dirpath , "" ) )
Returns a boolean for whether filepath is contained in dirpath .
39,748
def log_progress ( iterable , logger , batch_size = 10000 , progress_message = "Processing... ({})" , done_message = "Done! (Total: {})" , ) : i = 0 for x in iterable : yield x i += 1 if not i % batch_size : logger . info ( progress_message . format ( i ) ) logger . info ( done_message . format ( i ) )
Log progress while iterating .
39,749
def login_url ( self , org = None ) : if org is None : org = self . org else : org = self . keychain . get_org ( org ) return org . start_url
Returns the login url which will automatically log into the target Salesforce org . By default the org_name passed to the library constructor is used but this can be overridden with the org option to log into a different org .
39,750
def run_task ( self , task_name , ** options ) : task_config = self . project_config . get_task ( task_name ) class_path = task_config . class_path logger . console ( "\n" ) task_class , task_config = self . _init_task ( class_path , options , task_config ) return self . _run_task ( task_class , task_config )
Runs a named CumulusCI task for the current project with optional support for overriding task options via kwargs .
39,751
def run_task_class ( self , class_path , ** options ) : logger . console ( "\n" ) task_class , task_config = self . _init_task ( class_path , options , TaskConfig ( ) ) return self . _run_task ( task_class , task_config )
Runs a CumulusCI task class with task options via kwargs .
39,752
def get_task ( self , name ) : config = getattr ( self , "tasks__{}" . format ( name ) ) if not config : raise TaskNotFoundError ( "Task not found: {}" . format ( name ) ) return TaskConfig ( config )
Returns a TaskConfig
39,753
def get_flow ( self , name ) : config = getattr ( self , "flows__{}" . format ( name ) ) if not config : raise FlowNotFoundError ( "Flow not found: {}" . format ( name ) ) return FlowConfig ( config )
Returns a FlowConfig
39,754
def render ( self ) : release_notes = [ ] for parser in self . parsers : parser_content = parser . render ( ) if parser_content is not None : release_notes . append ( parser_content ) return u"\r\n\r\n" . join ( release_notes )
Returns the rendered release notes from all parsers as a string
39,755
def _update_release_content ( self , release , content ) : if release . body : new_body = [ ] current_parser = None is_start_line = False for parser in self . parsers : parser . replaced = False for line in release . body . splitlines ( ) : if current_parser : if current_parser . _is_end_line ( current_parser . _process_line ( line ) ) : parser_content = current_parser . render ( ) if parser_content : new_body . append ( parser_content + "\r\n" ) current_parser = None for parser in self . parsers : if ( parser . _render_header ( ) . strip ( ) == parser . _process_line ( line ) . strip ( ) ) : parser . replaced = True current_parser = parser is_start_line = True break else : is_start_line = False if is_start_line : continue if current_parser : continue else : new_body . append ( line . strip ( ) ) if current_parser : new_body . append ( current_parser . render ( ) ) for parser in self . parsers : parser_content = parser . render ( ) if parser_content and not parser . replaced : new_body . append ( parser_content + "\r\n" ) content = u"\r\n" . join ( new_body ) return content
Merge existing and new release content .
39,756
def get_flow ( self , name , options = None ) : config = self . project_config . get_flow ( name ) callbacks = self . callback_class ( ) coordinator = FlowCoordinator ( self . project_config , config , name = name , options = options , skip = None , callbacks = callbacks , ) return coordinator
Get a primed and readytogo flow coordinator .
39,757
def _get_env ( self ) : env = { } for k , v in os . environ . items ( ) : k = k . decode ( ) if isinstance ( k , bytes ) else k v = v . decode ( ) if isinstance ( v , bytes ) else v env [ k ] = v return list ( env . items ( ) )
loads the environment variables as unicode if ascii
39,758
def import_class ( path ) : components = path . split ( "." ) module = components [ : - 1 ] module = "." . join ( module ) mod = __import__ ( module , fromlist = [ native_str ( components [ - 1 ] ) ] ) return getattr ( mod , native_str ( components [ - 1 ] ) )
Import a class from a string module class path
39,759
def parse_datetime ( dt_str , format ) : t = time . strptime ( dt_str , format ) return datetime ( t [ 0 ] , t [ 1 ] , t [ 2 ] , t [ 3 ] , t [ 4 ] , t [ 5 ] , t [ 6 ] , pytz . UTC )
Create a timezone - aware datetime object from a datetime string .
39,760
def process_list_arg ( arg ) : if isinstance ( arg , list ) : return arg elif isinstance ( arg , basestring ) : args = [ ] for part in arg . split ( "," ) : args . append ( part . strip ( ) ) return args
Parse a string into a list separated by commas with whitespace stripped
39,761
def decode_to_unicode ( content ) : if content and not isinstance ( content , str ) : try : return content . decode ( "ISO-8859-1" ) except UnicodeEncodeError : return content return content
decode ISO - 8859 - 1 to unicode when using sf api
39,762
def _find_or_create_version ( self , product ) : tag = self . options [ "tag" ] label = self . project_config . get_version_for_tag ( tag ) result = self . _call_api ( "GET" , "/versions" , params = { "product" : product [ "id" ] , "label" : label } ) if len ( result [ "data" ] ) == 0 : version = self . _call_api ( "POST" , "/versions" , json = { "product" : product [ "url" ] , "label" : label , "description" : self . options . get ( "description" , "" ) , "is_production" : True , "commit_ish" : tag , "is_listed" : False , } , ) self . logger . info ( "Created {}" . format ( version [ "url" ] ) ) else : version = result [ "data" ] [ 0 ] self . logger . info ( "Found {}" . format ( version [ "url" ] ) ) return version
Create a Version in MetaDeploy if it doesn t already exist
39,763
def _render_html ( self , libraries ) : title = self . options . get ( "title" , "Keyword Documentation" ) date = time . strftime ( "%A %B %d, %I:%M %p" ) cci_version = cumulusci . __version__ stylesheet_path = os . path . join ( os . path . dirname ( __file__ ) , "stylesheet.css" ) with open ( stylesheet_path ) as f : stylesheet = f . read ( ) jinjaenv = jinja2 . Environment ( loader = jinja2 . FileSystemLoader ( os . path . dirname ( __file__ ) ) , autoescape = False ) jinjaenv . filters [ "robot_html" ] = robot . utils . html_format template = jinjaenv . get_template ( "template.html" ) return template . render ( libraries = libraries , title = title , cci_version = cci_version , stylesheet = stylesheet , date = date , )
Generate the html . libraries is a list of LibraryDocumentation objects
39,764
def generate_password ( self ) : if self . password_failed : self . logger . warning ( "Skipping resetting password since last attempt failed" ) return command = sarge . shell_format ( "sfdx force:user:password:generate -u {0}" , self . username ) self . logger . info ( "Generating scratch org user password with command {}" . format ( command ) ) p = sarge . Command ( command , stdout = sarge . Capture ( buffer_size = - 1 ) , stderr = sarge . Capture ( buffer_size = - 1 ) , shell = True , ) p . run ( ) stderr = io . TextIOWrapper ( p . stderr ) . readlines ( ) stdout = io . TextIOWrapper ( p . stdout ) . readlines ( ) if p . returncode : self . config [ "password_failed" ] = True self . logger . warning ( "Failed to set password: \n{}\n{}" . format ( "\n" . join ( stdout ) , "\n" . join ( stderr ) ) )
Generates an org password with the sfdx utility .
39,765
def _convert_connected_app ( self ) : if self . services and "connected_app" in self . services : return connected_app = self . get_connected_app ( ) if not connected_app : return self . logger . warning ( "Reading Connected App info from deprecated config." " Connected App should be changed to a service." " If using environment keychain, update the environment variable." " Otherwise, it has been handled automatically and you should not" " see this message again." ) ca_config = ServiceConfig ( { "callback_url" : connected_app . callback_url , "client_id" : connected_app . client_id , "client_secret" : connected_app . client_secret , } ) self . set_service ( "connected_app" , ca_config )
Convert Connected App to service
39,766
def _load_scratch_orgs ( self ) : current_orgs = self . list_orgs ( ) if not self . project_config . orgs__scratch : return for config_name in self . project_config . orgs__scratch . keys ( ) : if config_name in current_orgs : continue self . create_scratch_org ( config_name , config_name )
Creates all scratch org configs for the project in the keychain if a keychain org doesn t already exist
39,767
def change_key ( self , key ) : services = { } for service_name in self . list_services ( ) : services [ service_name ] = self . get_service ( service_name ) orgs = { } for org_name in self . list_orgs ( ) : orgs [ org_name ] = self . get_org ( org_name ) self . key = key if orgs : for org_name , org_config in list ( orgs . items ( ) ) : self . set_org ( org_config ) if services : for service_name , service_config in list ( services . items ( ) ) : self . set_service ( service_name , service_config ) self . _convert_connected_app ( )
re - encrypt stored services and orgs with the new key
39,768
def get_default_org ( self ) : for org in self . list_orgs ( ) : org_config = self . get_org ( org ) if org_config . default : return org , org_config return None , None
retrieve the name and configuration of the default org
39,769
def set_default_org ( self , name ) : org = self . get_org ( name ) self . unset_default_org ( ) org . config [ "default" ] = True self . set_org ( org )
set the default org for tasks by name key
39,770
def unset_default_org ( self ) : for org in self . list_orgs ( ) : org_config = self . get_org ( org ) if org_config . default : del org_config . config [ "default" ] self . set_org ( org_config )
unset the default orgs for tasks
39,771
def get_org ( self , name ) : if name not in self . orgs : self . _raise_org_not_found ( name ) return self . _get_org ( name )
retrieve an org configuration by name key
39,772
def list_orgs ( self ) : orgs = list ( self . orgs . keys ( ) ) orgs . sort ( ) return orgs
list the orgs configured in the keychain
39,773
def set_service ( self , name , service_config , project = False ) : if not self . project_config . services or name not in self . project_config . services : self . _raise_service_not_valid ( name ) self . _validate_service ( name , service_config ) self . _set_service ( name , service_config , project ) self . _load_services ( )
Store a ServiceConfig in the keychain
39,774
def get_service ( self , name ) : self . _convert_connected_app ( ) if not self . project_config . services or name not in self . project_config . services : self . _raise_service_not_valid ( name ) if name not in self . services : self . _raise_service_not_configured ( name ) return self . _get_service ( name )
Retrieve a stored ServiceConfig from the keychain or exception
39,775
def list_services ( self ) : services = list ( self . services . keys ( ) ) services . sort ( ) return services
list the services configured in the keychain
39,776
def set_pdb_trace ( pm = False ) : import sys import pdb for attr in ( "stdin" , "stdout" , "stderr" ) : setattr ( sys , attr , getattr ( sys , "__%s__" % attr ) ) if pm : pdb . post_mortem ( ) else : pdb . set_trace ( )
Start the Python debugger when robotframework is running .
39,777
def selenium_retry ( target = None , retry = True ) : if isinstance ( target , bool ) : retry = target target = None def decorate ( target ) : if isinstance ( target , type ) : cls = target return type ( cls . __name__ , ( cls , RetryingSeleniumLibraryMixin ) , { "retry_selenium" : retry , "__doc__" : cls . __doc__ } , ) func = target @ functools . wraps ( func ) def run_with_retry ( self , * args , ** kwargs ) : old_retry = self . retry_selenium self . retry = retry try : return func ( self , * args , ** kwargs ) finally : self . retry_selenium = old_retry set_pdb_trace ( ) run_with_retry . is_selenium_retry_decorator = True return run_with_retry if target is None : return decorate else : return decorate ( target )
Decorator to turn on automatic retries of flaky selenium failures .
39,778
def selenium_execute_with_retry ( self , execute , command , params ) : try : return execute ( command , params ) except Exception as e : if isinstance ( e , ALWAYS_RETRY_EXCEPTIONS ) or ( isinstance ( e , WebDriverException ) and "Other element would receive the click" in str ( e ) ) : self . builtin . log ( "Retrying {} command" . format ( command ) , level = "WARN" ) time . sleep ( 2 ) return execute ( command , params ) else : raise
Run a single selenium command and retry once .
39,779
def _get_last_tag ( self ) : current_version = LooseVersion ( self . _get_version_from_tag ( self . release_notes_generator . current_tag ) ) versions = [ ] for tag in self . repo . tags ( ) : if not tag . name . startswith ( self . github_info [ "prefix_prod" ] ) : continue version = LooseVersion ( self . _get_version_from_tag ( tag . name ) ) if version >= current_version : continue versions . append ( version ) if versions : versions . sort ( ) return "{}{}" . format ( self . github_info [ "prefix_prod" ] , versions [ - 1 ] )
Gets the last release tag before self . current_tag
39,780
def _get_pull_requests ( self ) : for pull in self . repo . pull_requests ( state = "closed" , base = self . github_info [ "master_branch" ] , direction = "asc" ) : if self . _include_pull_request ( pull ) : yield pull
Gets all pull requests from the repo since we can t do a filtered date merged search
39,781
def _include_pull_request ( self , pull_request ) : merged_date = pull_request . merged_at if not merged_date : return False if self . last_tag : last_tag_sha = self . last_tag_info [ "commit" ] . sha if pull_request . merge_commit_sha == last_tag_sha : return False current_tag_sha = self . current_tag_info [ "commit" ] . sha if pull_request . merge_commit_sha == current_tag_sha : return True if merged_date <= self . start_date : if self . end_date : if ( merged_date > self . end_date and pull_request . merge_commit_sha != last_tag_sha ) : return True else : return True return False
Checks if the given pull_request was merged to the default branch between self . start_date and self . end_date
39,782
def patch_statusreporter ( ) : from robot . running . statusreporter import StatusReporter orig_exit = StatusReporter . __exit__ def __exit__ ( self , exc_type , exc_val , exc_tb ) : if exc_val and isinstance ( exc_val , Exception ) : set_pdb_trace ( pm = True ) return orig_exit ( self , exc_type , exc_val , exc_tb ) StatusReporter . __exit__ = __exit__
Monkey patch robotframework to do postmortem debugging
39,783
def get_item_name ( self , item , parent ) : names = self . get_name_elements ( item ) if not names : raise MissingNameElementError name = names [ 0 ] . text prefix = self . item_name_prefix ( parent ) if prefix : name = prefix + name return name
Returns the value of the first name element found inside of element
39,784
def for_display ( self ) : skip = "" if self . skip : skip = " [SKIP]" result = "{step_num}: {path}{skip}" . format ( step_num = self . step_num , path = self . path , skip = skip ) description = self . task_config . get ( "description" ) if description : result += ": {}" . format ( description ) return result
Step details formatted for logging output .
39,785
def run_step ( self ) : task_config = self . step . task_config . copy ( ) task_config [ "options" ] = task_config [ "options" ] . copy ( ) self . flow . resolve_return_value_options ( task_config [ "options" ] ) exc = None try : task = self . step . task_class ( self . project_config , TaskConfig ( task_config ) , org_config = self . org_config , name = self . step . task_name , stepnum = self . step . step_num , flow = self . flow , ) self . _log_options ( task ) task ( ) except Exception as e : self . flow . logger . exception ( "Exception in task {}" . format ( self . step . task_name ) ) exc = e return StepResult ( self . step . step_num , self . step . task_name , self . step . path , task . result , task . return_values , exc , )
Run a step .
39,786
def _init_steps ( self , ) : self . _check_old_yaml_format ( ) config_steps = self . flow_config . steps self . _check_infinite_flows ( config_steps ) steps = [ ] for number , step_config in config_steps . items ( ) : specs = self . _visit_step ( number , step_config ) steps . extend ( specs ) return sorted ( steps , key = attrgetter ( "step_num" ) )
Given the flow config and everything else create a list of steps to run sorted by step number .
39,787
def _check_infinite_flows ( self , steps , flows = None ) : if flows is None : flows = [ ] for step in steps . values ( ) : if "flow" in step : flow = step [ "flow" ] if flow == "None" : continue if flow in flows : raise FlowInfiniteLoopError ( "Infinite flows detected with flow {}" . format ( flow ) ) flows . append ( flow ) flow_config = self . project_config . get_flow ( flow ) self . _check_infinite_flows ( flow_config . steps , flows )
Recursively loop through the flow_config and check if there are any cycles .
39,788
def _init_org ( self ) : self . logger . info ( "Verifying and refreshing credentials for the specified org: {}." . format ( self . org_config . name ) ) orig_config = self . org_config . config . copy ( ) self . org_config . refresh_oauth_token ( self . project_config . keychain ) if self . org_config . config != orig_config : self . logger . info ( "Org info has changed, updating org in keychain" ) self . project_config . keychain . set_org ( self . org_config )
Test and refresh credentials to the org specified .
39,789
def resolve_return_value_options ( self , options ) : for key , value in options . items ( ) : if isinstance ( value , str ) and value . startswith ( RETURN_VALUE_OPTION_PREFIX ) : path , name = value [ len ( RETURN_VALUE_OPTION_PREFIX ) : ] . rsplit ( "." , 1 ) result = self . _find_result_by_path ( path ) options [ key ] = result . return_values . get ( name )
Handle dynamic option value lookups in the format ^^task_name . attr
39,790
def init_logger ( log_requests = False ) : logger = logging . getLogger ( __name__ . split ( "." ) [ 0 ] ) for handler in logger . handlers : logger . removeHandler ( handler ) formatter = coloredlogs . ColoredFormatter ( fmt = "%(asctime)s: %(message)s" ) handler = logging . StreamHandler ( ) handler . setLevel ( logging . DEBUG ) handler . setFormatter ( formatter ) logger . addHandler ( handler ) logger . setLevel ( logging . DEBUG ) logger . propagate = False if log_requests : requests . packages . urllib3 . add_stderr_logger ( )
Initialize the logger
39,791
def register_new_node ( suffix_node_id = None ) : node_id = uuid4 ( ) event = Node . Created ( originator_id = node_id , suffix_node_id = suffix_node_id ) entity = Node . mutate ( event = event ) publish ( event ) return entity
Factory method registers new node .
39,792
def register_new_edge ( edge_id , first_char_index , last_char_index , source_node_id , dest_node_id ) : event = Edge . Created ( originator_id = edge_id , first_char_index = first_char_index , last_char_index = last_char_index , source_node_id = source_node_id , dest_node_id = dest_node_id , ) entity = Edge . mutate ( event = event ) publish ( event ) return entity
Factory method registers new edge .
39,793
def register_new_suffix_tree ( case_insensitive = False ) : assert isinstance ( case_insensitive , bool ) root_node = register_new_node ( ) suffix_tree_id = uuid4 ( ) event = SuffixTree . Created ( originator_id = suffix_tree_id , root_node_id = root_node . id , case_insensitive = case_insensitive , ) entity = SuffixTree . mutate ( event = event ) assert isinstance ( entity , SuffixTree ) entity . nodes [ root_node . id ] = root_node publish ( event ) return entity
Factory method returns new suffix tree object .
39,794
def find_substring ( substring , suffix_tree , edge_repo ) : assert isinstance ( substring , str ) assert isinstance ( suffix_tree , SuffixTree ) assert isinstance ( edge_repo , EventSourcedRepository ) if not substring : return - 1 if suffix_tree . case_insensitive : substring = substring . lower ( ) curr_node_id = suffix_tree . root_node_id i = 0 while i < len ( substring ) : edge_id = make_edge_id ( curr_node_id , substring [ i ] ) try : edge = edge_repo [ edge_id ] except RepositoryKeyError : return - 1 ln = min ( edge . length + 1 , len ( substring ) - i ) if substring [ i : i + ln ] != suffix_tree . string [ edge . first_char_index : edge . first_char_index + ln ] : return - 1 i += edge . length + 1 curr_node_id = edge . dest_node_id return edge . first_char_index - len ( substring ) + ln
Returns the index if substring in tree otherwise - 1 .
39,795
def _add_prefix ( self , last_char_index ) : last_parent_node_id = None while True : parent_node_id = self . active . source_node_id if self . active . explicit ( ) : edge_id = make_edge_id ( self . active . source_node_id , self . string [ last_char_index ] ) if edge_id in self . edges : break else : edge_id = make_edge_id ( self . active . source_node_id , self . string [ self . active . first_char_index ] ) e = self . edges [ edge_id ] if self . string [ e . first_char_index + self . active . length + 1 ] == self . string [ last_char_index ] : break parent_node_id = self . _split_edge ( e , self . active ) node = register_new_node ( ) self . nodes [ node . id ] = node edge_id = make_edge_id ( parent_node_id , self . string [ last_char_index ] ) e = register_new_edge ( edge_id = edge_id , first_char_index = last_char_index , last_char_index = self . N , source_node_id = parent_node_id , dest_node_id = node . id , ) self . _insert_edge ( e ) if last_parent_node_id is not None : self . nodes [ last_parent_node_id ] . suffix_node_id = parent_node_id last_parent_node_id = parent_node_id if self . active . source_node_id == self . root_node_id : self . active . first_char_index += 1 else : self . active . source_node_id = self . nodes [ self . active . source_node_id ] . suffix_node_id self . _canonize_suffix ( self . active ) if last_parent_node_id is not None : self . nodes [ last_parent_node_id ] . suffix_node_id = parent_node_id self . active . last_char_index += 1 self . _canonize_suffix ( self . active )
The core construction method .
39,796
def _canonize_suffix ( self , suffix ) : if not suffix . explicit ( ) : edge_id = make_edge_id ( suffix . source_node_id , self . string [ suffix . first_char_index ] ) e = self . edges [ edge_id ] if e . length <= suffix . length : suffix . first_char_index += e . length + 1 suffix . source_node_id = e . dest_node_id self . _canonize_suffix ( suffix )
This canonizes the suffix walking along its suffix string until it is explicit or there are no more matched nodes .
39,797
def entity_from_snapshot ( snapshot ) : assert isinstance ( snapshot , AbstractSnapshop ) , type ( snapshot ) if snapshot . state is not None : entity_class = resolve_topic ( snapshot . topic ) return reconstruct_object ( entity_class , snapshot . state )
Reconstructs domain entity from given snapshot .
39,798
def get_snapshot ( self , entity_id , lt = None , lte = None ) : snapshots = self . snapshot_store . get_domain_events ( entity_id , lt = lt , lte = lte , limit = 1 , is_ascending = False ) if len ( snapshots ) == 1 : return snapshots [ 0 ]
Gets the last snapshot for entity optionally until a particular version number .
39,799
def take_snapshot ( self , entity_id , entity , last_event_version ) : snapshot = Snapshot ( originator_id = entity_id , originator_version = last_event_version , topic = get_topic ( entity . __class__ ) , state = None if entity is None else deepcopy ( entity . __dict__ ) ) self . snapshot_store . store ( snapshot ) return snapshot
Creates a Snapshot from the given state and appends it to the snapshot store .