idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
23,500 | def get_cookbook_dirs ( self , base_dir = None ) : if base_dir is None : base_dir = self . env_root cookbook_dirs = [ ] dirs_to_skip = set ( [ '.git' ] ) for root , dirs , files in os . walk ( base_dir ) : dirs [ : ] = [ d for d in dirs if d not in dirs_to_skip ] for name in files : if name == 'metadata.rb' : if 'cookbook' in os . path . basename ( os . path . dirname ( root ) ) : cookbook_dirs . append ( root ) return cookbook_dirs | Find cookbook directories . |
23,501 | def path_only_contains_dirs ( self , path ) : pathlistdir = os . listdir ( path ) if pathlistdir == [ ] : return True if any ( os . path . isfile ( os . path . join ( path , i ) ) for i in pathlistdir ) : return False return all ( self . path_only_contains_dirs ( os . path . join ( path , i ) ) for i in pathlistdir ) | Return boolean on whether a path only contains directories . |
23,502 | def get_empty_dirs ( self , path ) : empty_dirs = [ ] for i in os . listdir ( path ) : child_path = os . path . join ( path , i ) if i == '.git' or os . path . isfile ( child_path ) or os . path . islink ( child_path ) : continue if self . path_only_contains_dirs ( child_path ) : empty_dirs . append ( i ) return empty_dirs | Return a list of empty directories in path . |
23,503 | def parse_runway_config ( self ) : if not os . path . isfile ( self . runway_config_path ) : LOGGER . error ( "Runway config file was not found (looking for " "%s)" , self . runway_config_path ) sys . exit ( 1 ) with open ( self . runway_config_path ) as data_file : return yaml . safe_load ( data_file ) | Read and parse runway . yml . |
23,504 | def runway_config ( self ) : if not self . _runway_config : self . _runway_config = self . parse_runway_config ( ) return self . _runway_config | Return parsed runway . yml . |
23,505 | def purge_bucket ( context , provider , ** kwargs ) : session = get_session ( provider . region ) if kwargs . get ( 'bucket_name' ) : bucket_name = kwargs [ 'bucket_name' ] else : if kwargs . get ( 'bucket_output_lookup' ) : value = kwargs [ 'bucket_output_lookup' ] handler = OutputLookup . handle elif kwargs . get ( 'bucket_rxref_lookup' ) : value = kwargs [ 'bucket_rxref_lookup' ] handler = RxrefLookup . handle elif kwargs . get ( 'bucket_xref_lookup' ) : value = kwargs [ 'bucket_xref_lookup' ] handler = XrefLookup . handle else : LOGGER . fatal ( 'No bucket name/source provided.' ) return False try : session . client ( 'cloudformation' ) . describe_stacks ( StackName = context . get_fqn ( value . split ( '::' ) [ 0 ] ) ) except ClientError as exc : if 'does not exist' in exc . response [ 'Error' ] [ 'Message' ] : LOGGER . info ( 'S3 bucket stack appears to have already been ' 'deleted...' ) return True raise bucket_name = handler ( value , provider = provider , context = context ) s3_resource = session . resource ( 's3' ) try : s3_resource . meta . client . head_bucket ( Bucket = bucket_name ) except ClientError as exc : if exc . response [ 'Error' ] [ 'Code' ] == '404' : LOGGER . info ( "%s S3 bucket appears to have already been deleted..." , bucket_name ) return True raise bucket = s3_resource . Bucket ( bucket_name ) bucket . object_versions . delete ( ) return True | Delete objects in bucket . |
23,506 | def execute ( self ) : logging . getLogger ( 'runway' ) . setLevel ( logging . ERROR ) if not os . path . isfile ( 'runway.yml' ) : self . env_root = os . path . dirname ( os . getcwd ( ) ) self . runway_config_path = os . path . join ( self . env_root , 'runway.yml' ) print ( get_env ( self . env_root , self . runway_config . get ( 'ignore_git_branch' , False ) ) ) | Output environment name . |
23,507 | def update_api_endpoint ( ) : environment = subprocess . check_output ( [ 'pipenv' , 'run' , 'runway' , 'whichenv' ] ) . decode ( ) . strip ( ) environment_file = os . path . join ( os . path . dirname ( os . path . realpath ( __file__ ) ) , 'src' , 'environments' , 'environment.prod.ts' if environment == 'prod' else 'environment.ts' ) cloudformation = boto3 . resource ( 'cloudformation' ) stack = cloudformation . Stack ( STACK_PREFIX + environment ) endpoint = [ i [ 'OutputValue' ] for i in stack . outputs if i [ 'OutputKey' ] == 'ServiceEndpoint' ] [ 0 ] with open ( environment_file , 'r' ) as stream : content = stream . read ( ) content = re . sub ( r'api_url: \'.*\'$' , "api_url: '%s/api'" % endpoint , content , flags = re . M ) with open ( environment_file , 'w' ) as stream : stream . write ( content ) | Update app environment file with backend endpoint . |
23,508 | def change_dir ( newdir ) : prevdir = os . getcwd ( ) os . chdir ( os . path . expanduser ( newdir ) ) try : yield finally : os . chdir ( prevdir ) | Change directory . |
23,509 | def ensure_file_is_executable ( path ) : if platform . system ( ) != 'Windows' and ( not stat . S_IXUSR & os . stat ( path ) [ stat . ST_MODE ] ) : print ( "Error: File %s is not executable" % path ) sys . exit ( 1 ) | Exit if file is not executable . |
23,510 | def merge_dicts ( dict1 , dict2 , deep_merge = True ) : if deep_merge : if isinstance ( dict1 , list ) and isinstance ( dict2 , list ) : return dict1 + dict2 if not isinstance ( dict1 , dict ) or not isinstance ( dict2 , dict ) : return dict2 for key in dict2 : dict1 [ key ] = merge_dicts ( dict1 [ key ] , dict2 [ key ] ) if key in dict1 else dict2 [ key ] return dict1 dict3 = dict1 . copy ( ) dict3 . update ( dict2 ) return dict3 | Merge dict2 into dict1 . |
23,511 | def extract_boto_args_from_env ( env_vars ) : boto_args = { } for i in [ 'aws_access_key_id' , 'aws_secret_access_key' , 'aws_session_token' ] : if env_vars . get ( i . upper ( ) ) : boto_args [ i ] = env_vars [ i . upper ( ) ] return boto_args | Return boto3 client args dict with environment creds . |
23,512 | def flatten_path_lists ( env_dict , env_root = None ) : for ( key , val ) in env_dict . items ( ) : if isinstance ( val , list ) : env_dict [ key ] = os . path . join ( env_root , os . path . join ( * val ) ) if ( env_root and not os . path . isabs ( os . path . join ( * val ) ) ) else os . path . join ( * val ) return env_dict | Join paths in environment dict down to strings . |
23,513 | def merge_nested_environment_dicts ( env_dicts , env_name = None , env_root = None ) : if all ( isinstance ( val , ( six . string_types , list ) ) for ( _key , val ) in env_dicts . items ( ) ) : return flatten_path_lists ( env_dicts , env_root ) if env_name is None : if env_dicts . get ( '*' ) : return flatten_path_lists ( env_dicts . get ( '*' ) , env_root ) raise AttributeError ( "Provided config key:val pairs %s aren't usable with no environment provided" % env_dicts ) if not env_dicts . get ( '*' ) and not env_dicts . get ( env_name ) : raise AttributeError ( "Provided config key:val pairs %s aren't usable with environment %s" % ( env_dicts , env_name ) ) combined_dicts = merge_dicts ( env_dicts . get ( '*' , { } ) , env_dicts . get ( env_name , { } ) ) return flatten_path_lists ( combined_dicts , env_root ) | Return single - level dictionary from dictionary of dictionaries . |
23,514 | def get_embedded_lib_path ( ) : return os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , 'embedded' ) | Return path of embedded libraries . |
23,515 | def get_hash_for_filename ( filename , hashfile_path ) : filehash = '' with open ( hashfile_path , 'r' ) as stream : for _cnt , line in enumerate ( stream ) : if line . rstrip ( ) . endswith ( filename ) : filehash = re . match ( r'^[A-Za-z0-9]*' , line ) . group ( 0 ) break if filehash : return filehash raise AttributeError ( "Filename %s not found in hash file" % filename ) | Return hash for filename in the hashfile . |
23,516 | def fix_windows_command_list ( commands ) : fully_qualified_cmd_path = which ( commands [ 0 ] ) if fully_qualified_cmd_path and ( not which ( commands [ 0 ] , add_win_suffixes = False ) ) : commands [ 0 ] = os . path . basename ( fully_qualified_cmd_path ) return commands | Return command list with working Windows commands . |
23,517 | def run_commands ( commands , directory , env = None ) : if env is None : env = os . environ . copy ( ) for step in commands : if isinstance ( step , ( list , six . string_types ) ) : execution_dir = directory raw_command = step elif step . get ( 'command' ) : execution_dir = os . path . join ( directory , step . get ( 'cwd' ) ) if step . get ( 'cwd' ) else directory raw_command = step [ 'command' ] else : raise AttributeError ( "Invalid command step: %s" % step ) command_list = raw_command . split ( ' ' ) if isinstance ( raw_command , six . string_types ) else raw_command if platform . system ( ) . lower ( ) == 'windows' : command_list = fix_windows_command_list ( command_list ) with change_dir ( execution_dir ) : check_call ( command_list , env = env ) | Run list of commands . |
23,518 | def sha256sum ( filename ) : sha256 = hashlib . sha256 ( ) mem_view = memoryview ( bytearray ( 128 * 1024 ) ) with open ( filename , 'rb' , buffering = 0 ) as stream : for i in iter ( lambda : stream . readinto ( mem_view ) , 0 ) : sha256 . update ( mem_view [ : i ] ) return sha256 . hexdigest ( ) | Return SHA256 hash of file . |
23,519 | def use_embedded_pkgs ( embedded_lib_path = None ) : if embedded_lib_path is None : embedded_lib_path = get_embedded_lib_path ( ) old_sys_path = list ( sys . path ) sys . path . insert ( 1 , embedded_lib_path ) try : yield finally : sys . path = old_sys_path | Temporarily prepend embedded packages to sys . path . |
23,520 | def which ( program , add_win_suffixes = True ) : def is_exe ( fpath ) : return os . path . isfile ( fpath ) and os . access ( fpath , os . X_OK ) fpath , fname = os . path . split ( program ) if add_win_suffixes and platform . system ( ) . lower ( ) == 'windows' and not ( fname . endswith ( '.exe' ) or fname . endswith ( '.cmd' ) ) : fnames = [ fname + '.exe' , fname + '.cmd' ] else : fnames = [ fname ] for i in fnames : if fpath : exe_file = os . path . join ( fpath , i ) if is_exe ( exe_file ) : return exe_file else : for path in os . environ [ 'PATH' ] . split ( os . pathsep ) : exe_file = os . path . join ( path , i ) if is_exe ( exe_file ) : return exe_file return None | Mimic which command behavior . |
23,521 | def create_config_backend_options ( module_opts , env_name , env_vars ) : backend_opts = { } if module_opts . get ( 'terraform_backend_config' ) : backend_opts [ 'config' ] = merge_nested_environment_dicts ( module_opts . get ( 'terraform_backend_config' ) , env_name ) if module_opts . get ( 'terraform_backend_cfn_outputs' ) : if not backend_opts . get ( 'config' ) : backend_opts [ 'config' ] = { } if not backend_opts [ 'config' ] . get ( 'region' ) : backend_opts [ 'config' ] [ 'region' ] = env_vars [ 'AWS_DEFAULT_REGION' ] boto_args = extract_boto_args_from_env ( env_vars ) cfn_client = boto3 . client ( 'cloudformation' , region_name = backend_opts [ 'config' ] [ 'region' ] , ** boto_args ) for ( key , val ) in merge_nested_environment_dicts ( module_opts . get ( 'terraform_backend_cfn_outputs' ) , env_name ) . items ( ) : backend_opts [ 'config' ] [ key ] = find_cfn_output ( val . split ( '::' ) [ 1 ] , cfn_client . describe_stacks ( StackName = val . split ( '::' ) [ 0 ] ) [ 'Stacks' ] [ 0 ] [ 'Outputs' ] ) return backend_opts | Return backend options defined in module options . |
23,522 | def get_backend_init_list ( backend_vals ) : cmd_list = [ ] for ( key , val ) in backend_vals . items ( ) : cmd_list . append ( '-backend-config' ) cmd_list . append ( key + '=' + val ) return cmd_list | Turn backend config dict into command line items . |
23,523 | def get_backend_tfvars_file ( path , environment , region ) : backend_filenames = gen_backend_tfvars_files ( environment , region ) for name in backend_filenames : if os . path . isfile ( os . path . join ( path , name ) ) : return name return backend_filenames [ - 1 ] | Determine Terraform backend file . |
23,524 | def get_module_defined_tf_var ( terraform_version_opts , env_name ) : if isinstance ( terraform_version_opts , six . string_types ) : return terraform_version_opts if terraform_version_opts . get ( env_name ) : return terraform_version_opts . get ( env_name ) if terraform_version_opts . get ( '*' ) : return terraform_version_opts . get ( '*' ) return None | Return version of Terraform requested in module options . |
23,525 | def get_workspace_tfvars_file ( path , environment , region ) : for name in gen_workspace_tfvars_files ( environment , region ) : if os . path . isfile ( os . path . join ( path , name ) ) : return name return "%s.tfvars" % environment | Determine Terraform workspace - specific tfvars file name . |
23,526 | def reinit_on_backend_changes ( tf_bin , module_path , backend_options , env_name , env_region , env_vars ) : terraform_dir = os . path . join ( module_path , '.terraform' ) local_tfstate_path = os . path . join ( terraform_dir , 'terraform.tfstate' ) current_backend_config = { } desired_backend_config = { } LOGGER . debug ( 'Comparing previous & desired Terraform backend configs' ) if os . path . isfile ( local_tfstate_path ) : with open ( local_tfstate_path , 'r' ) as stream : current_backend_config = hcl . load ( stream ) . get ( 'backend' , { } ) . get ( 'config' , { } ) if backend_options . get ( 'config' ) : desired_backend_config = backend_options . get ( 'config' ) elif os . path . isfile ( os . path . join ( module_path , backend_options . get ( 'filename' ) ) ) : with open ( os . path . join ( module_path , backend_options . get ( 'filename' ) ) , 'r' ) as stream : desired_backend_config = hcl . load ( stream ) for filename in [ 'main.tf' ] + glob . glob ( os . path . join ( module_path , '*.tf' ) ) : if os . path . isfile ( filename ) : with open ( filename , 'r' ) as stream : tf_config = hcl . load ( stream ) if tf_config . get ( 'terraform' , { } ) . get ( 'backend' ) : [ ( _s3key , tffile_backend_config ) ] = tf_config [ 'terraform' ] [ 'backend' ] . items ( ) desired_backend_config = merge_dicts ( desired_backend_config , tffile_backend_config ) break if current_backend_config != desired_backend_config : LOGGER . info ( "Desired and previously initialized TF backend config is " "out of sync; trashing local TF state directory %s" , terraform_dir ) send2trash ( terraform_dir ) run_terraform_init ( tf_bin = tf_bin , module_path = module_path , backend_options = backend_options , env_name = env_name , env_region = env_region , env_vars = env_vars ) | Clean terraform directory and run init if necessary . |
23,527 | def run_terraform_init ( tf_bin , module_path , backend_options , env_name , env_region , env_vars ) : init_cmd = [ tf_bin , 'init' ] cmd_opts = { 'env_vars' : env_vars , 'exit_on_error' : False } if backend_options . get ( 'config' ) : LOGGER . info ( 'Using provided backend values "%s"' , str ( backend_options . get ( 'config' ) ) ) cmd_opts [ 'cmd_list' ] = init_cmd + get_backend_init_list ( backend_options . get ( 'config' ) ) elif os . path . isfile ( os . path . join ( module_path , backend_options . get ( 'filename' ) ) ) : LOGGER . info ( 'Using backend config file %s' , backend_options . get ( 'filename' ) ) cmd_opts [ 'cmd_list' ] = init_cmd + [ '-backend-config=%s' % backend_options . get ( 'filename' ) ] else : LOGGER . info ( "No backend tfvars file found -- looking for one " "of \"%s\" (proceeding with bare 'terraform " "init')" , ', ' . join ( gen_backend_tfvars_files ( env_name , env_region ) ) ) cmd_opts [ 'cmd_list' ] = init_cmd try : run_module_command ( ** cmd_opts ) except subprocess . CalledProcessError as shelloutexc : if os . path . isdir ( os . path . join ( module_path , '.terraform' ) ) : with open ( os . path . join ( module_path , '.terraform' , FAILED_INIT_FILENAME ) , 'w' ) as stream : stream . write ( '1' ) sys . exit ( shelloutexc . returncode ) | Run Terraform init . |
23,528 | def find_command_class ( possible_command_names ) : for command_name in possible_command_names : if hasattr ( ALL_COMMANDS_MODULE , command_name ) : command_module = getattr ( ALL_COMMANDS_MODULE , command_name ) command_class_hierarchy = getmembers ( command_module , isclass ) command_class_tuple = list ( filter ( _not_base_class , command_class_hierarchy ) ) [ 0 ] return command_class_tuple [ 1 ] return None | Try to find a class for one of the given command names . |
23,529 | def generate_sample_module ( module_dir ) : if os . path . isdir ( module_dir ) : LOGGER . error ( "Error generating sample module -- directory %s " "already exists!" , module_dir ) sys . exit ( 1 ) os . mkdir ( module_dir ) | Generate skeleton sample module . |
23,530 | def generate_sample_sls_module ( env_root , module_dir = None ) : if module_dir is None : module_dir = os . path . join ( env_root , 'sampleapp.sls' ) generate_sample_module ( module_dir ) for i in [ 'config-dev-us-east-1.json' , 'handler.py' , 'package.json' , 'serverless.yml' ] : shutil . copyfile ( os . path . join ( ROOT , 'templates' , 'serverless' , i ) , os . path . join ( module_dir , i ) , ) LOGGER . info ( "Sample Serverless module created at %s" , module_dir ) | Generate skeleton Serverless sample module . |
23,531 | def generate_sample_sls_tsc_module ( env_root , module_dir = None ) : if module_dir is None : module_dir = os . path . join ( env_root , 'sampleapp.sls' ) generate_sample_module ( module_dir ) for i in [ 'package.json' , 'serverless.yml' , 'tsconfig.json' , 'webpack.config.js' ] : shutil . copyfile ( os . path . join ( ROOT , 'templates' , 'sls-tsc' , i ) , os . path . join ( module_dir , i ) , ) os . mkdir ( os . path . join ( module_dir , 'src' ) ) for i in [ 'handler.spec.ts' , 'handler.ts' ] : shutil . copyfile ( os . path . join ( ROOT , 'templates' , 'sls-tsc' , 'src' , i ) , os . path . join ( module_dir , 'src' , i ) , ) LOGGER . info ( "Sample Serverless TypeScript module created at %s" , module_dir ) | Generate skeleton Serverless TypeScript sample module . |
23,532 | def generate_sample_cdk_tsc_module ( env_root , module_dir = None ) : if module_dir is None : module_dir = os . path . join ( env_root , 'sampleapp.cdk' ) generate_sample_module ( module_dir ) for i in [ '.npmignore' , 'cdk.json' , 'package.json' , 'runway.module.yml' , 'tsconfig.json' , 'README.md' ] : shutil . copyfile ( os . path . join ( ROOT , 'templates' , 'cdk-tsc' , i ) , os . path . join ( module_dir , i ) , ) for i in [ [ 'bin' , 'sample.ts' ] , [ 'lib' , 'sample-stack.ts' ] ] : os . mkdir ( os . path . join ( module_dir , i [ 0 ] ) ) shutil . copyfile ( os . path . join ( ROOT , 'templates' , 'cdk-tsc' , i [ 0 ] , i [ 1 ] ) , os . path . join ( module_dir , i [ 0 ] , i [ 1 ] ) , ) with open ( os . path . join ( module_dir , '.gitignore' ) , 'w' ) as stream : stream . write ( '*.js\n' ) stream . write ( '*.d.ts\n' ) stream . write ( 'node_modules\n' ) LOGGER . info ( "Sample CDK module created at %s" , module_dir ) LOGGER . info ( 'To finish its setup, change to the %s directory and execute ' '"npm install" to generate its lockfile.' , module_dir ) | Generate skeleton CDK TS sample module . |
23,533 | def generate_sample_cdk_py_module ( env_root , module_dir = None ) : if module_dir is None : module_dir = os . path . join ( env_root , 'sampleapp.cdk' ) generate_sample_module ( module_dir ) for i in [ 'app.py' , 'cdk.json' , 'lambda-index.py' , 'package.json' , 'runway.module.yml' , 'Pipfile' ] : shutil . copyfile ( os . path . join ( ROOT , 'templates' , 'cdk-py' , i ) , os . path . join ( module_dir , i ) , ) with open ( os . path . join ( module_dir , '.gitignore' ) , 'w' ) as stream : stream . write ( 'node_modules' ) LOGGER . info ( "Sample CDK module created at %s" , module_dir ) LOGGER . info ( 'To finish its setup, change to the %s directory and execute ' '"npm install" and "pipenv update -d --three" to generate its ' 'lockfiles.' , module_dir ) | Generate skeleton CDK python sample module . |
23,534 | def generate_sample_cfn_module ( env_root , module_dir = None ) : if module_dir is None : module_dir = os . path . join ( env_root , 'sampleapp.cfn' ) generate_sample_module ( module_dir ) for i in [ 'stacks.yaml' , 'dev-us-east-1.env' ] : shutil . copyfile ( os . path . join ( ROOT , 'templates' , 'cfn' , i ) , os . path . join ( module_dir , i ) ) os . mkdir ( os . path . join ( module_dir , 'templates' ) ) with open ( os . path . join ( module_dir , 'templates' , 'tf_state.yml' ) , 'w' ) as stream : stream . write ( cfn_flip . flip ( check_output ( [ sys . executable , os . path . join ( ROOT , 'templates' , 'stacker' , 'tfstate_blueprints' , 'tf_state.py' ) ] ) ) ) LOGGER . info ( "Sample CloudFormation module created at %s" , module_dir ) | Generate skeleton CloudFormation sample module . |
23,535 | def generate_sample_stacker_module ( env_root , module_dir = None ) : if module_dir is None : module_dir = os . path . join ( env_root , 'runway-sample-tfstate.cfn' ) generate_sample_module ( module_dir ) for i in [ 'stacks.yaml' , 'dev-us-east-1.env' ] : shutil . copyfile ( os . path . join ( ROOT , 'templates' , 'stacker' , i ) , os . path . join ( module_dir , i ) ) os . mkdir ( os . path . join ( module_dir , 'tfstate_blueprints' ) ) for i in [ '__init__.py' , 'tf_state.py' ] : shutil . copyfile ( os . path . join ( ROOT , 'templates' , 'stacker' , 'tfstate_blueprints' , i ) , os . path . join ( module_dir , 'tfstate_blueprints' , i ) ) os . chmod ( os . path . join ( module_dir , 'tfstate_blueprints' , 'tf_state.py' ) , os . stat ( os . path . join ( module_dir , 'tfstate_blueprints' , 'tf_state.py' ) ) . st_mode | 0o0111 ) LOGGER . info ( "Sample Stacker module created at %s" , module_dir ) | Generate skeleton Stacker sample module . |
23,536 | def generate_sample_tf_module ( env_root , module_dir = None ) : if module_dir is None : module_dir = os . path . join ( env_root , 'sampleapp.tf' ) generate_sample_module ( module_dir ) for i in [ 'backend-us-east-1.tfvars' , 'dev-us-east-1.tfvars' , 'main.tf' ] : shutil . copyfile ( os . path . join ( ROOT , 'templates' , 'terraform' , i ) , os . path . join ( module_dir , i ) , ) tf_ver_template = os . path . join ( ROOT , 'templates' , 'terraform' , '.terraform-version' ) if os . path . isfile ( tf_ver_template ) : shutil . copyfile ( tf_ver_template , os . path . join ( module_dir , '.terraform-version' ) , ) else : latest_tf_ver = get_latest_tf_version ( ) with open ( os . path . join ( module_dir , '.terraform-version' ) , 'w' ) as stream : stream . write ( latest_tf_ver ) LOGGER . info ( "Sample Terraform app created at %s" , module_dir ) | Generate skeleton Terraform sample module . |
23,537 | def execute ( self ) : if self . _cli_arguments [ 'cfn' ] : generate_sample_cfn_module ( self . env_root ) elif self . _cli_arguments [ 'sls' ] : generate_sample_sls_module ( self . env_root ) elif self . _cli_arguments [ 'sls-tsc' ] : generate_sample_sls_tsc_module ( self . env_root ) elif self . _cli_arguments [ 'stacker' ] : generate_sample_stacker_module ( self . env_root ) elif self . _cli_arguments [ 'tf' ] : generate_sample_tf_module ( self . env_root ) elif self . _cli_arguments [ 'cdk-tsc' ] : generate_sample_cdk_tsc_module ( self . env_root ) elif self . _cli_arguments [ 'cdk-py' ] : generate_sample_cdk_py_module ( self . env_root ) elif self . _cli_arguments [ 'cdk-csharp' ] : generate_sample_cdk_cs_module ( self . env_root ) | Run selected module generator . |
23,538 | def execute ( self ) : if os . path . isfile ( 'runway.yml' ) : print ( 'Runway config already present' ) sys . exit ( 1 ) with open ( 'runway.yml' , 'w' ) as stream : stream . write ( ) print ( 'runway.yml generated' ) print ( 'See additional getting started information at ' 'https://docs.onica.com/projects/runway/en/latest/how_to_use.html' ) | Generate runway . yml . |
23,539 | def does_s3_object_exist ( bucket_name , key , session = None ) : if session : s3_resource = session . resource ( 's3' ) else : s3_resource = boto3 . resource ( 's3' ) try : s3_resource . Object ( bucket_name , key ) . load ( ) except ClientError as exc : if exc . response [ 'Error' ] [ 'Code' ] == '404' : return False raise return True | Determine if object exists on s3 . |
23,540 | def download_and_extract_to_mkdtemp ( bucket , key , session = None ) : if session : s3_client = session . client ( 's3' ) else : s3_client = boto3 . client ( 's3' ) transfer = S3Transfer ( s3_client ) filedes , temp_file = tempfile . mkstemp ( ) os . close ( filedes ) transfer . download_file ( bucket , key , temp_file ) output_dir = tempfile . mkdtemp ( ) zip_ref = zipfile . ZipFile ( temp_file , 'r' ) zip_ref . extractall ( output_dir ) zip_ref . close ( ) os . remove ( temp_file ) return output_dir | Download zip archive and extract it to temporary directory . |
23,541 | def zip_and_upload ( app_dir , bucket , key , session = None ) : if session : s3_client = session . client ( 's3' ) else : s3_client = boto3 . client ( 's3' ) transfer = S3Transfer ( s3_client ) filedes , temp_file = tempfile . mkstemp ( ) os . close ( filedes ) LOGGER . info ( "staticsite: archiving app at %s to s3://%s/%s" , app_dir , bucket , key ) with zipfile . ZipFile ( temp_file , 'w' , zipfile . ZIP_DEFLATED ) as filehandle : with change_dir ( app_dir ) : for dirname , _subdirs , files in os . walk ( './' ) : if dirname != './' : filehandle . write ( dirname ) for filename in files : filehandle . write ( os . path . join ( dirname , filename ) ) transfer . upload_file ( temp_file , bucket , key ) os . remove ( temp_file ) | Zip built static site and upload to S3 . |
23,542 | def build ( context , provider , ** kwargs ) : session = get_session ( provider . region ) options = kwargs . get ( 'options' , { } ) context_dict = { } context_dict [ 'artifact_key_prefix' ] = "%s-%s-" % ( options [ 'namespace' ] , options [ 'name' ] ) default_param_name = "%shash" % context_dict [ 'artifact_key_prefix' ] if options . get ( 'build_output' ) : build_output = os . path . join ( options [ 'path' ] , options [ 'build_output' ] ) else : build_output = options [ 'path' ] context_dict [ 'artifact_bucket_name' ] = RxrefLookup . handle ( kwargs . get ( 'artifact_bucket_rxref_lookup' ) , provider = provider , context = context ) if options . get ( 'pre_build_steps' ) : run_commands ( options [ 'pre_build_steps' ] , options [ 'path' ] ) context_dict [ 'hash' ] = get_hash_of_files ( root_path = options [ 'path' ] , directories = options . get ( 'source_hashing' , { } ) . get ( 'directories' ) ) if options . get ( 'source_hashing' , { } ) . get ( 'enabled' , True ) : context_dict [ 'hash_tracking_parameter' ] = options . get ( 'source_hashing' , { } ) . get ( 'parameter' , default_param_name ) ssm_client = session . client ( 'ssm' ) try : old_parameter_value = ssm_client . get_parameter ( Name = context_dict [ 'hash_tracking_parameter' ] ) [ 'Parameter' ] [ 'Value' ] except ssm_client . exceptions . ParameterNotFound : old_parameter_value = None else : context_dict [ 'hash_tracking_disabled' ] = True old_parameter_value = None context_dict [ 'current_archive_filename' ] = ( context_dict [ 'artifact_key_prefix' ] + context_dict [ 'hash' ] + '.zip' ) if old_parameter_value : context_dict [ 'old_archive_filename' ] = ( context_dict [ 'artifact_key_prefix' ] + old_parameter_value + '.zip' ) if old_parameter_value == context_dict [ 'hash' ] : LOGGER . info ( "staticsite: skipping build; app hash %s already deployed " "in this environment" , context_dict [ 'hash' ] ) context_dict [ 'deploy_is_current' ] = True return context_dict if does_s3_object_exist ( context_dict [ 'artifact_bucket_name' ] , context_dict [ 'current_archive_filename' ] , session ) : context_dict [ 'app_directory' ] = download_and_extract_to_mkdtemp ( context_dict [ 'artifact_bucket_name' ] , context_dict [ 'current_archive_filename' ] , session ) else : if options . get ( 'build_steps' ) : LOGGER . info ( 'staticsite: executing build commands' ) run_commands ( options [ 'build_steps' ] , options [ 'path' ] ) zip_and_upload ( build_output , context_dict [ 'artifact_bucket_name' ] , context_dict [ 'current_archive_filename' ] , session ) context_dict [ 'app_directory' ] = build_output context_dict [ 'deploy_is_current' ] = False return context_dict | Build static site . |
23,543 | def ensure_valid_environment_config ( module_name , config ) : if not config . get ( 'namespace' ) : LOGGER . fatal ( "staticsite: module %s's environment configuration is " "missing a namespace definition!" , module_name ) sys . exit ( 1 ) | Exit if config is invalid . |
23,544 | def plan ( self ) : if self . options . get ( 'environments' , { } ) . get ( self . context . env_name ) : self . setup_website_module ( command = 'plan' ) else : LOGGER . info ( "Skipping staticsite plan of %s; no environment " "config found for this environment/region" , self . options [ 'path' ] ) | Create website CFN module and run stacker diff . |
23,545 | def format_npm_command_for_logging ( command ) : if platform . system ( ) . lower ( ) == 'windows' : if command [ 0 ] == 'npx.cmd' and command [ 1 ] == '-c' : return "npx.cmd -c \"%s\"" % " " . join ( command [ 2 : ] ) return " " . join ( command ) return " " . join ( command ) . replace ( '\'\'' , '\'' ) | Convert npm command list to string for display to user . |
23,546 | def generate_node_command ( command , command_opts , path ) : if which ( NPX_BIN ) : LOGGER . debug ( "Using npx to invoke %s." , command ) if platform . system ( ) . lower ( ) == 'windows' : cmd_list = [ NPX_BIN , '-c' , "%s %s" % ( command , ' ' . join ( command_opts ) ) ] else : cmd_list = [ NPX_BIN , '-c' , "''%s %s''" % ( command , ' ' . join ( command_opts ) ) ] else : LOGGER . debug ( 'npx not found; falling back invoking %s shell script ' 'directly.' , command ) cmd_list = [ os . path . join ( path , 'node_modules' , '.bin' , command ) ] + command_opts return cmd_list | Return node bin command list for subprocess execution . |
23,547 | def run_module_command ( cmd_list , env_vars , exit_on_error = True ) : if exit_on_error : try : subprocess . check_call ( cmd_list , env = env_vars ) except subprocess . CalledProcessError as shelloutexc : sys . exit ( shelloutexc . returncode ) else : subprocess . check_call ( cmd_list , env = env_vars ) | Shell out to provisioner command . |
23,548 | def use_npm_ci ( path ) : with open ( os . devnull , 'w' ) as fnull : if ( ( os . path . isfile ( os . path . join ( path , 'package-lock.json' ) ) or os . path . isfile ( os . path . join ( path , 'npm-shrinkwrap.json' ) ) ) and subprocess . call ( [ NPM_BIN , 'ci' , '-h' ] , stdout = fnull , stderr = subprocess . STDOUT ) == 0 ) : return True return False | Return true if npm ci should be used in lieu of npm install . |
23,549 | def cdk_module_matches_env ( env_name , env_config , env_vars ) : if env_config . get ( env_name ) : current_env_config = env_config [ env_name ] if isinstance ( current_env_config , type ( True ) ) and current_env_config : return True if isinstance ( current_env_config , six . string_types ) : ( account_id , region ) = current_env_config . split ( '/' ) if region == env_vars [ 'AWS_DEFAULT_REGION' ] : boto_args = extract_boto_args_from_env ( env_vars ) sts_client = boto3 . client ( 'sts' , region_name = env_vars [ 'AWS_DEFAULT_REGION' ] , ** boto_args ) if sts_client . get_caller_identity ( ) [ 'Account' ] == account_id : return True if isinstance ( current_env_config , dict ) : return True return False | Return bool on whether cdk command should continue in current env . |
23,550 | def get_cdk_stacks ( module_path , env_vars , context_opts ) : LOGGER . debug ( 'Listing stacks in the CDK app prior to ' 'diff' ) return subprocess . check_output ( generate_node_command ( command = 'cdk' , command_opts = [ 'list' ] + context_opts , path = module_path ) , env = env_vars ) . strip ( ) . split ( '\n' ) | Return list of CDK stacks . |
23,551 | def assume_role ( role_arn , session_name = None , duration_seconds = None , region = 'us-east-1' , env_vars = None ) : if session_name is None : session_name = 'runway' assume_role_opts = { 'RoleArn' : role_arn , 'RoleSessionName' : session_name } if duration_seconds : assume_role_opts [ 'DurationSeconds' ] = int ( duration_seconds ) boto_args = { } if env_vars : for i in [ 'aws_access_key_id' , 'aws_secret_access_key' , 'aws_session_token' ] : if env_vars . get ( i . upper ( ) ) : boto_args [ i ] = env_vars [ i . upper ( ) ] sts_client = boto3 . client ( 'sts' , region_name = region , ** boto_args ) LOGGER . info ( "Assuming role %s..." , role_arn ) response = sts_client . assume_role ( ** assume_role_opts ) return { 'AWS_ACCESS_KEY_ID' : response [ 'Credentials' ] [ 'AccessKeyId' ] , 'AWS_SECRET_ACCESS_KEY' : response [ 'Credentials' ] [ 'SecretAccessKey' ] , 'AWS_SESSION_TOKEN' : response [ 'Credentials' ] [ 'SessionToken' ] } | Assume IAM role . |
23,552 | def determine_module_class ( path , class_path ) : if not class_path : basename = os . path . basename ( path ) if basename . endswith ( '.sls' ) : class_path = 'runway.module.serverless.Serverless' elif basename . endswith ( '.tf' ) : class_path = 'runway.module.terraform.Terraform' elif basename . endswith ( '.cdk' ) : class_path = 'runway.module.cdk.CloudDevelopmentKit' elif basename . endswith ( '.cfn' ) : class_path = 'runway.module.cloudformation.CloudFormation' if not class_path : if os . path . isfile ( os . path . join ( path , 'serverless.yml' ) ) : class_path = 'runway.module.serverless.Serverless' elif glob . glob ( os . path . join ( path , '*.tf' ) ) : class_path = 'runway.module.terraform.Terraform' elif os . path . isfile ( os . path . join ( path , 'cdk.json' ) ) and os . path . isfile ( os . path . join ( path , 'package.json' ) ) : class_path = 'runway.module.cdk.CloudDevelopmentKit' elif glob . glob ( os . path . join ( path , '*.env' ) ) or ( glob . glob ( os . path . join ( path , '*.yaml' ) ) ) or ( glob . glob ( os . path . join ( path , '*.yml' ) ) ) : class_path = 'runway.module.cloudformation.CloudFormation' if not class_path : LOGGER . error ( 'No module class found for %s' , os . path . basename ( path ) ) sys . exit ( 1 ) return load_object_from_string ( class_path ) | Determine type of module and return deployment module class . |
23,553 | def load_module_opts_from_file ( path , module_options ) : module_options_file = os . path . join ( path , 'runway.module.yml' ) if os . path . isfile ( module_options_file ) : with open ( module_options_file , 'r' ) as stream : module_options = merge_dicts ( module_options , yaml . safe_load ( stream ) ) return module_options | Update module_options with any options defined in module path . |
23,554 | def post_deploy_assume_role ( assume_role_config , context ) : if isinstance ( assume_role_config , dict ) : if assume_role_config . get ( 'post_deploy_env_revert' ) : context . restore_existing_iam_env_vars ( ) | Revert to previous credentials if necessary . |
23,555 | def validate_account_alias ( iam_client , account_alias ) : current_account_aliases = [ ] paginator = iam_client . get_paginator ( 'list_account_aliases' ) response_iterator = paginator . paginate ( ) for page in response_iterator : current_account_aliases . extend ( page . get ( 'AccountAliases' , [ ] ) ) if account_alias in current_account_aliases : LOGGER . info ( 'Verified current AWS account alias matches required ' 'alias %s.' , account_alias ) else : LOGGER . error ( 'Current AWS account aliases "%s" do not match ' 'required account alias %s in Runway config.' , ',' . join ( current_account_aliases ) , account_alias ) sys . exit ( 1 ) | Exit if list_account_aliases doesn t include account_alias . |
23,556 | def validate_account_id ( sts_client , account_id ) : resp = sts_client . get_caller_identity ( ) if 'Account' in resp : if resp [ 'Account' ] == account_id : LOGGER . info ( 'Verified current AWS account matches required ' 'account id %s.' , account_id ) else : LOGGER . error ( 'Current AWS account %s does not match ' 'required account %s in Runway config.' , resp [ 'Account' ] , account_id ) sys . exit ( 1 ) else : LOGGER . error ( 'Error checking current account ID' ) sys . exit ( 1 ) | Exit if get_caller_identity doesn t match account_id . |
23,557 | def validate_account_credentials ( deployment , context ) : boto_args = { 'region_name' : context . env_vars [ 'AWS_DEFAULT_REGION' ] } for i in [ 'aws_access_key_id' , 'aws_secret_access_key' , 'aws_session_token' ] : if context . env_vars . get ( i . upper ( ) ) : boto_args [ i ] = context . env_vars [ i . upper ( ) ] if isinstance ( deployment . get ( 'account-id' ) , ( int , six . string_types ) ) : account_id = str ( deployment [ 'account-id' ] ) elif deployment . get ( 'account-id' , { } ) . get ( context . env_name ) : account_id = str ( deployment [ 'account-id' ] [ context . env_name ] ) else : account_id = None if account_id : validate_account_id ( boto3 . client ( 'sts' , ** boto_args ) , account_id ) if isinstance ( deployment . get ( 'account-alias' ) , six . string_types ) : account_alias = deployment [ 'account-alias' ] elif deployment . get ( 'account-alias' , { } ) . get ( context . env_name ) : account_alias = deployment [ 'account-alias' ] [ context . env_name ] else : account_alias = None if account_alias : validate_account_alias ( boto3 . client ( 'iam' , ** boto_args ) , account_alias ) | Exit if requested deployment account doesn t match credentials . |
23,558 | def echo_detected_environment ( env_name , env_vars ) : env_override_name = 'DEPLOY_ENVIRONMENT' LOGGER . info ( "" ) if env_override_name in env_vars : LOGGER . info ( "Environment \"%s\" was determined from the %s environment variable." , env_name , env_override_name ) LOGGER . info ( "If this is not correct, update " "the value (or unset it to fall back to the name of " "the current git branch or parent directory)." ) else : LOGGER . info ( "Environment \"%s\" was determined from the current " "git branch or parent directory." , env_name ) LOGGER . info ( "If this is not the environment name, update the branch/folder name or " "set an override value via the %s environment variable" , env_override_name ) LOGGER . info ( "" ) | Print a helper note about how the environment was determined . |
23,559 | def _module_menu_entry ( module , environment_name ) : name = _module_name_for_display ( module ) if isinstance ( module , dict ) : environment_config = module . get ( 'environments' , { } ) . get ( environment_name ) if environment_config : return "%s (%s)" % ( name , environment_config ) return "%s" % ( name ) | Build a string to display in the select module menu . |
23,560 | def _deployment_menu_entry ( deployment ) : paths = ", " . join ( [ _module_name_for_display ( module ) for module in deployment [ 'modules' ] ] ) regions = ", " . join ( deployment . get ( 'regions' , [ ] ) ) return "%s - %s (%s)" % ( deployment . get ( 'name' ) , paths , regions ) | Build a string to display in the select deployment menu . |
23,561 | def select_deployment_to_run ( env_name , deployments = None , command = 'build' ) : if deployments is None or not deployments : return [ ] deployments_to_run = [ ] num_deployments = len ( deployments ) if num_deployments == 1 : selected_deployment_index = 1 else : print ( '' ) print ( 'Configured deployments:' ) for i , deployment in enumerate ( deployments ) : print ( " %d: %s" % ( i + 1 , _deployment_menu_entry ( deployment ) ) ) print ( '' ) print ( '' ) if command == 'destroy' : print ( '(Operating in destroy mode -- "all" will destroy all ' 'deployments in reverse order)' ) selected_deployment_index = input ( 'Enter number of deployment to run (or "all"): ' ) if selected_deployment_index == 'all' : return deployments if selected_deployment_index == '' : LOGGER . error ( 'Please select a valid number (or "all")' ) sys . exit ( 1 ) selected_deployment = deployments [ int ( selected_deployment_index ) - 1 ] if selected_deployment . get ( 'current_dir' , False ) : deployments_to_run . append ( selected_deployment ) elif not selected_deployment . get ( 'modules' , [ ] ) : LOGGER . error ( 'No modules configured in selected deployment' ) sys . exit ( 1 ) elif len ( selected_deployment [ 'modules' ] ) == 1 : if command == 'destroy' : LOGGER . info ( '(only one deployment detected; all modules ' 'automatically selected for termination)' ) if not strtobool ( input ( 'Proceed?: ' ) ) : sys . exit ( 0 ) deployments_to_run . append ( selected_deployment ) else : modules = selected_deployment [ 'modules' ] print ( '' ) print ( 'Configured modules in deployment \'%s\':' % selected_deployment . get ( 'name' ) ) for i , module in enumerate ( modules ) : print ( " %s: %s" % ( i + 1 , _module_menu_entry ( module , env_name ) ) ) print ( '' ) print ( '' ) if command == 'destroy' : print ( '(Operating in destroy mode -- "all" will destroy all ' 'deployments in reverse order)' ) selected_module_index = input ( 'Enter number of module to run (or "all"): ' ) if selected_module_index == 'all' : deployments_to_run . append ( selected_deployment ) elif selected_module_index == '' or ( not selected_module_index . isdigit ( ) or ( not 0 < int ( selected_module_index ) <= len ( modules ) ) ) : LOGGER . error ( 'Please select a valid number (or "all")' ) sys . exit ( 1 ) else : selected_deployment [ 'modules' ] = [ modules [ int ( selected_module_index ) - 1 ] ] deployments_to_run . append ( selected_deployment ) LOGGER . debug ( 'Selected deployment is %s...' , deployments_to_run ) return deployments_to_run | Query user for deployments to run . |
23,562 | def gen_sls_config_files ( stage , region ) : names = [ ] for ext in [ 'yml' , 'json' ] : names . append ( os . path . join ( 'env' , "%s-%s.%s" % ( stage , region , ext ) ) ) names . append ( "config-%s-%s.%s" % ( stage , region , ext ) ) names . append ( os . path . join ( 'env' , "%s.%s" % ( stage , ext ) ) ) names . append ( "config-%s.%s" % ( stage , ext ) ) return names | Generate possible SLS config files names . |
23,563 | def get_sls_config_file ( path , stage , region ) : for name in gen_sls_config_files ( stage , region ) : if os . path . isfile ( os . path . join ( path , name ) ) : return name return "config-%s.json" % stage | Determine Serverless config file name . |
23,564 | def run_sls_remove ( sls_cmd , env_vars ) : sls_process = subprocess . Popen ( sls_cmd , stdout = subprocess . PIPE , env = env_vars ) stdoutdata , _stderrdata = sls_process . communicate ( ) sls_return = sls_process . wait ( ) print ( stdoutdata ) if sls_return != 0 and ( sls_return == 1 and not ( re . search ( r"Stack '.*' does not exist" , stdoutdata ) ) ) : sys . exit ( sls_return ) | Run sls remove command . |
23,565 | def run_serverless ( self , command = 'deploy' ) : response = { 'skipped_configs' : False } sls_opts = [ command ] if not which ( 'npm' ) : LOGGER . error ( '"npm" not found in path or is not executable; ' 'please ensure it is installed correctly.' ) sys . exit ( 1 ) if 'CI' in self . context . env_vars and command != 'remove' : sls_opts . append ( '--conceal' ) if 'DEBUG' in self . context . env_vars : sls_opts . append ( '-v' ) warn_on_boto_env_vars ( self . context . env_vars ) sls_opts . extend ( [ '-r' , self . context . env_region ] ) sls_opts . extend ( [ '--stage' , self . context . env_name ] ) sls_env_file = get_sls_config_file ( self . path , self . context . env_name , self . context . env_region ) sls_cmd = generate_node_command ( command = 'sls' , command_opts = sls_opts , path = self . path ) if ( not self . options . get ( 'environments' ) and os . path . isfile ( os . path . join ( self . path , sls_env_file ) ) ) or ( self . options . get ( 'environments' , { } ) . get ( self . context . env_name ) ) : if os . path . isfile ( os . path . join ( self . path , 'package.json' ) ) : with change_dir ( self . path ) : run_npm_install ( self . path , self . options , self . context ) LOGGER . info ( "Running sls %s on %s (\"%s\")" , command , os . path . basename ( self . path ) , format_npm_command_for_logging ( sls_cmd ) ) if command == 'remove' : run_sls_remove ( sls_cmd , self . context . env_vars ) else : run_module_command ( cmd_list = sls_cmd , env_vars = self . context . env_vars ) else : LOGGER . warning ( "Skipping serverless %s of %s; no \"package.json\" " "file was found (need a package file specifying " "serverless in devDependencies)" , command , os . path . basename ( self . path ) ) else : response [ 'skipped_configs' ] = True LOGGER . info ( "Skipping serverless %s of %s; no config file for " "this stage/region found (looking for one of \"%s\")" , command , os . path . basename ( self . path ) , ', ' . join ( gen_sls_config_files ( self . context . env_name , self . context . env_region ) ) ) return response | Run Serverless . |
23,566 | def aws_cli ( * cmd ) : old_env = dict ( os . environ ) try : env = os . environ . copy ( ) env [ 'LC_CTYPE' ] = u'en_US.UTF' os . environ . update ( env ) exit_code = create_clidriver ( ) . main ( * cmd ) if exit_code > 0 : raise RuntimeError ( 'AWS CLI exited with code {}' . format ( exit_code ) ) finally : os . environ . clear ( ) os . environ . update ( old_env ) | Invoke aws command . |
23,567 | def get_archives_to_prune ( archives , hook_data ) : files_to_skip = [ ] for i in [ 'current_archive_filename' , 'old_archive_filename' ] : if hook_data . get ( i ) : files_to_skip . append ( hook_data [ i ] ) archives . sort ( key = itemgetter ( 'LastModified' ) , reverse = False ) return [ i [ 'Key' ] for i in archives [ : - 15 ] if i [ 'Key' ] not in files_to_skip ] | Return list of keys to delete . |
23,568 | def sync ( context , provider , ** kwargs ) : session = get_session ( provider . region ) bucket_name = OutputLookup . handle ( kwargs . get ( 'bucket_output_lookup' ) , provider = provider , context = context ) if context . hook_data [ 'staticsite' ] [ 'deploy_is_current' ] : LOGGER . info ( 'staticsite: skipping upload; latest version already ' 'deployed' ) else : distribution_id = OutputLookup . handle ( kwargs . get ( 'distributionid_output_lookup' ) , provider = provider , context = context ) distribution_domain = OutputLookup . handle ( kwargs . get ( 'distributiondomain_output_lookup' ) , provider = provider , context = context ) aws_cli ( [ 's3' , 'sync' , context . hook_data [ 'staticsite' ] [ 'app_directory' ] , "s3://%s/" % bucket_name , '--delete' ] ) cf_client = session . client ( 'cloudfront' ) cf_client . create_invalidation ( DistributionId = distribution_id , InvalidationBatch = { 'Paths' : { 'Quantity' : 1 , 'Items' : [ '/*' ] } , 'CallerReference' : str ( time . time ( ) ) } ) LOGGER . info ( "staticsite: sync & CF invalidation of %s (domain %s) " "complete" , distribution_id , distribution_domain ) if not context . hook_data [ 'staticsite' ] . get ( 'hash_tracking_disabled' ) : LOGGER . info ( "staticsite: updating environment SSM parameter %s " "with hash %s" , context . hook_data [ 'staticsite' ] [ 'hash_tracking_parameter' ] , context . hook_data [ 'staticsite' ] [ 'hash' ] ) ssm_client = session . client ( 'ssm' ) ssm_client . put_parameter ( Name = context . hook_data [ 'staticsite' ] [ 'hash_tracking_parameter' ] , Description = 'Hash of currently deployed static website source' , Value = context . hook_data [ 'staticsite' ] [ 'hash' ] , Type = 'String' , Overwrite = True ) LOGGER . info ( "staticsite: cleaning up old site archives..." ) archives = [ ] s3_client = session . client ( 's3' ) list_objects_v2_paginator = s3_client . get_paginator ( 'list_objects_v2' ) response_iterator = list_objects_v2_paginator . paginate ( Bucket = context . hook_data [ 'staticsite' ] [ 'artifact_bucket_name' ] , Prefix = context . hook_data [ 'staticsite' ] [ 'artifact_key_prefix' ] ) for page in response_iterator : archives . extend ( page . get ( 'Contents' , [ ] ) ) archives_to_prune = get_archives_to_prune ( archives , context . hook_data [ 'staticsite' ] ) for objects in [ archives_to_prune [ i : i + 1000 ] for i in range ( 0 , len ( archives_to_prune ) , 1000 ) ] : s3_client . delete_objects ( Bucket = context . hook_data [ 'staticsite' ] [ 'artifact_bucket_name' ] , Delete = { 'Objects' : [ { 'Key' : i } for i in objects ] } ) return True | Sync static website to S3 bucket . |
23,569 | def ensure_stacker_compat_config ( config_filename ) : try : with open ( config_filename , 'r' ) as stream : yaml . safe_load ( stream ) except yaml . constructor . ConstructorError as yaml_error : if yaml_error . problem . startswith ( 'could not determine a constructor for the tag \'!' ) : LOGGER . error ( '"%s" appears to be a CloudFormation template, ' 'but is located in the top level of a module ' 'alongside the CloudFormation config files (i.e. ' 'the file or files indicating the stack names & ' 'parameters). Please move the template to a ' 'subdirectory.' , config_filename ) sys . exit ( 1 ) | Ensure config file can be loaded by Stacker . |
23,570 | def get_stacker_env_file ( path , environment , region ) : for name in gen_stacker_env_files ( environment , region ) : if os . path . isfile ( os . path . join ( path , name ) ) : return name return "%s-%s.env" % ( environment , region ) | Determine Stacker environment file name . |
23,571 | def make_stacker_cmd_string ( args , lib_path ) : if platform . system ( ) . lower ( ) == 'windows' : lib_path = lib_path . replace ( '\\' , '/' ) return ( "import sys;" "sys.argv = ['stacker'] + {args};" "sys.path.insert(1, '{lib_path}');" "from stacker.logger import setup_logging;" "from stacker.commands import Stacker;" "stacker = Stacker(setup_logging=setup_logging);" "args = stacker.parse_args({args});" "stacker.configure(args);args.run(args)" . format ( args = str ( args ) , lib_path = lib_path ) ) | Generate stacker invocation script from command line arg list . |
23,572 | def run_stacker ( self , command = 'diff' ) : response = { 'skipped_configs' : False } stacker_cmd = [ command , "--region=%s" % self . context . env_region ] if command == 'destroy' : stacker_cmd . append ( '--force' ) elif command == 'build' : if 'CI' in self . context . env_vars : stacker_cmd . append ( '--recreate-failed' ) else : stacker_cmd . append ( '--interactive' ) if 'DEBUG' in self . context . env_vars : stacker_cmd . append ( '--verbose' ) stacker_env_file = get_stacker_env_file ( self . path , self . context . env_name , self . context . env_region ) stacker_env_file_present = os . path . isfile ( os . path . join ( self . path , stacker_env_file ) ) if isinstance ( self . options . get ( 'environments' , { } ) . get ( self . context . env_name ) , dict ) : for ( key , val ) in self . options [ 'environments' ] [ self . context . env_name ] . items ( ) : stacker_cmd . extend ( [ '-e' , "%s=%s" % ( key , val ) ] ) if stacker_env_file_present : stacker_cmd . append ( stacker_env_file ) if not ( stacker_env_file_present or self . options . get ( 'environments' , { } ) . get ( self . context . env_name ) ) : response [ 'skipped_configs' ] = True LOGGER . info ( "Skipping stacker %s; no environment " "file found for this environment/region " "(looking for one of \"%s\")" , command , ', ' . join ( gen_stacker_env_files ( self . context . env_name , self . context . env_region ) ) ) else : with change_dir ( self . path ) : for _root , _dirs , files in os . walk ( self . path ) : sorted_files = sorted ( files ) if command == 'destroy' : sorted_files = reversed ( sorted_files ) for name in sorted_files : if re . match ( r"runway(\..*)?\.yml" , name ) or ( name . startswith ( '.' ) ) : continue if os . path . splitext ( name ) [ 1 ] in [ '.yaml' , '.yml' ] : ensure_stacker_compat_config ( os . path . join ( self . path , name ) ) LOGGER . info ( "Running stacker %s on %s in region %s" , command , name , self . context . env_region ) stacker_cmd_str = make_stacker_cmd_string ( stacker_cmd + [ name ] , get_embedded_lib_path ( ) ) stacker_cmd_list = [ sys . executable , '-c' ] LOGGER . debug ( "Stacker command being executed: %s \"%s\"" , ' ' . join ( stacker_cmd_list ) , stacker_cmd_str ) run_module_command ( cmd_list = stacker_cmd_list + [ stacker_cmd_str ] , env_vars = self . context . env_vars ) break return response | Run Stacker . |
23,573 | def get_cf_distribution_class ( ) : if LooseVersion ( troposphere . __version__ ) == LooseVersion ( '2.4.0' ) : cf_dist = cloudfront . Distribution cf_dist . props [ 'DistributionConfig' ] = ( DistributionConfig , True ) return cf_dist return cloudfront . Distribution | Return the correct troposphere CF distribution class . |
23,574 | def get_s3_origin_conf_class ( ) : if LooseVersion ( troposphere . __version__ ) > LooseVersion ( '2.4.0' ) : return cloudfront . S3OriginConfig if LooseVersion ( troposphere . __version__ ) == LooseVersion ( '2.4.0' ) : return S3OriginConfig return cloudfront . S3Origin | Return the correct S3 Origin Config class for troposphere . |
23,575 | def set_available ( self , show = None ) : show = self . state . show if show is None else show self . set_presence ( PresenceState ( available = True , show = show ) ) | Sets the agent availability to True . |
23,576 | def set_unavailable ( self ) : show = PresenceShow . NONE self . set_presence ( PresenceState ( available = False , show = show ) ) | Sets the agent availability to False . |
23,577 | def set_presence ( self , state = None , status = None , priority = None ) : state = state if state is not None else self . state status = status if status is not None else self . status priority = priority if priority is not None else self . priority self . presenceserver . set_presence ( state , status , priority ) | Change the presence broadcast by the client . If the client is currently connected the new presence is broadcast immediately . |
23,578 | def get_contacts ( self ) : for jid , item in self . roster . items . items ( ) : try : self . _contacts [ jid . bare ( ) ] . update ( item . export_as_json ( ) ) except KeyError : self . _contacts [ jid . bare ( ) ] = item . export_as_json ( ) return self . _contacts | Returns list of contacts |
23,579 | def get_contact ( self , jid ) : try : return self . get_contacts ( ) [ jid . bare ( ) ] except KeyError : raise ContactNotFound except AttributeError : raise AttributeError ( "jid must be an aioxmpp.JID object" ) | Returns a contact |
23,580 | def subscribe ( self , peer_jid ) : self . roster . subscribe ( aioxmpp . JID . fromstr ( peer_jid ) . bare ( ) ) | Asks for subscription |
23,581 | def unsubscribe ( self , peer_jid ) : self . roster . unsubscribe ( aioxmpp . JID . fromstr ( peer_jid ) . bare ( ) ) | Asks for unsubscription |
23,582 | def approve ( self , peer_jid ) : self . roster . approve ( aioxmpp . JID . fromstr ( peer_jid ) . bare ( ) ) | Approve a subscription request from jid |
23,583 | def append ( self , event , category = None ) : date = datetime . datetime . now ( ) self . store . insert ( 0 , ( date , event , category ) ) if len ( self . store ) > self . size : del self . store [ - 1 ] | Adds a new event to the trace store . The event may hava a category |
23,584 | def filter ( self , limit = None , to = None , category = None ) : if category and not to : msg_slice = itertools . islice ( ( x for x in self . store if x [ 2 ] == category ) , limit ) elif to and not category : to = JID . fromstr ( to ) msg_slice = itertools . islice ( ( x for x in self . store if _agent_in_msg ( to , x [ 1 ] ) ) , limit ) elif to and category : to = JID . fromstr ( to ) msg_slice = itertools . islice ( ( x for x in self . store if _agent_in_msg ( to , x [ 1 ] ) and x [ 2 ] == category ) , limit ) else : msg_slice = self . all ( limit = limit ) return msg_slice return list ( msg_slice ) [ : : - 1 ] | Returns the events that match the filters |
23,585 | def start ( self , auto_register = True ) : return self . container . start_agent ( agent = self , auto_register = auto_register ) | Tells the container to start this agent . It returns a coroutine or a future depending on whether it is called from a coroutine or a synchronous method . |
23,586 | async def _async_connect ( self ) : try : self . conn_coro = self . client . connected ( ) aenter = type ( self . conn_coro ) . __aenter__ ( self . conn_coro ) self . stream = await aenter logger . info ( f"Agent {str(self.jid)} connected and authenticated." ) except aiosasl . AuthenticationFailure : raise AuthenticationFailure ( "Could not authenticate the agent. Check user and password or use auto_register=True" ) | connect and authenticate to the XMPP server . Async mode . |
23,587 | async def _async_register ( self ) : metadata = aioxmpp . make_security_layer ( None , no_verify = not self . verify_security ) query = ibr . Query ( self . jid . localpart , self . password ) _ , stream , features = await aioxmpp . node . connect_xmlstream ( self . jid , metadata , loop = self . loop ) await ibr . register ( stream , query ) | Register the agent in the XMPP server from a coroutine . |
23,588 | def build_avatar_url ( jid ) : digest = md5 ( str ( jid ) . encode ( "utf-8" ) ) . hexdigest ( ) return "http://www.gravatar.com/avatar/{md5}?d=monsterid" . format ( md5 = digest ) | Static method to build a gravatar url with the agent s JID |
23,589 | def add_behaviour ( self , behaviour , template = None ) : behaviour . set_agent ( self ) if issubclass ( type ( behaviour ) , FSMBehaviour ) : for _ , state in behaviour . get_states ( ) . items ( ) : state . set_agent ( self ) behaviour . set_template ( template ) self . behaviours . append ( behaviour ) if self . is_alive ( ) : behaviour . start ( ) | Adds and starts a behaviour to the agent . If template is not None it is used to match new messages and deliver them to the behaviour . |
23,590 | def remove_behaviour ( self , behaviour ) : if not self . has_behaviour ( behaviour ) : raise ValueError ( "This behaviour is not registered" ) index = self . behaviours . index ( behaviour ) self . behaviours [ index ] . kill ( ) self . behaviours . pop ( index ) | Removes a behaviour from the agent . The behaviour is first killed . |
23,591 | async def _async_stop ( self ) : if self . presence : self . presence . set_unavailable ( ) for behav in self . behaviours : behav . kill ( ) if self . web . is_started ( ) : await self . web . runner . cleanup ( ) if self . is_alive ( ) : self . client . stop ( ) aexit = self . conn_coro . __aexit__ ( * sys . exc_info ( ) ) await aexit logger . info ( "Client disconnected." ) self . _alive . clear ( ) | Stops an agent and kills all its behaviours . |
23,592 | def _message_received ( self , msg ) : msg = Message . from_node ( msg ) return self . dispatch ( msg ) | Callback run when an XMPP Message is reveived . This callback delivers the message to every behaviour that is waiting for it . First the aioxmpp . Message is converted to spade . message . Message |
23,593 | def dispatch ( self , msg ) : logger . debug ( f"Got message: {msg}" ) futures = [ ] matched = False for behaviour in ( x for x in self . behaviours if x . match ( msg ) ) : futures . append ( self . submit ( behaviour . enqueue ( msg ) ) ) logger . debug ( f"Message enqueued to behaviour: {behaviour}" ) self . traces . append ( msg , category = str ( behaviour ) ) matched = True if not matched : logger . warning ( f"No behaviour matched for message: {msg}" ) self . traces . append ( msg ) return futures | Dispatch the message to every behaviour that is waiting for it using their templates match . |
23,594 | def from_node ( cls , node ) : if not isinstance ( node , aioxmpp . stanza . Message ) : raise AttributeError ( "node must be a aioxmpp.stanza.Message instance" ) msg = cls ( ) msg . _to = node . to msg . _sender = node . from_ if None in node . body : msg . body = node . body [ None ] else : for key in node . body . keys ( ) : msg . body = node . body [ key ] break for data in node . xep0004_data : if data . title == SPADE_X_METADATA : for field in data . fields : if field . var != "_thread_node" : msg . set_metadata ( field . var , field . values [ 0 ] ) else : msg . thread = field . values [ 0 ] return msg | Creates a new spade . message . Message from an aixoxmpp . stanza . Message |
23,595 | def to ( self , jid : str ) : if jid is not None and not isinstance ( jid , str ) : raise TypeError ( "'to' MUST be a string" ) self . _to = aioxmpp . JID . fromstr ( jid ) if jid is not None else None | Set jid of the receiver . |
23,596 | def sender ( self , jid : str ) : if jid is not None and not isinstance ( jid , str ) : raise TypeError ( "'sender' MUST be a string" ) self . _sender = aioxmpp . JID . fromstr ( jid ) if jid is not None else None | Set jid of the sender |
23,597 | def thread ( self , value : str ) : if value is not None and not isinstance ( value , str ) : raise TypeError ( "'thread' MUST be a string" ) self . _thread = value | Set thread id of the message |
23,598 | def set_metadata ( self , key : str , value : str ) : if not isinstance ( key , str ) or not isinstance ( value , str ) : raise TypeError ( "'key' and 'value' of metadata MUST be strings" ) self . metadata [ key ] = value | Add a new metadata to the message |
23,599 | def get_metadata ( self , key ) -> str : return self . metadata [ key ] if key in self . metadata else None | Get the value of a metadata . Returns None if metadata does not exist . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.